From 6cdc7fdcb5af7c9865f505c99eb51fe658077bb8 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Tue, 29 Jul 2025 12:04:13 -0700 Subject: [PATCH 01/18] Workflow init commit --- python/packages/workflow/LICENSE | 21 ++ python/packages/workflow/README.md | 9 + .../agent_framework_workflow/__init__.py | 0 .../agent_framework_workflow/core/__init__.py | 0 .../agent_framework_workflow/core/_edge.py | 105 +++++++++ .../agent_framework_workflow/core/_runner.py | 114 ++++++++++ .../core/_shared_state.py | 68 ++++++ .../core/_typing_utils.py | 42 ++++ .../agent_framework_workflow/core/events.py | 95 ++++++++ .../core/execution_context.py | 129 +++++++++++ .../agent_framework_workflow/core/executor.py | 122 +++++++++++ .../agent_framework_workflow/core/workflow.py | 204 ++++++++++++++++++ python/packages/workflow/pyproject.toml | 83 +++++++ 13 files changed, 992 insertions(+) create mode 100644 python/packages/workflow/LICENSE create mode 100644 python/packages/workflow/README.md create mode 100644 python/packages/workflow/agent_framework_workflow/__init__.py create mode 100644 python/packages/workflow/agent_framework_workflow/core/__init__.py create mode 100644 python/packages/workflow/agent_framework_workflow/core/_edge.py create mode 100644 python/packages/workflow/agent_framework_workflow/core/_runner.py create mode 100644 python/packages/workflow/agent_framework_workflow/core/_shared_state.py create mode 100644 python/packages/workflow/agent_framework_workflow/core/_typing_utils.py create mode 100644 python/packages/workflow/agent_framework_workflow/core/events.py create mode 100644 python/packages/workflow/agent_framework_workflow/core/execution_context.py create mode 100644 python/packages/workflow/agent_framework_workflow/core/executor.py create mode 100644 python/packages/workflow/agent_framework_workflow/core/workflow.py create mode 100644 python/packages/workflow/pyproject.toml diff --git a/python/packages/workflow/LICENSE b/python/packages/workflow/LICENSE new file mode 100644 index 0000000000..9e841e7a26 --- /dev/null +++ b/python/packages/workflow/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/python/packages/workflow/README.md b/python/packages/workflow/README.md new file mode 100644 index 0000000000..1133074818 --- /dev/null +++ b/python/packages/workflow/README.md @@ -0,0 +1,9 @@ +# Get Started with Microsoft Agent Framework Workflow + +Please install this package as the extra for `agent-framework`: + +```bash +pip install agent-framework[workflow] +``` + +and see the [README](https://github.com/microsoft/agent-framework/tree/main/python/README.md) for more information. diff --git a/python/packages/workflow/agent_framework_workflow/__init__.py b/python/packages/workflow/agent_framework_workflow/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/packages/workflow/agent_framework_workflow/core/__init__.py b/python/packages/workflow/agent_framework_workflow/core/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/packages/workflow/agent_framework_workflow/core/_edge.py b/python/packages/workflow/agent_framework_workflow/core/_edge.py new file mode 100644 index 0000000000..bc4e4ce669 --- /dev/null +++ b/python/packages/workflow/agent_framework_workflow/core/_edge.py @@ -0,0 +1,105 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from collections.abc import Callable +from typing import Any, ClassVar + +from ._shared_state import SharedState +from .execution_context import ExecutionContext +from .executor import Executor, ExecutorContext + + +class Edge: + """Represents a directed edge in a graph.""" + + ID_SEPARATOR: ClassVar[str] = "->" + + def __init__( + self, + source: Executor[Any], + target: Executor[Any], + condition: Callable[[Any], bool] | None = None, + ): + """Initialize the edge with a source and target node.""" + self.source = source + self.target = target + self._condition = condition + + # Edge group is used to group edges that share the same target executor. + # It allows for sending messages to the target executor only when all edges in the group have data. + self._edge_group_ids: list[str] = [] + + @property + def source_id(self) -> str: + """Get the source executor ID.""" + return self.source.id + + @property + def target_id(self) -> str: + """Get the target executor ID.""" + return self.target.id + + @property + def id(self) -> str: + """Get the unique ID of the edge.""" + return f"{self.source_id}{self.ID_SEPARATOR}{self.target_id}" + + @classmethod + def source_and_target_from_id(cls, edge_id: str) -> tuple[str, str]: + """Extract the source and target IDs from the edge ID.""" + if cls.ID_SEPARATOR not in edge_id: + raise ValueError(f"Invalid edge ID format: {edge_id}") + ids = edge_id.split(cls.ID_SEPARATOR) + if len(ids) != 2: + raise ValueError(f"Invalid edge ID format: {edge_id}") + return ids[0], ids[1] + + async def send_message(self, data: Any, shared_state: SharedState, ctx: ExecutionContext) -> None: + """Send a message along this edge.""" + if not self._edge_group_ids and self._should_route(data): + await self.target.execute(data, ExecutorContext(self.target.id, shared_state, ctx)) + elif self._edge_group_ids: + # Logic: + # 1. If not all edges in the edge group have data in the shared state, + # add the data to the shared state. + # 2. If all edges in the edge group have data in the shared state, + # copy the data to a list and send it to the target executor. + messages = [] + async with shared_state.hold() as held_shared_state: + has_data = await asyncio.gather( + *(held_shared_state.has_within_hold(edge_id) for edge_id in self._edge_group_ids) + ) + if not all(has_data): + await held_shared_state.set_within_hold(self.id, data) + else: + messages = [ + await held_shared_state.get_within_hold(edge_id) for edge_id in self._edge_group_ids + ] + [data] + # Remove the data from the shared state after retrieving it + await asyncio.gather(*[ + held_shared_state.delete_within_hold(edge_id) for edge_id in self._edge_group_ids + ]) + + if messages: + await self.target.execute(messages, ExecutorContext(self.target.id, shared_state, ctx)) + + def _should_route(self, data: Any) -> bool: + """Determine if message should be routed through this edge.""" + if not self.target.can_handle(data): + return False + + if self._condition is None: + return True + + return self._condition(data) + + def set_edge_group(self, edge_group_ids: list[str]) -> None: + """Set the edge group IDs for this edge.""" + # Validate that the edges in the edge group contain the same target executor as this edge + # TODO: An edge cannot be part of multiple edge groups. + # TODO: Can an edge have both a condition and an edge group? + if edge_group_ids: + for edge_id in edge_group_ids: + if Edge.source_and_target_from_id(edge_id)[1] != self.target.id: + raise ValueError("All edges in the group must have the same target executor.") + self._edge_group_ids = edge_group_ids diff --git a/python/packages/workflow/agent_framework_workflow/core/_runner.py b/python/packages/workflow/agent_framework_workflow/core/_runner.py new file mode 100644 index 0000000000..c6442cbeda --- /dev/null +++ b/python/packages/workflow/agent_framework_workflow/core/_runner.py @@ -0,0 +1,114 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging +from collections import defaultdict +from collections.abc import AsyncIterable +from typing import Any + +from ._edge import Edge +from ._shared_state import SharedState +from .events import WorkflowEvent +from .execution_context import ExecutionContext +from .executor import Executor + +logger = logging.getLogger(__name__) + + +class Runner: + """A class to run a workflow in Pregel supersteps.""" + + def __init__( + self, + edges: list[Edge], + shared_state: SharedState, + ctx: ExecutionContext, + max_iterations: int = 100, + ): + self._edge_map = self._parse_edges(edges) + self._ctx = ctx + self._iteration = 0 + self._max_iterations = max_iterations + self._shared_state = shared_state + self._executors = {edge.source_id: edge.source for edge in edges} | { + edge.target_id: edge.target for edge in edges + } + + @property + def execution_context(self) -> ExecutionContext: + """Get the execution context.""" + return self._ctx + + async def run_until_convergence(self) -> AsyncIterable[WorkflowEvent]: + """Run the workflow until no more messages are sent.""" + while self._iteration < self._max_iterations: + await self._run_iteration() + self._iteration += 1 + + if await self._ctx.has_events(): + events = await self._ctx.drain_events() + for event in events: + yield event + + if not await self._ctx.has_messages(): + break + + self._iteration = 0 + + async def _run_iteration(self): + """Run a superstep of the workflow execution.""" + + async def _deliver_messages(source_executor_id: str, messages: list[Any]) -> None: + """Deliver messages to the executors. + + Outer loop to concurrently deliver messages from all sources to their targets. + """ + + async def _deliver_messages_inner( + edge: Edge, + messages: list[Any], + ) -> None: + """Deliver messages to a specific target executor. + + Inner loop to deliver messages to a specific target executor. + """ + for message in messages: + await edge.send_message(message, self._shared_state, self._ctx) + + associated_edges = self._edge_map.get(source_executor_id, []) + tasks = [asyncio.create_task(_deliver_messages_inner(edge, messages)) for edge in associated_edges] + await asyncio.gather(*tasks) + + messages = await self._ctx.drain_messages() + tasks = [ + asyncio.create_task(_deliver_messages(source_executor_id, messages)) + for source_executor_id, messages in messages.items() + ] + await asyncio.gather(*tasks) + + def _parse_edges(self, edges: list[Edge]) -> dict[str, list[Edge]]: + """Parse the edges of the workflow into a more convenient format. + + Args: + edges: A list of edges in the workflow. + + Returns: + A dictionary mapping each source executor ID to a list of target executor IDs. + """ + parsed: defaultdict[str, list[Edge]] = defaultdict(list) + for edge in edges: + parsed[edge.source_id].append(edge) + return parsed + + def get_executor_by_id(self, executor_id: str) -> Executor[Any]: + """Get an executor by its ID. + + Args: + executor_id: The ID of the executor to retrieve. + + Returns: + The Executor instance corresponding to the given ID. + """ + if executor_id not in self._executors: + raise ValueError(f"Executor with ID {executor_id} not found.") + return self._executors[executor_id] diff --git a/python/packages/workflow/agent_framework_workflow/core/_shared_state.py b/python/packages/workflow/agent_framework_workflow/core/_shared_state.py new file mode 100644 index 0000000000..2c194db5e5 --- /dev/null +++ b/python/packages/workflow/agent_framework_workflow/core/_shared_state.py @@ -0,0 +1,68 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from contextlib import asynccontextmanager +from typing import Any + + +class SharedState: + """A class to manage shared state in a workflow.""" + + def __init__(self): + """Initialize the shared state.""" + self._state: dict[str, Any] = {} + self._shared_state_lock = asyncio.Lock() + + async def set(self, key: str, value: Any) -> None: + """Set a value in the shared state.""" + async with self._shared_state_lock: + await self.set_within_hold(key, value) + + async def get(self, key: str) -> Any: + """Get a value from the shared state.""" + async with self._shared_state_lock: + return await self.get_within_hold(key) + + async def has(self, key: str) -> bool: + """Check if a key exists in the shared state.""" + async with self._shared_state_lock: + return await self.has_within_hold(key) + + async def delete(self, key: str) -> None: + """Delete a key from the shared state.""" + async with self._shared_state_lock: + await self.delete_within_hold(key) + + @asynccontextmanager + async def hold(self): + """Context manager to hold the shared state lock for multiple operations. + + Usage: + async with shared_state.hold(): + await shared_state.set_within_hold("key", value) + value = await shared_state.get_within_hold("key") + """ + async with self._shared_state_lock: + yield self + + # Unsafe methods that don't acquire locks (for use within hold() context) + async def set_within_hold(self, key: str, value: Any) -> None: + """Set a value without acquiring the lock (unsafe - use within hold() context).""" + self._state[key] = value + + async def get_within_hold(self, key: str) -> Any: + """Get a value without acquiring the lock (unsafe - use within hold() context).""" + if key not in self._state: + raise KeyError(f"Key '{key}' not found in shared state.") + return self._state[key] + + async def has_within_hold(self, key: str) -> bool: + """Check if a key exists without acquiring the lock (unsafe - use within hold() context).""" + return key in self._state + + async def delete_within_hold(self, key: str) -> None: + """Delete a key without acquiring the lock (unsafe - use within hold() context).""" + if key in self._state: + del self._state[key] + else: + raise KeyError(f"Key '{key}' not found in shared state.") diff --git a/python/packages/workflow/agent_framework_workflow/core/_typing_utils.py b/python/packages/workflow/agent_framework_workflow/core/_typing_utils.py new file mode 100644 index 0000000000..b22e13786e --- /dev/null +++ b/python/packages/workflow/agent_framework_workflow/core/_typing_utils.py @@ -0,0 +1,42 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Any, Union, get_args, get_origin + + +def is_instance_of(data: Any, target_type: type) -> bool: + """Check if the data is an instance of the target type.""" + origin = get_origin(target_type) + args = get_args(target_type) + + # Case 1: origin is None, meaning target_type is not a generic type + if origin is None: + return isinstance(data, target_type) + + # Case 2: target_type is Optional[T] or Union[T1, T2, ...] + # Optional[T] is really just as Union[T, None] + if origin is Union: + return any(is_instance_of(data, arg) for arg in args) + + # Case 3: target_type is a generic type + if origin in [list, set]: + return isinstance(data, origin) and all(is_instance_of(item, args[0]) for item in data) # type: ignore + + # Case 4: target_type is a tuple + if origin is tuple: + if len(args) == 1 and args[0] is Ellipsis: # Tuple[...] case + return isinstance(data, tuple) + return ( + isinstance(data, tuple) + and len(data) == len(args) # type: ignore + and all(is_instance_of(item, arg) for item, arg in zip(data, args, strict=False)) # type: ignore + ) + + # Case 5: target_type is a dict + if origin is dict: + return isinstance(data, dict) and all( + is_instance_of(key, args[0]) and is_instance_of(value, args[1]) + for key, value in data.items() # type: ignore + ) + + # Fallback: if we reach here, we assume data is an instance of the target_type + return isinstance(data, target_type) diff --git a/python/packages/workflow/agent_framework_workflow/core/events.py b/python/packages/workflow/agent_framework_workflow/core/events.py new file mode 100644 index 0000000000..eb5c1f18f4 --- /dev/null +++ b/python/packages/workflow/agent_framework_workflow/core/events.py @@ -0,0 +1,95 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Any + +from agent_framework import ChatResponse, ChatResponseUpdate + + +class WorkflowEvent: + """Base class for workflow events.""" + + def __init__(self, data: Any | None = None): + """Initialize the workflow event with optional data.""" + self.data = data + + def __repr__(self): + """Return a string representation of the workflow event.""" + return f"{self.__class__.__name__}(data={self.data if self.data is not None else 'None'})" + + +class WorkflowStartedEvent(WorkflowEvent): + """Event triggered when a workflow starts.""" + + ... + + +class WorkflowCompletedEvent(WorkflowEvent): + """Event triggered when a workflow completes.""" + + ... + + +class ExecutorEvent(WorkflowEvent): + """Base class for executor events.""" + + def __init__(self, executor_id: str, data: Any | None = None): + """Initialize the executor event with an executor ID and optional data.""" + super().__init__(data) + self.executor_id = executor_id + + def __repr__(self): + """Return a string representation of the executor event.""" + return f"{self.__class__.__name__}(executor_id={self.executor_id}, data={self.data})" + + +class HumanInTheLoopEvent(ExecutorEvent): + """Event triggered when human intervention is required in the workflow.""" + + def __init__(self, executor_id: str, data: Any | None = None, **kwargs: Any): + """Initialize the human-in-the-loop event with optional data.""" + super().__init__(executor_id, data) + self.kwargs = kwargs + + def __repr__(self): + """Return a string representation of the human-in-the-loop event.""" + return f"{self.__class__.__name__}(executor_id={self.executor_id}, data={self.data}, kwargs={self.kwargs})" + + +class ExecutorInvokeEvent(ExecutorEvent): + """Event triggered when an executor handler is invoked.""" + + def __repr__(self): + """Return a string representation of the executor handler invoke event.""" + return f"{self.__class__.__name__}(executor_id={self.executor_id})" + + +class ExecutorCompleteEvent(ExecutorEvent): + """Event triggered when an executor handler is completed.""" + + def __repr__(self): + """Return a string representation of the executor handler complete event.""" + return f"{self.__class__.__name__}(executor_id={self.executor_id})" + + +class AgentRunStreamingEvent(ExecutorEvent): + """Event triggered when an agent is streaming messages.""" + + def __init__(self, executor_id: str, data: ChatResponseUpdate | None = None): + """Initialize the agent streaming event.""" + super().__init__(executor_id, data) + + def __repr__(self): + """Return a string representation of the agent streaming event.""" + return f"{self.__class__.__name__}(executor_id={self.executor_id}, messages={self.data})" + + +class AgentRunEvent(ExecutorEvent): + """Event triggered when an agent run is completed.""" + + def __init__(self, executor_id: str, data: ChatResponse | None = None): + """Initialize the agent run event.""" + super().__init__(executor_id, data) + + def __repr__(self): + """Return a string representation of the agent run event.""" + return f"{self.__class__.__name__}(executor_id={self.executor_id}, data={self.data})" diff --git a/python/packages/workflow/agent_framework_workflow/core/execution_context.py b/python/packages/workflow/agent_framework_workflow/core/execution_context.py new file mode 100644 index 0000000000..0d7d2b643f --- /dev/null +++ b/python/packages/workflow/agent_framework_workflow/core/execution_context.py @@ -0,0 +1,129 @@ +# Copyright (c) Microsoft. All rights reserved. + +from collections import defaultdict +from typing import Any, Protocol, runtime_checkable + +from .events import WorkflowEvent + + +@runtime_checkable +class ExecutionContext(Protocol): + """Protocol for execution context used by executors.""" + + async def send_message(self, source_id: str, message: Any) -> None: + """Send a message from the executor to the context. + + Args: + source_id: The ID of the executor sending the message. + message: The message to be sent. + """ + ... + + async def drain_messages(self) -> dict[str, list[Any]]: + """Drain all messages from the context. + + Returns: + A dictionary mapping executor IDs to lists of messages. + """ + ... + + async def has_messages(self) -> bool: + """Check if there are any messages in the context. + + Returns: + True if there are messages, False otherwise. + """ + ... + + async def add_event(self, event: WorkflowEvent) -> None: + """Add an event to the execution context. + + Args: + event: The event to be added. + """ + ... + + async def drain_events(self) -> list[WorkflowEvent]: + """Drain all events from the context. + + Returns: + A list of events that were added to the context. + """ + ... + + async def has_events(self) -> bool: + """Check if there are any events in the context. + + Returns: + True if there are events, False otherwise. + """ + ... + + +class InProcExecutionContext(ExecutionContext): + """In-process execution context for testing purposes.""" + + def __init__(self): + """Initialize the in-process execution context.""" + self._messages: defaultdict[str, list[Any]] = defaultdict(list) + self._events: list[WorkflowEvent] = [] + + async def send_message(self, source_id: str, message: Any) -> None: + """Send a message from the executor to the context.""" + self._messages[source_id].append(message) + + async def drain_messages(self) -> dict[str, list[Any]]: + """Drain all messages from the context.""" + messages = dict(self._messages) + self._messages.clear() + return messages + + async def has_messages(self) -> bool: + """Check if there are any messages in the context.""" + return bool(self._messages) + + async def add_event(self, event: WorkflowEvent) -> None: + """Add an event to the execution context. + + Args: + event: The event to be added. + """ + self._events.append(event) + + async def drain_events(self) -> list[WorkflowEvent]: + """Drain all events from the context.""" + events = self._events.copy() + self._events.clear() + return events + + async def has_events(self) -> bool: + """Check if there are any events in the context.""" + return bool(self._events) + + +class NoopExecutionContext(ExecutionContext): + """A no-operation execution context that does nothing.""" + + async def send_message(self, source_id: str, message: Any) -> None: + """Override to do nothing.""" + pass + + async def drain_messages(self) -> dict[str, list[Any]]: + """Override to return an empty dictionary.""" + return {} + + async def has_messages(self) -> bool: + """Override to always return False.""" + return False + + async def add_event(self, event: WorkflowEvent) -> None: + """Override to do nothing.""" + pass + + async def drain_events(self) -> list[WorkflowEvent]: + """Override to return an empty list.""" + return [] + + async def has_events(self) -> bool: + """Override to always return False.""" + return False diff --git a/python/packages/workflow/agent_framework_workflow/core/executor.py b/python/packages/workflow/agent_framework_workflow/core/executor.py new file mode 100644 index 0000000000..74f94a6f02 --- /dev/null +++ b/python/packages/workflow/agent_framework_workflow/core/executor.py @@ -0,0 +1,122 @@ +# Copyright (c) Microsoft. All rights reserved. + +import uuid +from abc import ABC, abstractmethod +from typing import Any, Generic, TypeVar, get_args + +from ._shared_state import SharedState +from ._typing_utils import is_instance_of +from .events import ExecutorCompleteEvent, ExecutorInvokeEvent, WorkflowEvent +from .execution_context import ExecutionContext, NoopExecutionContext + +T = TypeVar("T") + + +class ExecutorContext: + """Context for executing an executor. + + This class provides a way to execute an executor with a specific context. + It is used to manage the execution of tasks in a workflow. + """ + + def __init__(self, executor_id: str, shared_state: SharedState, execution_context: ExecutionContext): + """Initialize the executor context with the given execution context.""" + self._execution_context = execution_context + self._executor_id = executor_id + self._shared_state = shared_state + + async def send_message(self, message: Any) -> None: + """Send a message to the execution context.""" + await self._execution_context.send_message(self._executor_id, message) + + async def add_event(self, event: WorkflowEvent) -> None: + """Add an event to the execution context.""" + await self._execution_context.add_event(event) + + async def get_shared_state(self, key: str) -> Any: + """Get a value from the shared state.""" + return await self._shared_state.get(key) + + async def set_shared_state(self, key: str, value: Any) -> None: + """Set a value in the shared state.""" + await self._shared_state.set(key, value) + + +class NoopExecutorContext(ExecutorContext): + """A no-operation executor context that does nothing.""" + + def __init__(self): + """Initialize the noop executor context.""" + super().__init__(executor_id="", shared_state=SharedState(), execution_context=NoopExecutionContext()) + + +class Executor(Generic[T], ABC): + """An abstract base class for executing tasks in a workflow. + + Args: + T: The type of the task to be executed. + """ + + def __init__(self, id: str | None = None): + """Initialize the executor with a unique identifier.""" + self._id = id or str(uuid.uuid4()) + + args = get_args(self.__orig_bases__[0]) # type: ignore + if len(args) != 1: + raise ValueError(f"Executor must be parameterized with a single type, got {args}") + self._input_type = args[0] + + @abstractmethod + async def _execute(self, data: T, ctx: ExecutorContext) -> Any: + """Execute the task using the registered handlers. + + Args: + data: The data of type T to be processed. + ctx: The execution context containing additional information. + """ + raise NotImplementedError("Subclasses must implement this method.") + + async def execute(self, data: T, ctx: ExecutorContext | None = None) -> Any: + """Execute a task with the given data and context. + + Args: + data: The data of type T to be processed. + ctx: The execution context containing additional information. + """ + if ctx is None: + ctx = NoopExecutorContext() + + await ctx.add_event(ExecutorInvokeEvent(executor_id=self._id, data=data)) + result = await self._execute(data, ctx) + await ctx.add_event(ExecutorCompleteEvent(executor_id=self._id, data=result)) + + return result + + @property + def id(self) -> str: + """Get the unique identifier of the executor.""" + return self._id + + def can_handle(self, data: Any) -> bool: + """Determine if the executor can handle the given data. + + Args: + data: The data to check. + + Returns: + bool: True if the executor can handle the data, False otherwise. + """ + return is_instance_of(data, self._input_type) + + +TExecutor = TypeVar("TExecutor", bound=Executor[Any]) + + +def output_message_types(*output_types: type): + """Decorator to specify the output types of an executor.""" + + def decorator(cls: TExecutor) -> TExecutor: + cls._declare_output_types = output_types # type: ignore + return cls + + return decorator diff --git a/python/packages/workflow/agent_framework_workflow/core/workflow.py b/python/packages/workflow/agent_framework_workflow/core/workflow.py new file mode 100644 index 0000000000..76eb1d063c --- /dev/null +++ b/python/packages/workflow/agent_framework_workflow/core/workflow.py @@ -0,0 +1,204 @@ +# Copyright (c) Microsoft. All rights reserved. + +from collections.abc import AsyncIterable, Callable +from enum import Enum +from typing import Any, Self + +from ._edge import Edge +from ._runner import Runner +from ._shared_state import SharedState +from .events import WorkflowEvent +from .execution_context import ExecutionContext, InProcExecutionContext +from .executor import Executor, ExecutorContext + + +class Workflow: + """A class representing a workflow that can be executed. + + This class is a placeholder for the workflow logic and does not implement any specific functionality. + It serves as a base class for more complex workflows that can be defined in subclasses. + """ + + def __init__( + self, + edges: list[Edge], + start_executor: Executor[Any] | str, + execution_context: ExecutionContext, + ): + """Initialize the workflow with a list of edges. + + Args: + edges: A list of directed edges representing the connections between nodes in the workflow. + start_executor: The starting executor for the workflow, which can be an Executor instance or its ID. + execution_context: The ExecutionContext instance to be used during workflow execution. + """ + self._edges = edges + self._start_executor = start_executor + + self._shared_state = SharedState() + self._runner = Runner(self._edges, self._shared_state, execution_context) + + async def run_stream( + self, + message: Any, + executor: Executor[Any] | str | None = None, + ) -> AsyncIterable[WorkflowEvent]: + """Send a message to the starting executor of the workflow. + + Args: + message: The message to be sent to the starting executor. + executor: The executor to which the message should be sent. If None, the starting executor is used. + """ + if not executor: + executor = self._start_executor + + if isinstance(executor, str): + executor = self._runner.get_executor_by_id(executor) + + await executor.execute( + message, + ExecutorContext( + executor.id, + self._shared_state, + self._runner.execution_context, + ), + ) + async for event in self._runner.run_until_convergence(): + yield event + + +class Activation(Enum): + """Enum to represent the activation condition for edges in a fan in.""" + + WhenAll = "when_all" + WhenAny = "when_any" + + +class WorkflowBuilder: + """A builder class for constructing workflows. + + This class provides methods to add edges and set the starting executor for the workflow. + """ + + def __init__(self): + """Initialize the WorkflowBuilder with an empty list of edges and no starting executor.""" + self._edges: list[Edge] = [] + self._start_executor: Executor[Any] | str | None = None + self._execution_context: ExecutionContext | None = None + + def add_edge( + self, + source: Executor[Any], + target: Executor[Any], + condition: Callable[[Any], bool] | None = None, + ) -> "Self": + """Add a directed edge between two executors. + + Args: + source: The source executor of the edge. + target: The target executor of the edge. + condition: An optional condition function that determines whether the edge should be traversed based on the message type. + """ + # TODO(@taochen): Support executor factories for lazy initialization + self._edges.append(Edge(source, target, condition)) + return self + + def add_fan_out_edges(self, source: Executor[Any], targets: list[Executor[Any]]) -> "Self": + """Add multiple edges to the workflow. + + Args: + source: The source executor of the edges. + targets: A list of target executors for the edges. + """ + for target in targets: + self._edges.append(Edge(source, target)) + return self + + def add_fan_in_edges( + self, + sources: list[Executor[Any]], + target: Executor[Any], + activation: Activation = Activation.WhenAll, + ) -> "Self": + """Add multiple edges from sources to a single target executor. + + Args: + sources: A list of source executors for the edges. + target: The target executor for the edges. + activation: The activation condition for the edges, either WhenAll or WhenAny. + """ + edges = [Edge(source, target) for source in sources] + + if activation == Activation.WhenAll: + # Set the edge groups for the edges to ensure they are processed together. + for i, edge in enumerate(edges): + group_ids: list[str] = [] + group_ids.extend([e.id for e in edges[0:i]]) + group_ids.extend([e.id for e in edges[i + 1 :]]) + edge.set_edge_group(group_ids) + + self._edges.extend(edges) + + return self + + def add_loop( + self, + source: Executor[Any], + target: Executor[Any], + condition: Callable[[Any], bool] | None = None, + ) -> "Self": + """Add a loop edge from a source executor to a target executor. + + Args: + source: The source executor of the loop edge. + target: The target executor of the loop edge. + condition: An optional condition function that be applied on only the source -> target edge. + """ + self._edges.append(Edge(source, target, condition)) + self._edges.append(Edge(target, source)) + + return self + + def add_chain( + self, + executors: list[Executor[Any]], + ) -> "Self": + """Add a chain of executors to the workflow. + + Args: + executors: A list of executors to be added to the chain. + """ + for i in range(len(executors) - 1): + self.add_edge(executors[i], executors[i + 1]) + return self + + def set_start_executor(self, executor: Executor[Any] | str) -> "Self": + """Set the starting executor for the workflow. + + Args: + executor: The starting executor, which can be an Executor instance or its ID. + """ + self._start_executor = executor + return self + + def set_execution_context(self, execution_context: ExecutionContext) -> "Self": + """Set the execution context for the workflow. + + Args: + execution_context: The ExecutionContext instance to be used during workflow execution. + """ + self._execution_context = execution_context + return self + + def build(self) -> Workflow: + """Build and return the constructed workflow. + + Returns: + A Workflow instance with the defined edges and starting executor. + """ + if not self._start_executor: + raise ValueError("Starting executor must be set before building the workflow.") + + execution_context = self._execution_context or InProcExecutionContext() + + return Workflow(self._edges, self._start_executor, execution_context) diff --git a/python/packages/workflow/pyproject.toml b/python/packages/workflow/pyproject.toml new file mode 100644 index 0000000000..e97aad3534 --- /dev/null +++ b/python/packages/workflow/pyproject.toml @@ -0,0 +1,83 @@ +[project] +name = "agent-framework-workflow" +description = "Workflow integration for Microsoft Agent Framework." +authors = [{ name = "Microsoft", email = "SK-Support@microsoft.com"}] +readme = "README.md" +requires-python = ">=3.10" +version = "0.1.0b1" +license-files = ["LICENSE"] +urls.homepage = "https://learn.microsoft.com/en-us/semantic-kernel/overview/" +urls.source = "https://github.com/microsoft/agent-framework/tree/main/python" +urls.release_notes = "https://github.com/microsoft/agent-framework/releases?q=tag%3Apython-1&expanded=true" +urls.issues = "https://github.com/microsoft/agent-framework/issues" +classifiers = [ + "License :: OSI Approved :: MIT License", + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Framework :: Pydantic :: 2", + "Typing :: Typed", +] +dependencies = [ + "agent-framework", +] + +[tool.uv] +prerelease = "if-necessary-or-explicit" +environments = [ + "sys_platform == 'darwin'", + "sys_platform == 'linux'", + "sys_platform == 'win32'" +] + +[tool.uv-dynamic-versioning] +fallback-version = "0.0.0" +[tool.pytest.ini_options] +testpaths = 'tests' +addopts = "-ra -q -r fEX" +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" +filterwarnings = [] +timeout = 120 + +[tool.ruff] +extend = "../../pyproject.toml" + +[tool.pyright] +extend = "../../pyproject.toml" +exclude = ['tests'] + +[tool.mypy] +plugins = ['pydantic.mypy'] +strict = true +python_version = "3.10" +ignore_missing_imports = true +disallow_untyped_defs = true +no_implicit_optional = true +check_untyped_defs = true +warn_return_any = true +show_error_codes = true +warn_unused_ignores = false +disallow_incomplete_defs = true +disallow_untyped_decorators = true +disallow_any_unimported = true + +[tool.bandit] +targets = ["agent_framework_workflow"] +exclude_dirs = ["tests"] + +[tool.poe] +executor.type = "uv" +include = "../../shared_tasks.toml" + +[tool.uv.build-backend] +module-name = "agent_framework_workflow" +module-root = "" + +[build-system] +requires = ["uv_build>=0.8.2,<0.9.0"] +build-backend = "uv_build" From 68186dcb639e7aa29c550e1c3ab5141c6c8aeb73 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Tue, 29 Jul 2025 15:47:48 -0700 Subject: [PATCH 02/18] Add samples and clean up --- .../main/agent_framework/workflow/__init__.py | 40 ++ .../agent_framework/workflow/__init__.pyi | 37 ++ python/packages/main/pyproject.toml | 3 + .../agent_framework_workflow/__init__.py | 43 ++ .../agent_framework_workflow/core/_edge.py | 4 +- .../agent_framework_workflow/core/executor.py | 2 +- .../agent_framework_workflow/core/workflow.py | 3 +- python/pyproject.toml | 2 + .../workflow/step_01_simple_executor.py | 31 + .../step_02_simple_workflow_sequential.py | 70 ++ .../step_02a_simple_workflow_condition.py | 90 +++ .../workflow/step_02b_simple_workflow_loop.py | 124 ++++ .../workflow/step_03_simple_group_chat.py | 137 ++++ .../step_04_simple_group_chat_with_hil.py | 203 ++++++ python/uv.lock | 609 +++++++++--------- 15 files changed, 1097 insertions(+), 301 deletions(-) create mode 100644 python/packages/main/agent_framework/workflow/__init__.py create mode 100644 python/packages/main/agent_framework/workflow/__init__.pyi create mode 100644 python/samples/getting_started/workflow/step_01_simple_executor.py create mode 100644 python/samples/getting_started/workflow/step_02_simple_workflow_sequential.py create mode 100644 python/samples/getting_started/workflow/step_02a_simple_workflow_condition.py create mode 100644 python/samples/getting_started/workflow/step_02b_simple_workflow_loop.py create mode 100644 python/samples/getting_started/workflow/step_03_simple_group_chat.py create mode 100644 python/samples/getting_started/workflow/step_04_simple_group_chat_with_hil.py diff --git a/python/packages/main/agent_framework/workflow/__init__.py b/python/packages/main/agent_framework/workflow/__init__.py new file mode 100644 index 0000000000..3590050f91 --- /dev/null +++ b/python/packages/main/agent_framework/workflow/__init__.py @@ -0,0 +1,40 @@ +# Copyright (c) Microsoft. All rights reserved. + +import importlib +from typing import Any + +PACKAGE_NAME = "agent_framework_workflow" +PACKAGE_EXTRA = "workflow" +_IMPORTS = [ + "Executor", + "ExecutorContext", + "__version__", + "events", + "WorkflowBuilder", + "output_message_types", + "ExecutorCompleteEvent", + "ExecutorEvent", + "ExecutorInvokeEvent", + "HumanInTheLoopEvent", + "WorkflowCompletedEvent", + "WorkflowEvent", + "WorkflowStartedEvent", + "AgentRunEvent", + "AgentRunStreamingEvent", +] + + +def __getattr__(name: str) -> Any: + if name in _IMPORTS: + try: + return getattr(importlib.import_module(PACKAGE_NAME), name) + except ModuleNotFoundError as exc: + raise ModuleNotFoundError( + f"The '{PACKAGE_EXTRA}' extra is not installed, " + f"please do `pip install agent-framework[{PACKAGE_EXTRA}]`" + ) from exc + raise AttributeError(f"Module {PACKAGE_NAME} has no attribute {name}.") + + +def __dir__() -> list[str]: + return _IMPORTS diff --git a/python/packages/main/agent_framework/workflow/__init__.pyi b/python/packages/main/agent_framework/workflow/__init__.pyi new file mode 100644 index 0000000000..18119932ee --- /dev/null +++ b/python/packages/main/agent_framework/workflow/__init__.pyi @@ -0,0 +1,37 @@ +# Copyright (c) Microsoft. All rights reserved. + +from agent_framework_workflow import ( + AgentRunEvent, + AgentRunStreamingEvent, + Executor, + ExecutorCompleteEvent, + ExecutorContext, + ExecutorEvent, + ExecutorInvokeEvent, + HumanInTheLoopEvent, + WorkflowBuilder, + WorkflowCompletedEvent, + WorkflowEvent, + WorkflowStartedEvent, + __version__, + events, + output_message_types, +) + +__all__ = [ + "AgentRunEvent", + "AgentRunStreamingEvent", + "Executor", + "ExecutorCompleteEvent", + "ExecutorContext", + "ExecutorEvent", + "ExecutorInvokeEvent", + "HumanInTheLoopEvent", + "WorkflowBuilder", + "WorkflowCompletedEvent", + "WorkflowEvent", + "WorkflowStartedEvent", + "__version__", + "events", + "output_message_types", +] diff --git a/python/packages/main/pyproject.toml b/python/packages/main/pyproject.toml index d36d5633a7..c0bec33dce 100644 --- a/python/packages/main/pyproject.toml +++ b/python/packages/main/pyproject.toml @@ -38,6 +38,9 @@ azure = [ foundry = [ "agent-framework-foundry" ] +workflow = [ + "agent-framework-workflow" +] [tool.uv] prerelease = "if-necessary-or-explicit" diff --git a/python/packages/workflow/agent_framework_workflow/__init__.py b/python/packages/workflow/agent_framework_workflow/__init__.py index e69de29bb2..b9cbf88c43 100644 --- a/python/packages/workflow/agent_framework_workflow/__init__.py +++ b/python/packages/workflow/agent_framework_workflow/__init__.py @@ -0,0 +1,43 @@ +# Copyright (c) Microsoft. All rights reserved. + +import importlib.metadata + +from .core import events +from .core.events import ( + AgentRunEvent, + AgentRunStreamingEvent, + ExecutorCompleteEvent, + ExecutorEvent, + ExecutorInvokeEvent, + HumanInTheLoopEvent, + WorkflowCompletedEvent, + WorkflowEvent, + WorkflowStartedEvent, +) +from .core.executor import Executor, ExecutorContext, output_message_types +from .core.workflow import WorkflowBuilder + +try: + __version__ = importlib.metadata.version(__name__) +except importlib.metadata.PackageNotFoundError: + __version__ = "0.0.0" # Fallback for development mode + + +__all__ = [ + "AgentRunEvent", + "AgentRunStreamingEvent", + "Executor", + "ExecutorCompleteEvent", + "ExecutorContext", + "ExecutorEvent", + "ExecutorInvokeEvent", + "HumanInTheLoopEvent", + "WorkflowBuilder", + "WorkflowBuilder", + "WorkflowCompletedEvent", + "WorkflowEvent", + "WorkflowStartedEvent", + "__version__", + "events", + "output_message_types", +] diff --git a/python/packages/workflow/agent_framework_workflow/core/_edge.py b/python/packages/workflow/agent_framework_workflow/core/_edge.py index bc4e4ce669..c0d3a63a73 100644 --- a/python/packages/workflow/agent_framework_workflow/core/_edge.py +++ b/python/packages/workflow/agent_framework_workflow/core/_edge.py @@ -96,8 +96,8 @@ def _should_route(self, data: Any) -> bool: def set_edge_group(self, edge_group_ids: list[str]) -> None: """Set the edge group IDs for this edge.""" # Validate that the edges in the edge group contain the same target executor as this edge - # TODO: An edge cannot be part of multiple edge groups. - # TODO: Can an edge have both a condition and an edge group? + # TODO(@taochen): An edge cannot be part of multiple edge groups. + # TODO(@taochen): Can an edge have both a condition and an edge group? if edge_group_ids: for edge_id in edge_group_ids: if Edge.source_and_target_from_id(edge_id)[1] != self.target.id: diff --git a/python/packages/workflow/agent_framework_workflow/core/executor.py b/python/packages/workflow/agent_framework_workflow/core/executor.py index 74f94a6f02..60558d44cc 100644 --- a/python/packages/workflow/agent_framework_workflow/core/executor.py +++ b/python/packages/workflow/agent_framework_workflow/core/executor.py @@ -115,7 +115,7 @@ def can_handle(self, data: Any) -> bool: def output_message_types(*output_types: type): """Decorator to specify the output types of an executor.""" - def decorator(cls: TExecutor) -> TExecutor: + def decorator(cls: type[TExecutor]) -> type[TExecutor]: cls._declare_output_types = output_types # type: ignore return cls diff --git a/python/packages/workflow/agent_framework_workflow/core/workflow.py b/python/packages/workflow/agent_framework_workflow/core/workflow.py index 76eb1d063c..165a400389 100644 --- a/python/packages/workflow/agent_framework_workflow/core/workflow.py +++ b/python/packages/workflow/agent_framework_workflow/core/workflow.py @@ -97,7 +97,8 @@ def add_edge( Args: source: The source executor of the edge. target: The target executor of the edge. - condition: An optional condition function that determines whether the edge should be traversed based on the message type. + condition: An optional condition function that determines whether the edge + should be traversed based on the message type. """ # TODO(@taochen): Support executor factories for lazy initialization self._edges.append(Edge(source, target, condition)) diff --git a/python/pyproject.toml b/python/pyproject.toml index 2ee0a1e95d..3860577006 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -7,6 +7,7 @@ dependencies = [ "agent-framework", "agent-framework-azure", "agent-framework-foundry", + "agent_framework-workflow", ] [dependency-groups] @@ -62,6 +63,7 @@ exclude = [ "packages/agent_framework_project.egg-info" ] agent-framework = { workspace = true } agent-framework-azure = { workspace = true } agent-framework-foundry = { workspace = true } +agent-framework-workflow = { workspace = true } [tool.ruff] line-length = 120 diff --git a/python/samples/getting_started/workflow/step_01_simple_executor.py b/python/samples/getting_started/workflow/step_01_simple_executor.py new file mode 100644 index 0000000000..59dd39a88b --- /dev/null +++ b/python/samples/getting_started/workflow/step_01_simple_executor.py @@ -0,0 +1,31 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import sys + +from agent_framework.workflow import Executor, ExecutorContext + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + + +class SimpleExecutor(Executor[str]): + """A simple executor that processes string messages.""" + + @override + async def _execute(self, data: str, ctx: ExecutorContext) -> str: + """Execute the task by converting the input string to uppercase.""" + return data.upper() + + +async def main(): + """Main function to run the SimpleExecutor.""" + executor = SimpleExecutor() + result = await executor.execute("hello world") + print(result) # Output: HELLO WORLD + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started/workflow/step_02_simple_workflow_sequential.py b/python/samples/getting_started/workflow/step_02_simple_workflow_sequential.py new file mode 100644 index 0000000000..a6025098be --- /dev/null +++ b/python/samples/getting_started/workflow/step_02_simple_workflow_sequential.py @@ -0,0 +1,70 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import sys + +from agent_framework.workflow import ( + Executor, + ExecutorContext, + WorkflowBuilder, + WorkflowCompletedEvent, + output_message_types, +) + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + + +@output_message_types(str) +class UpperCaseExecutor(Executor[str]): + """An executor that converts text to uppercase.""" + + @override + async def _execute(self, data: str, ctx: ExecutorContext) -> str: + """Execute the task by converting the input string to uppercase.""" + result = data.upper() + + await ctx.send_message(result) + return result + + +@output_message_types(str) +class ReverseTextExecutor(Executor[str]): + """An executor that reverses text.""" + + @override + async def _execute(self, data: str, ctx: ExecutorContext) -> str: + """Execute the task by reversing the input string.""" + result = data[::-1] + + await ctx.send_message(result) + await ctx.add_event(WorkflowCompletedEvent(result)) + return result + + +async def main(): + """Main function to run the workflow.""" + upper_case_executor = UpperCaseExecutor() + reverse_text_executor = ReverseTextExecutor() + + workflow = ( + WorkflowBuilder() + .add_edge(upper_case_executor, reverse_text_executor) + .set_start_executor(upper_case_executor) + .build() + ) + + completion_event = None + async for event in workflow.run_stream("hello world"): + print(f"Event: {event}") + if isinstance(event, WorkflowCompletedEvent): + completion_event = event + + if completion_event: + print(f"Workflow completed with result: {completion_event.data}") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started/workflow/step_02a_simple_workflow_condition.py b/python/samples/getting_started/workflow/step_02a_simple_workflow_condition.py new file mode 100644 index 0000000000..cff7c4b487 --- /dev/null +++ b/python/samples/getting_started/workflow/step_02a_simple_workflow_condition.py @@ -0,0 +1,90 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import sys + +from agent_framework.workflow import ( + Executor, + ExecutorContext, + WorkflowBuilder, + WorkflowCompletedEvent, + output_message_types, +) + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + + +@output_message_types(bool) +class DetectSpamExecutor(Executor[str]): + """An executor that determines if a message is spam.""" + + def __init__(self, spam_keywords: list[str], id: str | None = None): + """Initialize the executor with spam keywords.""" + super().__init__(id=id) + self._spam_keywords = spam_keywords + + @override + async def _execute(self, data: str, ctx: ExecutorContext) -> bool: + """Determine if the input string is spam.""" + result = any(keyword in data.lower() for keyword in self._spam_keywords) + + await ctx.send_message(result) + return result + + +@output_message_types() +class RespondToMessageExecutor(Executor[bool]): + """An executor that responds to a message based on spam detection.""" + + @override + async def _execute(self, data: bool, ctx: ExecutorContext) -> None: + """Respond with a message based on whether the input is spam.""" + if data is True: + raise RuntimeError("Input is spam, cannot respond.") + + # Simulate processing delay + await asyncio.sleep(1) + + await ctx.add_event(WorkflowCompletedEvent("Message processed successfully.")) + + +@output_message_types() +class RemoveSpamExecutor(Executor[bool]): + """An executor that removes spam messages.""" + + @override + async def _execute(self, data: bool, ctx: ExecutorContext) -> None: + """Remove the spam message.""" + if data is False: + raise RuntimeError("Input is not spam, cannot remove.") + + # Simulate processing delay + await asyncio.sleep(1) + + await ctx.add_event(WorkflowCompletedEvent("Spam message removed.")) + + +async def main(): + """Main function to run the workflow.""" + spam_keywords = ["spam", "advertisement", "offer"] + detect_spam_executor = DetectSpamExecutor(spam_keywords) + respond_to_message_executor = RespondToMessageExecutor() + remove_spam_executor = RemoveSpamExecutor() + + workflow = ( + WorkflowBuilder() + .set_start_executor(detect_spam_executor) + .add_edge(detect_spam_executor, respond_to_message_executor, condition=lambda x: x is False) + .add_edge(detect_spam_executor, remove_spam_executor, condition=lambda x: x is True) + .build() + ) + + async for event in workflow.run_stream("This is a spam."): + print(f"Event: {event}") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started/workflow/step_02b_simple_workflow_loop.py b/python/samples/getting_started/workflow/step_02b_simple_workflow_loop.py new file mode 100644 index 0000000000..2266ea2c68 --- /dev/null +++ b/python/samples/getting_started/workflow/step_02b_simple_workflow_loop.py @@ -0,0 +1,124 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import sys +from enum import Enum + +from agent_framework.workflow import ( + Executor, + ExecutorCompleteEvent, + ExecutorContext, + WorkflowBuilder, + WorkflowCompletedEvent, + output_message_types, +) + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + + +class NumberSignal(Enum): + """Enum to represent number signals for the workflow.""" + + # The target number is above the guess. + ABOVE = "above" + # The target number is below the guess. + BELOW = "below" + # The guess matches the target number. + MATCHED = "matched" + # Initial signal to start the guessing process. + INIT = "init" + + +@output_message_types(int) +class GuessNumberExecutor(Executor[NumberSignal]): + """An executor that guesses a number.""" + + def __init__(self, bound: tuple[int, int], id: str | None = None): + """Initialize the executor with a target number.""" + super().__init__(id=id) + self._lower = bound[0] + self._upper = bound[1] + + @override + async def _execute(self, data: NumberSignal, ctx: ExecutorContext) -> int: + """Execute the task by guessing a number.""" + if data == NumberSignal.INIT: + self._guess = (self._lower + self._upper) // 2 + await ctx.send_message(self._guess) + return self._guess + + if data == NumberSignal.MATCHED: + # The previous guess was correct. + await ctx.add_event(WorkflowCompletedEvent(f"Guessed the number: {self._guess}")) + return self._guess + + if data == NumberSignal.ABOVE: + # The previous guess was too low. + # Update the lower bound to the previous guess. + # Generate a new number that is between the new bounds. + self._lower = self._guess + 1 + self._guess = (self._lower + self._upper) // 2 + await ctx.send_message(self._guess) + return self._guess + + # The previous guess was too high. + # Update the upper bound to the previous guess. + # Generate a new number that is between the new bounds. + self._upper = self._guess - 1 + self._guess = (self._lower + self._upper) // 2 + await ctx.send_message(self._guess) + return self._guess + + +@output_message_types(NumberSignal) +class JudgeExecutor(Executor[int]): + """An executor that judges the guessed number.""" + + def __init__(self, target: int, id: str | None = None): + """Initialize the executor with a target number.""" + super().__init__(id=id) + self._target = target + + @override + async def _execute(self, data: int, ctx: ExecutorContext) -> NumberSignal: + """Judge the guessed number.""" + if data == self._target: + result = NumberSignal.MATCHED + elif data < self._target: + result = NumberSignal.ABOVE + else: + result = NumberSignal.BELOW + + await ctx.send_message(result) + return result + + +async def main(): + """Main function to run the workflow.""" + guess_number_executor = GuessNumberExecutor((1, 100)) + judge_executor = JudgeExecutor(30) + + workflow = ( + WorkflowBuilder() + .add_loop(guess_number_executor, judge_executor) + .set_start_executor(guess_number_executor) + .build() + ) + + iterations = 0 + async for event in workflow.run_stream(NumberSignal.INIT): + if isinstance(event, ExecutorCompleteEvent) and event.executor_id == guess_number_executor.id: + iterations += 1 + print(f"Event: {event}") + + # This is essentially a binary search, so the number of iterations should be logarithmic. + # The maximum number of iterations is [log2(range size)]. For a range of 1 to 100, this is log2(100) which is 7. + # Subtract because the last round is the MATCHED event. + print(f"Guessed {iterations - 1} times.") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started/workflow/step_03_simple_group_chat.py b/python/samples/getting_started/workflow/step_03_simple_group_chat.py new file mode 100644 index 0000000000..47601edb11 --- /dev/null +++ b/python/samples/getting_started/workflow/step_03_simple_group_chat.py @@ -0,0 +1,137 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import sys +from dataclasses import dataclass + +from agent_framework import ChatMessage, ChatResponse, ChatRole +from agent_framework.workflow import ( + AgentRunEvent, + Executor, + ExecutorContext, + WorkflowBuilder, + WorkflowCompletedEvent, + output_message_types, +) + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + + +@dataclass +class GroupChatMessage: + """A data class to hold the messages in a group chat.""" + + messages: list[ChatMessage] + + +@dataclass +class AgentSelectionDecision(GroupChatMessage): + """A data class to hold the decision made by the HILDe executor.""" + + selection: str + + +@output_message_types(AgentSelectionDecision) +class RoundRobinGroupChatManager(Executor[list[ChatMessage]]): + """An executor that manages a round-robin group chat.""" + + def __init__(self, members: list[str], max_round: int, id: str | None = None): + """Initialize the executor with a unique identifier.""" + super().__init__(id) + self._members = members + self._max_round = max_round + self._current_round = 0 + self._chat_history: list[ChatMessage] = [] + + @override + async def _execute(self, data: list[ChatMessage], ctx: ExecutorContext) -> AgentSelectionDecision | None: + """Execute the task by sending messages to the next executor in the round-robin sequence.""" + self._chat_history.extend(data) + + if self._should_terminate(): + await ctx.add_event(WorkflowCompletedEvent(data=self._chat_history)) + return None + + self._current_round += 1 + selection_decision = AgentSelectionDecision( + messages=self._chat_history, + selection=self._get_next_member(), + ) + await ctx.send_message(selection_decision) + + return selection_decision + + def _should_terminate(self) -> bool: + """Determine if the group chat should terminate based on the current round.""" + return self._current_round >= self._max_round + + def _get_next_member(self) -> str: + """Get the next member in the round-robin sequence.""" + return self._members[(self._current_round - 1) % len(self._members)] + + +@output_message_types(list[ChatMessage]) +class FakeAgentExecutor(Executor[AgentSelectionDecision]): + """An executor that simulates a group chat agent A.""" + + @override + async def _execute(self, data: AgentSelectionDecision, ctx: ExecutorContext) -> list[ChatMessage]: + """Simulate a response.""" + response = ChatResponse( + messages=[ + ChatMessage( + ChatRole.ASSISTANT, + text=f"{self.id} received request. Current message size: {len(data.messages)}", + author_name=f"{self.id}", + ) + ] + ) + + await ctx.add_event(AgentRunEvent(self.id, data=response)) + await ctx.send_message(response.messages) + + return response.messages + + +async def main(): + executor_a = FakeAgentExecutor(id="executor_a") + executor_b = FakeAgentExecutor(id="executor_b") + executor_c = FakeAgentExecutor(id="executor_c") + + group_chat_manager = RoundRobinGroupChatManager( + members=[executor_a.id, executor_b.id, executor_c.id], + max_round=3, + id="group_chat_manager", + ) + # The workflow graph: + # + # GroupChatManager -> executor_a -> GroupChatManager + # GroupChatManager -> executor_b -> GroupChatManager + # GroupChatManager -> executor_c -> GroupChatManager + + workflow = ( + WorkflowBuilder() + .set_start_executor(group_chat_manager) + .add_loop(group_chat_manager, executor_a, condition=lambda x: x.selection == executor_a.id) + .add_loop(group_chat_manager, executor_b, condition=lambda x: x.selection == executor_b.id) + .add_loop(group_chat_manager, executor_c, condition=lambda x: x.selection == executor_c.id) + .build() + ) + + completion_event = None + async for event in workflow.run_stream([ChatMessage(ChatRole.USER, text="Start group chat")]): + if isinstance(event, AgentRunEvent): + print(f"{event}") + + if isinstance(event, WorkflowCompletedEvent): + completion_event = event + + if completion_event: + print(f"Completion Event: {completion_event}") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started/workflow/step_04_simple_group_chat_with_hil.py b/python/samples/getting_started/workflow/step_04_simple_group_chat_with_hil.py new file mode 100644 index 0000000000..4be653b33d --- /dev/null +++ b/python/samples/getting_started/workflow/step_04_simple_group_chat_with_hil.py @@ -0,0 +1,203 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import sys +from dataclasses import dataclass + +from agent_framework import ChatMessage, ChatResponse, ChatRole +from agent_framework.workflow import ( + AgentRunEvent, + Executor, + ExecutorContext, + HumanInTheLoopEvent, + WorkflowBuilder, + WorkflowCompletedEvent, + output_message_types, +) + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + + +@dataclass +class GroupChatMessage: + """A data class to hold the messages in a group chat.""" + + messages: list[ChatMessage] + + +@dataclass +class AgentSelectionDecision(GroupChatMessage): + """A data class to hold the decision made by the Human-in-the-Loop executor.""" + + selection: str + + +@output_message_types(AgentSelectionDecision, list[ChatMessage]) +class CriticGroupChatManagerWithHIL(Executor[list[ChatMessage]]): + """An executor that manages a round-robin group chat.""" + + def __init__(self, members: list[str], id: str | None = None): + """Initialize the executor with a unique identifier.""" + super().__init__(id) + self._members = members + self._current_round = 0 + self._chat_history: list[ChatMessage] = [] + + @override + async def _execute( + self, + data: list[ChatMessage], + ctx: ExecutorContext, + ) -> AgentSelectionDecision | list[ChatMessage] | None: + """Execute the task by sending messages to the next executor in the round-robin sequence.""" + self._chat_history.extend(data) + + if self._should_terminate(): + await ctx.add_event(WorkflowCompletedEvent(data=self._chat_history)) + return None + + if self._should_request_hil(): + # Request human intervention if the last message was from the assistant + await ctx.send_message(self._chat_history) + return self._chat_history + + self._current_round += 1 + selection_decision = AgentSelectionDecision( + messages=self._chat_history, + selection=self._get_next_member(), + ) + await ctx.send_message(selection_decision) + + return selection_decision + + def _should_terminate(self) -> bool: + """Determine if the group chat should terminate based on the last message.""" + if len(self._chat_history) == 0: + return False + + last_message = self._chat_history[-1] + return bool(last_message.role == ChatRole.USER and "stop" in last_message.text.lower()) + + def _should_request_hil(self) -> bool: + """Determine if the group chat should request HIL based on the last message.""" + if len(self._chat_history) == 0: + return True + + last_message = self._chat_history[-1] + return last_message.role == ChatRole.ASSISTANT + + def _get_next_member(self) -> str: + """Get the next member in the round-robin sequence.""" + return self._members[(self._current_round - 1) % len(self._members)] + + +@output_message_types(list[ChatMessage]) +class HumanInTheLoopExecutor(Executor[list[ChatMessage]]): + """An executor that simulates a human-in-the-loop decision-making process.""" + + def __init__(self, id: str | None = None): + """Initialize the executor with a unique identifier.""" + super().__init__(id) + + self._is_waiting_for_human_input = False + + @override + async def _execute(self, data: list[ChatMessage], ctx: ExecutorContext) -> list[ChatMessage] | None: + """Simulate a human-in-the-loop response.""" + if not self._is_waiting_for_human_input: + # If it's not waiting but received a message, it means it should prompt for human input. + self._is_waiting_for_human_input = True + await ctx.add_event(HumanInTheLoopEvent(executor_id=self.id)) + return None + + self._is_waiting_for_human_input = False + # If it is waiting, it means the human has provided input. It should return the messages. + await ctx.send_message(data) + return data + + +@output_message_types(list[ChatMessage]) +class FakeAgentExecutor(Executor[AgentSelectionDecision]): + """An executor that simulates a group chat agent A.""" + + @override + async def _execute(self, data: AgentSelectionDecision, ctx: ExecutorContext) -> None: + """Simulate a response.""" + response = ChatResponse( + messages=[ + ChatMessage( + ChatRole.ASSISTANT, + text=f"{self.id} received request. Current message size: {len(data.messages)}", + author_name=f"{self.id}", + ) + ] + ) + await ctx.add_event(AgentRunEvent(self.id, data=response)) + await ctx.send_message(response.messages) + + +async def main(): + executor_a = FakeAgentExecutor(id="executor_a") + executor_b = FakeAgentExecutor(id="executor_b") + executor_c = FakeAgentExecutor(id="executor_c") + + hil_executor = HumanInTheLoopExecutor(id="hil_executor") + + group_chat_manager = CriticGroupChatManagerWithHIL( + members=[executor_a.id, executor_b.id, executor_c.id], + id="group_chat_manager", + ) + # The workflow graph: + # + # CriticGroupChatManagerWithHIL -> executor_a <-> CriticGroupChatManagerWithHIL <-> HumanInTheLoopExecutor + # CriticGroupChatManagerWithHIL -> executor_b <-> CriticGroupChatManagerWithHIL <-> HumanInTheLoopExecutor + # CriticGroupChatManagerWithHIL -> executor_c <-> CriticGroupChatManagerWithHIL <-> HumanInTheLoopExecutor + + workflow = ( + WorkflowBuilder() + .set_start_executor(group_chat_manager) + .add_loop(group_chat_manager, hil_executor) + .add_loop(group_chat_manager, executor_a, condition=lambda x: x.selection == executor_a.id) + .add_loop(group_chat_manager, executor_b, condition=lambda x: x.selection == executor_b.id) + .add_loop(group_chat_manager, executor_c, condition=lambda x: x.selection == executor_c.id) + .build() + ) + + completion_event: WorkflowCompletedEvent | None = None + human_in_the_loop_event: HumanInTheLoopEvent | None = None + user_input = "Start group chat" + + while True: + # Depending on whether we have a human-in-the-loop event, we either + # run the workflow normally or send the message to the HIL executor. + if not human_in_the_loop_event: + response = workflow.run_stream([ChatMessage(ChatRole.USER, text=user_input)]) + else: + response = workflow.run_stream( + [ChatMessage(ChatRole.USER, text=user_input)], + executor=human_in_the_loop_event.executor_id, + ) + human_in_the_loop_event = None + + async for event in response: + print(f"{event}") + + if isinstance(event, WorkflowCompletedEvent): + completion_event = event + elif isinstance(event, HumanInTheLoopEvent): + human_in_the_loop_event = event + + # Prompt for user input if we are waiting for human intervention + if human_in_the_loop_event: + user_input = input("Human intervention required. Type 'stop' to end the loop or any message to continue: ") + elif completion_event: + break + + print(f"Completion Event: {completion_event}") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/uv.lock b/python/uv.lock index e015c2d749..809576fe9a 100644 --- a/python/uv.lock +++ b/python/uv.lock @@ -21,6 +21,7 @@ members = [ "agent-framework-azure", "agent-framework-foundry", "agent-framework-project", + "agent-framework-workflow", ] [[package]] @@ -55,11 +56,15 @@ azure = [ foundry = [ { name = "agent-framework-foundry", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] +workflow = [ + { name = "agent-framework-workflow", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] [package.metadata] requires-dist = [ { name = "agent-framework-azure", marker = "extra == 'azure'", editable = "packages/azure" }, { name = "agent-framework-foundry", marker = "extra == 'foundry'", editable = "packages/foundry" }, + { name = "agent-framework-workflow", marker = "extra == 'workflow'", editable = "packages/workflow" }, { name = "openai", specifier = ">=1.94.0" }, { name = "opentelemetry-api", specifier = "~=1.24" }, { name = "opentelemetry-sdk", specifier = "~=1.24" }, @@ -67,7 +72,7 @@ requires-dist = [ { name = "pydantic-settings", specifier = ">=2.10.1" }, { name = "typing-extensions", specifier = ">=4.14.0" }, ] -provides-extras = ["azure", "foundry"] +provides-extras = ["azure", "foundry", "workflow"] [[package]] name = "agent-framework-azure" @@ -111,6 +116,7 @@ dependencies = [ { name = "agent-framework", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "agent-framework-azure", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "agent-framework-foundry", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "agent-framework-workflow", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] [package.dev-dependencies] @@ -152,6 +158,7 @@ requires-dist = [ { name = "agent-framework", editable = "packages/main" }, { name = "agent-framework-azure", editable = "packages/azure" }, { name = "agent-framework-foundry", editable = "packages/foundry" }, + { name = "agent-framework-workflow", editable = "packages/workflow" }, ] [package.metadata.requires-dev] @@ -187,6 +194,17 @@ dev = [ { name = "uv", specifier = ">=0.8.2,<0.9.0" }, ] +[[package]] +name = "agent-framework-workflow" +version = "0.1.0b1" +source = { editable = "packages/workflow" } +dependencies = [ + { name = "agent-framework", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] + +[package.metadata] +requires-dist = [{ name = "agent-framework", editable = "packages/main" }] + [[package]] name = "aiohappyeyeballs" version = "2.6.1" @@ -198,7 +216,7 @@ wheels = [ [[package]] name = "aiohttp" -version = "3.12.14" +version = "3.12.15" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -210,76 +228,76 @@ dependencies = [ { name = "propcache", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "yarl", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e6/0b/e39ad954107ebf213a2325038a3e7a506be3d98e1435e1f82086eec4cde2/aiohttp-3.12.14.tar.gz", hash = "sha256:6e06e120e34d93100de448fd941522e11dafa78ef1a893c179901b7d66aa29f2", size = 7822921, upload-time = "2025-07-10T13:05:33.968Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/88/f161f429f9de391eee6a5c2cffa54e2ecd5b7122ae99df247f7734dfefcb/aiohttp-3.12.14-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:906d5075b5ba0dd1c66fcaaf60eb09926a9fef3ca92d912d2a0bbdbecf8b1248", size = 702641, upload-time = "2025-07-10T13:02:38.98Z" }, - { url = "https://files.pythonhosted.org/packages/fe/b5/24fa382a69a25d242e2baa3e56d5ea5227d1b68784521aaf3a1a8b34c9a4/aiohttp-3.12.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c875bf6fc2fd1a572aba0e02ef4e7a63694778c5646cdbda346ee24e630d30fb", size = 479005, upload-time = "2025-07-10T13:02:42.714Z" }, - { url = "https://files.pythonhosted.org/packages/09/67/fda1bc34adbfaa950d98d934a23900918f9d63594928c70e55045838c943/aiohttp-3.12.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbb284d15c6a45fab030740049d03c0ecd60edad9cd23b211d7e11d3be8d56fd", size = 466781, upload-time = "2025-07-10T13:02:44.639Z" }, - { url = "https://files.pythonhosted.org/packages/36/96/3ce1ea96d3cf6928b87cfb8cdd94650367f5c2f36e686a1f5568f0f13754/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e360381e02e1a05d36b223ecab7bc4a6e7b5ab15760022dc92589ee1d4238c", size = 1648841, upload-time = "2025-07-10T13:02:46.356Z" }, - { url = "https://files.pythonhosted.org/packages/be/04/ddea06cb4bc7d8db3745cf95e2c42f310aad485ca075bd685f0e4f0f6b65/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:aaf90137b5e5d84a53632ad95ebee5c9e3e7468f0aab92ba3f608adcb914fa95", size = 1622896, upload-time = "2025-07-10T13:02:48.422Z" }, - { url = "https://files.pythonhosted.org/packages/73/66/63942f104d33ce6ca7871ac6c1e2ebab48b88f78b2b7680c37de60f5e8cd/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e532a25e4a0a2685fa295a31acf65e027fbe2bea7a4b02cdfbbba8a064577663", size = 1695302, upload-time = "2025-07-10T13:02:50.078Z" }, - { url = "https://files.pythonhosted.org/packages/20/00/aab615742b953f04b48cb378ee72ada88555b47b860b98c21c458c030a23/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eab9762c4d1b08ae04a6c77474e6136da722e34fdc0e6d6eab5ee93ac29f35d1", size = 1737617, upload-time = "2025-07-10T13:02:52.123Z" }, - { url = "https://files.pythonhosted.org/packages/d6/4f/ef6d9f77225cf27747368c37b3d69fac1f8d6f9d3d5de2d410d155639524/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abe53c3812b2899889a7fca763cdfaeee725f5be68ea89905e4275476ffd7e61", size = 1642282, upload-time = "2025-07-10T13:02:53.899Z" }, - { url = "https://files.pythonhosted.org/packages/37/e1/e98a43c15aa52e9219a842f18c59cbae8bbe2d50c08d298f17e9e8bafa38/aiohttp-3.12.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5760909b7080aa2ec1d320baee90d03b21745573780a072b66ce633eb77a8656", size = 1582406, upload-time = "2025-07-10T13:02:55.515Z" }, - { url = "https://files.pythonhosted.org/packages/71/5c/29c6dfb49323bcdb0239bf3fc97ffcf0eaf86d3a60426a3287ec75d67721/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:02fcd3f69051467bbaa7f84d7ec3267478c7df18d68b2e28279116e29d18d4f3", size = 1626255, upload-time = "2025-07-10T13:02:57.343Z" }, - { url = "https://files.pythonhosted.org/packages/79/60/ec90782084090c4a6b459790cfd8d17be2c5662c9c4b2d21408b2f2dc36c/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4dcd1172cd6794884c33e504d3da3c35648b8be9bfa946942d353b939d5f1288", size = 1637041, upload-time = "2025-07-10T13:02:59.008Z" }, - { url = "https://files.pythonhosted.org/packages/22/89/205d3ad30865c32bc472ac13f94374210745b05bd0f2856996cb34d53396/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:224d0da41355b942b43ad08101b1b41ce633a654128ee07e36d75133443adcda", size = 1612494, upload-time = "2025-07-10T13:03:00.618Z" }, - { url = "https://files.pythonhosted.org/packages/48/ae/2f66edaa8bd6db2a4cba0386881eb92002cdc70834e2a93d1d5607132c7e/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e387668724f4d734e865c1776d841ed75b300ee61059aca0b05bce67061dcacc", size = 1692081, upload-time = "2025-07-10T13:03:02.154Z" }, - { url = "https://files.pythonhosted.org/packages/08/3a/fa73bfc6e21407ea57f7906a816f0dc73663d9549da703be05dbd76d2dc3/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:dec9cde5b5a24171e0b0a4ca064b1414950904053fb77c707efd876a2da525d8", size = 1715318, upload-time = "2025-07-10T13:03:04.322Z" }, - { url = "https://files.pythonhosted.org/packages/e3/b3/751124b8ceb0831c17960d06ee31a4732cb4a6a006fdbfa1153d07c52226/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bbad68a2af4877cc103cd94af9160e45676fc6f0c14abb88e6e092b945c2c8e3", size = 1643660, upload-time = "2025-07-10T13:03:06.406Z" }, - { url = "https://files.pythonhosted.org/packages/81/3c/72477a1d34edb8ab8ce8013086a41526d48b64f77e381c8908d24e1c18f5/aiohttp-3.12.14-cp310-cp310-win32.whl", hash = "sha256:ee580cb7c00bd857b3039ebca03c4448e84700dc1322f860cf7a500a6f62630c", size = 428289, upload-time = "2025-07-10T13:03:08.274Z" }, - { url = "https://files.pythonhosted.org/packages/a2/c4/8aec4ccf1b822ec78e7982bd5cf971113ecce5f773f04039c76a083116fc/aiohttp-3.12.14-cp310-cp310-win_amd64.whl", hash = "sha256:cf4f05b8cea571e2ccc3ca744e35ead24992d90a72ca2cf7ab7a2efbac6716db", size = 451328, upload-time = "2025-07-10T13:03:10.146Z" }, - { url = "https://files.pythonhosted.org/packages/53/e1/8029b29316971c5fa89cec170274582619a01b3d82dd1036872acc9bc7e8/aiohttp-3.12.14-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f4552ff7b18bcec18b60a90c6982049cdb9dac1dba48cf00b97934a06ce2e597", size = 709960, upload-time = "2025-07-10T13:03:11.936Z" }, - { url = "https://files.pythonhosted.org/packages/96/bd/4f204cf1e282041f7b7e8155f846583b19149e0872752711d0da5e9cc023/aiohttp-3.12.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8283f42181ff6ccbcf25acaae4e8ab2ff7e92b3ca4a4ced73b2c12d8cd971393", size = 482235, upload-time = "2025-07-10T13:03:14.118Z" }, - { url = "https://files.pythonhosted.org/packages/d6/0f/2a580fcdd113fe2197a3b9df30230c7e85bb10bf56f7915457c60e9addd9/aiohttp-3.12.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:040afa180ea514495aaff7ad34ec3d27826eaa5d19812730fe9e529b04bb2179", size = 470501, upload-time = "2025-07-10T13:03:16.153Z" }, - { url = "https://files.pythonhosted.org/packages/38/78/2c1089f6adca90c3dd74915bafed6d6d8a87df5e3da74200f6b3a8b8906f/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b413c12f14c1149f0ffd890f4141a7471ba4b41234fe4fd4a0ff82b1dc299dbb", size = 1740696, upload-time = "2025-07-10T13:03:18.4Z" }, - { url = "https://files.pythonhosted.org/packages/4a/c8/ce6c7a34d9c589f007cfe064da2d943b3dee5aabc64eaecd21faf927ab11/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1d6f607ce2e1a93315414e3d448b831238f1874b9968e1195b06efaa5c87e245", size = 1689365, upload-time = "2025-07-10T13:03:20.629Z" }, - { url = "https://files.pythonhosted.org/packages/18/10/431cd3d089de700756a56aa896faf3ea82bee39d22f89db7ddc957580308/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:565e70d03e924333004ed101599902bba09ebb14843c8ea39d657f037115201b", size = 1788157, upload-time = "2025-07-10T13:03:22.44Z" }, - { url = "https://files.pythonhosted.org/packages/fa/b2/26f4524184e0f7ba46671c512d4b03022633bcf7d32fa0c6f1ef49d55800/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4699979560728b168d5ab63c668a093c9570af2c7a78ea24ca5212c6cdc2b641", size = 1827203, upload-time = "2025-07-10T13:03:24.628Z" }, - { url = "https://files.pythonhosted.org/packages/e0/30/aadcdf71b510a718e3d98a7bfeaea2396ac847f218b7e8edb241b09bd99a/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad5fdf6af93ec6c99bf800eba3af9a43d8bfd66dce920ac905c817ef4a712afe", size = 1729664, upload-time = "2025-07-10T13:03:26.412Z" }, - { url = "https://files.pythonhosted.org/packages/67/7f/7ccf11756ae498fdedc3d689a0c36ace8fc82f9d52d3517da24adf6e9a74/aiohttp-3.12.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ac76627c0b7ee0e80e871bde0d376a057916cb008a8f3ffc889570a838f5cc7", size = 1666741, upload-time = "2025-07-10T13:03:28.167Z" }, - { url = "https://files.pythonhosted.org/packages/6b/4d/35ebc170b1856dd020c92376dbfe4297217625ef4004d56587024dc2289c/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:798204af1180885651b77bf03adc903743a86a39c7392c472891649610844635", size = 1715013, upload-time = "2025-07-10T13:03:30.018Z" }, - { url = "https://files.pythonhosted.org/packages/7b/24/46dc0380146f33e2e4aa088b92374b598f5bdcde1718c77e8d1a0094f1a4/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4f1205f97de92c37dd71cf2d5bcfb65fdaed3c255d246172cce729a8d849b4da", size = 1710172, upload-time = "2025-07-10T13:03:31.821Z" }, - { url = "https://files.pythonhosted.org/packages/2f/0a/46599d7d19b64f4d0fe1b57bdf96a9a40b5c125f0ae0d8899bc22e91fdce/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:76ae6f1dd041f85065d9df77c6bc9c9703da9b5c018479d20262acc3df97d419", size = 1690355, upload-time = "2025-07-10T13:03:34.754Z" }, - { url = "https://files.pythonhosted.org/packages/08/86/b21b682e33d5ca317ef96bd21294984f72379454e689d7da584df1512a19/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a194ace7bc43ce765338ca2dfb5661489317db216ea7ea700b0332878b392cab", size = 1783958, upload-time = "2025-07-10T13:03:36.53Z" }, - { url = "https://files.pythonhosted.org/packages/4f/45/f639482530b1396c365f23c5e3b1ae51c9bc02ba2b2248ca0c855a730059/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:16260e8e03744a6fe3fcb05259eeab8e08342c4c33decf96a9dad9f1187275d0", size = 1804423, upload-time = "2025-07-10T13:03:38.504Z" }, - { url = "https://files.pythonhosted.org/packages/7e/e5/39635a9e06eed1d73671bd4079a3caf9cf09a49df08490686f45a710b80e/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8c779e5ebbf0e2e15334ea404fcce54009dc069210164a244d2eac8352a44b28", size = 1717479, upload-time = "2025-07-10T13:03:40.158Z" }, - { url = "https://files.pythonhosted.org/packages/51/e1/7f1c77515d369b7419c5b501196526dad3e72800946c0099594c1f0c20b4/aiohttp-3.12.14-cp311-cp311-win32.whl", hash = "sha256:a289f50bf1bd5be227376c067927f78079a7bdeccf8daa6a9e65c38bae14324b", size = 427907, upload-time = "2025-07-10T13:03:41.801Z" }, - { url = "https://files.pythonhosted.org/packages/06/24/a6bf915c85b7a5b07beba3d42b3282936b51e4578b64a51e8e875643c276/aiohttp-3.12.14-cp311-cp311-win_amd64.whl", hash = "sha256:0b8a69acaf06b17e9c54151a6c956339cf46db4ff72b3ac28516d0f7068f4ced", size = 452334, upload-time = "2025-07-10T13:03:43.485Z" }, - { url = "https://files.pythonhosted.org/packages/c3/0d/29026524e9336e33d9767a1e593ae2b24c2b8b09af7c2bd8193762f76b3e/aiohttp-3.12.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a0ecbb32fc3e69bc25efcda7d28d38e987d007096cbbeed04f14a6662d0eee22", size = 701055, upload-time = "2025-07-10T13:03:45.59Z" }, - { url = "https://files.pythonhosted.org/packages/0a/b8/a5e8e583e6c8c1056f4b012b50a03c77a669c2e9bf012b7cf33d6bc4b141/aiohttp-3.12.14-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0400f0ca9bb3e0b02f6466421f253797f6384e9845820c8b05e976398ac1d81a", size = 475670, upload-time = "2025-07-10T13:03:47.249Z" }, - { url = "https://files.pythonhosted.org/packages/29/e8/5202890c9e81a4ec2c2808dd90ffe024952e72c061729e1d49917677952f/aiohttp-3.12.14-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a56809fed4c8a830b5cae18454b7464e1529dbf66f71c4772e3cfa9cbec0a1ff", size = 468513, upload-time = "2025-07-10T13:03:49.377Z" }, - { url = "https://files.pythonhosted.org/packages/23/e5/d11db8c23d8923d3484a27468a40737d50f05b05eebbb6288bafcb467356/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f2e373276e4755691a963e5d11756d093e346119f0627c2d6518208483fb6d", size = 1715309, upload-time = "2025-07-10T13:03:51.556Z" }, - { url = "https://files.pythonhosted.org/packages/53/44/af6879ca0eff7a16b1b650b7ea4a827301737a350a464239e58aa7c387ef/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ca39e433630e9a16281125ef57ece6817afd1d54c9f1bf32e901f38f16035869", size = 1697961, upload-time = "2025-07-10T13:03:53.511Z" }, - { url = "https://files.pythonhosted.org/packages/bb/94/18457f043399e1ec0e59ad8674c0372f925363059c276a45a1459e17f423/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c748b3f8b14c77720132b2510a7d9907a03c20ba80f469e58d5dfd90c079a1c", size = 1753055, upload-time = "2025-07-10T13:03:55.368Z" }, - { url = "https://files.pythonhosted.org/packages/26/d9/1d3744dc588fafb50ff8a6226d58f484a2242b5dd93d8038882f55474d41/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0a568abe1b15ce69d4cc37e23020720423f0728e3cb1f9bcd3f53420ec3bfe7", size = 1799211, upload-time = "2025-07-10T13:03:57.216Z" }, - { url = "https://files.pythonhosted.org/packages/73/12/2530fb2b08773f717ab2d249ca7a982ac66e32187c62d49e2c86c9bba9b4/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9888e60c2c54eaf56704b17feb558c7ed6b7439bca1e07d4818ab878f2083660", size = 1718649, upload-time = "2025-07-10T13:03:59.469Z" }, - { url = "https://files.pythonhosted.org/packages/b9/34/8d6015a729f6571341a311061b578e8b8072ea3656b3d72329fa0faa2c7c/aiohttp-3.12.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3006a1dc579b9156de01e7916d38c63dc1ea0679b14627a37edf6151bc530088", size = 1634452, upload-time = "2025-07-10T13:04:01.698Z" }, - { url = "https://files.pythonhosted.org/packages/ff/4b/08b83ea02595a582447aeb0c1986792d0de35fe7a22fb2125d65091cbaf3/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aa8ec5c15ab80e5501a26719eb48a55f3c567da45c6ea5bb78c52c036b2655c7", size = 1695511, upload-time = "2025-07-10T13:04:04.165Z" }, - { url = "https://files.pythonhosted.org/packages/b5/66/9c7c31037a063eec13ecf1976185c65d1394ded4a5120dd5965e3473cb21/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:39b94e50959aa07844c7fe2206b9f75d63cc3ad1c648aaa755aa257f6f2498a9", size = 1716967, upload-time = "2025-07-10T13:04:06.132Z" }, - { url = "https://files.pythonhosted.org/packages/ba/02/84406e0ad1acb0fb61fd617651ab6de760b2d6a31700904bc0b33bd0894d/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:04c11907492f416dad9885d503fbfc5dcb6768d90cad8639a771922d584609d3", size = 1657620, upload-time = "2025-07-10T13:04:07.944Z" }, - { url = "https://files.pythonhosted.org/packages/07/53/da018f4013a7a179017b9a274b46b9a12cbeb387570f116964f498a6f211/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:88167bd9ab69bb46cee91bd9761db6dfd45b6e76a0438c7e884c3f8160ff21eb", size = 1737179, upload-time = "2025-07-10T13:04:10.182Z" }, - { url = "https://files.pythonhosted.org/packages/49/e8/ca01c5ccfeaafb026d85fa4f43ceb23eb80ea9c1385688db0ef322c751e9/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:791504763f25e8f9f251e4688195e8b455f8820274320204f7eafc467e609425", size = 1765156, upload-time = "2025-07-10T13:04:12.029Z" }, - { url = "https://files.pythonhosted.org/packages/22/32/5501ab525a47ba23c20613e568174d6c63aa09e2caa22cded5c6ea8e3ada/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2785b112346e435dd3a1a67f67713a3fe692d288542f1347ad255683f066d8e0", size = 1724766, upload-time = "2025-07-10T13:04:13.961Z" }, - { url = "https://files.pythonhosted.org/packages/06/af/28e24574801fcf1657945347ee10df3892311c2829b41232be6089e461e7/aiohttp-3.12.14-cp312-cp312-win32.whl", hash = "sha256:15f5f4792c9c999a31d8decf444e79fcfd98497bf98e94284bf390a7bb8c1729", size = 422641, upload-time = "2025-07-10T13:04:16.018Z" }, - { url = "https://files.pythonhosted.org/packages/98/d5/7ac2464aebd2eecac38dbe96148c9eb487679c512449ba5215d233755582/aiohttp-3.12.14-cp312-cp312-win_amd64.whl", hash = "sha256:3b66e1a182879f579b105a80d5c4bd448b91a57e8933564bf41665064796a338", size = 449316, upload-time = "2025-07-10T13:04:18.289Z" }, - { url = "https://files.pythonhosted.org/packages/06/48/e0d2fa8ac778008071e7b79b93ab31ef14ab88804d7ba71b5c964a7c844e/aiohttp-3.12.14-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3143a7893d94dc82bc409f7308bc10d60285a3cd831a68faf1aa0836c5c3c767", size = 695471, upload-time = "2025-07-10T13:04:20.124Z" }, - { url = "https://files.pythonhosted.org/packages/8d/e7/f73206afa33100804f790b71092888f47df65fd9a4cd0e6800d7c6826441/aiohttp-3.12.14-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3d62ac3d506cef54b355bd34c2a7c230eb693880001dfcda0bf88b38f5d7af7e", size = 473128, upload-time = "2025-07-10T13:04:21.928Z" }, - { url = "https://files.pythonhosted.org/packages/df/e2/4dd00180be551a6e7ee979c20fc7c32727f4889ee3fd5b0586e0d47f30e1/aiohttp-3.12.14-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:48e43e075c6a438937c4de48ec30fa8ad8e6dfef122a038847456bfe7b947b63", size = 465426, upload-time = "2025-07-10T13:04:24.071Z" }, - { url = "https://files.pythonhosted.org/packages/de/dd/525ed198a0bb674a323e93e4d928443a680860802c44fa7922d39436b48b/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:077b4488411a9724cecc436cbc8c133e0d61e694995b8de51aaf351c7578949d", size = 1704252, upload-time = "2025-07-10T13:04:26.049Z" }, - { url = "https://files.pythonhosted.org/packages/d8/b1/01e542aed560a968f692ab4fc4323286e8bc4daae83348cd63588e4f33e3/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d8c35632575653f297dcbc9546305b2c1133391089ab925a6a3706dfa775ccab", size = 1685514, upload-time = "2025-07-10T13:04:28.186Z" }, - { url = "https://files.pythonhosted.org/packages/b3/06/93669694dc5fdabdc01338791e70452d60ce21ea0946a878715688d5a191/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b8ce87963f0035c6834b28f061df90cf525ff7c9b6283a8ac23acee6502afd4", size = 1737586, upload-time = "2025-07-10T13:04:30.195Z" }, - { url = "https://files.pythonhosted.org/packages/a5/3a/18991048ffc1407ca51efb49ba8bcc1645961f97f563a6c480cdf0286310/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0a2cf66e32a2563bb0766eb24eae7e9a269ac0dc48db0aae90b575dc9583026", size = 1786958, upload-time = "2025-07-10T13:04:32.482Z" }, - { url = "https://files.pythonhosted.org/packages/30/a8/81e237f89a32029f9b4a805af6dffc378f8459c7b9942712c809ff9e76e5/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdea089caf6d5cde975084a884c72d901e36ef9c2fd972c9f51efbbc64e96fbd", size = 1709287, upload-time = "2025-07-10T13:04:34.493Z" }, - { url = "https://files.pythonhosted.org/packages/8c/e3/bd67a11b0fe7fc12c6030473afd9e44223d456f500f7cf526dbaa259ae46/aiohttp-3.12.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a7865f27db67d49e81d463da64a59365ebd6b826e0e4847aa111056dcb9dc88", size = 1622990, upload-time = "2025-07-10T13:04:36.433Z" }, - { url = "https://files.pythonhosted.org/packages/83/ba/e0cc8e0f0d9ce0904e3cf2d6fa41904e379e718a013c721b781d53dcbcca/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0ab5b38a6a39781d77713ad930cb5e7feea6f253de656a5f9f281a8f5931b086", size = 1676015, upload-time = "2025-07-10T13:04:38.958Z" }, - { url = "https://files.pythonhosted.org/packages/d8/b3/1e6c960520bda094c48b56de29a3d978254637ace7168dd97ddc273d0d6c/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b3b15acee5c17e8848d90a4ebc27853f37077ba6aec4d8cb4dbbea56d156933", size = 1707678, upload-time = "2025-07-10T13:04:41.275Z" }, - { url = "https://files.pythonhosted.org/packages/0a/19/929a3eb8c35b7f9f076a462eaa9830b32c7f27d3395397665caa5e975614/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e4c972b0bdaac167c1e53e16a16101b17c6d0ed7eac178e653a07b9f7fad7151", size = 1650274, upload-time = "2025-07-10T13:04:43.483Z" }, - { url = "https://files.pythonhosted.org/packages/22/e5/81682a6f20dd1b18ce3d747de8eba11cbef9b270f567426ff7880b096b48/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7442488b0039257a3bdbc55f7209587911f143fca11df9869578db6c26feeeb8", size = 1726408, upload-time = "2025-07-10T13:04:45.577Z" }, - { url = "https://files.pythonhosted.org/packages/8c/17/884938dffaa4048302985483f77dfce5ac18339aad9b04ad4aaa5e32b028/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f68d3067eecb64c5e9bab4a26aa11bd676f4c70eea9ef6536b0a4e490639add3", size = 1759879, upload-time = "2025-07-10T13:04:47.663Z" }, - { url = "https://files.pythonhosted.org/packages/95/78/53b081980f50b5cf874359bde707a6eacd6c4be3f5f5c93937e48c9d0025/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f88d3704c8b3d598a08ad17d06006cb1ca52a1182291f04979e305c8be6c9758", size = 1708770, upload-time = "2025-07-10T13:04:49.944Z" }, - { url = "https://files.pythonhosted.org/packages/ed/91/228eeddb008ecbe3ffa6c77b440597fdf640307162f0c6488e72c5a2d112/aiohttp-3.12.14-cp313-cp313-win32.whl", hash = "sha256:a3c99ab19c7bf375c4ae3debd91ca5d394b98b6089a03231d4c580ef3c2ae4c5", size = 421688, upload-time = "2025-07-10T13:04:51.993Z" }, - { url = "https://files.pythonhosted.org/packages/66/5f/8427618903343402fdafe2850738f735fd1d9409d2a8f9bcaae5e630d3ba/aiohttp-3.12.14-cp313-cp313-win_amd64.whl", hash = "sha256:3f8aad695e12edc9d571f878c62bedc91adf30c760c8632f09663e5f564f4baa", size = 448098, upload-time = "2025-07-10T13:04:53.999Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/9b/e7/d92a237d8802ca88483906c388f7c201bbe96cd80a165ffd0ac2f6a8d59f/aiohttp-3.12.15.tar.gz", hash = "sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2", size = 7823716, upload-time = "2025-07-29T05:52:32.215Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/dc/ef9394bde9080128ad401ac7ede185267ed637df03b51f05d14d1c99ad67/aiohttp-3.12.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b6fc902bff74d9b1879ad55f5404153e2b33a82e72a95c89cec5eb6cc9e92fbc", size = 703921, upload-time = "2025-07-29T05:49:43.584Z" }, + { url = "https://files.pythonhosted.org/packages/8f/42/63fccfc3a7ed97eb6e1a71722396f409c46b60a0552d8a56d7aad74e0df5/aiohttp-3.12.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:098e92835b8119b54c693f2f88a1dec690e20798ca5f5fe5f0520245253ee0af", size = 480288, upload-time = "2025-07-29T05:49:47.851Z" }, + { url = "https://files.pythonhosted.org/packages/9c/a2/7b8a020549f66ea2a68129db6960a762d2393248f1994499f8ba9728bbed/aiohttp-3.12.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:40b3fee496a47c3b4a39a731954c06f0bd9bd3e8258c059a4beb76ac23f8e421", size = 468063, upload-time = "2025-07-29T05:49:49.789Z" }, + { url = "https://files.pythonhosted.org/packages/8f/f5/d11e088da9176e2ad8220338ae0000ed5429a15f3c9dfd983f39105399cd/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ce13fcfb0bb2f259fb42106cdc63fa5515fb85b7e87177267d89a771a660b79", size = 1650122, upload-time = "2025-07-29T05:49:51.874Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6b/b60ce2757e2faed3d70ed45dafee48cee7bfb878785a9423f7e883f0639c/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3beb14f053222b391bf9cf92ae82e0171067cc9c8f52453a0f1ec7c37df12a77", size = 1624176, upload-time = "2025-07-29T05:49:53.805Z" }, + { url = "https://files.pythonhosted.org/packages/dd/de/8c9fde2072a1b72c4fadecf4f7d4be7a85b1d9a4ab333d8245694057b4c6/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c39e87afe48aa3e814cac5f535bc6199180a53e38d3f51c5e2530f5aa4ec58c", size = 1696583, upload-time = "2025-07-29T05:49:55.338Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ad/07f863ca3d895a1ad958a54006c6dafb4f9310f8c2fdb5f961b8529029d3/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5f1b4ce5bc528a6ee38dbf5f39bbf11dd127048726323b72b8e85769319ffc4", size = 1738896, upload-time = "2025-07-29T05:49:57.045Z" }, + { url = "https://files.pythonhosted.org/packages/20/43/2bd482ebe2b126533e8755a49b128ec4e58f1a3af56879a3abdb7b42c54f/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1004e67962efabbaf3f03b11b4c43b834081c9e3f9b32b16a7d97d4708a9abe6", size = 1643561, upload-time = "2025-07-29T05:49:58.762Z" }, + { url = "https://files.pythonhosted.org/packages/23/40/2fa9f514c4cf4cbae8d7911927f81a1901838baf5e09a8b2c299de1acfe5/aiohttp-3.12.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8faa08fcc2e411f7ab91d1541d9d597d3a90e9004180edb2072238c085eac8c2", size = 1583685, upload-time = "2025-07-29T05:50:00.375Z" }, + { url = "https://files.pythonhosted.org/packages/b8/c3/94dc7357bc421f4fb978ca72a201a6c604ee90148f1181790c129396ceeb/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fe086edf38b2222328cdf89af0dde2439ee173b8ad7cb659b4e4c6f385b2be3d", size = 1627533, upload-time = "2025-07-29T05:50:02.306Z" }, + { url = "https://files.pythonhosted.org/packages/bf/3f/1f8911fe1844a07001e26593b5c255a685318943864b27b4e0267e840f95/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:79b26fe467219add81d5e47b4a4ba0f2394e8b7c7c3198ed36609f9ba161aecb", size = 1638319, upload-time = "2025-07-29T05:50:04.282Z" }, + { url = "https://files.pythonhosted.org/packages/4e/46/27bf57a99168c4e145ffee6b63d0458b9c66e58bb70687c23ad3d2f0bd17/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b761bac1192ef24e16706d761aefcb581438b34b13a2f069a6d343ec8fb693a5", size = 1613776, upload-time = "2025-07-29T05:50:05.863Z" }, + { url = "https://files.pythonhosted.org/packages/0f/7e/1d2d9061a574584bb4ad3dbdba0da90a27fdc795bc227def3a46186a8bc1/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e153e8adacfe2af562861b72f8bc47f8a5c08e010ac94eebbe33dc21d677cd5b", size = 1693359, upload-time = "2025-07-29T05:50:07.563Z" }, + { url = "https://files.pythonhosted.org/packages/08/98/bee429b52233c4a391980a5b3b196b060872a13eadd41c3a34be9b1469ed/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fc49c4de44977aa8601a00edbf157e9a421f227aa7eb477d9e3df48343311065", size = 1716598, upload-time = "2025-07-29T05:50:09.33Z" }, + { url = "https://files.pythonhosted.org/packages/57/39/b0314c1ea774df3392751b686104a3938c63ece2b7ce0ba1ed7c0b4a934f/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2776c7ec89c54a47029940177e75c8c07c29c66f73464784971d6a81904ce9d1", size = 1644940, upload-time = "2025-07-29T05:50:11.334Z" }, + { url = "https://files.pythonhosted.org/packages/1b/83/3dacb8d3f8f512c8ca43e3fa8a68b20583bd25636ffa4e56ee841ffd79ae/aiohttp-3.12.15-cp310-cp310-win32.whl", hash = "sha256:2c7d81a277fa78b2203ab626ced1487420e8c11a8e373707ab72d189fcdad20a", size = 429239, upload-time = "2025-07-29T05:50:12.803Z" }, + { url = "https://files.pythonhosted.org/packages/eb/f9/470b5daba04d558c9673ca2034f28d067f3202a40e17804425f0c331c89f/aiohttp-3.12.15-cp310-cp310-win_amd64.whl", hash = "sha256:83603f881e11f0f710f8e2327817c82e79431ec976448839f3cd05d7afe8f830", size = 452297, upload-time = "2025-07-29T05:50:14.266Z" }, + { url = "https://files.pythonhosted.org/packages/20/19/9e86722ec8e835959bd97ce8c1efa78cf361fa4531fca372551abcc9cdd6/aiohttp-3.12.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d3ce17ce0220383a0f9ea07175eeaa6aa13ae5a41f30bc61d84df17f0e9b1117", size = 711246, upload-time = "2025-07-29T05:50:15.937Z" }, + { url = "https://files.pythonhosted.org/packages/71/f9/0a31fcb1a7d4629ac9d8f01f1cb9242e2f9943f47f5d03215af91c3c1a26/aiohttp-3.12.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:010cc9bbd06db80fe234d9003f67e97a10fe003bfbedb40da7d71c1008eda0fe", size = 483515, upload-time = "2025-07-29T05:50:17.442Z" }, + { url = "https://files.pythonhosted.org/packages/62/6c/94846f576f1d11df0c2e41d3001000527c0fdf63fce7e69b3927a731325d/aiohttp-3.12.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f9d7c55b41ed687b9d7165b17672340187f87a773c98236c987f08c858145a9", size = 471776, upload-time = "2025-07-29T05:50:19.568Z" }, + { url = "https://files.pythonhosted.org/packages/f8/6c/f766d0aaafcee0447fad0328da780d344489c042e25cd58fde566bf40aed/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc4fbc61bb3548d3b482f9ac7ddd0f18c67e4225aaa4e8552b9f1ac7e6bda9e5", size = 1741977, upload-time = "2025-07-29T05:50:21.665Z" }, + { url = "https://files.pythonhosted.org/packages/17/e5/fb779a05ba6ff44d7bc1e9d24c644e876bfff5abe5454f7b854cace1b9cc/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7fbc8a7c410bb3ad5d595bb7118147dfbb6449d862cc1125cf8867cb337e8728", size = 1690645, upload-time = "2025-07-29T05:50:23.333Z" }, + { url = "https://files.pythonhosted.org/packages/37/4e/a22e799c2035f5d6a4ad2cf8e7c1d1bd0923192871dd6e367dafb158b14c/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74dad41b3458dbb0511e760fb355bb0b6689e0630de8a22b1b62a98777136e16", size = 1789437, upload-time = "2025-07-29T05:50:25.007Z" }, + { url = "https://files.pythonhosted.org/packages/28/e5/55a33b991f6433569babb56018b2fb8fb9146424f8b3a0c8ecca80556762/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b6f0af863cf17e6222b1735a756d664159e58855da99cfe965134a3ff63b0b0", size = 1828482, upload-time = "2025-07-29T05:50:26.693Z" }, + { url = "https://files.pythonhosted.org/packages/c6/82/1ddf0ea4f2f3afe79dffed5e8a246737cff6cbe781887a6a170299e33204/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5b7fe4972d48a4da367043b8e023fb70a04d1490aa7d68800e465d1b97e493b", size = 1730944, upload-time = "2025-07-29T05:50:28.382Z" }, + { url = "https://files.pythonhosted.org/packages/1b/96/784c785674117b4cb3877522a177ba1b5e4db9ce0fd519430b5de76eec90/aiohttp-3.12.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6443cca89553b7a5485331bc9bedb2342b08d073fa10b8c7d1c60579c4a7b9bd", size = 1668020, upload-time = "2025-07-29T05:50:30.032Z" }, + { url = "https://files.pythonhosted.org/packages/12/8a/8b75f203ea7e5c21c0920d84dd24a5c0e971fe1e9b9ebbf29ae7e8e39790/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c5f40ec615e5264f44b4282ee27628cea221fcad52f27405b80abb346d9f3f8", size = 1716292, upload-time = "2025-07-29T05:50:31.983Z" }, + { url = "https://files.pythonhosted.org/packages/47/0b/a1451543475bb6b86a5cfc27861e52b14085ae232896a2654ff1231c0992/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2abbb216a1d3a2fe86dbd2edce20cdc5e9ad0be6378455b05ec7f77361b3ab50", size = 1711451, upload-time = "2025-07-29T05:50:33.989Z" }, + { url = "https://files.pythonhosted.org/packages/55/fd/793a23a197cc2f0d29188805cfc93aa613407f07e5f9da5cd1366afd9d7c/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:db71ce547012a5420a39c1b744d485cfb823564d01d5d20805977f5ea1345676", size = 1691634, upload-time = "2025-07-29T05:50:35.846Z" }, + { url = "https://files.pythonhosted.org/packages/ca/bf/23a335a6670b5f5dfc6d268328e55a22651b440fca341a64fccf1eada0c6/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ced339d7c9b5030abad5854aa5413a77565e5b6e6248ff927d3e174baf3badf7", size = 1785238, upload-time = "2025-07-29T05:50:37.597Z" }, + { url = "https://files.pythonhosted.org/packages/57/4f/ed60a591839a9d85d40694aba5cef86dde9ee51ce6cca0bb30d6eb1581e7/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7c7dd29c7b5bda137464dc9bfc738d7ceea46ff70309859ffde8c022e9b08ba7", size = 1805701, upload-time = "2025-07-29T05:50:39.591Z" }, + { url = "https://files.pythonhosted.org/packages/85/e0/444747a9455c5de188c0f4a0173ee701e2e325d4b2550e9af84abb20cdba/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:421da6fd326460517873274875c6c5a18ff225b40da2616083c5a34a7570b685", size = 1718758, upload-time = "2025-07-29T05:50:41.292Z" }, + { url = "https://files.pythonhosted.org/packages/36/ab/1006278d1ffd13a698e5dd4bfa01e5878f6bddefc296c8b62649753ff249/aiohttp-3.12.15-cp311-cp311-win32.whl", hash = "sha256:4420cf9d179ec8dfe4be10e7d0fe47d6d606485512ea2265b0d8c5113372771b", size = 428868, upload-time = "2025-07-29T05:50:43.063Z" }, + { url = "https://files.pythonhosted.org/packages/10/97/ad2b18700708452400278039272032170246a1bf8ec5d832772372c71f1a/aiohttp-3.12.15-cp311-cp311-win_amd64.whl", hash = "sha256:edd533a07da85baa4b423ee8839e3e91681c7bfa19b04260a469ee94b778bf6d", size = 453273, upload-time = "2025-07-29T05:50:44.613Z" }, + { url = "https://files.pythonhosted.org/packages/63/97/77cb2450d9b35f517d6cf506256bf4f5bda3f93a66b4ad64ba7fc917899c/aiohttp-3.12.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7", size = 702333, upload-time = "2025-07-29T05:50:46.507Z" }, + { url = "https://files.pythonhosted.org/packages/83/6d/0544e6b08b748682c30b9f65640d006e51f90763b41d7c546693bc22900d/aiohttp-3.12.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444", size = 476948, upload-time = "2025-07-29T05:50:48.067Z" }, + { url = "https://files.pythonhosted.org/packages/3a/1d/c8c40e611e5094330284b1aea8a4b02ca0858f8458614fa35754cab42b9c/aiohttp-3.12.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d", size = 469787, upload-time = "2025-07-29T05:50:49.669Z" }, + { url = "https://files.pythonhosted.org/packages/38/7d/b76438e70319796bfff717f325d97ce2e9310f752a267bfdf5192ac6082b/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c", size = 1716590, upload-time = "2025-07-29T05:50:51.368Z" }, + { url = "https://files.pythonhosted.org/packages/79/b1/60370d70cdf8b269ee1444b390cbd72ce514f0d1cd1a715821c784d272c9/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0", size = 1699241, upload-time = "2025-07-29T05:50:53.628Z" }, + { url = "https://files.pythonhosted.org/packages/a3/2b/4968a7b8792437ebc12186db31523f541943e99bda8f30335c482bea6879/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab", size = 1754335, upload-time = "2025-07-29T05:50:55.394Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c1/49524ed553f9a0bec1a11fac09e790f49ff669bcd14164f9fab608831c4d/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb", size = 1800491, upload-time = "2025-07-29T05:50:57.202Z" }, + { url = "https://files.pythonhosted.org/packages/de/5e/3bf5acea47a96a28c121b167f5ef659cf71208b19e52a88cdfa5c37f1fcc/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545", size = 1719929, upload-time = "2025-07-29T05:50:59.192Z" }, + { url = "https://files.pythonhosted.org/packages/39/94/8ae30b806835bcd1cba799ba35347dee6961a11bd507db634516210e91d8/aiohttp-3.12.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c", size = 1635733, upload-time = "2025-07-29T05:51:01.394Z" }, + { url = "https://files.pythonhosted.org/packages/7a/46/06cdef71dd03acd9da7f51ab3a9107318aee12ad38d273f654e4f981583a/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd", size = 1696790, upload-time = "2025-07-29T05:51:03.657Z" }, + { url = "https://files.pythonhosted.org/packages/02/90/6b4cfaaf92ed98d0ec4d173e78b99b4b1a7551250be8937d9d67ecb356b4/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f", size = 1718245, upload-time = "2025-07-29T05:51:05.911Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e6/2593751670fa06f080a846f37f112cbe6f873ba510d070136a6ed46117c6/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d", size = 1658899, upload-time = "2025-07-29T05:51:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/8f/28/c15bacbdb8b8eb5bf39b10680d129ea7410b859e379b03190f02fa104ffd/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519", size = 1738459, upload-time = "2025-07-29T05:51:09.56Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/c269cbc4faa01fb10f143b1670633a8ddd5b2e1ffd0548f7aa49cb5c70e2/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea", size = 1766434, upload-time = "2025-07-29T05:51:11.423Z" }, + { url = "https://files.pythonhosted.org/packages/52/b0/4ff3abd81aa7d929b27d2e1403722a65fc87b763e3a97b3a2a494bfc63bc/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3", size = 1726045, upload-time = "2025-07-29T05:51:13.689Z" }, + { url = "https://files.pythonhosted.org/packages/71/16/949225a6a2dd6efcbd855fbd90cf476052e648fb011aa538e3b15b89a57a/aiohttp-3.12.15-cp312-cp312-win32.whl", hash = "sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1", size = 423591, upload-time = "2025-07-29T05:51:15.452Z" }, + { url = "https://files.pythonhosted.org/packages/2b/d8/fa65d2a349fe938b76d309db1a56a75c4fb8cc7b17a398b698488a939903/aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34", size = 450266, upload-time = "2025-07-29T05:51:17.239Z" }, + { url = "https://files.pythonhosted.org/packages/f2/33/918091abcf102e39d15aba2476ad9e7bd35ddb190dcdd43a854000d3da0d/aiohttp-3.12.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9f922ffd05034d439dde1c77a20461cf4a1b0831e6caa26151fe7aa8aaebc315", size = 696741, upload-time = "2025-07-29T05:51:19.021Z" }, + { url = "https://files.pythonhosted.org/packages/b5/2a/7495a81e39a998e400f3ecdd44a62107254803d1681d9189be5c2e4530cd/aiohttp-3.12.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2ee8a8ac39ce45f3e55663891d4b1d15598c157b4d494a4613e704c8b43112cd", size = 474407, upload-time = "2025-07-29T05:51:21.165Z" }, + { url = "https://files.pythonhosted.org/packages/49/fc/a9576ab4be2dcbd0f73ee8675d16c707cfc12d5ee80ccf4015ba543480c9/aiohttp-3.12.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3eae49032c29d356b94eee45a3f39fdf4b0814b397638c2f718e96cfadf4c4e4", size = 466703, upload-time = "2025-07-29T05:51:22.948Z" }, + { url = "https://files.pythonhosted.org/packages/09/2f/d4bcc8448cf536b2b54eed48f19682031ad182faa3a3fee54ebe5b156387/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97752ff12cc12f46a9b20327104448042fce5c33a624f88c18f66f9368091c7", size = 1705532, upload-time = "2025-07-29T05:51:25.211Z" }, + { url = "https://files.pythonhosted.org/packages/f1/f3/59406396083f8b489261e3c011aa8aee9df360a96ac8fa5c2e7e1b8f0466/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:894261472691d6fe76ebb7fcf2e5870a2ac284c7406ddc95823c8598a1390f0d", size = 1686794, upload-time = "2025-07-29T05:51:27.145Z" }, + { url = "https://files.pythonhosted.org/packages/dc/71/164d194993a8d114ee5656c3b7ae9c12ceee7040d076bf7b32fb98a8c5c6/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5fa5d9eb82ce98959fc1031c28198b431b4d9396894f385cb63f1e2f3f20ca6b", size = 1738865, upload-time = "2025-07-29T05:51:29.366Z" }, + { url = "https://files.pythonhosted.org/packages/1c/00/d198461b699188a93ead39cb458554d9f0f69879b95078dce416d3209b54/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0fa751efb11a541f57db59c1dd821bec09031e01452b2b6217319b3a1f34f3d", size = 1788238, upload-time = "2025-07-29T05:51:31.285Z" }, + { url = "https://files.pythonhosted.org/packages/85/b8/9e7175e1fa0ac8e56baa83bf3c214823ce250d0028955dfb23f43d5e61fd/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5346b93e62ab51ee2a9d68e8f73c7cf96ffb73568a23e683f931e52450e4148d", size = 1710566, upload-time = "2025-07-29T05:51:33.219Z" }, + { url = "https://files.pythonhosted.org/packages/59/e4/16a8eac9df39b48ae102ec030fa9f726d3570732e46ba0c592aeeb507b93/aiohttp-3.12.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:049ec0360f939cd164ecbfd2873eaa432613d5e77d6b04535e3d1fbae5a9e645", size = 1624270, upload-time = "2025-07-29T05:51:35.195Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f8/cd84dee7b6ace0740908fd0af170f9fab50c2a41ccbc3806aabcb1050141/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b52dcf013b57464b6d1e51b627adfd69a8053e84b7103a7cd49c030f9ca44461", size = 1677294, upload-time = "2025-07-29T05:51:37.215Z" }, + { url = "https://files.pythonhosted.org/packages/ce/42/d0f1f85e50d401eccd12bf85c46ba84f947a84839c8a1c2c5f6e8ab1eb50/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b2af240143dd2765e0fb661fd0361a1b469cab235039ea57663cda087250ea9", size = 1708958, upload-time = "2025-07-29T05:51:39.328Z" }, + { url = "https://files.pythonhosted.org/packages/d5/6b/f6fa6c5790fb602538483aa5a1b86fcbad66244997e5230d88f9412ef24c/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ac77f709a2cde2cc71257ab2d8c74dd157c67a0558a0d2799d5d571b4c63d44d", size = 1651553, upload-time = "2025-07-29T05:51:41.356Z" }, + { url = "https://files.pythonhosted.org/packages/04/36/a6d36ad545fa12e61d11d1932eef273928b0495e6a576eb2af04297fdd3c/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:47f6b962246f0a774fbd3b6b7be25d59b06fdb2f164cf2513097998fc6a29693", size = 1727688, upload-time = "2025-07-29T05:51:43.452Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c8/f195e5e06608a97a4e52c5d41c7927301bf757a8e8bb5bbf8cef6c314961/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:760fb7db442f284996e39cf9915a94492e1896baac44f06ae551974907922b64", size = 1761157, upload-time = "2025-07-29T05:51:45.643Z" }, + { url = "https://files.pythonhosted.org/packages/05/6a/ea199e61b67f25ba688d3ce93f63b49b0a4e3b3d380f03971b4646412fc6/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad702e57dc385cae679c39d318def49aef754455f237499d5b99bea4ef582e51", size = 1710050, upload-time = "2025-07-29T05:51:48.203Z" }, + { url = "https://files.pythonhosted.org/packages/b4/2e/ffeb7f6256b33635c29dbed29a22a723ff2dd7401fff42ea60cf2060abfb/aiohttp-3.12.15-cp313-cp313-win32.whl", hash = "sha256:f813c3e9032331024de2eb2e32a88d86afb69291fbc37a3a3ae81cc9917fb3d0", size = 422647, upload-time = "2025-07-29T05:51:50.718Z" }, + { url = "https://files.pythonhosted.org/packages/1b/8e/78ee35774201f38d5e1ba079c9958f7629b1fd079459aea9467441dbfbf5/aiohttp-3.12.15-cp313-cp313-win_amd64.whl", hash = "sha256:1a649001580bdb37c6fdb1bebbd7e3bc688e8ec2b5c6f52edbb664662b17dc84", size = 449067, upload-time = "2025-07-29T05:51:52.549Z" }, ] [[package]] @@ -643,78 +661,96 @@ wheels = [ [[package]] name = "comm" -version = "0.2.2" +version = "0.2.3" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "traitlets", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e9/a8/fb783cb0abe2b5fded9f55e5703015cdf1c9c85b3669087c538dd15a6a86/comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e", size = 6210, upload-time = "2024-03-12T16:53:41.133Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/13/7d740c5849255756bc17888787313b61fd38a0a8304fc4f073dfc46122aa/comm-0.2.3.tar.gz", hash = "sha256:2dc8048c10962d55d7ad693be1e7045d891b7ce8d999c97963a5e3e99c055971", size = 6319, upload-time = "2025-07-25T14:02:04.452Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/75/49e5bfe642f71f272236b5b2d2691cf915a7283cc0ceda56357b61daa538/comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3", size = 7180, upload-time = "2024-03-12T16:53:39.226Z" }, + { url = "https://files.pythonhosted.org/packages/60/97/891a0971e1e4a8c5d2b20bbe0e524dc04548d2307fee33cdeba148fd4fc7/comm-0.2.3-py3-none-any.whl", hash = "sha256:c615d91d75f7f04f095b30d1c1711babd43bdc6419c1be9886a85f2f4e489417", size = 7294, upload-time = "2025-07-25T14:02:02.896Z" }, ] [[package]] name = "coverage" -version = "7.9.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/04/b7/c0465ca253df10a9e8dae0692a4ae6e9726d245390aaef92360e1d6d3832/coverage-7.9.2.tar.gz", hash = "sha256:997024fa51e3290264ffd7492ec97d0690293ccd2b45a6cd7d82d945a4a80c8b", size = 813556, upload-time = "2025-07-03T10:54:15.101Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/0d/5c2114fd776c207bd55068ae8dc1bef63ecd1b767b3389984a8e58f2b926/coverage-7.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:66283a192a14a3854b2e7f3418d7db05cdf411012ab7ff5db98ff3b181e1f912", size = 212039, upload-time = "2025-07-03T10:52:38.955Z" }, - { url = "https://files.pythonhosted.org/packages/cf/ad/dc51f40492dc2d5fcd31bb44577bc0cc8920757d6bc5d3e4293146524ef9/coverage-7.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4e01d138540ef34fcf35c1aa24d06c3de2a4cffa349e29a10056544f35cca15f", size = 212428, upload-time = "2025-07-03T10:52:41.36Z" }, - { url = "https://files.pythonhosted.org/packages/a2/a3/55cb3ff1b36f00df04439c3993d8529193cdf165a2467bf1402539070f16/coverage-7.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f22627c1fe2745ee98d3ab87679ca73a97e75ca75eb5faee48660d060875465f", size = 241534, upload-time = "2025-07-03T10:52:42.956Z" }, - { url = "https://files.pythonhosted.org/packages/eb/c9/a8410b91b6be4f6e9c2e9f0dce93749b6b40b751d7065b4410bf89cb654b/coverage-7.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b1c2d8363247b46bd51f393f86c94096e64a1cf6906803fa8d5a9d03784bdbf", size = 239408, upload-time = "2025-07-03T10:52:44.199Z" }, - { url = "https://files.pythonhosted.org/packages/ff/c4/6f3e56d467c612b9070ae71d5d3b114c0b899b5788e1ca3c93068ccb7018/coverage-7.9.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c10c882b114faf82dbd33e876d0cbd5e1d1ebc0d2a74ceef642c6152f3f4d547", size = 240552, upload-time = "2025-07-03T10:52:45.477Z" }, - { url = "https://files.pythonhosted.org/packages/fd/20/04eda789d15af1ce79bce5cc5fd64057c3a0ac08fd0576377a3096c24663/coverage-7.9.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:de3c0378bdf7066c3988d66cd5232d161e933b87103b014ab1b0b4676098fa45", size = 240464, upload-time = "2025-07-03T10:52:46.809Z" }, - { url = "https://files.pythonhosted.org/packages/a9/5a/217b32c94cc1a0b90f253514815332d08ec0812194a1ce9cca97dda1cd20/coverage-7.9.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1e2f097eae0e5991e7623958a24ced3282676c93c013dde41399ff63e230fcf2", size = 239134, upload-time = "2025-07-03T10:52:48.149Z" }, - { url = "https://files.pythonhosted.org/packages/34/73/1d019c48f413465eb5d3b6898b6279e87141c80049f7dbf73fd020138549/coverage-7.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:28dc1f67e83a14e7079b6cea4d314bc8b24d1aed42d3582ff89c0295f09b181e", size = 239405, upload-time = "2025-07-03T10:52:49.687Z" }, - { url = "https://files.pythonhosted.org/packages/49/6c/a2beca7aa2595dad0c0d3f350382c381c92400efe5261e2631f734a0e3fe/coverage-7.9.2-cp310-cp310-win32.whl", hash = "sha256:bf7d773da6af9e10dbddacbf4e5cab13d06d0ed93561d44dae0188a42c65be7e", size = 214519, upload-time = "2025-07-03T10:52:51.036Z" }, - { url = "https://files.pythonhosted.org/packages/fc/c8/91e5e4a21f9a51e2c7cdd86e587ae01a4fcff06fc3fa8cde4d6f7cf68df4/coverage-7.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:0c0378ba787681ab1897f7c89b415bd56b0b2d9a47e5a3d8dc0ea55aac118d6c", size = 215400, upload-time = "2025-07-03T10:52:52.313Z" }, - { url = "https://files.pythonhosted.org/packages/39/40/916786453bcfafa4c788abee4ccd6f592b5b5eca0cd61a32a4e5a7ef6e02/coverage-7.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a7a56a2964a9687b6aba5b5ced6971af308ef6f79a91043c05dd4ee3ebc3e9ba", size = 212152, upload-time = "2025-07-03T10:52:53.562Z" }, - { url = "https://files.pythonhosted.org/packages/9f/66/cc13bae303284b546a030762957322bbbff1ee6b6cb8dc70a40f8a78512f/coverage-7.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123d589f32c11d9be7fe2e66d823a236fe759b0096f5db3fb1b75b2fa414a4fa", size = 212540, upload-time = "2025-07-03T10:52:55.196Z" }, - { url = "https://files.pythonhosted.org/packages/0f/3c/d56a764b2e5a3d43257c36af4a62c379df44636817bb5f89265de4bf8bd7/coverage-7.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:333b2e0ca576a7dbd66e85ab402e35c03b0b22f525eed82681c4b866e2e2653a", size = 245097, upload-time = "2025-07-03T10:52:56.509Z" }, - { url = "https://files.pythonhosted.org/packages/b1/46/bd064ea8b3c94eb4ca5d90e34d15b806cba091ffb2b8e89a0d7066c45791/coverage-7.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:326802760da234baf9f2f85a39e4a4b5861b94f6c8d95251f699e4f73b1835dc", size = 242812, upload-time = "2025-07-03T10:52:57.842Z" }, - { url = "https://files.pythonhosted.org/packages/43/02/d91992c2b29bc7afb729463bc918ebe5f361be7f1daae93375a5759d1e28/coverage-7.9.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19e7be4cfec248df38ce40968c95d3952fbffd57b400d4b9bb580f28179556d2", size = 244617, upload-time = "2025-07-03T10:52:59.239Z" }, - { url = "https://files.pythonhosted.org/packages/b7/4f/8fadff6bf56595a16d2d6e33415841b0163ac660873ed9a4e9046194f779/coverage-7.9.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0b4a4cb73b9f2b891c1788711408ef9707666501ba23684387277ededab1097c", size = 244263, upload-time = "2025-07-03T10:53:00.601Z" }, - { url = "https://files.pythonhosted.org/packages/9b/d2/e0be7446a2bba11739edb9f9ba4eff30b30d8257370e237418eb44a14d11/coverage-7.9.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2c8937fa16c8c9fbbd9f118588756e7bcdc7e16a470766a9aef912dd3f117dbd", size = 242314, upload-time = "2025-07-03T10:53:01.932Z" }, - { url = "https://files.pythonhosted.org/packages/9d/7d/dcbac9345000121b8b57a3094c2dfcf1ccc52d8a14a40c1d4bc89f936f80/coverage-7.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:42da2280c4d30c57a9b578bafd1d4494fa6c056d4c419d9689e66d775539be74", size = 242904, upload-time = "2025-07-03T10:53:03.478Z" }, - { url = "https://files.pythonhosted.org/packages/41/58/11e8db0a0c0510cf31bbbdc8caf5d74a358b696302a45948d7c768dfd1cf/coverage-7.9.2-cp311-cp311-win32.whl", hash = "sha256:14fa8d3da147f5fdf9d298cacc18791818f3f1a9f542c8958b80c228320e90c6", size = 214553, upload-time = "2025-07-03T10:53:05.174Z" }, - { url = "https://files.pythonhosted.org/packages/3a/7d/751794ec8907a15e257136e48dc1021b1f671220ecccfd6c4eaf30802714/coverage-7.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:549cab4892fc82004f9739963163fd3aac7a7b0df430669b75b86d293d2df2a7", size = 215441, upload-time = "2025-07-03T10:53:06.472Z" }, - { url = "https://files.pythonhosted.org/packages/62/5b/34abcedf7b946c1c9e15b44f326cb5b0da852885312b30e916f674913428/coverage-7.9.2-cp311-cp311-win_arm64.whl", hash = "sha256:c2667a2b913e307f06aa4e5677f01a9746cd08e4b35e14ebcde6420a9ebb4c62", size = 213873, upload-time = "2025-07-03T10:53:07.699Z" }, - { url = "https://files.pythonhosted.org/packages/53/d7/7deefc6fd4f0f1d4c58051f4004e366afc9e7ab60217ac393f247a1de70a/coverage-7.9.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ae9eb07f1cfacd9cfe8eaee6f4ff4b8a289a668c39c165cd0c8548484920ffc0", size = 212344, upload-time = "2025-07-03T10:53:09.3Z" }, - { url = "https://files.pythonhosted.org/packages/95/0c/ee03c95d32be4d519e6a02e601267769ce2e9a91fc8faa1b540e3626c680/coverage-7.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9ce85551f9a1119f02adc46d3014b5ee3f765deac166acf20dbb851ceb79b6f3", size = 212580, upload-time = "2025-07-03T10:53:11.52Z" }, - { url = "https://files.pythonhosted.org/packages/8b/9f/826fa4b544b27620086211b87a52ca67592622e1f3af9e0a62c87aea153a/coverage-7.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8f6389ac977c5fb322e0e38885fbbf901743f79d47f50db706e7644dcdcb6e1", size = 246383, upload-time = "2025-07-03T10:53:13.134Z" }, - { url = "https://files.pythonhosted.org/packages/7f/b3/4477aafe2a546427b58b9c540665feff874f4db651f4d3cb21b308b3a6d2/coverage-7.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff0d9eae8cdfcd58fe7893b88993723583a6ce4dfbfd9f29e001922544f95615", size = 243400, upload-time = "2025-07-03T10:53:14.614Z" }, - { url = "https://files.pythonhosted.org/packages/f8/c2/efffa43778490c226d9d434827702f2dfbc8041d79101a795f11cbb2cf1e/coverage-7.9.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fae939811e14e53ed8a9818dad51d434a41ee09df9305663735f2e2d2d7d959b", size = 245591, upload-time = "2025-07-03T10:53:15.872Z" }, - { url = "https://files.pythonhosted.org/packages/c6/e7/a59888e882c9a5f0192d8627a30ae57910d5d449c80229b55e7643c078c4/coverage-7.9.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:31991156251ec202c798501e0a42bbdf2169dcb0f137b1f5c0f4267f3fc68ef9", size = 245402, upload-time = "2025-07-03T10:53:17.124Z" }, - { url = "https://files.pythonhosted.org/packages/92/a5/72fcd653ae3d214927edc100ce67440ed8a0a1e3576b8d5e6d066ed239db/coverage-7.9.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d0d67963f9cbfc7c7f96d4ac74ed60ecbebd2ea6eeb51887af0f8dce205e545f", size = 243583, upload-time = "2025-07-03T10:53:18.781Z" }, - { url = "https://files.pythonhosted.org/packages/5c/f5/84e70e4df28f4a131d580d7d510aa1ffd95037293da66fd20d446090a13b/coverage-7.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:49b752a2858b10580969ec6af6f090a9a440a64a301ac1528d7ca5f7ed497f4d", size = 244815, upload-time = "2025-07-03T10:53:20.168Z" }, - { url = "https://files.pythonhosted.org/packages/39/e7/d73d7cbdbd09fdcf4642655ae843ad403d9cbda55d725721965f3580a314/coverage-7.9.2-cp312-cp312-win32.whl", hash = "sha256:88d7598b8ee130f32f8a43198ee02edd16d7f77692fa056cb779616bbea1b355", size = 214719, upload-time = "2025-07-03T10:53:21.521Z" }, - { url = "https://files.pythonhosted.org/packages/9f/d6/7486dcc3474e2e6ad26a2af2db7e7c162ccd889c4c68fa14ea8ec189c9e9/coverage-7.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:9dfb070f830739ee49d7c83e4941cc767e503e4394fdecb3b54bfdac1d7662c0", size = 215509, upload-time = "2025-07-03T10:53:22.853Z" }, - { url = "https://files.pythonhosted.org/packages/b7/34/0439f1ae2593b0346164d907cdf96a529b40b7721a45fdcf8b03c95fcd90/coverage-7.9.2-cp312-cp312-win_arm64.whl", hash = "sha256:4e2c058aef613e79df00e86b6d42a641c877211384ce5bd07585ed7ba71ab31b", size = 213910, upload-time = "2025-07-03T10:53:24.472Z" }, - { url = "https://files.pythonhosted.org/packages/94/9d/7a8edf7acbcaa5e5c489a646226bed9591ee1c5e6a84733c0140e9ce1ae1/coverage-7.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:985abe7f242e0d7bba228ab01070fde1d6c8fa12f142e43debe9ed1dde686038", size = 212367, upload-time = "2025-07-03T10:53:25.811Z" }, - { url = "https://files.pythonhosted.org/packages/e8/9e/5cd6f130150712301f7e40fb5865c1bc27b97689ec57297e568d972eec3c/coverage-7.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82c3939264a76d44fde7f213924021ed31f55ef28111a19649fec90c0f109e6d", size = 212632, upload-time = "2025-07-03T10:53:27.075Z" }, - { url = "https://files.pythonhosted.org/packages/a8/de/6287a2c2036f9fd991c61cefa8c64e57390e30c894ad3aa52fac4c1e14a8/coverage-7.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae5d563e970dbe04382f736ec214ef48103d1b875967c89d83c6e3f21706d5b3", size = 245793, upload-time = "2025-07-03T10:53:28.408Z" }, - { url = "https://files.pythonhosted.org/packages/06/cc/9b5a9961d8160e3cb0b558c71f8051fe08aa2dd4b502ee937225da564ed1/coverage-7.9.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdd612e59baed2a93c8843c9a7cb902260f181370f1d772f4842987535071d14", size = 243006, upload-time = "2025-07-03T10:53:29.754Z" }, - { url = "https://files.pythonhosted.org/packages/49/d9/4616b787d9f597d6443f5588619c1c9f659e1f5fc9eebf63699eb6d34b78/coverage-7.9.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:256ea87cb2a1ed992bcdfc349d8042dcea1b80436f4ddf6e246d6bee4b5d73b6", size = 244990, upload-time = "2025-07-03T10:53:31.098Z" }, - { url = "https://files.pythonhosted.org/packages/48/83/801cdc10f137b2d02b005a761661649ffa60eb173dcdaeb77f571e4dc192/coverage-7.9.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f44ae036b63c8ea432f610534a2668b0c3aee810e7037ab9d8ff6883de480f5b", size = 245157, upload-time = "2025-07-03T10:53:32.717Z" }, - { url = "https://files.pythonhosted.org/packages/c8/a4/41911ed7e9d3ceb0ffb019e7635468df7499f5cc3edca5f7dfc078e9c5ec/coverage-7.9.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:82d76ad87c932935417a19b10cfe7abb15fd3f923cfe47dbdaa74ef4e503752d", size = 243128, upload-time = "2025-07-03T10:53:34.009Z" }, - { url = "https://files.pythonhosted.org/packages/10/41/344543b71d31ac9cb00a664d5d0c9ef134a0fe87cb7d8430003b20fa0b7d/coverage-7.9.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:619317bb86de4193debc712b9e59d5cffd91dc1d178627ab2a77b9870deb2868", size = 244511, upload-time = "2025-07-03T10:53:35.434Z" }, - { url = "https://files.pythonhosted.org/packages/d5/81/3b68c77e4812105e2a060f6946ba9e6f898ddcdc0d2bfc8b4b152a9ae522/coverage-7.9.2-cp313-cp313-win32.whl", hash = "sha256:0a07757de9feb1dfafd16ab651e0f628fd7ce551604d1bf23e47e1ddca93f08a", size = 214765, upload-time = "2025-07-03T10:53:36.787Z" }, - { url = "https://files.pythonhosted.org/packages/06/a2/7fac400f6a346bb1a4004eb2a76fbff0e242cd48926a2ce37a22a6a1d917/coverage-7.9.2-cp313-cp313-win_amd64.whl", hash = "sha256:115db3d1f4d3f35f5bb021e270edd85011934ff97c8797216b62f461dd69374b", size = 215536, upload-time = "2025-07-03T10:53:38.188Z" }, - { url = "https://files.pythonhosted.org/packages/08/47/2c6c215452b4f90d87017e61ea0fd9e0486bb734cb515e3de56e2c32075f/coverage-7.9.2-cp313-cp313-win_arm64.whl", hash = "sha256:48f82f889c80af8b2a7bb6e158d95a3fbec6a3453a1004d04e4f3b5945a02694", size = 213943, upload-time = "2025-07-03T10:53:39.492Z" }, - { url = "https://files.pythonhosted.org/packages/a3/46/e211e942b22d6af5e0f323faa8a9bc7c447a1cf1923b64c47523f36ed488/coverage-7.9.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:55a28954545f9d2f96870b40f6c3386a59ba8ed50caf2d949676dac3ecab99f5", size = 213088, upload-time = "2025-07-03T10:53:40.874Z" }, - { url = "https://files.pythonhosted.org/packages/d2/2f/762551f97e124442eccd907bf8b0de54348635b8866a73567eb4e6417acf/coverage-7.9.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cdef6504637731a63c133bb2e6f0f0214e2748495ec15fe42d1e219d1b133f0b", size = 213298, upload-time = "2025-07-03T10:53:42.218Z" }, - { url = "https://files.pythonhosted.org/packages/7a/b7/76d2d132b7baf7360ed69be0bcab968f151fa31abe6d067f0384439d9edb/coverage-7.9.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcd5ebe66c7a97273d5d2ddd4ad0ed2e706b39630ed4b53e713d360626c3dbb3", size = 256541, upload-time = "2025-07-03T10:53:43.823Z" }, - { url = "https://files.pythonhosted.org/packages/a0/17/392b219837d7ad47d8e5974ce5f8dc3deb9f99a53b3bd4d123602f960c81/coverage-7.9.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9303aed20872d7a3c9cb39c5d2b9bdbe44e3a9a1aecb52920f7e7495410dfab8", size = 252761, upload-time = "2025-07-03T10:53:45.19Z" }, - { url = "https://files.pythonhosted.org/packages/d5/77/4256d3577fe1b0daa8d3836a1ebe68eaa07dd2cbaf20cf5ab1115d6949d4/coverage-7.9.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc18ea9e417a04d1920a9a76fe9ebd2f43ca505b81994598482f938d5c315f46", size = 254917, upload-time = "2025-07-03T10:53:46.931Z" }, - { url = "https://files.pythonhosted.org/packages/53/99/fc1a008eef1805e1ddb123cf17af864743354479ea5129a8f838c433cc2c/coverage-7.9.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6406cff19880aaaadc932152242523e892faff224da29e241ce2fca329866584", size = 256147, upload-time = "2025-07-03T10:53:48.289Z" }, - { url = "https://files.pythonhosted.org/packages/92/c0/f63bf667e18b7f88c2bdb3160870e277c4874ced87e21426128d70aa741f/coverage-7.9.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d0d4f6ecdf37fcc19c88fec3e2277d5dee740fb51ffdd69b9579b8c31e4232e", size = 254261, upload-time = "2025-07-03T10:53:49.99Z" }, - { url = "https://files.pythonhosted.org/packages/8c/32/37dd1c42ce3016ff8ec9e4b607650d2e34845c0585d3518b2a93b4830c1a/coverage-7.9.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c33624f50cf8de418ab2b4d6ca9eda96dc45b2c4231336bac91454520e8d1fac", size = 255099, upload-time = "2025-07-03T10:53:51.354Z" }, - { url = "https://files.pythonhosted.org/packages/da/2e/af6b86f7c95441ce82f035b3affe1cd147f727bbd92f563be35e2d585683/coverage-7.9.2-cp313-cp313t-win32.whl", hash = "sha256:1df6b76e737c6a92210eebcb2390af59a141f9e9430210595251fbaf02d46926", size = 215440, upload-time = "2025-07-03T10:53:52.808Z" }, - { url = "https://files.pythonhosted.org/packages/4d/bb/8a785d91b308867f6b2e36e41c569b367c00b70c17f54b13ac29bcd2d8c8/coverage-7.9.2-cp313-cp313t-win_amd64.whl", hash = "sha256:f5fd54310b92741ebe00d9c0d1d7b2b27463952c022da6d47c175d246a98d1bd", size = 216537, upload-time = "2025-07-03T10:53:54.273Z" }, - { url = "https://files.pythonhosted.org/packages/1d/a0/a6bffb5e0f41a47279fd45a8f3155bf193f77990ae1c30f9c224b61cacb0/coverage-7.9.2-cp313-cp313t-win_arm64.whl", hash = "sha256:c48c2375287108c887ee87d13b4070a381c6537d30e8487b24ec721bf2a781cb", size = 214398, upload-time = "2025-07-03T10:53:56.715Z" }, - { url = "https://files.pythonhosted.org/packages/d7/85/f8bbefac27d286386961c25515431482a425967e23d3698b75a250872924/coverage-7.9.2-pp39.pp310.pp311-none-any.whl", hash = "sha256:8a1166db2fb62473285bcb092f586e081e92656c7dfa8e9f62b4d39d7e6b5050", size = 204013, upload-time = "2025-07-03T10:54:12.084Z" }, - { url = "https://files.pythonhosted.org/packages/3c/38/bbe2e63902847cf79036ecc75550d0698af31c91c7575352eb25190d0fb3/coverage-7.9.2-py3-none-any.whl", hash = "sha256:e425cd5b00f6fc0ed7cdbd766c70be8baab4b7839e4d4fe5fac48581dd968ea4", size = 204005, upload-time = "2025-07-03T10:54:13.491Z" }, +version = "7.10.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/87/0e/66dbd4c6a7f0758a8d18044c048779ba21fb94856e1edcf764bd5403e710/coverage-7.10.1.tar.gz", hash = "sha256:ae2b4856f29ddfe827106794f3589949a57da6f0d38ab01e24ec35107979ba57", size = 819938, upload-time = "2025-07-27T14:13:39.045Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/e7/0f4e35a15361337529df88151bddcac8e8f6d6fd01da94a4b7588901c2fe/coverage-7.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1c86eb388bbd609d15560e7cc0eb936c102b6f43f31cf3e58b4fd9afe28e1372", size = 214627, upload-time = "2025-07-27T14:11:01.211Z" }, + { url = "https://files.pythonhosted.org/packages/e0/fd/17872e762c408362072c936dbf3ca28c67c609a1f5af434b1355edcb7e12/coverage-7.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6b4ba0f488c1bdb6bd9ba81da50715a372119785458831c73428a8566253b86b", size = 215015, upload-time = "2025-07-27T14:11:03.988Z" }, + { url = "https://files.pythonhosted.org/packages/54/50/c9d445ba38ee5f685f03876c0f8223469e2e46c5d3599594dca972b470c8/coverage-7.10.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:083442ecf97d434f0cb3b3e3676584443182653da08b42e965326ba12d6b5f2a", size = 241995, upload-time = "2025-07-27T14:11:05.983Z" }, + { url = "https://files.pythonhosted.org/packages/cc/83/4ae6e0f60376af33de543368394d21b9ac370dc86434039062ef171eebf8/coverage-7.10.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c1a40c486041006b135759f59189385da7c66d239bad897c994e18fd1d0c128f", size = 243253, upload-time = "2025-07-27T14:11:07.424Z" }, + { url = "https://files.pythonhosted.org/packages/49/90/17a4d9ac7171be364ce8c0bb2b6da05e618ebfe1f11238ad4f26c99f5467/coverage-7.10.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3beb76e20b28046989300c4ea81bf690df84ee98ade4dc0bbbf774a28eb98440", size = 245110, upload-time = "2025-07-27T14:11:09.152Z" }, + { url = "https://files.pythonhosted.org/packages/e1/f7/edc3f485d536ed417f3af2b4969582bcb5fab456241721825fa09354161e/coverage-7.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bc265a7945e8d08da28999ad02b544963f813a00f3ed0a7a0ce4165fd77629f8", size = 243056, upload-time = "2025-07-27T14:11:10.586Z" }, + { url = "https://files.pythonhosted.org/packages/58/2c/c4c316a57718556b8d0cc8304437741c31b54a62934e7c8c551a7915c2f4/coverage-7.10.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:47c91f32ba4ac46f1e224a7ebf3f98b4b24335bad16137737fe71a5961a0665c", size = 241731, upload-time = "2025-07-27T14:11:12.145Z" }, + { url = "https://files.pythonhosted.org/packages/f7/93/c78e144c6f086043d0d7d9237c5b880e71ac672ed2712c6f8cca5544481f/coverage-7.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1a108dd78ed185020f66f131c60078f3fae3f61646c28c8bb4edd3fa121fc7fc", size = 242023, upload-time = "2025-07-27T14:11:13.573Z" }, + { url = "https://files.pythonhosted.org/packages/8f/e1/34e8505ca81fc144a612e1cc79fadd4a78f42e96723875f4e9f1f470437e/coverage-7.10.1-cp310-cp310-win32.whl", hash = "sha256:7092cc82382e634075cc0255b0b69cb7cada7c1f249070ace6a95cb0f13548ef", size = 217130, upload-time = "2025-07-27T14:11:15.11Z" }, + { url = "https://files.pythonhosted.org/packages/75/2b/82adfce6edffc13d804aee414e64c0469044234af9296e75f6d13f92f6a2/coverage-7.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:ac0c5bba938879c2fc0bc6c1b47311b5ad1212a9dcb8b40fe2c8110239b7faed", size = 218015, upload-time = "2025-07-27T14:11:16.836Z" }, + { url = "https://files.pythonhosted.org/packages/20/8e/ef088112bd1b26e2aa931ee186992b3e42c222c64f33e381432c8ee52aae/coverage-7.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b45e2f9d5b0b5c1977cb4feb5f594be60eb121106f8900348e29331f553a726f", size = 214747, upload-time = "2025-07-27T14:11:18.217Z" }, + { url = "https://files.pythonhosted.org/packages/2d/76/a1e46f3c6e0897758eb43af88bb3c763cb005f4950769f7b553e22aa5f89/coverage-7.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a7a4d74cb0f5e3334f9aa26af7016ddb94fb4bfa11b4a573d8e98ecba8c34f1", size = 215128, upload-time = "2025-07-27T14:11:19.706Z" }, + { url = "https://files.pythonhosted.org/packages/78/4d/903bafb371a8c887826ecc30d3977b65dfad0e1e66aa61b7e173de0828b0/coverage-7.10.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d4b0aab55ad60ead26159ff12b538c85fbab731a5e3411c642b46c3525863437", size = 245140, upload-time = "2025-07-27T14:11:21.261Z" }, + { url = "https://files.pythonhosted.org/packages/55/f1/1f8f09536f38394a8698dd08a0e9608a512eacee1d3b771e2d06397f77bf/coverage-7.10.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:dcc93488c9ebd229be6ee1f0d9aad90da97b33ad7e2912f5495804d78a3cd6b7", size = 246977, upload-time = "2025-07-27T14:11:23.15Z" }, + { url = "https://files.pythonhosted.org/packages/57/cc/ed6bbc5a3bdb36ae1bca900bbbfdcb23b260ef2767a7b2dab38b92f61adf/coverage-7.10.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa309df995d020f3438407081b51ff527171cca6772b33cf8f85344b8b4b8770", size = 249140, upload-time = "2025-07-27T14:11:24.743Z" }, + { url = "https://files.pythonhosted.org/packages/10/f5/e881ade2d8e291b60fa1d93d6d736107e940144d80d21a0d4999cff3642f/coverage-7.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cfb8b9d8855c8608f9747602a48ab525b1d320ecf0113994f6df23160af68262", size = 246869, upload-time = "2025-07-27T14:11:26.156Z" }, + { url = "https://files.pythonhosted.org/packages/53/b9/6a5665cb8996e3cd341d184bb11e2a8edf01d8dadcf44eb1e742186cf243/coverage-7.10.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:320d86da829b012982b414c7cdda65f5d358d63f764e0e4e54b33097646f39a3", size = 244899, upload-time = "2025-07-27T14:11:27.622Z" }, + { url = "https://files.pythonhosted.org/packages/27/11/24156776709c4e25bf8a33d6bb2ece9a9067186ddac19990f6560a7f8130/coverage-7.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dc60ddd483c556590da1d9482a4518292eec36dd0e1e8496966759a1f282bcd0", size = 245507, upload-time = "2025-07-27T14:11:29.544Z" }, + { url = "https://files.pythonhosted.org/packages/43/db/a6f0340b7d6802a79928659c9a32bc778ea420e87a61b568d68ac36d45a8/coverage-7.10.1-cp311-cp311-win32.whl", hash = "sha256:4fcfe294f95b44e4754da5b58be750396f2b1caca8f9a0e78588e3ef85f8b8be", size = 217167, upload-time = "2025-07-27T14:11:31.349Z" }, + { url = "https://files.pythonhosted.org/packages/f5/6f/1990eb4fd05cea4cfabdf1d587a997ac5f9a8bee883443a1d519a2a848c9/coverage-7.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:efa23166da3fe2915f8ab452dde40319ac84dc357f635737174a08dbd912980c", size = 218054, upload-time = "2025-07-27T14:11:33.202Z" }, + { url = "https://files.pythonhosted.org/packages/b4/4d/5e061d6020251b20e9b4303bb0b7900083a1a384ec4e5db326336c1c4abd/coverage-7.10.1-cp311-cp311-win_arm64.whl", hash = "sha256:d12b15a8c3759e2bb580ffa423ae54be4f184cf23beffcbd641f4fe6e1584293", size = 216483, upload-time = "2025-07-27T14:11:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3f/b051feeb292400bd22d071fdf933b3ad389a8cef5c80c7866ed0c7414b9e/coverage-7.10.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6b7dc7f0a75a7eaa4584e5843c873c561b12602439d2351ee28c7478186c4da4", size = 214934, upload-time = "2025-07-27T14:11:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e4/a61b27d5c4c2d185bdfb0bfe9d15ab4ac4f0073032665544507429ae60eb/coverage-7.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:607f82389f0ecafc565813aa201a5cade04f897603750028dd660fb01797265e", size = 215173, upload-time = "2025-07-27T14:11:38.005Z" }, + { url = "https://files.pythonhosted.org/packages/8a/01/40a6ee05b60d02d0bc53742ad4966e39dccd450aafb48c535a64390a3552/coverage-7.10.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f7da31a1ba31f1c1d4d5044b7c5813878adae1f3af8f4052d679cc493c7328f4", size = 246190, upload-time = "2025-07-27T14:11:39.887Z" }, + { url = "https://files.pythonhosted.org/packages/11/ef/a28d64d702eb583c377255047281305dc5a5cfbfb0ee36e721f78255adb6/coverage-7.10.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:51fe93f3fe4f5d8483d51072fddc65e717a175490804e1942c975a68e04bf97a", size = 248618, upload-time = "2025-07-27T14:11:41.841Z" }, + { url = "https://files.pythonhosted.org/packages/6a/ad/73d018bb0c8317725370c79d69b5c6e0257df84a3b9b781bda27a438a3be/coverage-7.10.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3e59d00830da411a1feef6ac828b90bbf74c9b6a8e87b8ca37964925bba76dbe", size = 250081, upload-time = "2025-07-27T14:11:43.705Z" }, + { url = "https://files.pythonhosted.org/packages/2d/dd/496adfbbb4503ebca5d5b2de8bed5ec00c0a76558ffc5b834fd404166bc9/coverage-7.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:924563481c27941229cb4e16eefacc35da28563e80791b3ddc5597b062a5c386", size = 247990, upload-time = "2025-07-27T14:11:45.244Z" }, + { url = "https://files.pythonhosted.org/packages/18/3c/a9331a7982facfac0d98a4a87b36ae666fe4257d0f00961a3a9ef73e015d/coverage-7.10.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ca79146ee421b259f8131f153102220b84d1a5e6fb9c8aed13b3badfd1796de6", size = 246191, upload-time = "2025-07-27T14:11:47.093Z" }, + { url = "https://files.pythonhosted.org/packages/62/0c/75345895013b83f7afe92ec595e15a9a525ede17491677ceebb2ba5c3d85/coverage-7.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2b225a06d227f23f386fdc0eab471506d9e644be699424814acc7d114595495f", size = 247400, upload-time = "2025-07-27T14:11:48.643Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a9/98b268cfc5619ef9df1d5d34fee408ecb1542d9fd43d467e5c2f28668cd4/coverage-7.10.1-cp312-cp312-win32.whl", hash = "sha256:5ba9a8770effec5baaaab1567be916c87d8eea0c9ad11253722d86874d885eca", size = 217338, upload-time = "2025-07-27T14:11:50.258Z" }, + { url = "https://files.pythonhosted.org/packages/fe/31/22a5440e4d1451f253c5cd69fdcead65e92ef08cd4ec237b8756dc0b20a7/coverage-7.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:9eb245a8d8dd0ad73b4062135a251ec55086fbc2c42e0eb9725a9b553fba18a3", size = 218125, upload-time = "2025-07-27T14:11:52.034Z" }, + { url = "https://files.pythonhosted.org/packages/d6/2b/40d9f0ce7ee839f08a43c5bfc9d05cec28aaa7c9785837247f96cbe490b9/coverage-7.10.1-cp312-cp312-win_arm64.whl", hash = "sha256:7718060dd4434cc719803a5e526838a5d66e4efa5dc46d2b25c21965a9c6fcc4", size = 216523, upload-time = "2025-07-27T14:11:53.965Z" }, + { url = "https://files.pythonhosted.org/packages/ef/72/135ff5fef09b1ffe78dbe6fcf1e16b2e564cd35faeacf3d63d60d887f12d/coverage-7.10.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ebb08d0867c5a25dffa4823377292a0ffd7aaafb218b5d4e2e106378b1061e39", size = 214960, upload-time = "2025-07-27T14:11:55.959Z" }, + { url = "https://files.pythonhosted.org/packages/b1/aa/73a5d1a6fc08ca709a8177825616aa95ee6bf34d522517c2595484a3e6c9/coverage-7.10.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f32a95a83c2e17422f67af922a89422cd24c6fa94041f083dd0bb4f6057d0bc7", size = 215220, upload-time = "2025-07-27T14:11:57.899Z" }, + { url = "https://files.pythonhosted.org/packages/8d/40/3124fdd45ed3772a42fc73ca41c091699b38a2c3bd4f9cb564162378e8b6/coverage-7.10.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c4c746d11c8aba4b9f58ca8bfc6fbfd0da4efe7960ae5540d1a1b13655ee8892", size = 245772, upload-time = "2025-07-27T14:12:00.422Z" }, + { url = "https://files.pythonhosted.org/packages/42/62/a77b254822efa8c12ad59e8039f2bc3df56dc162ebda55e1943e35ba31a5/coverage-7.10.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7f39edd52c23e5c7ed94e0e4bf088928029edf86ef10b95413e5ea670c5e92d7", size = 248116, upload-time = "2025-07-27T14:12:03.099Z" }, + { url = "https://files.pythonhosted.org/packages/1d/01/8101f062f472a3a6205b458d18ef0444a63ae5d36a8a5ed5dd0f6167f4db/coverage-7.10.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab6e19b684981d0cd968906e293d5628e89faacb27977c92f3600b201926b994", size = 249554, upload-time = "2025-07-27T14:12:04.668Z" }, + { url = "https://files.pythonhosted.org/packages/8f/7b/e51bc61573e71ff7275a4f167aecbd16cb010aefdf54bcd8b0a133391263/coverage-7.10.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5121d8cf0eacb16133501455d216bb5f99899ae2f52d394fe45d59229e6611d0", size = 247766, upload-time = "2025-07-27T14:12:06.234Z" }, + { url = "https://files.pythonhosted.org/packages/4b/71/1c96d66a51d4204a9d6d12df53c4071d87e110941a2a1fe94693192262f5/coverage-7.10.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:df1c742ca6f46a6f6cbcaef9ac694dc2cb1260d30a6a2f5c68c5f5bcfee1cfd7", size = 245735, upload-time = "2025-07-27T14:12:08.305Z" }, + { url = "https://files.pythonhosted.org/packages/13/d5/efbc2ac4d35ae2f22ef6df2ca084c60e13bd9378be68655e3268c80349ab/coverage-7.10.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:40f9a38676f9c073bf4b9194707aa1eb97dca0e22cc3766d83879d72500132c7", size = 247118, upload-time = "2025-07-27T14:12:09.903Z" }, + { url = "https://files.pythonhosted.org/packages/d1/22/073848352bec28ca65f2b6816b892fcf9a31abbef07b868487ad15dd55f1/coverage-7.10.1-cp313-cp313-win32.whl", hash = "sha256:2348631f049e884839553b9974f0821d39241c6ffb01a418efce434f7eba0fe7", size = 217381, upload-time = "2025-07-27T14:12:11.535Z" }, + { url = "https://files.pythonhosted.org/packages/b7/df/df6a0ff33b042f000089bd11b6bb034bab073e2ab64a56e78ed882cba55d/coverage-7.10.1-cp313-cp313-win_amd64.whl", hash = "sha256:4072b31361b0d6d23f750c524f694e1a417c1220a30d3ef02741eed28520c48e", size = 218152, upload-time = "2025-07-27T14:12:13.182Z" }, + { url = "https://files.pythonhosted.org/packages/30/e3/5085ca849a40ed6b47cdb8f65471c2f754e19390b5a12fa8abd25cbfaa8f/coverage-7.10.1-cp313-cp313-win_arm64.whl", hash = "sha256:3e31dfb8271937cab9425f19259b1b1d1f556790e98eb266009e7a61d337b6d4", size = 216559, upload-time = "2025-07-27T14:12:14.807Z" }, + { url = "https://files.pythonhosted.org/packages/cc/93/58714efbfdeb547909feaabe1d67b2bdd59f0597060271b9c548d5efb529/coverage-7.10.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1c4f679c6b573a5257af6012f167a45be4c749c9925fd44d5178fd641ad8bf72", size = 215677, upload-time = "2025-07-27T14:12:16.68Z" }, + { url = "https://files.pythonhosted.org/packages/c0/0c/18eaa5897e7e8cb3f8c45e563e23e8a85686b4585e29d53cacb6bc9cb340/coverage-7.10.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:871ebe8143da284bd77b84a9136200bd638be253618765d21a1fce71006d94af", size = 215899, upload-time = "2025-07-27T14:12:18.758Z" }, + { url = "https://files.pythonhosted.org/packages/84/c1/9d1affacc3c75b5a184c140377701bbf14fc94619367f07a269cd9e4fed6/coverage-7.10.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:998c4751dabf7d29b30594af416e4bf5091f11f92a8d88eb1512c7ba136d1ed7", size = 257140, upload-time = "2025-07-27T14:12:20.357Z" }, + { url = "https://files.pythonhosted.org/packages/3d/0f/339bc6b8fa968c346df346068cca1f24bdea2ddfa93bb3dc2e7749730962/coverage-7.10.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:780f750a25e7749d0af6b3631759c2c14f45de209f3faaa2398312d1c7a22759", size = 259005, upload-time = "2025-07-27T14:12:22.007Z" }, + { url = "https://files.pythonhosted.org/packages/c8/22/89390864b92ea7c909079939b71baba7e5b42a76bf327c1d615bd829ba57/coverage-7.10.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:590bdba9445df4763bdbebc928d8182f094c1f3947a8dc0fc82ef014dbdd8324", size = 261143, upload-time = "2025-07-27T14:12:23.746Z" }, + { url = "https://files.pythonhosted.org/packages/2c/56/3d04d89017c0c41c7a71bd69b29699d919b6bbf2649b8b2091240b97dd6a/coverage-7.10.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b2df80cb6a2af86d300e70acb82e9b79dab2c1e6971e44b78dbfc1a1e736b53", size = 258735, upload-time = "2025-07-27T14:12:25.73Z" }, + { url = "https://files.pythonhosted.org/packages/cb/40/312252c8afa5ca781063a09d931f4b9409dc91526cd0b5a2b84143ffafa2/coverage-7.10.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d6a558c2725bfb6337bf57c1cd366c13798bfd3bfc9e3dd1f4a6f6fc95a4605f", size = 256871, upload-time = "2025-07-27T14:12:27.767Z" }, + { url = "https://files.pythonhosted.org/packages/1f/2b/564947d5dede068215aaddb9e05638aeac079685101462218229ddea9113/coverage-7.10.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e6150d167f32f2a54690e572e0a4c90296fb000a18e9b26ab81a6489e24e78dd", size = 257692, upload-time = "2025-07-27T14:12:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/93/1b/c8a867ade85cb26d802aea2209b9c2c80613b9c122baa8c8ecea6799648f/coverage-7.10.1-cp313-cp313t-win32.whl", hash = "sha256:d946a0c067aa88be4a593aad1236493313bafaa27e2a2080bfe88db827972f3c", size = 218059, upload-time = "2025-07-27T14:12:31.076Z" }, + { url = "https://files.pythonhosted.org/packages/a1/fe/cd4ab40570ae83a516bf5e754ea4388aeedd48e660e40c50b7713ed4f930/coverage-7.10.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e37c72eaccdd5ed1130c67a92ad38f5b2af66eeff7b0abe29534225db2ef7b18", size = 219150, upload-time = "2025-07-27T14:12:32.746Z" }, + { url = "https://files.pythonhosted.org/packages/8d/16/6e5ed5854be6d70d0c39e9cb9dd2449f2c8c34455534c32c1a508c7dbdb5/coverage-7.10.1-cp313-cp313t-win_arm64.whl", hash = "sha256:89ec0ffc215c590c732918c95cd02b55c7d0f569d76b90bb1a5e78aa340618e4", size = 217014, upload-time = "2025-07-27T14:12:34.406Z" }, + { url = "https://files.pythonhosted.org/packages/54/8e/6d0bfe9c3d7121cf936c5f8b03e8c3da1484fb801703127dba20fb8bd3c7/coverage-7.10.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:166d89c57e877e93d8827dac32cedae6b0277ca684c6511497311249f35a280c", size = 214951, upload-time = "2025-07-27T14:12:36.069Z" }, + { url = "https://files.pythonhosted.org/packages/f2/29/e3e51a8c653cf2174c60532aafeb5065cea0911403fa144c9abe39790308/coverage-7.10.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:bed4a2341b33cd1a7d9ffc47df4a78ee61d3416d43b4adc9e18b7d266650b83e", size = 215229, upload-time = "2025-07-27T14:12:37.759Z" }, + { url = "https://files.pythonhosted.org/packages/e0/59/3c972080b2fa18b6c4510201f6d4dc87159d450627d062cd9ad051134062/coverage-7.10.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ddca1e4f5f4c67980533df01430184c19b5359900e080248bbf4ed6789584d8b", size = 245738, upload-time = "2025-07-27T14:12:39.453Z" }, + { url = "https://files.pythonhosted.org/packages/2e/04/fc0d99d3f809452654e958e1788454f6e27b34e43f8f8598191c8ad13537/coverage-7.10.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:37b69226001d8b7de7126cad7366b0778d36777e4d788c66991455ba817c5b41", size = 248045, upload-time = "2025-07-27T14:12:41.387Z" }, + { url = "https://files.pythonhosted.org/packages/5e/2e/afcbf599e77e0dfbf4c97197747250d13d397d27e185b93987d9eaac053d/coverage-7.10.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2f22102197bcb1722691296f9e589f02b616f874e54a209284dd7b9294b0b7f", size = 249666, upload-time = "2025-07-27T14:12:43.056Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ae/bc47f7f8ecb7a06cbae2bf86a6fa20f479dd902bc80f57cff7730438059d/coverage-7.10.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1e0c768b0f9ac5839dac5cf88992a4bb459e488ee8a1f8489af4cb33b1af00f1", size = 247692, upload-time = "2025-07-27T14:12:44.83Z" }, + { url = "https://files.pythonhosted.org/packages/b6/26/cbfa3092d31ccba8ba7647e4d25753263e818b4547eba446b113d7d1efdf/coverage-7.10.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:991196702d5e0b120a8fef2664e1b9c333a81d36d5f6bcf6b225c0cf8b0451a2", size = 245536, upload-time = "2025-07-27T14:12:46.527Z" }, + { url = "https://files.pythonhosted.org/packages/56/77/9c68e92500e6a1c83d024a70eadcc9a173f21aadd73c4675fe64c9c43fdf/coverage-7.10.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ae8e59e5f4fd85d6ad34c2bb9d74037b5b11be072b8b7e9986beb11f957573d4", size = 246954, upload-time = "2025-07-27T14:12:49.279Z" }, + { url = "https://files.pythonhosted.org/packages/7f/a5/ba96671c5a669672aacd9877a5987c8551501b602827b4e84256da2a30a7/coverage-7.10.1-cp314-cp314-win32.whl", hash = "sha256:042125c89cf74a074984002e165d61fe0e31c7bd40ebb4bbebf07939b5924613", size = 217616, upload-time = "2025-07-27T14:12:51.214Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3c/e1e1eb95fc1585f15a410208c4795db24a948e04d9bde818fe4eb893bc85/coverage-7.10.1-cp314-cp314-win_amd64.whl", hash = "sha256:a22c3bfe09f7a530e2c94c87ff7af867259c91bef87ed2089cd69b783af7b84e", size = 218412, upload-time = "2025-07-27T14:12:53.429Z" }, + { url = "https://files.pythonhosted.org/packages/b0/85/7e1e5be2cb966cba95566ba702b13a572ca744fbb3779df9888213762d67/coverage-7.10.1-cp314-cp314-win_arm64.whl", hash = "sha256:ee6be07af68d9c4fca4027c70cea0c31a0f1bc9cb464ff3c84a1f916bf82e652", size = 216776, upload-time = "2025-07-27T14:12:55.482Z" }, + { url = "https://files.pythonhosted.org/packages/62/0f/5bb8f29923141cca8560fe2217679caf4e0db643872c1945ac7d8748c2a7/coverage-7.10.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d24fb3c0c8ff0d517c5ca5de7cf3994a4cd559cde0315201511dbfa7ab528894", size = 215698, upload-time = "2025-07-27T14:12:57.225Z" }, + { url = "https://files.pythonhosted.org/packages/80/29/547038ffa4e8e4d9e82f7dfc6d152f75fcdc0af146913f0ba03875211f03/coverage-7.10.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1217a54cfd79be20512a67ca81c7da3f2163f51bbfd188aab91054df012154f5", size = 215902, upload-time = "2025-07-27T14:12:59.071Z" }, + { url = "https://files.pythonhosted.org/packages/e1/8a/7aaa8fbfaed900147987a424e112af2e7790e1ac9cd92601e5bd4e1ba60a/coverage-7.10.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:51f30da7a52c009667e02f125737229d7d8044ad84b79db454308033a7808ab2", size = 257230, upload-time = "2025-07-27T14:13:01.248Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1d/c252b5ffac44294e23a0d79dd5acf51749b39795ccc898faeabf7bee903f/coverage-7.10.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ed3718c757c82d920f1c94089066225ca2ad7f00bb904cb72b1c39ebdd906ccb", size = 259194, upload-time = "2025-07-27T14:13:03.247Z" }, + { url = "https://files.pythonhosted.org/packages/16/ad/6c8d9f83d08f3bac2e7507534d0c48d1a4f52c18e6f94919d364edbdfa8f/coverage-7.10.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc452481e124a819ced0c25412ea2e144269ef2f2534b862d9f6a9dae4bda17b", size = 261316, upload-time = "2025-07-27T14:13:04.957Z" }, + { url = "https://files.pythonhosted.org/packages/d6/4e/f9bbf3a36c061e2e0e0f78369c006d66416561a33d2bee63345aee8ee65e/coverage-7.10.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:9d6f494c307e5cb9b1e052ec1a471060f1dea092c8116e642e7a23e79d9388ea", size = 258794, upload-time = "2025-07-27T14:13:06.715Z" }, + { url = "https://files.pythonhosted.org/packages/87/82/e600bbe78eb2cb0541751d03cef9314bcd0897e8eea156219c39b685f869/coverage-7.10.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:fc0e46d86905ddd16b85991f1f4919028092b4e511689bbdaff0876bd8aab3dd", size = 256869, upload-time = "2025-07-27T14:13:08.933Z" }, + { url = "https://files.pythonhosted.org/packages/ce/5d/2fc9a9236c5268f68ac011d97cd3a5ad16cc420535369bedbda659fdd9b7/coverage-7.10.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:80b9ccd82e30038b61fc9a692a8dc4801504689651b281ed9109f10cc9fe8b4d", size = 257765, upload-time = "2025-07-27T14:13:10.778Z" }, + { url = "https://files.pythonhosted.org/packages/8a/05/b4e00b2bd48a2dc8e1c7d2aea7455f40af2e36484ab2ef06deb85883e9fe/coverage-7.10.1-cp314-cp314t-win32.whl", hash = "sha256:e58991a2b213417285ec866d3cd32db17a6a88061a985dbb7e8e8f13af429c47", size = 218420, upload-time = "2025-07-27T14:13:12.882Z" }, + { url = "https://files.pythonhosted.org/packages/77/fb/d21d05f33ea27ece327422240e69654b5932b0b29e7fbc40fbab3cf199bf/coverage-7.10.1-cp314-cp314t-win_amd64.whl", hash = "sha256:e88dd71e4ecbc49d9d57d064117462c43f40a21a1383507811cf834a4a620651", size = 219536, upload-time = "2025-07-27T14:13:14.718Z" }, + { url = "https://files.pythonhosted.org/packages/a6/68/7fea94b141281ed8be3d1d5c4319a97f2befc3e487ce33657fc64db2c45e/coverage-7.10.1-cp314-cp314t-win_arm64.whl", hash = "sha256:1aadfb06a30c62c2eb82322171fe1f7c288c80ca4156d46af0ca039052814bab", size = 217190, upload-time = "2025-07-27T14:13:16.85Z" }, + { url = "https://files.pythonhosted.org/packages/0f/64/922899cff2c0fd3496be83fa8b81230f5a8d82a2ad30f98370b133c2c83b/coverage-7.10.1-py3-none-any.whl", hash = "sha256:fa2a258aa6bf188eb9a8948f7102a83da7c430a0dce918dbd8b60ef8fcb772d7", size = 206597, upload-time = "2025-07-27T14:13:37.221Z" }, ] [package.optional-dependencies] @@ -1808,20 +1844,20 @@ wheels = [ [[package]] name = "opentelemetry-api" -version = "1.35.0" +version = "1.36.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "importlib-metadata", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/99/c9/4509bfca6bb43220ce7f863c9f791e0d5001c2ec2b5867d48586008b3d96/opentelemetry_api-1.35.0.tar.gz", hash = "sha256:a111b959bcfa5b4d7dffc2fbd6a241aa72dd78dd8e79b5b1662bda896c5d2ffe", size = 64778, upload-time = "2025-07-11T12:23:28.804Z" } +sdist = { url = "https://files.pythonhosted.org/packages/27/d2/c782c88b8afbf961d6972428821c302bd1e9e7bc361352172f0ca31296e2/opentelemetry_api-1.36.0.tar.gz", hash = "sha256:9a72572b9c416d004d492cbc6e61962c0501eaf945ece9b5a0f56597d8348aa0", size = 64780, upload-time = "2025-07-29T15:12:06.02Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/5a/3f8d078dbf55d18442f6a2ecedf6786d81d7245844b2b20ce2b8ad6f0307/opentelemetry_api-1.35.0-py3-none-any.whl", hash = "sha256:c4ea7e258a244858daf18474625e9cc0149b8ee354f37843415771a40c25ee06", size = 65566, upload-time = "2025-07-11T12:23:07.944Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ee/6b08dde0a022c463b88f55ae81149584b125a42183407dc1045c486cc870/opentelemetry_api-1.36.0-py3-none-any.whl", hash = "sha256:02f20bcacf666e1333b6b1f04e647dc1d5111f86b8e510238fcc56d7762cda8c", size = 65564, upload-time = "2025-07-29T15:11:47.998Z" }, ] [[package]] name = "opentelemetry-instrumentation" -version = "0.56b0" +version = "0.57b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1829,9 +1865,9 @@ dependencies = [ { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "wrapt", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bb/14/964e90f524655aed5c699190dad8dd9a05ed0f5fa334b4b33532237c2b51/opentelemetry_instrumentation-0.56b0.tar.gz", hash = "sha256:d2dbb3021188ca0ec8c5606349ee9a2919239627e8341d4d37f1d21ec3291d11", size = 28551, upload-time = "2025-07-11T12:26:19.305Z" } +sdist = { url = "https://files.pythonhosted.org/packages/12/37/cf17cf28f945a3aca5a038cfbb45ee01317d4f7f3a0e5209920883fe9b08/opentelemetry_instrumentation-0.57b0.tar.gz", hash = "sha256:f2a30135ba77cdea2b0e1df272f4163c154e978f57214795d72f40befd4fcf05", size = 30807, upload-time = "2025-07-29T15:42:44.746Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/26/aa/2328f27200b8e51640d4d7ff5343ba6a81ab7d2650a9f574db016aae4adf/opentelemetry_instrumentation-0.56b0-py3-none-any.whl", hash = "sha256:948967f7c8f5bdc6e43512ba74c9ae14acb48eb72a35b61afe8db9909f743be3", size = 31105, upload-time = "2025-07-11T12:25:22.788Z" }, + { url = "https://files.pythonhosted.org/packages/d0/6f/f20cd1542959f43fb26a5bf9bb18cd81a1ea0700e8870c8f369bd07f5c65/opentelemetry_instrumentation-0.57b0-py3-none-any.whl", hash = "sha256:9109280f44882e07cec2850db28210b90600ae9110b42824d196de357cbddf7e", size = 32460, upload-time = "2025-07-29T15:41:40.883Z" }, ] [[package]] @@ -1852,29 +1888,29 @@ wheels = [ [[package]] name = "opentelemetry-sdk" -version = "1.35.0" +version = "1.36.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "opentelemetry-semantic-conventions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9a/cf/1eb2ed2ce55e0a9aa95b3007f26f55c7943aeef0a783bb006bdd92b3299e/opentelemetry_sdk-1.35.0.tar.gz", hash = "sha256:2a400b415ab68aaa6f04e8a6a9f6552908fb3090ae2ff78d6ae0c597ac581954", size = 160871, upload-time = "2025-07-11T12:23:39.566Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/85/8567a966b85a2d3f971c4d42f781c305b2b91c043724fa08fd37d158e9dc/opentelemetry_sdk-1.36.0.tar.gz", hash = "sha256:19c8c81599f51b71670661ff7495c905d8fdf6976e41622d5245b791b06fa581", size = 162557, upload-time = "2025-07-29T15:12:16.76Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/01/4f/8e32b757ef3b660511b638ab52d1ed9259b666bdeeceba51a082ce3aea95/opentelemetry_sdk-1.35.0-py3-none-any.whl", hash = "sha256:223d9e5f5678518f4842311bb73966e0b6db5d1e0b74e35074c052cd2487f800", size = 119379, upload-time = "2025-07-11T12:23:24.521Z" }, + { url = "https://files.pythonhosted.org/packages/0b/59/7bed362ad1137ba5886dac8439e84cd2df6d087be7c09574ece47ae9b22c/opentelemetry_sdk-1.36.0-py3-none-any.whl", hash = "sha256:19fe048b42e98c5c1ffe85b569b7073576ad4ce0bcb6e9b4c6a39e890a6c45fb", size = 119995, upload-time = "2025-07-29T15:12:03.181Z" }, ] [[package]] name = "opentelemetry-semantic-conventions" -version = "0.56b0" +version = "0.57b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/32/8e/214fa817f63b9f068519463d8ab46afd5d03b98930c39394a37ae3e741d0/opentelemetry_semantic_conventions-0.56b0.tar.gz", hash = "sha256:c114c2eacc8ff6d3908cb328c811eaf64e6d68623840be9224dc829c4fd6c2ea", size = 124221, upload-time = "2025-07-11T12:23:40.71Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/31/67dfa252ee88476a29200b0255bda8dfc2cf07b56ad66dc9a6221f7dc787/opentelemetry_semantic_conventions-0.57b0.tar.gz", hash = "sha256:609a4a79c7891b4620d64c7aac6898f872d790d75f22019913a660756f27ff32", size = 124225, upload-time = "2025-07-29T15:12:17.873Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/3f/e80c1b017066a9d999efffe88d1cce66116dcf5cb7f80c41040a83b6e03b/opentelemetry_semantic_conventions-0.56b0-py3-none-any.whl", hash = "sha256:df44492868fd6b482511cc43a942e7194be64e94945f572db24df2e279a001a2", size = 201625, upload-time = "2025-07-11T12:23:25.63Z" }, + { url = "https://files.pythonhosted.org/packages/05/75/7d591371c6c39c73de5ce5da5a2cc7b72d1d1cd3f8f4638f553c01c37b11/opentelemetry_semantic_conventions-0.57b0-py3-none-any.whl", hash = "sha256:757f7e76293294f124c827e514c2a3144f191ef175b069ce8d1211e1e38e9e78", size = 201627, upload-time = "2025-07-29T15:12:04.174Z" }, ] [[package]] @@ -2558,71 +2594,50 @@ wheels = [ [[package]] name = "regex" -version = "2024.11.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8e/5f/bd69653fbfb76cf8604468d3b4ec4c403197144c7bfe0e6a5fc9e02a07cb/regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519", size = 399494, upload-time = "2024-11-06T20:12:31.635Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/95/3c/4651f6b130c6842a8f3df82461a8950f923925db8b6961063e82744bddcc/regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91", size = 482674, upload-time = "2024-11-06T20:08:57.575Z" }, - { url = "https://files.pythonhosted.org/packages/15/51/9f35d12da8434b489c7b7bffc205c474a0a9432a889457026e9bc06a297a/regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0", size = 287684, upload-time = "2024-11-06T20:08:59.787Z" }, - { url = "https://files.pythonhosted.org/packages/bd/18/b731f5510d1b8fb63c6b6d3484bfa9a59b84cc578ac8b5172970e05ae07c/regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e", size = 284589, upload-time = "2024-11-06T20:09:01.896Z" }, - { url = "https://files.pythonhosted.org/packages/78/a2/6dd36e16341ab95e4c6073426561b9bfdeb1a9c9b63ab1b579c2e96cb105/regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde", size = 782511, upload-time = "2024-11-06T20:09:04.062Z" }, - { url = "https://files.pythonhosted.org/packages/1b/2b/323e72d5d2fd8de0d9baa443e1ed70363ed7e7b2fb526f5950c5cb99c364/regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e", size = 821149, upload-time = "2024-11-06T20:09:06.237Z" }, - { url = "https://files.pythonhosted.org/packages/90/30/63373b9ea468fbef8a907fd273e5c329b8c9535fee36fc8dba5fecac475d/regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2", size = 809707, upload-time = "2024-11-06T20:09:07.715Z" }, - { url = "https://files.pythonhosted.org/packages/f2/98/26d3830875b53071f1f0ae6d547f1d98e964dd29ad35cbf94439120bb67a/regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf", size = 781702, upload-time = "2024-11-06T20:09:10.101Z" }, - { url = "https://files.pythonhosted.org/packages/87/55/eb2a068334274db86208ab9d5599ffa63631b9f0f67ed70ea7c82a69bbc8/regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c", size = 771976, upload-time = "2024-11-06T20:09:11.566Z" }, - { url = "https://files.pythonhosted.org/packages/74/c0/be707bcfe98254d8f9d2cff55d216e946f4ea48ad2fd8cf1428f8c5332ba/regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86", size = 697397, upload-time = "2024-11-06T20:09:13.119Z" }, - { url = "https://files.pythonhosted.org/packages/49/dc/bb45572ceb49e0f6509f7596e4ba7031f6819ecb26bc7610979af5a77f45/regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67", size = 768726, upload-time = "2024-11-06T20:09:14.85Z" }, - { url = "https://files.pythonhosted.org/packages/5a/db/f43fd75dc4c0c2d96d0881967897926942e935d700863666f3c844a72ce6/regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d", size = 775098, upload-time = "2024-11-06T20:09:16.504Z" }, - { url = "https://files.pythonhosted.org/packages/99/d7/f94154db29ab5a89d69ff893159b19ada89e76b915c1293e98603d39838c/regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2", size = 839325, upload-time = "2024-11-06T20:09:18.698Z" }, - { url = "https://files.pythonhosted.org/packages/f7/17/3cbfab1f23356fbbf07708220ab438a7efa1e0f34195bf857433f79f1788/regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008", size = 843277, upload-time = "2024-11-06T20:09:21.725Z" }, - { url = "https://files.pythonhosted.org/packages/7e/f2/48b393b51900456155de3ad001900f94298965e1cad1c772b87f9cfea011/regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62", size = 773197, upload-time = "2024-11-06T20:09:24.092Z" }, - { url = "https://files.pythonhosted.org/packages/45/3f/ef9589aba93e084cd3f8471fded352826dcae8489b650d0b9b27bc5bba8a/regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e", size = 261714, upload-time = "2024-11-06T20:09:26.36Z" }, - { url = "https://files.pythonhosted.org/packages/42/7e/5f1b92c8468290c465fd50c5318da64319133231415a8aa6ea5ab995a815/regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519", size = 274042, upload-time = "2024-11-06T20:09:28.762Z" }, - { url = "https://files.pythonhosted.org/packages/58/58/7e4d9493a66c88a7da6d205768119f51af0f684fe7be7bac8328e217a52c/regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638", size = 482669, upload-time = "2024-11-06T20:09:31.064Z" }, - { url = "https://files.pythonhosted.org/packages/34/4c/8f8e631fcdc2ff978609eaeef1d6994bf2f028b59d9ac67640ed051f1218/regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7", size = 287684, upload-time = "2024-11-06T20:09:32.915Z" }, - { url = "https://files.pythonhosted.org/packages/c5/1b/f0e4d13e6adf866ce9b069e191f303a30ab1277e037037a365c3aad5cc9c/regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20", size = 284589, upload-time = "2024-11-06T20:09:35.504Z" }, - { url = "https://files.pythonhosted.org/packages/25/4d/ab21047f446693887f25510887e6820b93f791992994f6498b0318904d4a/regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114", size = 792121, upload-time = "2024-11-06T20:09:37.701Z" }, - { url = "https://files.pythonhosted.org/packages/45/ee/c867e15cd894985cb32b731d89576c41a4642a57850c162490ea34b78c3b/regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3", size = 831275, upload-time = "2024-11-06T20:09:40.371Z" }, - { url = "https://files.pythonhosted.org/packages/b3/12/b0f480726cf1c60f6536fa5e1c95275a77624f3ac8fdccf79e6727499e28/regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f", size = 818257, upload-time = "2024-11-06T20:09:43.059Z" }, - { url = "https://files.pythonhosted.org/packages/bf/ce/0d0e61429f603bac433910d99ef1a02ce45a8967ffbe3cbee48599e62d88/regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0", size = 792727, upload-time = "2024-11-06T20:09:48.19Z" }, - { url = "https://files.pythonhosted.org/packages/e4/c1/243c83c53d4a419c1556f43777ccb552bccdf79d08fda3980e4e77dd9137/regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55", size = 780667, upload-time = "2024-11-06T20:09:49.828Z" }, - { url = "https://files.pythonhosted.org/packages/c5/f4/75eb0dd4ce4b37f04928987f1d22547ddaf6c4bae697623c1b05da67a8aa/regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89", size = 776963, upload-time = "2024-11-06T20:09:51.819Z" }, - { url = "https://files.pythonhosted.org/packages/16/5d/95c568574e630e141a69ff8a254c2f188b4398e813c40d49228c9bbd9875/regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d", size = 784700, upload-time = "2024-11-06T20:09:53.982Z" }, - { url = "https://files.pythonhosted.org/packages/8e/b5/f8495c7917f15cc6fee1e7f395e324ec3e00ab3c665a7dc9d27562fd5290/regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34", size = 848592, upload-time = "2024-11-06T20:09:56.222Z" }, - { url = "https://files.pythonhosted.org/packages/1c/80/6dd7118e8cb212c3c60b191b932dc57db93fb2e36fb9e0e92f72a5909af9/regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d", size = 852929, upload-time = "2024-11-06T20:09:58.642Z" }, - { url = "https://files.pythonhosted.org/packages/11/9b/5a05d2040297d2d254baf95eeeb6df83554e5e1df03bc1a6687fc4ba1f66/regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45", size = 781213, upload-time = "2024-11-06T20:10:00.867Z" }, - { url = "https://files.pythonhosted.org/packages/26/b7/b14e2440156ab39e0177506c08c18accaf2b8932e39fb092074de733d868/regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9", size = 261734, upload-time = "2024-11-06T20:10:03.361Z" }, - { url = "https://files.pythonhosted.org/packages/80/32/763a6cc01d21fb3819227a1cc3f60fd251c13c37c27a73b8ff4315433a8e/regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60", size = 274052, upload-time = "2024-11-06T20:10:05.179Z" }, - { url = "https://files.pythonhosted.org/packages/ba/30/9a87ce8336b172cc232a0db89a3af97929d06c11ceaa19d97d84fa90a8f8/regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a", size = 483781, upload-time = "2024-11-06T20:10:07.07Z" }, - { url = "https://files.pythonhosted.org/packages/01/e8/00008ad4ff4be8b1844786ba6636035f7ef926db5686e4c0f98093612add/regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9", size = 288455, upload-time = "2024-11-06T20:10:09.117Z" }, - { url = "https://files.pythonhosted.org/packages/60/85/cebcc0aff603ea0a201667b203f13ba75d9fc8668fab917ac5b2de3967bc/regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2", size = 284759, upload-time = "2024-11-06T20:10:11.155Z" }, - { url = "https://files.pythonhosted.org/packages/94/2b/701a4b0585cb05472a4da28ee28fdfe155f3638f5e1ec92306d924e5faf0/regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4", size = 794976, upload-time = "2024-11-06T20:10:13.24Z" }, - { url = "https://files.pythonhosted.org/packages/4b/bf/fa87e563bf5fee75db8915f7352e1887b1249126a1be4813837f5dbec965/regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577", size = 833077, upload-time = "2024-11-06T20:10:15.37Z" }, - { url = "https://files.pythonhosted.org/packages/a1/56/7295e6bad94b047f4d0834e4779491b81216583c00c288252ef625c01d23/regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3", size = 823160, upload-time = "2024-11-06T20:10:19.027Z" }, - { url = "https://files.pythonhosted.org/packages/fb/13/e3b075031a738c9598c51cfbc4c7879e26729c53aa9cca59211c44235314/regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e", size = 796896, upload-time = "2024-11-06T20:10:21.85Z" }, - { url = "https://files.pythonhosted.org/packages/24/56/0b3f1b66d592be6efec23a795b37732682520b47c53da5a32c33ed7d84e3/regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe", size = 783997, upload-time = "2024-11-06T20:10:24.329Z" }, - { url = "https://files.pythonhosted.org/packages/f9/a1/eb378dada8b91c0e4c5f08ffb56f25fcae47bf52ad18f9b2f33b83e6d498/regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e", size = 781725, upload-time = "2024-11-06T20:10:28.067Z" }, - { url = "https://files.pythonhosted.org/packages/83/f2/033e7dec0cfd6dda93390089864732a3409246ffe8b042e9554afa9bff4e/regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29", size = 789481, upload-time = "2024-11-06T20:10:31.612Z" }, - { url = "https://files.pythonhosted.org/packages/83/23/15d4552ea28990a74e7696780c438aadd73a20318c47e527b47a4a5a596d/regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39", size = 852896, upload-time = "2024-11-06T20:10:34.054Z" }, - { url = "https://files.pythonhosted.org/packages/e3/39/ed4416bc90deedbfdada2568b2cb0bc1fdb98efe11f5378d9892b2a88f8f/regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51", size = 860138, upload-time = "2024-11-06T20:10:36.142Z" }, - { url = "https://files.pythonhosted.org/packages/93/2d/dd56bb76bd8e95bbce684326302f287455b56242a4f9c61f1bc76e28360e/regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad", size = 787692, upload-time = "2024-11-06T20:10:38.394Z" }, - { url = "https://files.pythonhosted.org/packages/0b/55/31877a249ab7a5156758246b9c59539abbeba22461b7d8adc9e8475ff73e/regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54", size = 262135, upload-time = "2024-11-06T20:10:40.367Z" }, - { url = "https://files.pythonhosted.org/packages/38/ec/ad2d7de49a600cdb8dd78434a1aeffe28b9d6fc42eb36afab4a27ad23384/regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b", size = 273567, upload-time = "2024-11-06T20:10:43.467Z" }, - { url = "https://files.pythonhosted.org/packages/90/73/bcb0e36614601016552fa9344544a3a2ae1809dc1401b100eab02e772e1f/regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84", size = 483525, upload-time = "2024-11-06T20:10:45.19Z" }, - { url = "https://files.pythonhosted.org/packages/0f/3f/f1a082a46b31e25291d830b369b6b0c5576a6f7fb89d3053a354c24b8a83/regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4", size = 288324, upload-time = "2024-11-06T20:10:47.177Z" }, - { url = "https://files.pythonhosted.org/packages/09/c9/4e68181a4a652fb3ef5099e077faf4fd2a694ea6e0f806a7737aff9e758a/regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0", size = 284617, upload-time = "2024-11-06T20:10:49.312Z" }, - { url = "https://files.pythonhosted.org/packages/fc/fd/37868b75eaf63843165f1d2122ca6cb94bfc0271e4428cf58c0616786dce/regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0", size = 795023, upload-time = "2024-11-06T20:10:51.102Z" }, - { url = "https://files.pythonhosted.org/packages/c4/7c/d4cd9c528502a3dedb5c13c146e7a7a539a3853dc20209c8e75d9ba9d1b2/regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7", size = 833072, upload-time = "2024-11-06T20:10:52.926Z" }, - { url = "https://files.pythonhosted.org/packages/4f/db/46f563a08f969159c5a0f0e722260568425363bea43bb7ae370becb66a67/regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7", size = 823130, upload-time = "2024-11-06T20:10:54.828Z" }, - { url = "https://files.pythonhosted.org/packages/db/60/1eeca2074f5b87df394fccaa432ae3fc06c9c9bfa97c5051aed70e6e00c2/regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c", size = 796857, upload-time = "2024-11-06T20:10:56.634Z" }, - { url = "https://files.pythonhosted.org/packages/10/db/ac718a08fcee981554d2f7bb8402f1faa7e868c1345c16ab1ebec54b0d7b/regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3", size = 784006, upload-time = "2024-11-06T20:10:59.369Z" }, - { url = "https://files.pythonhosted.org/packages/c2/41/7da3fe70216cea93144bf12da2b87367590bcf07db97604edeea55dac9ad/regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07", size = 781650, upload-time = "2024-11-06T20:11:02.042Z" }, - { url = "https://files.pythonhosted.org/packages/a7/d5/880921ee4eec393a4752e6ab9f0fe28009435417c3102fc413f3fe81c4e5/regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e", size = 789545, upload-time = "2024-11-06T20:11:03.933Z" }, - { url = "https://files.pythonhosted.org/packages/dc/96/53770115e507081122beca8899ab7f5ae28ae790bfcc82b5e38976df6a77/regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6", size = 853045, upload-time = "2024-11-06T20:11:06.497Z" }, - { url = "https://files.pythonhosted.org/packages/31/d3/1372add5251cc2d44b451bd94f43b2ec78e15a6e82bff6a290ef9fd8f00a/regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4", size = 860182, upload-time = "2024-11-06T20:11:09.06Z" }, - { url = "https://files.pythonhosted.org/packages/ed/e3/c446a64984ea9f69982ba1a69d4658d5014bc7a0ea468a07e1a1265db6e2/regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d", size = 787733, upload-time = "2024-11-06T20:11:11.256Z" }, - { url = "https://files.pythonhosted.org/packages/2b/f1/e40c8373e3480e4f29f2692bd21b3e05f296d3afebc7e5dcf21b9756ca1c/regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff", size = 262122, upload-time = "2024-11-06T20:11:13.161Z" }, - { url = "https://files.pythonhosted.org/packages/45/94/bc295babb3062a731f52621cdc992d123111282e291abaf23faa413443ea/regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a", size = 273545, upload-time = "2024-11-06T20:11:15Z" }, +version = "2025.7.29" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/53/3f4f8350331c585b3e6656974c186d9b375a32489b67b0b515c4eddf3da0/regex-2025.7.29-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:35639491014a0e9dbb1a40b19977301038d886aaca11b53dd4e2f7c963e8727a", size = 489341, upload-time = "2025-07-29T18:48:35.568Z" }, + { url = "https://files.pythonhosted.org/packages/f2/ca/0d29e7650e8b4707d9afc8835955035246a3bc10d0125ba059f52d9143ea/regex-2025.7.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffca9a2d6beaa72c2f8daac44b69c8cd0e23e2d7e48b66326bd450efe78fd36f", size = 293046, upload-time = "2025-07-29T18:48:39.425Z" }, + { url = "https://files.pythonhosted.org/packages/66/f6/7f58400b8c6a737257e24ee2dedef13681ae8fe6abf777a5d26abd119157/regex-2025.7.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e98f8de85942984c79ed216123224a9c526a05e1f3e59561258e598e437bab4c", size = 290096, upload-time = "2025-07-29T18:48:40.777Z" }, + { url = "https://files.pythonhosted.org/packages/e6/b9/f099d5060e0262ef885bd0c77d809923c18db7d4fb0c1096673b638f5ee8/regex-2025.7.29-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:afe3a8979ad224ed6ad89e919d95732601858a74ac82f9cf12bed134b9cd0cba", size = 794546, upload-time = "2025-07-29T18:48:42.255Z" }, + { url = "https://files.pythonhosted.org/packages/b5/b6/d029b4cfcb1771276bb1840017fe19fe6149f5b231a3f79141d7caf5abcf/regex-2025.7.29-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c5af1f445525470e2fd37f6a964348504e4b1f351d7923f759f0e1d821ce305d", size = 790435, upload-time = "2025-07-29T18:48:43.735Z" }, + { url = "https://files.pythonhosted.org/packages/48/be/88e867a2ca2ccef31ee8b35952419be27ebd5afb1338ad9188b524f0c6f1/regex-2025.7.29-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:11beacf9fd16094778372b84e0162d0bc65292ab9886c43792df3903fc6719d5", size = 782475, upload-time = "2025-07-29T18:48:45.356Z" }, + { url = "https://files.pythonhosted.org/packages/04/0e/e03855bdd0200cfb1448a8155f7fc0ca9597b2e10fdcf067783d2a6927a4/regex-2025.7.29-cp310-cp310-win32.whl", hash = "sha256:618a1855271a4f5959ac3761d8c667d42a58af3dedaab9db75ad2241ea21a6c1", size = 268728, upload-time = "2025-07-29T18:48:46.98Z" }, + { url = "https://files.pythonhosted.org/packages/30/c7/c360c095d65ee93b6f9ccb66948666660941dcb89840e463c9bc6a1b6095/regex-2025.7.29-cp310-cp310-win_amd64.whl", hash = "sha256:0415cb192ae7e0335908dc88f7430e0ebe07bc74395b5e7f7bf455e1967696b3", size = 280418, upload-time = "2025-07-29T18:48:48.496Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/b01dd1598dbe863cd2b98643424184646d037bdfa6e3ae254af4cbafda3f/regex-2025.7.29-cp310-cp310-win_arm64.whl", hash = "sha256:da62a150ab653f902bad866facbffa4f47a43ff16bc5768ee40da9b3a52efbdd", size = 272885, upload-time = "2025-07-29T18:48:50.315Z" }, + { url = "https://files.pythonhosted.org/packages/dd/63/975c4989b97b2a757495ebe5c52d82970a5ef88fcdc5f4d95cfac369e20d/regex-2025.7.29-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:747fea7f98761ed25dbbffa10f3def9385b48e49badfc5e97fad6e3f4f2caf5f", size = 489347, upload-time = "2025-07-29T18:48:51.851Z" }, + { url = "https://files.pythonhosted.org/packages/d3/cc/51e28ec89cdcfa2165be30a29123cd46c169b4ccfe3a778fc6221032ae7a/regex-2025.7.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5714cc58c6bfaff1204f592c52b6531c90a27bf2a70e296a863bae18c92ebd0c", size = 293052, upload-time = "2025-07-29T18:48:53.176Z" }, + { url = "https://files.pythonhosted.org/packages/c2/eb/c029b72e3ae82c794aa65c26a5caa997341128ce1023aaafee946739298f/regex-2025.7.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf3ac6f5f9e280b7ae6da10bdabc7fc9c102d1bf9e47eb8d92db4c73b78842f9", size = 290097, upload-time = "2025-07-29T18:48:54.646Z" }, + { url = "https://files.pythonhosted.org/packages/17/87/7373079eb1e2f7b973e9c5435224e5bc8a90ae7d812a9eae93f99d59ea13/regex-2025.7.29-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3603c722d36d9ed013918a4b1687db6caa08fcaafb4ba3b296c9fc8bd31a53c9", size = 803690, upload-time = "2025-07-29T18:48:55.874Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/4bcb450fb04aa4dea495e6c574ba5f7c306e04a17d0a47d80cdcf273f667/regex-2025.7.29-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:529880c105ae9a1230ff6d1130207e4f3b7e64d71c487f242464695673231bea", size = 792224, upload-time = "2025-07-29T18:48:57.425Z" }, + { url = "https://files.pythonhosted.org/packages/2e/18/b13983ee37f7571413660df445bbb6851f6d3a5f7b4998461893ee147c45/regex-2025.7.29-cp311-cp311-win32.whl", hash = "sha256:612765d6a7e39e6a43751e9f4412334414027f31273cd742284b2ddbba75dbd3", size = 268740, upload-time = "2025-07-29T18:48:59.147Z" }, + { url = "https://files.pythonhosted.org/packages/40/33/92f11c75965627bb93dc24990e1345b4021d60ef0cfc5acf261d4882d687/regex-2025.7.29-cp311-cp311-win_amd64.whl", hash = "sha256:fd4a6a80788661ad09db376828833b0fc26359655e4e77be7539fcbe82241bec", size = 280435, upload-time = "2025-07-29T18:49:00.369Z" }, + { url = "https://files.pythonhosted.org/packages/a1/34/e4a14d793fe1e853afa5ffcdeb97d3556c1f5e3429d5b980164404f4c9ca/regex-2025.7.29-cp311-cp311-win_arm64.whl", hash = "sha256:a5aaafafb0a1fec9258dcd87b4b12d3a9c6078daaa74524a2cc0e74691075585", size = 272885, upload-time = "2025-07-29T18:49:02.089Z" }, + { url = "https://files.pythonhosted.org/packages/e1/0a/571b277e81ed74af6ffc5f93bf62f202ba21438727c20806fc31a8e87530/regex-2025.7.29-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df26c13221124138ac6944d7d895c12673b09499a9d650c81790b025a0b1bb37", size = 490335, upload-time = "2025-07-29T18:49:04.273Z" }, + { url = "https://files.pythonhosted.org/packages/77/93/70e71743dda71a2100d0ddcde1d48f27cc19726cd789940e126b5661a862/regex-2025.7.29-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d1efb631d67f5ed0a37c7102425e4ae6e7c60acc561a92aa9f983360568ba17e", size = 293734, upload-time = "2025-07-29T18:49:06.041Z" }, + { url = "https://files.pythonhosted.org/packages/a9/55/e57b02df5d37f551dce447899f600428b9cb1e7a57479e22227e16e1ecba/regex-2025.7.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c7ca42a898610d64bee82854085810b006bae647508e6ca44b58a6866b94932", size = 290268, upload-time = "2025-07-29T18:49:07.599Z" }, + { url = "https://files.pythonhosted.org/packages/3d/17/fa18558ceb768851a4e7bb930f7cf73c99ec23564a57295e70a38701d343/regex-2025.7.29-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:81c3dbddee0de40bc5db9c093e97f12fec1cfc48ddc8be61699bd28e67cd477f", size = 804510, upload-time = "2025-07-29T18:49:08.918Z" }, + { url = "https://files.pythonhosted.org/packages/6f/0a/b6150fec18920a324233360d3aaca074b32b01acae475f5a16450e15b831/regex-2025.7.29-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:72ded9020430d97bbc68a87e602b9f05f037c3a978d3ada2124af5a960b01721", size = 794272, upload-time = "2025-07-29T18:49:10.362Z" }, + { url = "https://files.pythonhosted.org/packages/52/b9/b69a16a8fbdc7c6ae0616bea3166c814c9bcfd8671589379329cba129790/regex-2025.7.29-cp312-cp312-win32.whl", hash = "sha256:1538bfae71d42f31232e36d4d45c5594d3cc6515b0a49897331367946f0fb32a", size = 269105, upload-time = "2025-07-29T18:49:12.099Z" }, + { url = "https://files.pythonhosted.org/packages/8d/d4/ef00edfff55867ec95ff9f8af085c28e590c2c83379f63f0b126ec8795d1/regex-2025.7.29-cp312-cp312-win_amd64.whl", hash = "sha256:9d72d33903a3e2d93acaa9e24d11cf3688f1c20515e4f8ec1ea881eea24b92e6", size = 279788, upload-time = "2025-07-29T18:49:13.578Z" }, + { url = "https://files.pythonhosted.org/packages/33/61/6e652fe1fe164028b5a60d3b6c57cb05193515ab7453361d6bdf1c3957e8/regex-2025.7.29-cp312-cp312-win_arm64.whl", hash = "sha256:d0c5de6962e7d062a3c2e41347cfe6c2a26b0731ba2da3500884519eaab7ac08", size = 272990, upload-time = "2025-07-29T18:49:15.061Z" }, + { url = "https://files.pythonhosted.org/packages/b1/67/c81234a9e900cb9b62c9fe549e9f56a2f19718323cc826f77f472653deeb/regex-2025.7.29-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:b22b2cc3402996c730f1dfd240be95108e8897192f82b8a01bcffcfeafaf0476", size = 490122, upload-time = "2025-07-29T18:49:16.358Z" }, + { url = "https://files.pythonhosted.org/packages/e2/f2/41dd213a58e8d4a3b0db7a598602de7cbfb465f14139040ffb6710b7a0b1/regex-2025.7.29-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bc0d5d1c45ad2880afec2891716616f1bcb84ebfbd70767086e81656a219f70b", size = 293621, upload-time = "2025-07-29T18:49:17.784Z" }, + { url = "https://files.pythonhosted.org/packages/a9/55/942db711ae7f1a19686994468ceef654a35440ec77beab2f706fe5d72631/regex-2025.7.29-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c82c7ca3b6589573b48250ca59b0d17ad43884491a0c5c6b9ef9f868f68a0aa7", size = 290210, upload-time = "2025-07-29T18:49:19.396Z" }, + { url = "https://files.pythonhosted.org/packages/1b/25/c07c7a7a8bd4b2351139742de46704ddbcfe83e0ff03f68443819c2885d7/regex-2025.7.29-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34a885ee2db12adfe3a96faa231fb61a0731ba74c90f5265cb1cb78a0d53463f", size = 804528, upload-time = "2025-07-29T18:49:21.106Z" }, + { url = "https://files.pythonhosted.org/packages/6e/b1/8b5ff8e6b27e539d390287e8ab08f5a04deda5c8da6639aeda11a2c2e2b4/regex-2025.7.29-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bd1b8732ce1fcf6b119a36edfdcd4fbb49e82ee86fe73b963a706c3ea19edc42", size = 794347, upload-time = "2025-07-29T18:49:22.767Z" }, + { url = "https://files.pythonhosted.org/packages/0f/20/44b4bf1cc0e460b889e0ac2b04faa618447f737d2dc804fb4bc2fc8a1aa3/regex-2025.7.29-cp313-cp313-win32.whl", hash = "sha256:534fbaa53bb9f8b5951a5a87efee9ef10cab1a282f60c3711f24a84fff7faa97", size = 269087, upload-time = "2025-07-29T18:49:24.475Z" }, + { url = "https://files.pythonhosted.org/packages/2b/ca/4a615ed8a17046eef18a65f05d7e7d27e5ad1c6a472dfafddf1e6369c9ea/regex-2025.7.29-cp313-cp313-win_amd64.whl", hash = "sha256:136bcfb36b751d51eafe7f21458a5d35be3d568f9c70f9c0934005ee96d19253", size = 279764, upload-time = "2025-07-29T18:49:26.178Z" }, + { url = "https://files.pythonhosted.org/packages/a7/59/3225b28555f1f56545f18e9ce913aa11875bef960a7f5641b7f86056a2b9/regex-2025.7.29-cp313-cp313-win_arm64.whl", hash = "sha256:b6fad25e9189187ac9e81cb3cdb7dd73b8912cde8a56301aa49c803252b93ef5", size = 272985, upload-time = "2025-07-29T18:49:27.932Z" }, + { url = "https://files.pythonhosted.org/packages/46/85/95db52d187d1d94a6f712dad8317b88a953b8e6aae949e64ba4a56f6f97e/regex-2025.7.29-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:571ecd5970615bb3b3257d1fd23e76889977123fc0f525c166d8607680ffff28", size = 490156, upload-time = "2025-07-29T18:49:29.546Z" }, + { url = "https://files.pythonhosted.org/packages/56/e4/74a9162c588a62a50aafa302d0a354f5007c079d01dde0ae0f23cee72c73/regex-2025.7.29-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:3cf360a5b44bde2c32097c6303fbf11136e04ce7912b5368b7b04c84f52a939b", size = 293534, upload-time = "2025-07-29T18:49:31.219Z" }, + { url = "https://files.pythonhosted.org/packages/f3/8c/4a7853fecb771ad80c0c01bb9e6991c5bfd36e50dd21025a1d3b6d6fd479/regex-2025.7.29-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0e211412e1466d654806f10597695e70e562899be3a883cad3326803c8da39ee", size = 290327, upload-time = "2025-07-29T18:49:32.533Z" }, + { url = "https://files.pythonhosted.org/packages/a3/9f/c6831493334a46285a9842da754ba2644fb543a354cccccc667f8a2fb53b/regex-2025.7.29-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0f8e976785376ff0ab67176d0cbf42c8a964663f10306e6620c3424c88120a2", size = 803973, upload-time = "2025-07-29T18:49:33.833Z" }, + { url = "https://files.pythonhosted.org/packages/8d/d6/d165df45ac08572ed7ade0ee15a127724b964147008f52a97006a4a1456b/regex-2025.7.29-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:fc6f92c568e6dad6041e850bddbb7b6f9fed0d2d36e91e8313d0f0abb95ddcda", size = 793880, upload-time = "2025-07-29T18:49:35.651Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ec/bd2e90e33c539bda7c5b608937a55e58dcc9585ba39c3083f784af97a8d6/regex-2025.7.29-cp314-cp314-win32.whl", hash = "sha256:a91781c833c0f03f42821bc349de4197fd411ef9a4dd513b72abf74d3afb8634", size = 274477, upload-time = "2025-07-29T18:49:37.554Z" }, + { url = "https://files.pythonhosted.org/packages/52/b1/9eb1af06611ebbd399910630960b41c8e23c4f5804aa4be9f5e27aef3186/regex-2025.7.29-cp314-cp314-win_amd64.whl", hash = "sha256:5743ae64c22b6f7672a699260fef86ec84baf8f6ee21be1484f9cca880ba85ba", size = 283030, upload-time = "2025-07-29T18:49:38.88Z" }, + { url = "https://files.pythonhosted.org/packages/ff/2f/7ac07ba3252b91fec0095b64d8084611bdd36207a6d1833f831a50bebc9c/regex-2025.7.29-cp314-cp314-win_arm64.whl", hash = "sha256:03c0eab5d3310968f19721930014b9735d3a61dbe719b04cfa57d0571fbb64ac", size = 276079, upload-time = "2025-07-29T18:49:40.265Z" }, ] [[package]] @@ -2642,16 +2657,15 @@ wheels = [ [[package]] name = "rich" -version = "14.0.0" +version = "14.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078, upload-time = "2025-03-30T14:15:14.23Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z" }, + { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" }, ] [[package]] @@ -2791,27 +2805,27 @@ wheels = [ [[package]] name = "ruff" -version = "0.12.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9b/ce/8d7dbedede481245b489b769d27e2934730791a9a82765cb94566c6e6abd/ruff-0.12.4.tar.gz", hash = "sha256:13efa16df6c6eeb7d0f091abae50f58e9522f3843edb40d56ad52a5a4a4b6873", size = 5131435, upload-time = "2025-07-17T17:27:19.138Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ae/9f/517bc5f61bad205b7f36684ffa5415c013862dee02f55f38a217bdbe7aa4/ruff-0.12.4-py3-none-linux_armv6l.whl", hash = "sha256:cb0d261dac457ab939aeb247e804125a5d521b21adf27e721895b0d3f83a0d0a", size = 10188824, upload-time = "2025-07-17T17:26:31.412Z" }, - { url = "https://files.pythonhosted.org/packages/28/83/691baae5a11fbbde91df01c565c650fd17b0eabed259e8b7563de17c6529/ruff-0.12.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:55c0f4ca9769408d9b9bac530c30d3e66490bd2beb2d3dae3e4128a1f05c7442", size = 10884521, upload-time = "2025-07-17T17:26:35.084Z" }, - { url = "https://files.pythonhosted.org/packages/d6/8d/756d780ff4076e6dd035d058fa220345f8c458391f7edfb1c10731eedc75/ruff-0.12.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a8224cc3722c9ad9044da7f89c4c1ec452aef2cfe3904365025dd2f51daeae0e", size = 10277653, upload-time = "2025-07-17T17:26:37.897Z" }, - { url = "https://files.pythonhosted.org/packages/8d/97/8eeee0f48ece153206dce730fc9e0e0ca54fd7f261bb3d99c0a4343a1892/ruff-0.12.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9949d01d64fa3672449a51ddb5d7548b33e130240ad418884ee6efa7a229586", size = 10485993, upload-time = "2025-07-17T17:26:40.68Z" }, - { url = "https://files.pythonhosted.org/packages/49/b8/22a43d23a1f68df9b88f952616c8508ea6ce4ed4f15353b8168c48b2d7e7/ruff-0.12.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:be0593c69df9ad1465e8a2d10e3defd111fdb62dcd5be23ae2c06da77e8fcffb", size = 10022824, upload-time = "2025-07-17T17:26:43.564Z" }, - { url = "https://files.pythonhosted.org/packages/cd/70/37c234c220366993e8cffcbd6cadbf332bfc848cbd6f45b02bade17e0149/ruff-0.12.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7dea966bcb55d4ecc4cc3270bccb6f87a337326c9dcd3c07d5b97000dbff41c", size = 11524414, upload-time = "2025-07-17T17:26:46.219Z" }, - { url = "https://files.pythonhosted.org/packages/14/77/c30f9964f481b5e0e29dd6a1fae1f769ac3fd468eb76fdd5661936edd262/ruff-0.12.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:afcfa3ab5ab5dd0e1c39bf286d829e042a15e966b3726eea79528e2e24d8371a", size = 12419216, upload-time = "2025-07-17T17:26:48.883Z" }, - { url = "https://files.pythonhosted.org/packages/6e/79/af7fe0a4202dce4ef62c5e33fecbed07f0178f5b4dd9c0d2fcff5ab4a47c/ruff-0.12.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c057ce464b1413c926cdb203a0f858cd52f3e73dcb3270a3318d1630f6395bb3", size = 11976756, upload-time = "2025-07-17T17:26:51.754Z" }, - { url = "https://files.pythonhosted.org/packages/09/d1/33fb1fc00e20a939c305dbe2f80df7c28ba9193f7a85470b982815a2dc6a/ruff-0.12.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e64b90d1122dc2713330350626b10d60818930819623abbb56535c6466cce045", size = 11020019, upload-time = "2025-07-17T17:26:54.265Z" }, - { url = "https://files.pythonhosted.org/packages/64/f4/e3cd7f7bda646526f09693e2e02bd83d85fff8a8222c52cf9681c0d30843/ruff-0.12.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2abc48f3d9667fdc74022380b5c745873499ff827393a636f7a59da1515e7c57", size = 11277890, upload-time = "2025-07-17T17:26:56.914Z" }, - { url = "https://files.pythonhosted.org/packages/5e/d0/69a85fb8b94501ff1a4f95b7591505e8983f38823da6941eb5b6badb1e3a/ruff-0.12.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2b2449dc0c138d877d629bea151bee8c0ae3b8e9c43f5fcaafcd0c0d0726b184", size = 10348539, upload-time = "2025-07-17T17:26:59.381Z" }, - { url = "https://files.pythonhosted.org/packages/16/a0/91372d1cb1678f7d42d4893b88c252b01ff1dffcad09ae0c51aa2542275f/ruff-0.12.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:56e45bb11f625db55f9b70477062e6a1a04d53628eda7784dce6e0f55fd549eb", size = 10009579, upload-time = "2025-07-17T17:27:02.462Z" }, - { url = "https://files.pythonhosted.org/packages/23/1b/c4a833e3114d2cc0f677e58f1df6c3b20f62328dbfa710b87a1636a5e8eb/ruff-0.12.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:478fccdb82ca148a98a9ff43658944f7ab5ec41c3c49d77cd99d44da019371a1", size = 10942982, upload-time = "2025-07-17T17:27:05.343Z" }, - { url = "https://files.pythonhosted.org/packages/ff/ce/ce85e445cf0a5dd8842f2f0c6f0018eedb164a92bdf3eda51984ffd4d989/ruff-0.12.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0fc426bec2e4e5f4c4f182b9d2ce6a75c85ba9bcdbe5c6f2a74fcb8df437df4b", size = 11343331, upload-time = "2025-07-17T17:27:08.652Z" }, - { url = "https://files.pythonhosted.org/packages/35/cf/441b7fc58368455233cfb5b77206c849b6dfb48b23de532adcc2e50ccc06/ruff-0.12.4-py3-none-win32.whl", hash = "sha256:4de27977827893cdfb1211d42d84bc180fceb7b72471104671c59be37041cf93", size = 10267904, upload-time = "2025-07-17T17:27:11.814Z" }, - { url = "https://files.pythonhosted.org/packages/ce/7e/20af4a0df5e1299e7368d5ea4350412226afb03d95507faae94c80f00afd/ruff-0.12.4-py3-none-win_amd64.whl", hash = "sha256:fe0b9e9eb23736b453143d72d2ceca5db323963330d5b7859d60d101147d461a", size = 11209038, upload-time = "2025-07-17T17:27:14.417Z" }, - { url = "https://files.pythonhosted.org/packages/11/02/8857d0dfb8f44ef299a5dfd898f673edefb71e3b533b3b9d2db4c832dd13/ruff-0.12.4-py3-none-win_arm64.whl", hash = "sha256:0618ec4442a83ab545e5b71202a5c0ed7791e8471435b94e655b570a5031a98e", size = 10469336, upload-time = "2025-07-17T17:27:16.913Z" }, +version = "0.12.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/30/cd/01015eb5034605fd98d829c5839ec2c6b4582b479707f7c1c2af861e8258/ruff-0.12.5.tar.gz", hash = "sha256:b209db6102b66f13625940b7f8c7d0f18e20039bb7f6101fbdac935c9612057e", size = 5170722, upload-time = "2025-07-24T13:26:37.456Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d4/de/ad2f68f0798ff15dd8c0bcc2889558970d9a685b3249565a937cd820ad34/ruff-0.12.5-py3-none-linux_armv6l.whl", hash = "sha256:1de2c887e9dec6cb31fcb9948299de5b2db38144e66403b9660c9548a67abd92", size = 11819133, upload-time = "2025-07-24T13:25:56.369Z" }, + { url = "https://files.pythonhosted.org/packages/f8/fc/c6b65cd0e7fbe60f17e7ad619dca796aa49fbca34bb9bea5f8faf1ec2643/ruff-0.12.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d1ab65e7d8152f519e7dea4de892317c9da7a108da1c56b6a3c1d5e7cf4c5e9a", size = 12501114, upload-time = "2025-07-24T13:25:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/c5/de/c6bec1dce5ead9f9e6a946ea15e8d698c35f19edc508289d70a577921b30/ruff-0.12.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:962775ed5b27c7aa3fdc0d8f4d4433deae7659ef99ea20f783d666e77338b8cf", size = 11716873, upload-time = "2025-07-24T13:26:01.496Z" }, + { url = "https://files.pythonhosted.org/packages/a1/16/cf372d2ebe91e4eb5b82a2275c3acfa879e0566a7ac94d331ea37b765ac8/ruff-0.12.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73b4cae449597e7195a49eb1cdca89fd9fbb16140c7579899e87f4c85bf82f73", size = 11958829, upload-time = "2025-07-24T13:26:03.721Z" }, + { url = "https://files.pythonhosted.org/packages/25/bf/cd07e8f6a3a6ec746c62556b4c4b79eeb9b0328b362bb8431b7b8afd3856/ruff-0.12.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b13489c3dc50de5e2d40110c0cce371e00186b880842e245186ca862bf9a1ac", size = 11626619, upload-time = "2025-07-24T13:26:06.118Z" }, + { url = "https://files.pythonhosted.org/packages/d8/c9/c2ccb3b8cbb5661ffda6925f81a13edbb786e623876141b04919d1128370/ruff-0.12.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1504fea81461cf4841778b3ef0a078757602a3b3ea4b008feb1308cb3f23e08", size = 13221894, upload-time = "2025-07-24T13:26:08.292Z" }, + { url = "https://files.pythonhosted.org/packages/6b/58/68a5be2c8e5590ecdad922b2bcd5583af19ba648f7648f95c51c3c1eca81/ruff-0.12.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c7da4129016ae26c32dfcbd5b671fe652b5ab7fc40095d80dcff78175e7eddd4", size = 14163909, upload-time = "2025-07-24T13:26:10.474Z" }, + { url = "https://files.pythonhosted.org/packages/bd/d1/ef6b19622009ba8386fdb792c0743f709cf917b0b2f1400589cbe4739a33/ruff-0.12.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca972c80f7ebcfd8af75a0f18b17c42d9f1ef203d163669150453f50ca98ab7b", size = 13583652, upload-time = "2025-07-24T13:26:13.381Z" }, + { url = "https://files.pythonhosted.org/packages/62/e3/1c98c566fe6809a0c83751d825a03727f242cdbe0d142c9e292725585521/ruff-0.12.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8dbbf9f25dfb501f4237ae7501d6364b76a01341c6f1b2cd6764fe449124bb2a", size = 12700451, upload-time = "2025-07-24T13:26:15.488Z" }, + { url = "https://files.pythonhosted.org/packages/24/ff/96058f6506aac0fbc0d0fc0d60b0d0bd746240a0594657a2d94ad28033ba/ruff-0.12.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c47dea6ae39421851685141ba9734767f960113d51e83fd7bb9958d5be8763a", size = 12937465, upload-time = "2025-07-24T13:26:17.808Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d3/68bc5e7ab96c94b3589d1789f2dd6dd4b27b263310019529ac9be1e8f31b/ruff-0.12.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c5076aa0e61e30f848846f0265c873c249d4b558105b221be1828f9f79903dc5", size = 11771136, upload-time = "2025-07-24T13:26:20.422Z" }, + { url = "https://files.pythonhosted.org/packages/52/75/7356af30a14584981cabfefcf6106dea98cec9a7af4acb5daaf4b114845f/ruff-0.12.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a5a4c7830dadd3d8c39b1cc85386e2c1e62344f20766be6f173c22fb5f72f293", size = 11601644, upload-time = "2025-07-24T13:26:22.928Z" }, + { url = "https://files.pythonhosted.org/packages/c2/67/91c71d27205871737cae11025ee2b098f512104e26ffd8656fd93d0ada0a/ruff-0.12.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:46699f73c2b5b137b9dc0fc1a190b43e35b008b398c6066ea1350cce6326adcb", size = 12478068, upload-time = "2025-07-24T13:26:26.134Z" }, + { url = "https://files.pythonhosted.org/packages/34/04/b6b00383cf2f48e8e78e14eb258942fdf2a9bf0287fbf5cdd398b749193a/ruff-0.12.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5a655a0a0d396f0f072faafc18ebd59adde8ca85fb848dc1b0d9f024b9c4d3bb", size = 12991537, upload-time = "2025-07-24T13:26:28.533Z" }, + { url = "https://files.pythonhosted.org/packages/3e/b9/053d6445dc7544fb6594785056d8ece61daae7214859ada4a152ad56b6e0/ruff-0.12.5-py3-none-win32.whl", hash = "sha256:dfeb2627c459b0b78ca2bbdc38dd11cc9a0a88bf91db982058b26ce41714ffa9", size = 11751575, upload-time = "2025-07-24T13:26:30.835Z" }, + { url = "https://files.pythonhosted.org/packages/bc/0f/ab16e8259493137598b9149734fec2e06fdeda9837e6f634f5c4e35916da/ruff-0.12.5-py3-none-win_amd64.whl", hash = "sha256:ae0d90cf5f49466c954991b9d8b953bd093c32c27608e409ae3564c63c5306a5", size = 12882273, upload-time = "2025-07-24T13:26:32.929Z" }, + { url = "https://files.pythonhosted.org/packages/00/db/c376b0661c24cf770cb8815268190668ec1330eba8374a126ceef8c72d55/ruff-0.12.5-py3-none-win_arm64.whl", hash = "sha256:48cdbfc633de2c5c37d9f090ba3b352d1576b0015bfc3bc98eaf230275b7e805", size = 11951564, upload-time = "2025-07-24T13:26:34.994Z" }, ] [[package]] @@ -3052,47 +3066,47 @@ wheels = [ [[package]] name = "sqlalchemy" -version = "2.0.41" +version = "2.0.42" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "greenlet", marker = "(python_full_version < '3.14' and platform_machine == 'AMD64' and sys_platform == 'darwin') or (python_full_version < '3.14' and platform_machine == 'WIN32' and sys_platform == 'darwin') or (python_full_version < '3.14' and platform_machine == 'aarch64' and sys_platform == 'darwin') or (python_full_version < '3.14' and platform_machine == 'amd64' and sys_platform == 'darwin') or (python_full_version < '3.14' and platform_machine == 'ppc64le' and sys_platform == 'darwin') or (python_full_version < '3.14' and platform_machine == 'win32' and sys_platform == 'darwin') or (python_full_version < '3.14' and platform_machine == 'x86_64' and sys_platform == 'darwin') or (python_full_version < '3.14' and platform_machine == 'AMD64' and sys_platform == 'linux') or (python_full_version < '3.14' and platform_machine == 'WIN32' and sys_platform == 'linux') or (python_full_version < '3.14' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.14' and platform_machine == 'amd64' and sys_platform == 'linux') or (python_full_version < '3.14' and platform_machine == 'ppc64le' and sys_platform == 'linux') or (python_full_version < '3.14' and platform_machine == 'win32' and sys_platform == 'linux') or (python_full_version < '3.14' and platform_machine == 'x86_64' and sys_platform == 'linux') or (python_full_version < '3.14' and platform_machine == 'AMD64' and sys_platform == 'win32') or (python_full_version < '3.14' and platform_machine == 'WIN32' and sys_platform == 'win32') or (python_full_version < '3.14' and platform_machine == 'aarch64' and sys_platform == 'win32') or (python_full_version < '3.14' and platform_machine == 'amd64' and sys_platform == 'win32') or (python_full_version < '3.14' and platform_machine == 'ppc64le' and sys_platform == 'win32') or (python_full_version < '3.14' and platform_machine == 'win32' and sys_platform == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64' and sys_platform == 'win32')" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/63/66/45b165c595ec89aa7dcc2c1cd222ab269bc753f1fc7a1e68f8481bd957bf/sqlalchemy-2.0.41.tar.gz", hash = "sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9", size = 9689424, upload-time = "2025-05-14T17:10:32.339Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/12/d7c445b1940276a828efce7331cb0cb09d6e5f049651db22f4ebb0922b77/sqlalchemy-2.0.41-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b1f09b6821406ea1f94053f346f28f8215e293344209129a9c0fcc3578598d7b", size = 2117967, upload-time = "2025-05-14T17:48:15.841Z" }, - { url = "https://files.pythonhosted.org/packages/6f/b8/cb90f23157e28946b27eb01ef401af80a1fab7553762e87df51507eaed61/sqlalchemy-2.0.41-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1936af879e3db023601196a1684d28e12f19ccf93af01bf3280a3262c4b6b4e5", size = 2107583, upload-time = "2025-05-14T17:48:18.688Z" }, - { url = "https://files.pythonhosted.org/packages/9e/c2/eef84283a1c8164a207d898e063edf193d36a24fb6a5bb3ce0634b92a1e8/sqlalchemy-2.0.41-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2ac41acfc8d965fb0c464eb8f44995770239668956dc4cdf502d1b1ffe0d747", size = 3186025, upload-time = "2025-05-14T17:51:51.226Z" }, - { url = "https://files.pythonhosted.org/packages/bd/72/49d52bd3c5e63a1d458fd6d289a1523a8015adedbddf2c07408ff556e772/sqlalchemy-2.0.41-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81c24e0c0fde47a9723c81d5806569cddef103aebbf79dbc9fcbb617153dea30", size = 3186259, upload-time = "2025-05-14T17:55:22.526Z" }, - { url = "https://files.pythonhosted.org/packages/4f/9e/e3ffc37d29a3679a50b6bbbba94b115f90e565a2b4545abb17924b94c52d/sqlalchemy-2.0.41-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23a8825495d8b195c4aa9ff1c430c28f2c821e8c5e2d98089228af887e5d7e29", size = 3126803, upload-time = "2025-05-14T17:51:53.277Z" }, - { url = "https://files.pythonhosted.org/packages/8a/76/56b21e363f6039978ae0b72690237b38383e4657281285a09456f313dd77/sqlalchemy-2.0.41-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:60c578c45c949f909a4026b7807044e7e564adf793537fc762b2489d522f3d11", size = 3148566, upload-time = "2025-05-14T17:55:24.398Z" }, - { url = "https://files.pythonhosted.org/packages/3b/92/11b8e1b69bf191bc69e300a99badbbb5f2f1102f2b08b39d9eee2e21f565/sqlalchemy-2.0.41-cp310-cp310-win32.whl", hash = "sha256:118c16cd3f1b00c76d69343e38602006c9cfb9998fa4f798606d28d63f23beda", size = 2086696, upload-time = "2025-05-14T17:55:59.136Z" }, - { url = "https://files.pythonhosted.org/packages/5c/88/2d706c9cc4502654860f4576cd54f7db70487b66c3b619ba98e0be1a4642/sqlalchemy-2.0.41-cp310-cp310-win_amd64.whl", hash = "sha256:7492967c3386df69f80cf67efd665c0f667cee67032090fe01d7d74b0e19bb08", size = 2110200, upload-time = "2025-05-14T17:56:00.757Z" }, - { url = "https://files.pythonhosted.org/packages/37/4e/b00e3ffae32b74b5180e15d2ab4040531ee1bef4c19755fe7926622dc958/sqlalchemy-2.0.41-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6375cd674fe82d7aa9816d1cb96ec592bac1726c11e0cafbf40eeee9a4516b5f", size = 2121232, upload-time = "2025-05-14T17:48:20.444Z" }, - { url = "https://files.pythonhosted.org/packages/ef/30/6547ebb10875302074a37e1970a5dce7985240665778cfdee2323709f749/sqlalchemy-2.0.41-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9f8c9fdd15a55d9465e590a402f42082705d66b05afc3ffd2d2eb3c6ba919560", size = 2110897, upload-time = "2025-05-14T17:48:21.634Z" }, - { url = "https://files.pythonhosted.org/packages/9e/21/59df2b41b0f6c62da55cd64798232d7349a9378befa7f1bb18cf1dfd510a/sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f9dc8c44acdee06c8fc6440db9eae8b4af8b01e4b1aee7bdd7241c22edff4f", size = 3273313, upload-time = "2025-05-14T17:51:56.205Z" }, - { url = "https://files.pythonhosted.org/packages/62/e4/b9a7a0e5c6f79d49bcd6efb6e90d7536dc604dab64582a9dec220dab54b6/sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90c11ceb9a1f482c752a71f203a81858625d8df5746d787a4786bca4ffdf71c6", size = 3273807, upload-time = "2025-05-14T17:55:26.928Z" }, - { url = "https://files.pythonhosted.org/packages/39/d8/79f2427251b44ddee18676c04eab038d043cff0e764d2d8bb08261d6135d/sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:911cc493ebd60de5f285bcae0491a60b4f2a9f0f5c270edd1c4dbaef7a38fc04", size = 3209632, upload-time = "2025-05-14T17:51:59.384Z" }, - { url = "https://files.pythonhosted.org/packages/d4/16/730a82dda30765f63e0454918c982fb7193f6b398b31d63c7c3bd3652ae5/sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03968a349db483936c249f4d9cd14ff2c296adfa1290b660ba6516f973139582", size = 3233642, upload-time = "2025-05-14T17:55:29.901Z" }, - { url = "https://files.pythonhosted.org/packages/04/61/c0d4607f7799efa8b8ea3c49b4621e861c8f5c41fd4b5b636c534fcb7d73/sqlalchemy-2.0.41-cp311-cp311-win32.whl", hash = "sha256:293cd444d82b18da48c9f71cd7005844dbbd06ca19be1ccf6779154439eec0b8", size = 2086475, upload-time = "2025-05-14T17:56:02.095Z" }, - { url = "https://files.pythonhosted.org/packages/9d/8e/8344f8ae1cb6a479d0741c02cd4f666925b2bf02e2468ddaf5ce44111f30/sqlalchemy-2.0.41-cp311-cp311-win_amd64.whl", hash = "sha256:3d3549fc3e40667ec7199033a4e40a2f669898a00a7b18a931d3efb4c7900504", size = 2110903, upload-time = "2025-05-14T17:56:03.499Z" }, - { url = "https://files.pythonhosted.org/packages/3e/2a/f1f4e068b371154740dd10fb81afb5240d5af4aa0087b88d8b308b5429c2/sqlalchemy-2.0.41-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:81f413674d85cfd0dfcd6512e10e0f33c19c21860342a4890c3a2b59479929f9", size = 2119645, upload-time = "2025-05-14T17:55:24.854Z" }, - { url = "https://files.pythonhosted.org/packages/9b/e8/c664a7e73d36fbfc4730f8cf2bf930444ea87270f2825efbe17bf808b998/sqlalchemy-2.0.41-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:598d9ebc1e796431bbd068e41e4de4dc34312b7aa3292571bb3674a0cb415dd1", size = 2107399, upload-time = "2025-05-14T17:55:28.097Z" }, - { url = "https://files.pythonhosted.org/packages/5c/78/8a9cf6c5e7135540cb682128d091d6afa1b9e48bd049b0d691bf54114f70/sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a104c5694dfd2d864a6f91b0956eb5d5883234119cb40010115fd45a16da5e70", size = 3293269, upload-time = "2025-05-14T17:50:38.227Z" }, - { url = "https://files.pythonhosted.org/packages/3c/35/f74add3978c20de6323fb11cb5162702670cc7a9420033befb43d8d5b7a4/sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6145afea51ff0af7f2564a05fa95eb46f542919e6523729663a5d285ecb3cf5e", size = 3303364, upload-time = "2025-05-14T17:51:49.829Z" }, - { url = "https://files.pythonhosted.org/packages/6a/d4/c990f37f52c3f7748ebe98883e2a0f7d038108c2c5a82468d1ff3eec50b7/sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b46fa6eae1cd1c20e6e6f44e19984d438b6b2d8616d21d783d150df714f44078", size = 3229072, upload-time = "2025-05-14T17:50:39.774Z" }, - { url = "https://files.pythonhosted.org/packages/15/69/cab11fecc7eb64bc561011be2bd03d065b762d87add52a4ca0aca2e12904/sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41836fe661cc98abfae476e14ba1906220f92c4e528771a8a3ae6a151242d2ae", size = 3268074, upload-time = "2025-05-14T17:51:51.736Z" }, - { url = "https://files.pythonhosted.org/packages/5c/ca/0c19ec16858585d37767b167fc9602593f98998a68a798450558239fb04a/sqlalchemy-2.0.41-cp312-cp312-win32.whl", hash = "sha256:a8808d5cf866c781150d36a3c8eb3adccfa41a8105d031bf27e92c251e3969d6", size = 2084514, upload-time = "2025-05-14T17:55:49.915Z" }, - { url = "https://files.pythonhosted.org/packages/7f/23/4c2833d78ff3010a4e17f984c734f52b531a8c9060a50429c9d4b0211be6/sqlalchemy-2.0.41-cp312-cp312-win_amd64.whl", hash = "sha256:5b14e97886199c1f52c14629c11d90c11fbb09e9334fa7bb5f6d068d9ced0ce0", size = 2111557, upload-time = "2025-05-14T17:55:51.349Z" }, - { url = "https://files.pythonhosted.org/packages/d3/ad/2e1c6d4f235a97eeef52d0200d8ddda16f6c4dd70ae5ad88c46963440480/sqlalchemy-2.0.41-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4eeb195cdedaf17aab6b247894ff2734dcead6c08f748e617bfe05bd5a218443", size = 2115491, upload-time = "2025-05-14T17:55:31.177Z" }, - { url = "https://files.pythonhosted.org/packages/cf/8d/be490e5db8400dacc89056f78a52d44b04fbf75e8439569d5b879623a53b/sqlalchemy-2.0.41-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d4ae769b9c1c7757e4ccce94b0641bc203bbdf43ba7a2413ab2523d8d047d8dc", size = 2102827, upload-time = "2025-05-14T17:55:34.921Z" }, - { url = "https://files.pythonhosted.org/packages/a0/72/c97ad430f0b0e78efaf2791342e13ffeafcbb3c06242f01a3bb8fe44f65d/sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a62448526dd9ed3e3beedc93df9bb6b55a436ed1474db31a2af13b313a70a7e1", size = 3225224, upload-time = "2025-05-14T17:50:41.418Z" }, - { url = "https://files.pythonhosted.org/packages/5e/51/5ba9ea3246ea068630acf35a6ba0d181e99f1af1afd17e159eac7e8bc2b8/sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc56c9788617b8964ad02e8fcfeed4001c1f8ba91a9e1f31483c0dffb207002a", size = 3230045, upload-time = "2025-05-14T17:51:54.722Z" }, - { url = "https://files.pythonhosted.org/packages/78/2f/8c14443b2acea700c62f9b4a8bad9e49fc1b65cfb260edead71fd38e9f19/sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c153265408d18de4cc5ded1941dcd8315894572cddd3c58df5d5b5705b3fa28d", size = 3159357, upload-time = "2025-05-14T17:50:43.483Z" }, - { url = "https://files.pythonhosted.org/packages/fc/b2/43eacbf6ccc5276d76cea18cb7c3d73e294d6fb21f9ff8b4eef9b42bbfd5/sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f67766965996e63bb46cfbf2ce5355fc32d9dd3b8ad7e536a920ff9ee422e23", size = 3197511, upload-time = "2025-05-14T17:51:57.308Z" }, - { url = "https://files.pythonhosted.org/packages/fa/2e/677c17c5d6a004c3c45334ab1dbe7b7deb834430b282b8a0f75ae220c8eb/sqlalchemy-2.0.41-cp313-cp313-win32.whl", hash = "sha256:bfc9064f6658a3d1cadeaa0ba07570b83ce6801a1314985bf98ec9b95d74e15f", size = 2082420, upload-time = "2025-05-14T17:55:52.69Z" }, - { url = "https://files.pythonhosted.org/packages/e9/61/e8c1b9b6307c57157d328dd8b8348ddc4c47ffdf1279365a13b2b98b8049/sqlalchemy-2.0.41-cp313-cp313-win_amd64.whl", hash = "sha256:82ca366a844eb551daff9d2e6e7a9e5e76d2612c8564f58db6c19a726869c1df", size = 2108329, upload-time = "2025-05-14T17:55:54.495Z" }, - { url = "https://files.pythonhosted.org/packages/1c/fc/9ba22f01b5cdacc8f5ed0d22304718d2c758fce3fd49a5372b886a86f37c/sqlalchemy-2.0.41-py3-none-any.whl", hash = "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576", size = 1911224, upload-time = "2025-05-14T17:39:42.154Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/5a/03/a0af991e3a43174d6b83fca4fb399745abceddd1171bdabae48ce877ff47/sqlalchemy-2.0.42.tar.gz", hash = "sha256:160bedd8a5c28765bd5be4dec2d881e109e33b34922e50a3b881a7681773ac5f", size = 9749972, upload-time = "2025-07-29T12:48:09.323Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/12/33ff43214c2c6cc87499b402fe419869d2980a08101c991daae31345e901/sqlalchemy-2.0.42-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:172b244753e034d91a826f80a9a70f4cbac690641207f2217f8404c261473efe", size = 2130469, upload-time = "2025-07-29T13:25:15.215Z" }, + { url = "https://files.pythonhosted.org/packages/63/c4/4d2f2c21ddde9a2c7f7b258b202d6af0bac9fc5abfca5de367461c86d766/sqlalchemy-2.0.42-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be28f88abd74af8519a4542185ee80ca914933ca65cdfa99504d82af0e4210df", size = 2120393, upload-time = "2025-07-29T13:25:16.367Z" }, + { url = "https://files.pythonhosted.org/packages/a8/0d/5ff2f2dfbac10e4a9ade1942f8985ffc4bd8f157926b1f8aed553dfe3b88/sqlalchemy-2.0.42-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98b344859d282fde388047f1710860bb23f4098f705491e06b8ab52a48aafea9", size = 3206173, upload-time = "2025-07-29T13:29:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/1f/59/71493fe74bd76a773ae8fa0c50bfc2ccac1cbf7cfa4f9843ad92897e6dcf/sqlalchemy-2.0.42-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97978d223b11f1d161390a96f28c49a13ce48fdd2fed7683167c39bdb1b8aa09", size = 3206910, upload-time = "2025-07-29T13:24:50.58Z" }, + { url = "https://files.pythonhosted.org/packages/a9/51/01b1d85bbb492a36b25df54a070a0f887052e9b190dff71263a09f48576b/sqlalchemy-2.0.42-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e35b9b000c59fcac2867ab3a79fc368a6caca8706741beab3b799d47005b3407", size = 3145479, upload-time = "2025-07-29T13:29:02.3Z" }, + { url = "https://files.pythonhosted.org/packages/fa/78/10834f010e2a3df689f6d1888ea6ea0074ff10184e6a550b8ed7f9189a89/sqlalchemy-2.0.42-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bc7347ad7a7b1c78b94177f2d57263113bb950e62c59b96ed839b131ea4234e1", size = 3169605, upload-time = "2025-07-29T13:24:52.135Z" }, + { url = "https://files.pythonhosted.org/packages/0c/75/e6fdd66d237582c8488dd1dfa90899f6502822fbd866363ab70e8ac4a2ce/sqlalchemy-2.0.42-cp310-cp310-win32.whl", hash = "sha256:739e58879b20a179156b63aa21f05ccacfd3e28e08e9c2b630ff55cd7177c4f1", size = 2098759, upload-time = "2025-07-29T13:23:55.809Z" }, + { url = "https://files.pythonhosted.org/packages/a5/a8/366db192641c2c2d1ea8977e7c77b65a0d16a7858907bb76ea68b9dd37af/sqlalchemy-2.0.42-cp310-cp310-win_amd64.whl", hash = "sha256:1aef304ada61b81f1955196f584b9e72b798ed525a7c0b46e09e98397393297b", size = 2122423, upload-time = "2025-07-29T13:23:56.968Z" }, + { url = "https://files.pythonhosted.org/packages/ea/3c/7bfd65f3c2046e2fb4475b21fa0b9d7995f8c08bfa0948df7a4d2d0de869/sqlalchemy-2.0.42-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c34100c0b7ea31fbc113c124bcf93a53094f8951c7bf39c45f39d327bad6d1e7", size = 2133779, upload-time = "2025-07-29T13:25:18.446Z" }, + { url = "https://files.pythonhosted.org/packages/66/17/19be542fe9dd64a766090e90e789e86bdaa608affda6b3c1e118a25a2509/sqlalchemy-2.0.42-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ad59dbe4d1252448c19d171dfba14c74e7950b46dc49d015722a4a06bfdab2b0", size = 2123843, upload-time = "2025-07-29T13:25:19.749Z" }, + { url = "https://files.pythonhosted.org/packages/14/fc/83e45fc25f0acf1c26962ebff45b4c77e5570abb7c1a425a54b00bcfa9c7/sqlalchemy-2.0.42-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9187498c2149919753a7fd51766ea9c8eecdec7da47c1b955fa8090bc642eaa", size = 3294824, upload-time = "2025-07-29T13:29:03.879Z" }, + { url = "https://files.pythonhosted.org/packages/b9/81/421efc09837104cd1a267d68b470e5b7b6792c2963b8096ca1e060ba0975/sqlalchemy-2.0.42-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f092cf83ebcafba23a247f5e03f99f5436e3ef026d01c8213b5eca48ad6efa9", size = 3294662, upload-time = "2025-07-29T13:24:53.715Z" }, + { url = "https://files.pythonhosted.org/packages/2f/ba/55406e09d32ed5e5f9e8aaec5ef70c4f20b4ae25b9fa9784f4afaa28e7c3/sqlalchemy-2.0.42-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc6afee7e66fdba4f5a68610b487c1f754fccdc53894a9567785932dbb6a265e", size = 3229413, upload-time = "2025-07-29T13:29:05.638Z" }, + { url = "https://files.pythonhosted.org/packages/d4/c4/df596777fce27bde2d1a4a2f5a7ddea997c0c6d4b5246aafba966b421cc0/sqlalchemy-2.0.42-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:260ca1d2e5910f1f1ad3fe0113f8fab28657cee2542cb48c2f342ed90046e8ec", size = 3255563, upload-time = "2025-07-29T13:24:55.17Z" }, + { url = "https://files.pythonhosted.org/packages/16/ed/b9c4a939b314400f43f972c9eb0091da59d8466ef9c51d0fd5b449edc495/sqlalchemy-2.0.42-cp311-cp311-win32.whl", hash = "sha256:2eb539fd83185a85e5fcd6b19214e1c734ab0351d81505b0f987705ba0a1e231", size = 2098513, upload-time = "2025-07-29T13:23:58.946Z" }, + { url = "https://files.pythonhosted.org/packages/91/72/55b0c34e39feb81991aa3c974d85074c356239ac1170dfb81a474b4c23b3/sqlalchemy-2.0.42-cp311-cp311-win_amd64.whl", hash = "sha256:9193fa484bf00dcc1804aecbb4f528f1123c04bad6a08d7710c909750fa76aeb", size = 2123380, upload-time = "2025-07-29T13:24:00.155Z" }, + { url = "https://files.pythonhosted.org/packages/61/66/ac31a9821fc70a7376321fb2c70fdd7eadbc06dadf66ee216a22a41d6058/sqlalchemy-2.0.42-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:09637a0872689d3eb71c41e249c6f422e3e18bbd05b4cd258193cfc7a9a50da2", size = 2132203, upload-time = "2025-07-29T13:29:19.291Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ba/fd943172e017f955d7a8b3a94695265b7114efe4854feaa01f057e8f5293/sqlalchemy-2.0.42-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3cb3ec67cc08bea54e06b569398ae21623534a7b1b23c258883a7c696ae10df", size = 2120373, upload-time = "2025-07-29T13:29:21.049Z" }, + { url = "https://files.pythonhosted.org/packages/ea/a2/b5f7d233d063ffadf7e9fff3898b42657ba154a5bec95a96f44cba7f818b/sqlalchemy-2.0.42-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e87e6a5ef6f9d8daeb2ce5918bf5fddecc11cae6a7d7a671fcc4616c47635e01", size = 3317685, upload-time = "2025-07-29T13:26:40.837Z" }, + { url = "https://files.pythonhosted.org/packages/86/00/fcd8daab13a9119d41f3e485a101c29f5d2085bda459154ba354c616bf4e/sqlalchemy-2.0.42-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b718011a9d66c0d2f78e1997755cd965f3414563b31867475e9bc6efdc2281d", size = 3326967, upload-time = "2025-07-29T13:22:31.009Z" }, + { url = "https://files.pythonhosted.org/packages/a3/85/e622a273d648d39d6771157961956991a6d760e323e273d15e9704c30ccc/sqlalchemy-2.0.42-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:16d9b544873fe6486dddbb859501a07d89f77c61d29060bb87d0faf7519b6a4d", size = 3255331, upload-time = "2025-07-29T13:26:42.579Z" }, + { url = "https://files.pythonhosted.org/packages/3a/a0/2c2338b592c7b0a61feffd005378c084b4c01fabaf1ed5f655ab7bd446f0/sqlalchemy-2.0.42-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21bfdf57abf72fa89b97dd74d3187caa3172a78c125f2144764a73970810c4ee", size = 3291791, upload-time = "2025-07-29T13:22:32.454Z" }, + { url = "https://files.pythonhosted.org/packages/41/19/b8a2907972a78285fdce4c880ecaab3c5067eb726882ca6347f7a4bf64f6/sqlalchemy-2.0.42-cp312-cp312-win32.whl", hash = "sha256:78b46555b730a24901ceb4cb901c6b45c9407f8875209ed3c5d6bcd0390a6ed1", size = 2096180, upload-time = "2025-07-29T13:16:08.952Z" }, + { url = "https://files.pythonhosted.org/packages/48/1f/67a78f3dfd08a2ed1c7be820fe7775944f5126080b5027cc859084f8e223/sqlalchemy-2.0.42-cp312-cp312-win_amd64.whl", hash = "sha256:4c94447a016f36c4da80072e6c6964713b0af3c8019e9c4daadf21f61b81ab53", size = 2123533, upload-time = "2025-07-29T13:16:11.705Z" }, + { url = "https://files.pythonhosted.org/packages/e9/7e/25d8c28b86730c9fb0e09156f601d7a96d1c634043bf8ba36513eb78887b/sqlalchemy-2.0.42-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:941804f55c7d507334da38133268e3f6e5b0340d584ba0f277dd884197f4ae8c", size = 2127905, upload-time = "2025-07-29T13:29:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/e5/a1/9d8c93434d1d983880d976400fcb7895a79576bd94dca61c3b7b90b1ed0d/sqlalchemy-2.0.42-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:95d3d06a968a760ce2aa6a5889fefcbdd53ca935735e0768e1db046ec08cbf01", size = 2115726, upload-time = "2025-07-29T13:29:23.496Z" }, + { url = "https://files.pythonhosted.org/packages/a2/cc/d33646fcc24c87cc4e30a03556b611a4e7bcfa69a4c935bffb923e3c89f4/sqlalchemy-2.0.42-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cf10396a8a700a0f38ccd220d940be529c8f64435c5d5b29375acab9267a6c9", size = 3246007, upload-time = "2025-07-29T13:26:44.166Z" }, + { url = "https://files.pythonhosted.org/packages/67/08/4e6c533d4c7f5e7c4cbb6fe8a2c4e813202a40f05700d4009a44ec6e236d/sqlalchemy-2.0.42-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cae6c2b05326d7c2c7c0519f323f90e0fb9e8afa783c6a05bb9ee92a90d0f04", size = 3250919, upload-time = "2025-07-29T13:22:33.74Z" }, + { url = "https://files.pythonhosted.org/packages/5c/82/f680e9a636d217aece1b9a8030d18ad2b59b5e216e0c94e03ad86b344af3/sqlalchemy-2.0.42-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f50f7b20677b23cfb35b6afcd8372b2feb348a38e3033f6447ee0704540be894", size = 3180546, upload-time = "2025-07-29T13:26:45.648Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a2/8c8f6325f153894afa3775584c429cc936353fb1db26eddb60a549d0ff4b/sqlalchemy-2.0.42-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d88a1c0d66d24e229e3938e1ef16ebdbd2bf4ced93af6eff55225f7465cf350", size = 3216683, upload-time = "2025-07-29T13:22:34.977Z" }, + { url = "https://files.pythonhosted.org/packages/39/44/3a451d7fa4482a8ffdf364e803ddc2cfcafc1c4635fb366f169ecc2c3b11/sqlalchemy-2.0.42-cp313-cp313-win32.whl", hash = "sha256:45c842c94c9ad546c72225a0c0d1ae8ef3f7c212484be3d429715a062970e87f", size = 2093990, upload-time = "2025-07-29T13:16:13.036Z" }, + { url = "https://files.pythonhosted.org/packages/4b/9e/9bce34f67aea0251c8ac104f7bdb2229d58fb2e86a4ad8807999c4bee34b/sqlalchemy-2.0.42-cp313-cp313-win_amd64.whl", hash = "sha256:eb9905f7f1e49fd57a7ed6269bc567fcbbdac9feadff20ad6bd7707266a91577", size = 2120473, upload-time = "2025-07-29T13:16:14.502Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/ba2546ab09a6adebc521bf3974440dc1d8c06ed342cceb30ed62a8858835/sqlalchemy-2.0.42-py3-none-any.whl", hash = "sha256:defcdff7e661f0043daa381832af65d616e060ddb54d3fe4476f51df7eaa1835", size = 1922072, upload-time = "2025-07-29T13:09:17.061Z" }, ] [[package]] @@ -3296,27 +3310,28 @@ wheels = [ [[package]] name = "uv" -version = "0.8.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/44/b53ae1f6b08724ddf3ec7125ddb6a381bd64ccdd696e8d87a1db3502aa10/uv-0.8.2.tar.gz", hash = "sha256:1a2c6d332a4c38f7489f08829aea19cd1e276df7f2c6e51ae64ed92f8574cd68", size = 3412893, upload-time = "2025-07-22T20:36:33.108Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/40/03c5cd3c9fa0fb53dcce1a39a6a9da6d81f29057cf9c4b9bb850dc58e2fb/uv-0.8.2-py3-none-linux_armv6l.whl", hash = "sha256:a89c9a471fbb436063e79afa919b2fb27462900f0f3781f776d8fd0b874acd56", size = 17875572, upload-time = "2025-07-22T20:35:33.672Z" }, - { url = "https://files.pythonhosted.org/packages/22/13/1d97c67fe666112c4327d6eec8bf39c244931c08848d4c95be0a80017f19/uv-0.8.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a3b147064f69455b4558263228b3fdb053c3d550f25d41b049c4d34f1f77d74c", size = 17948871, upload-time = "2025-07-22T20:35:38.742Z" }, - { url = "https://files.pythonhosted.org/packages/72/ec/0dd7b14f92de906afa3adde0f31e05150d081f1aadce9eb77689e3adc4ca/uv-0.8.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:ebface4a113c493d953554460429731d44ede2427ba97e606955daadcc6e7ddc", size = 16660631, upload-time = "2025-07-22T20:35:42.323Z" }, - { url = "https://files.pythonhosted.org/packages/fc/bf/4896cde38c29cbca42d1d0f73d80e15e20826968817150323a34c8b23436/uv-0.8.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:6fb07dd58a2cb79640109c0604aeec57d1062fad89114c0fda2f9dbe3de3c0bb", size = 17208209, upload-time = "2025-07-22T20:35:45.355Z" }, - { url = "https://files.pythonhosted.org/packages/cc/91/f03b95ee6bb8c7bc4d6596664235992d1931d6e6b1b018acda6aeab69ea2/uv-0.8.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6af1b0f0f5f1416e94a6b098a595f360303a0024b21cf563d4e6139e6dd72640", size = 17570752, upload-time = "2025-07-22T20:35:48.711Z" }, - { url = "https://files.pythonhosted.org/packages/00/44/3c905c0bce2113a664432c50b7a605eea9f271d126333b2a6c9ec5105ef3/uv-0.8.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bddf1ceceaddbc3f2cf2ebdad3213482d6dab3d1b452ddfecd35468e3b2f0e6", size = 18207389, upload-time = "2025-07-22T20:35:52.415Z" }, - { url = "https://files.pythonhosted.org/packages/67/b9/8a384dc22db96f54642889fa609192f79cd58447755d90b1163a9ba5d812/uv-0.8.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:bffe304fec46d6c264c3b34d58b3358764b7cc17af13bd421e1cd1300b706f93", size = 19633291, upload-time = "2025-07-22T20:35:56.015Z" }, - { url = "https://files.pythonhosted.org/packages/46/c6/fd7855f0aba4a07a5b7a08b95cd6b9d264b534a3bc5095a4acb55aca1d46/uv-0.8.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d53b79f8304e98ee082336fc4c204a7892d15f78799ec2d59ceb09b0b82e45d", size = 19381757, upload-time = "2025-07-22T20:36:00.071Z" }, - { url = "https://files.pythonhosted.org/packages/bd/d7/4752b2f6a9aaad324483f433e659b2fe36996015f348fadf172e5056b94e/uv-0.8.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0f618b18b19fef09b087f2e637b2138f570a3c41beb3de4bbbb905a8b994a22a", size = 18671374, upload-time = "2025-07-22T20:36:03.672Z" }, - { url = "https://files.pythonhosted.org/packages/59/65/f979657cc773f17d0c1ec4a1d17dc1b0673ab484d00833aa5514982faf63/uv-0.8.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20f2b1eb4bf8b8197683e057ea9a8f0eb63b682ad20bb232b58529abac73a5ea", size = 18666053, upload-time = "2025-07-22T20:36:07.059Z" }, - { url = "https://files.pythonhosted.org/packages/94/8d/6ff7188911b671e3eedf87ea2ce4f1e39bbbaf27caa74eef92cd9824051d/uv-0.8.2-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:8d3ae329606ba586d317e9a06dca213619afb407bcc584cf6cff2a9b84cf25a2", size = 17462711, upload-time = "2025-07-22T20:36:10.29Z" }, - { url = "https://files.pythonhosted.org/packages/b4/07/4ce00ce186f2c02ca8708a5532102ffa9e0f87a0346521a4db83a04a56b4/uv-0.8.2-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:040ceacad98b85f9ca5ab8c8220270e6a60b2136c4889b334dbfcd13812f895f", size = 17534469, upload-time = "2025-07-22T20:36:13.439Z" }, - { url = "https://files.pythonhosted.org/packages/69/ad/bf37f8bea961278ae5719f23a0998dcae17c431a77d3fa9e0d3a9256a2b2/uv-0.8.2-py3-none-musllinux_1_1_i686.whl", hash = "sha256:9f15bcfd21ca66ec93b77e6ff612798dc75d54260e2ec52f07fe897e91f07367", size = 17787669, upload-time = "2025-07-22T20:36:17.053Z" }, - { url = "https://files.pythonhosted.org/packages/03/49/3b7e1c926bcdf1325aba9647cf1831c55ae84d7d2319a74f2d9ad88535fb/uv-0.8.2-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:9dea45737afec83739189834648b07595ce07f4c201021cc5545ee1759dfe25d", size = 18759496, upload-time = "2025-07-22T20:36:20.371Z" }, - { url = "https://files.pythonhosted.org/packages/d4/74/6b4e52593d1f469250e89ee85964011e9b84b2fc25e15e9353800f36d5ab/uv-0.8.2-py3-none-win32.whl", hash = "sha256:eb37db94c9295bfec77ff65fbc56b9962665d3d5bff0989dcb440c650351ee15", size = 17753486, upload-time = "2025-07-22T20:36:24.074Z" }, - { url = "https://files.pythonhosted.org/packages/79/c8/1d9510c5aeda8c14e6c088f1720e2dd818dc72ed37a0ee40d3d3137aabcf/uv-0.8.2-py3-none-win_amd64.whl", hash = "sha256:0fcaab1172c6fae036a9f16460a71812f7a427b3d3779f99457c2d537a3fc250", size = 19503370, upload-time = "2025-07-22T20:36:27.505Z" }, - { url = "https://files.pythonhosted.org/packages/00/38/8907e8fc94e3c040759180e81d30414734cbee6e575dae7ce9dc9cb1e0fc/uv-0.8.2-py3-none-win_arm64.whl", hash = "sha256:af35c0fe23907fc0518832243b561f623a48a058a75ab552204f87960793321b", size = 18145751, upload-time = "2025-07-22T20:36:30.826Z" }, +version = "0.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/e3/2c3cb3e992fa1bf9af590bb37983f13e3ae67155820a09a98945664f71f3/uv-0.8.3.tar.gz", hash = "sha256:2ccaae4c749126c99f6404d67a0ae1eae29cbafb05603d09094a775061fdf4e5", size = 3415565, upload-time = "2025-07-24T21:14:34.417Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/ab/7b881bb236b9c5f6d99a98adf0c4d1e7c4f0cf4b49051d6d24eb82f19c10/uv-0.8.3-py3-none-linux_armv6l.whl", hash = "sha256:ae7efe91dcfc24126fa91e0fb69a1daf6c0e494a781ba192bb0cc62d7ab623ee", size = 17912668, upload-time = "2025-07-24T21:13:50.682Z" }, + { url = "https://files.pythonhosted.org/packages/fa/9b/64d2ed7388ce88971ffb93d45e74465c95bb885bff40c93f5037b7250930/uv-0.8.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:966ec7d7f57521fef0fee685d71e183c9cafb358ddcfe27519dfeaf40550f247", size = 17947557, upload-time = "2025-07-24T21:13:54.59Z" }, + { url = "https://files.pythonhosted.org/packages/9c/ba/8ceec5d6a1adf6b827db557077d8059e573a84c3708a70433d22a0470fab/uv-0.8.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3f904f574dc2d7aa1d96ddf2483480ecd121dc9d060108cadd8bff100b754b64", size = 16638472, upload-time = "2025-07-24T21:13:57.57Z" }, + { url = "https://files.pythonhosted.org/packages/a3/76/6d2eb90936603756c4a71f9cf5de8d9214fa4d11dcb5a89117389acecd5e/uv-0.8.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:8b16f1bddfdf8f7470924ab34a7b55e4c372d5340c7c1e47e7fc84a743dc541f", size = 17221472, upload-time = "2025-07-24T21:14:00.158Z" }, + { url = "https://files.pythonhosted.org/packages/5b/bf/c3e1cc9604b114dfb49a3a40a230b5410fc97776c149ca73bb524990f9ba/uv-0.8.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:526f2c3bd6f311ce31f6f7b6b7d818b191f41e76bed3aaab671b716220c02d8f", size = 17607299, upload-time = "2025-07-24T21:14:02.226Z" }, + { url = "https://files.pythonhosted.org/packages/53/16/819f876f5ca2f8989c19d9b65b7d794d60e6cca0d13187bbc8c8b5532b52/uv-0.8.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76de331a07e5ae9b6490e70a9439a072b91b3167a5684510af10c2752c4ece9a", size = 18218124, upload-time = "2025-07-24T21:14:04.809Z" }, + { url = "https://files.pythonhosted.org/packages/61/a8/1df852a9153fec0c713358a50cfd7a21a4e17b5ed5704a390c0f3da448ab/uv-0.8.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:989898caeb6e972979543b57547d1c28ab8af81ff8fc15921fd354c17d432749", size = 19638846, upload-time = "2025-07-24T21:14:07.074Z" }, + { url = "https://files.pythonhosted.org/packages/ac/31/adeedaa009d8d919107c52afb58689d5e9db578b07f8dea5e15e4c738d52/uv-0.8.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ce7981f4fbeecf93dc5cf0a5a7915e84956fd99ad3ac977c048fe0cfdb1a17e", size = 19384261, upload-time = "2025-07-24T21:14:09.425Z" }, + { url = "https://files.pythonhosted.org/packages/8d/87/b3981f499e2b13c5ef0022fd7809f0fccbecd41282ae4f6a0e3fd5fa1430/uv-0.8.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8486f7576d15cc73509f93f47b3190f44701ea36839906369301b58c8604d5db", size = 18673722, upload-time = "2025-07-24T21:14:11.656Z" }, + { url = "https://files.pythonhosted.org/packages/5e/62/0d1ba1c666c5492d3716d8d3fba425f65ed2acc6707544c3cbbd381f6cbe/uv-0.8.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1eb7c896fc0d80ed534748aaf46697b6ebc8ce401f1c51666ce0b9923c3db9a", size = 18658829, upload-time = "2025-07-24T21:14:13.798Z" }, + { url = "https://files.pythonhosted.org/packages/cc/ae/11d09be3c74ca4896d55701ebbca7fe7a32db0502cf9f4c57e20bf77bfc4/uv-0.8.3-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:1121ad1c9389b865d029385031d3fd7d90d343c92a2149a4d4aa20bf469cb27f", size = 17460029, upload-time = "2025-07-24T21:14:15.993Z" }, + { url = "https://files.pythonhosted.org/packages/22/47/b67296c62381b8369f082a33d9fdcb7c579ad9922bcce7b09cd4af935dfa/uv-0.8.3-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:5313ee776ad65731ffa8ac585246f987d3a2bf72e6153c12add1fff22ad6e500", size = 18398665, upload-time = "2025-07-24T21:14:18.399Z" }, + { url = "https://files.pythonhosted.org/packages/01/5f/23990de5487085ca86e12f99d0a8f8410419442ffd35c42838675df5549b/uv-0.8.3-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:daa6e0d657a94f20e962d4a03d833ef7af5c8e51b7c8a2d92ba6cf64a4c07ac1", size = 17560408, upload-time = "2025-07-24T21:14:20.609Z" }, + { url = "https://files.pythonhosted.org/packages/89/42/1a8ce79d2ce7268e52690cd0f1b6c3e6c8d748a68d42de206e37219e9627/uv-0.8.3-py3-none-musllinux_1_1_i686.whl", hash = "sha256:ad13453ab0a1dfa64a221aac8f52199efdcaa52c97134fffd7bcebed794a6f4b", size = 17758504, upload-time = "2025-07-24T21:14:23.086Z" }, + { url = "https://files.pythonhosted.org/packages/6b/39/ae94e06ac00cb5002e636af0e48c5180fab5b50a463dc96386875ea511ea/uv-0.8.3-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:5843cc43bafad05cc710d8e31bd347ee37202462a63d32c30746e9df48cfbda2", size = 18741736, upload-time = "2025-07-24T21:14:25.329Z" }, + { url = "https://files.pythonhosted.org/packages/18/e0/a2fe9cc5f7b8815cbf97cb1bf64abb71fcb65f25ca7a5a8cdd4c2e23af97/uv-0.8.3-py3-none-win32.whl", hash = "sha256:17bcdb0615e37cc5f985f7d7546f755ac6343c1dc8bbe876c892437f14f8f904", size = 17723422, upload-time = "2025-07-24T21:14:28.02Z" }, + { url = "https://files.pythonhosted.org/packages/cf/c3/da508ec0f6883f1c269a0a477bb6447c81d5383fe3ad5d5ea3d45469fd30/uv-0.8.3-py3-none-win_amd64.whl", hash = "sha256:2e311c029bff2ca07c6ddf877ccc5935cabb78e09b94b53a849542665b6a6fa1", size = 19531666, upload-time = "2025-07-24T21:14:30.192Z" }, + { url = "https://files.pythonhosted.org/packages/b2/8d/c0354e416697b4baa7ceaad0e423639b6683d1f8299355e390a64809f7bf/uv-0.8.3-py3-none-win_arm64.whl", hash = "sha256:391c97577048a40fd8c85b370055df6420f26e81df7fa906f0e0ce1aa2af3527", size = 18161557, upload-time = "2025-07-24T21:14:32.482Z" }, ] [[package]] From 897013c4e68910ccef5dae8afbd03be1813417c5 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Tue, 29 Jul 2025 16:18:50 -0700 Subject: [PATCH 03/18] ExecutionContext -> WorkflowContext --- .../agent_framework_workflow/core/_edge.py | 4 +-- .../agent_framework_workflow/core/_runner.py | 8 +++--- .../agent_framework_workflow/core/executor.py | 27 +++++++++++-------- .../agent_framework_workflow/core/workflow.py | 24 ++++++++--------- ...ecution_context.py => workflow_context.py} | 8 +++--- 5 files changed, 38 insertions(+), 33 deletions(-) rename python/packages/workflow/agent_framework_workflow/core/{execution_context.py => workflow_context.py} (95%) diff --git a/python/packages/workflow/agent_framework_workflow/core/_edge.py b/python/packages/workflow/agent_framework_workflow/core/_edge.py index c0d3a63a73..b82eaeed72 100644 --- a/python/packages/workflow/agent_framework_workflow/core/_edge.py +++ b/python/packages/workflow/agent_framework_workflow/core/_edge.py @@ -5,8 +5,8 @@ from typing import Any, ClassVar from ._shared_state import SharedState -from .execution_context import ExecutionContext from .executor import Executor, ExecutorContext +from .workflow_context import WorkflowContext class Edge: @@ -54,7 +54,7 @@ def source_and_target_from_id(cls, edge_id: str) -> tuple[str, str]: raise ValueError(f"Invalid edge ID format: {edge_id}") return ids[0], ids[1] - async def send_message(self, data: Any, shared_state: SharedState, ctx: ExecutionContext) -> None: + async def send_message(self, data: Any, shared_state: SharedState, ctx: WorkflowContext) -> None: """Send a message along this edge.""" if not self._edge_group_ids and self._should_route(data): await self.target.execute(data, ExecutorContext(self.target.id, shared_state, ctx)) diff --git a/python/packages/workflow/agent_framework_workflow/core/_runner.py b/python/packages/workflow/agent_framework_workflow/core/_runner.py index c6442cbeda..fdd3480fae 100644 --- a/python/packages/workflow/agent_framework_workflow/core/_runner.py +++ b/python/packages/workflow/agent_framework_workflow/core/_runner.py @@ -9,8 +9,8 @@ from ._edge import Edge from ._shared_state import SharedState from .events import WorkflowEvent -from .execution_context import ExecutionContext from .executor import Executor +from .workflow_context import WorkflowContext logger = logging.getLogger(__name__) @@ -22,7 +22,7 @@ def __init__( self, edges: list[Edge], shared_state: SharedState, - ctx: ExecutionContext, + ctx: WorkflowContext, max_iterations: int = 100, ): self._edge_map = self._parse_edges(edges) @@ -35,8 +35,8 @@ def __init__( } @property - def execution_context(self) -> ExecutionContext: - """Get the execution context.""" + def workflow_context(self) -> WorkflowContext: + """Get the workflow context.""" return self._ctx async def run_until_convergence(self) -> AsyncIterable[WorkflowEvent]: diff --git a/python/packages/workflow/agent_framework_workflow/core/executor.py b/python/packages/workflow/agent_framework_workflow/core/executor.py index 60558d44cc..4723e66e12 100644 --- a/python/packages/workflow/agent_framework_workflow/core/executor.py +++ b/python/packages/workflow/agent_framework_workflow/core/executor.py @@ -7,7 +7,7 @@ from ._shared_state import SharedState from ._typing_utils import is_instance_of from .events import ExecutorCompleteEvent, ExecutorInvokeEvent, WorkflowEvent -from .execution_context import ExecutionContext, NoopExecutionContext +from .workflow_context import NoopWorkflowContext, WorkflowContext T = TypeVar("T") @@ -15,23 +15,23 @@ class ExecutorContext: """Context for executing an executor. - This class provides a way to execute an executor with a specific context. - It is used to manage the execution of tasks in a workflow. + This class is used to provide a way for executors to interact with the workflow + context and shared state, while preventing direct access to the workflow context. """ - def __init__(self, executor_id: str, shared_state: SharedState, execution_context: ExecutionContext): - """Initialize the executor context with the given execution context.""" - self._execution_context = execution_context + def __init__(self, executor_id: str, shared_state: SharedState, workflow_context: WorkflowContext): + """Initialize the executor context with the given workflow context.""" + self._workflow_context = workflow_context self._executor_id = executor_id self._shared_state = shared_state async def send_message(self, message: Any) -> None: - """Send a message to the execution context.""" - await self._execution_context.send_message(self._executor_id, message) + """Send a message to the workflow context.""" + await self._workflow_context.send_message(self._executor_id, message) async def add_event(self, event: WorkflowEvent) -> None: - """Add an event to the execution context.""" - await self._execution_context.add_event(event) + """Add an event to the workflow context.""" + await self._workflow_context.add_event(event) async def get_shared_state(self, key: str) -> Any: """Get a value from the shared state.""" @@ -41,13 +41,18 @@ async def set_shared_state(self, key: str, value: Any) -> None: """Set a value in the shared state.""" await self._shared_state.set(key, value) + @property + def shared_state(self) -> SharedState: + """Get the shared state.""" + return self._shared_state + class NoopExecutorContext(ExecutorContext): """A no-operation executor context that does nothing.""" def __init__(self): """Initialize the noop executor context.""" - super().__init__(executor_id="", shared_state=SharedState(), execution_context=NoopExecutionContext()) + super().__init__(executor_id="", shared_state=SharedState(), workflow_context=NoopWorkflowContext()) class Executor(Generic[T], ABC): diff --git a/python/packages/workflow/agent_framework_workflow/core/workflow.py b/python/packages/workflow/agent_framework_workflow/core/workflow.py index 165a400389..3152781627 100644 --- a/python/packages/workflow/agent_framework_workflow/core/workflow.py +++ b/python/packages/workflow/agent_framework_workflow/core/workflow.py @@ -8,8 +8,8 @@ from ._runner import Runner from ._shared_state import SharedState from .events import WorkflowEvent -from .execution_context import ExecutionContext, InProcExecutionContext from .executor import Executor, ExecutorContext +from .workflow_context import InProcWorkflowContext, WorkflowContext class Workflow: @@ -23,20 +23,20 @@ def __init__( self, edges: list[Edge], start_executor: Executor[Any] | str, - execution_context: ExecutionContext, + workflow_context: WorkflowContext, ): """Initialize the workflow with a list of edges. Args: edges: A list of directed edges representing the connections between nodes in the workflow. start_executor: The starting executor for the workflow, which can be an Executor instance or its ID. - execution_context: The ExecutionContext instance to be used during workflow execution. + workflow_context: The WorkflowContext instance to be used during workflow execution. """ self._edges = edges self._start_executor = start_executor self._shared_state = SharedState() - self._runner = Runner(self._edges, self._shared_state, execution_context) + self._runner = Runner(self._edges, self._shared_state, workflow_context) async def run_stream( self, @@ -60,7 +60,7 @@ async def run_stream( ExecutorContext( executor.id, self._shared_state, - self._runner.execution_context, + self._runner.workflow_context, ), ) async for event in self._runner.run_until_convergence(): @@ -84,7 +84,7 @@ def __init__(self): """Initialize the WorkflowBuilder with an empty list of edges and no starting executor.""" self._edges: list[Edge] = [] self._start_executor: Executor[Any] | str | None = None - self._execution_context: ExecutionContext | None = None + self._workflow_context: WorkflowContext | None = None def add_edge( self, @@ -182,13 +182,13 @@ def set_start_executor(self, executor: Executor[Any] | str) -> "Self": self._start_executor = executor return self - def set_execution_context(self, execution_context: ExecutionContext) -> "Self": - """Set the execution context for the workflow. + def set_workflow_context(self, workflow_context: WorkflowContext) -> "Self": + """Set the workflow context for the workflow. Args: - execution_context: The ExecutionContext instance to be used during workflow execution. + workflow_context: The WorkflowContext instance to be used during workflow execution. """ - self._execution_context = execution_context + self._workflow_context = workflow_context return self def build(self) -> Workflow: @@ -200,6 +200,6 @@ def build(self) -> Workflow: if not self._start_executor: raise ValueError("Starting executor must be set before building the workflow.") - execution_context = self._execution_context or InProcExecutionContext() + workflow_context = self._workflow_context or InProcWorkflowContext() - return Workflow(self._edges, self._start_executor, execution_context) + return Workflow(self._edges, self._start_executor, workflow_context) diff --git a/python/packages/workflow/agent_framework_workflow/core/execution_context.py b/python/packages/workflow/agent_framework_workflow/core/workflow_context.py similarity index 95% rename from python/packages/workflow/agent_framework_workflow/core/execution_context.py rename to python/packages/workflow/agent_framework_workflow/core/workflow_context.py index 0d7d2b643f..40ff814a96 100644 --- a/python/packages/workflow/agent_framework_workflow/core/execution_context.py +++ b/python/packages/workflow/agent_framework_workflow/core/workflow_context.py @@ -7,8 +7,8 @@ @runtime_checkable -class ExecutionContext(Protocol): - """Protocol for execution context used by executors.""" +class WorkflowContext(Protocol): + """Protocol for workflow context used by executors.""" async def send_message(self, source_id: str, message: Any) -> None: """Send a message from the executor to the context. @@ -60,7 +60,7 @@ async def has_events(self) -> bool: ... -class InProcExecutionContext(ExecutionContext): +class InProcWorkflowContext(WorkflowContext): """In-process execution context for testing purposes.""" def __init__(self): @@ -101,7 +101,7 @@ async def has_events(self) -> bool: return bool(self._events) -class NoopExecutionContext(ExecutionContext): +class NoopWorkflowContext(WorkflowContext): """A no-operation execution context that does nothing.""" async def send_message(self, source_id: str, message: Any) -> None: From 45473fe46295bd390c1cfeafc79baf89a0416ecb Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Tue, 29 Jul 2025 16:23:12 -0700 Subject: [PATCH 04/18] Address comments 1 --- .../packages/workflow/agent_framework_workflow/__init__.py | 1 - .../workflow/agent_framework_workflow/core/_edge.py | 6 +++--- .../getting_started/workflow/step_03_simple_group_chat.py | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/python/packages/workflow/agent_framework_workflow/__init__.py b/python/packages/workflow/agent_framework_workflow/__init__.py index b9cbf88c43..5829611ebb 100644 --- a/python/packages/workflow/agent_framework_workflow/__init__.py +++ b/python/packages/workflow/agent_framework_workflow/__init__.py @@ -33,7 +33,6 @@ "ExecutorInvokeEvent", "HumanInTheLoopEvent", "WorkflowBuilder", - "WorkflowBuilder", "WorkflowCompletedEvent", "WorkflowEvent", "WorkflowStartedEvent", diff --git a/python/packages/workflow/agent_framework_workflow/core/_edge.py b/python/packages/workflow/agent_framework_workflow/core/_edge.py index b82eaeed72..df9f2ec2c9 100644 --- a/python/packages/workflow/agent_framework_workflow/core/_edge.py +++ b/python/packages/workflow/agent_framework_workflow/core/_edge.py @@ -76,9 +76,9 @@ async def send_message(self, data: Any, shared_state: SharedState, ctx: Workflow await held_shared_state.get_within_hold(edge_id) for edge_id in self._edge_group_ids ] + [data] # Remove the data from the shared state after retrieving it - await asyncio.gather(*[ - held_shared_state.delete_within_hold(edge_id) for edge_id in self._edge_group_ids - ]) + await asyncio.gather( + *(held_shared_state.delete_within_hold(edge_id) for edge_id in self._edge_group_ids) + ) if messages: await self.target.execute(messages, ExecutorContext(self.target.id, shared_state, ctx)) diff --git a/python/samples/getting_started/workflow/step_03_simple_group_chat.py b/python/samples/getting_started/workflow/step_03_simple_group_chat.py index 47601edb11..0ad09aa0ef 100644 --- a/python/samples/getting_started/workflow/step_03_simple_group_chat.py +++ b/python/samples/getting_started/workflow/step_03_simple_group_chat.py @@ -29,7 +29,7 @@ class GroupChatMessage: @dataclass class AgentSelectionDecision(GroupChatMessage): - """A data class to hold the decision made by the HILDe executor.""" + """A data class to hold the decision made by the manager executor.""" selection: str From 6ce7b98c3b678ea5810e42c7b16ccad7fb1dae1b Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Tue, 29 Jul 2025 16:29:11 -0700 Subject: [PATCH 05/18] Fix mypy --- .../workflow/agent_framework_workflow/core/workflow.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/python/packages/workflow/agent_framework_workflow/core/workflow.py b/python/packages/workflow/agent_framework_workflow/core/workflow.py index 3152781627..7351178a32 100644 --- a/python/packages/workflow/agent_framework_workflow/core/workflow.py +++ b/python/packages/workflow/agent_framework_workflow/core/workflow.py @@ -1,8 +1,9 @@ # Copyright (c) Microsoft. All rights reserved. +import sys from collections.abc import AsyncIterable, Callable from enum import Enum -from typing import Any, Self +from typing import Any from ._edge import Edge from ._runner import Runner @@ -11,6 +12,11 @@ from .executor import Executor, ExecutorContext from .workflow_context import InProcWorkflowContext, WorkflowContext +if sys.version_info >= (3, 11): + from typing import Self # pragma: no cover +else: + from typing_extensions import Self # pragma: no cover + class Workflow: """A class representing a workflow that can be executed. From da6a1bb93869b8fc939f6a46d8de6aeebbda0e24 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Wed, 30 Jul 2025 15:02:57 -0700 Subject: [PATCH 06/18] flatting folder structure, and rename contexts --- .../main/agent_framework/workflow/__init__.py | 2 +- .../agent_framework/workflow/__init__.pyi | 6 +- .../agent_framework_workflow/__init__.py | 11 ++-- .../{core => }/_edge.py | 11 ++-- .../{core/events.py => _events.py} | 0 .../{core/executor.py => _executor.py} | 63 +++---------------- .../{core => }/_runner.py | 25 ++------ ...workflow_context.py => _runner_context.py} | 41 +++--------- .../{core => }/_shared_state.py | 0 .../{core => }/_typing_utils.py | 0 .../{core/workflow.py => _workflow.py} | 49 ++++++++++----- .../_workflow_context.py | 42 +++++++++++++ .../agent_framework_workflow/core/__init__.py | 0 .../workflow/step_01_simple_executor.py | 31 --------- ... => step_01_simple_workflow_sequential.py} | 8 +-- ...y => step_02_simple_workflow_condition.py} | 9 ++- ...oop.py => step_03_simple_workflow_loop.py} | 31 ++++----- ...p_chat.py => step_04_simple_group_chat.py} | 12 ++-- ... => step_05_simple_group_chat_with_hil.py} | 21 +++---- 19 files changed, 138 insertions(+), 224 deletions(-) rename python/packages/workflow/agent_framework_workflow/{core => }/_edge.py (93%) rename python/packages/workflow/agent_framework_workflow/{core/events.py => _events.py} (100%) rename python/packages/workflow/agent_framework_workflow/{core/executor.py => _executor.py} (50%) rename python/packages/workflow/agent_framework_workflow/{core => }/_runner.py (79%) rename python/packages/workflow/agent_framework_workflow/{core/workflow_context.py => _runner_context.py} (72%) rename python/packages/workflow/agent_framework_workflow/{core => }/_shared_state.py (100%) rename python/packages/workflow/agent_framework_workflow/{core => }/_typing_utils.py (100%) rename python/packages/workflow/agent_framework_workflow/{core/workflow.py => _workflow.py} (79%) create mode 100644 python/packages/workflow/agent_framework_workflow/_workflow_context.py delete mode 100644 python/packages/workflow/agent_framework_workflow/core/__init__.py delete mode 100644 python/samples/getting_started/workflow/step_01_simple_executor.py rename python/samples/getting_started/workflow/{step_02_simple_workflow_sequential.py => step_01_simple_workflow_sequential.py} (89%) rename python/samples/getting_started/workflow/{step_02a_simple_workflow_condition.py => step_02_simple_workflow_condition.py} (91%) rename python/samples/getting_started/workflow/{step_02b_simple_workflow_loop.py => step_03_simple_workflow_loop.py} (82%) rename python/samples/getting_started/workflow/{step_03_simple_group_chat.py => step_04_simple_group_chat.py} (94%) rename python/samples/getting_started/workflow/{step_04_simple_group_chat_with_hil.py => step_05_simple_group_chat_with_hil.py} (93%) diff --git a/python/packages/main/agent_framework/workflow/__init__.py b/python/packages/main/agent_framework/workflow/__init__.py index 3590050f91..b726a2f0b2 100644 --- a/python/packages/main/agent_framework/workflow/__init__.py +++ b/python/packages/main/agent_framework/workflow/__init__.py @@ -7,7 +7,7 @@ PACKAGE_EXTRA = "workflow" _IMPORTS = [ "Executor", - "ExecutorContext", + "WorkflowContext", "__version__", "events", "WorkflowBuilder", diff --git a/python/packages/main/agent_framework/workflow/__init__.pyi b/python/packages/main/agent_framework/workflow/__init__.pyi index 18119932ee..a30b0c9bcb 100644 --- a/python/packages/main/agent_framework/workflow/__init__.pyi +++ b/python/packages/main/agent_framework/workflow/__init__.pyi @@ -5,16 +5,15 @@ from agent_framework_workflow import ( AgentRunStreamingEvent, Executor, ExecutorCompleteEvent, - ExecutorContext, ExecutorEvent, ExecutorInvokeEvent, HumanInTheLoopEvent, WorkflowBuilder, WorkflowCompletedEvent, + WorkflowContext, WorkflowEvent, WorkflowStartedEvent, __version__, - events, output_message_types, ) @@ -23,15 +22,14 @@ __all__ = [ "AgentRunStreamingEvent", "Executor", "ExecutorCompleteEvent", - "ExecutorContext", "ExecutorEvent", "ExecutorInvokeEvent", "HumanInTheLoopEvent", "WorkflowBuilder", "WorkflowCompletedEvent", + "WorkflowContext", "WorkflowEvent", "WorkflowStartedEvent", "__version__", - "events", "output_message_types", ] diff --git a/python/packages/workflow/agent_framework_workflow/__init__.py b/python/packages/workflow/agent_framework_workflow/__init__.py index 5829611ebb..4335a6f13a 100644 --- a/python/packages/workflow/agent_framework_workflow/__init__.py +++ b/python/packages/workflow/agent_framework_workflow/__init__.py @@ -2,8 +2,7 @@ import importlib.metadata -from .core import events -from .core.events import ( +from ._events import ( AgentRunEvent, AgentRunStreamingEvent, ExecutorCompleteEvent, @@ -14,8 +13,9 @@ WorkflowEvent, WorkflowStartedEvent, ) -from .core.executor import Executor, ExecutorContext, output_message_types -from .core.workflow import WorkflowBuilder +from ._executor import Executor, output_message_types +from ._workflow import WorkflowBuilder +from ._workflow_context import WorkflowContext try: __version__ = importlib.metadata.version(__name__) @@ -28,15 +28,14 @@ "AgentRunStreamingEvent", "Executor", "ExecutorCompleteEvent", - "ExecutorContext", "ExecutorEvent", "ExecutorInvokeEvent", "HumanInTheLoopEvent", "WorkflowBuilder", "WorkflowCompletedEvent", + "WorkflowContext", "WorkflowEvent", "WorkflowStartedEvent", "__version__", - "events", "output_message_types", ] diff --git a/python/packages/workflow/agent_framework_workflow/core/_edge.py b/python/packages/workflow/agent_framework_workflow/_edge.py similarity index 93% rename from python/packages/workflow/agent_framework_workflow/core/_edge.py rename to python/packages/workflow/agent_framework_workflow/_edge.py index df9f2ec2c9..861fd0f9bd 100644 --- a/python/packages/workflow/agent_framework_workflow/core/_edge.py +++ b/python/packages/workflow/agent_framework_workflow/_edge.py @@ -4,9 +4,10 @@ from collections.abc import Callable from typing import Any, ClassVar +from ._executor import Executor +from ._runner_context import RunnerContext from ._shared_state import SharedState -from .executor import Executor, ExecutorContext -from .workflow_context import WorkflowContext +from ._workflow_context import WorkflowContext class Edge: @@ -54,10 +55,10 @@ def source_and_target_from_id(cls, edge_id: str) -> tuple[str, str]: raise ValueError(f"Invalid edge ID format: {edge_id}") return ids[0], ids[1] - async def send_message(self, data: Any, shared_state: SharedState, ctx: WorkflowContext) -> None: + async def send_message(self, data: Any, shared_state: SharedState, ctx: RunnerContext) -> None: """Send a message along this edge.""" if not self._edge_group_ids and self._should_route(data): - await self.target.execute(data, ExecutorContext(self.target.id, shared_state, ctx)) + await self.target.execute(data, WorkflowContext(self.target.id, shared_state, ctx)) elif self._edge_group_ids: # Logic: # 1. If not all edges in the edge group have data in the shared state, @@ -81,7 +82,7 @@ async def send_message(self, data: Any, shared_state: SharedState, ctx: Workflow ) if messages: - await self.target.execute(messages, ExecutorContext(self.target.id, shared_state, ctx)) + await self.target.execute(messages, WorkflowContext(self.target.id, shared_state, ctx)) def _should_route(self, data: Any) -> bool: """Determine if message should be routed through this edge.""" diff --git a/python/packages/workflow/agent_framework_workflow/core/events.py b/python/packages/workflow/agent_framework_workflow/_events.py similarity index 100% rename from python/packages/workflow/agent_framework_workflow/core/events.py rename to python/packages/workflow/agent_framework_workflow/_events.py diff --git a/python/packages/workflow/agent_framework_workflow/core/executor.py b/python/packages/workflow/agent_framework_workflow/_executor.py similarity index 50% rename from python/packages/workflow/agent_framework_workflow/core/executor.py rename to python/packages/workflow/agent_framework_workflow/_executor.py index 4723e66e12..02496b22aa 100644 --- a/python/packages/workflow/agent_framework_workflow/core/executor.py +++ b/python/packages/workflow/agent_framework_workflow/_executor.py @@ -4,57 +4,13 @@ from abc import ABC, abstractmethod from typing import Any, Generic, TypeVar, get_args -from ._shared_state import SharedState +from ._events import ExecutorCompleteEvent, ExecutorInvokeEvent from ._typing_utils import is_instance_of -from .events import ExecutorCompleteEvent, ExecutorInvokeEvent, WorkflowEvent -from .workflow_context import NoopWorkflowContext, WorkflowContext +from ._workflow_context import WorkflowContext T = TypeVar("T") -class ExecutorContext: - """Context for executing an executor. - - This class is used to provide a way for executors to interact with the workflow - context and shared state, while preventing direct access to the workflow context. - """ - - def __init__(self, executor_id: str, shared_state: SharedState, workflow_context: WorkflowContext): - """Initialize the executor context with the given workflow context.""" - self._workflow_context = workflow_context - self._executor_id = executor_id - self._shared_state = shared_state - - async def send_message(self, message: Any) -> None: - """Send a message to the workflow context.""" - await self._workflow_context.send_message(self._executor_id, message) - - async def add_event(self, event: WorkflowEvent) -> None: - """Add an event to the workflow context.""" - await self._workflow_context.add_event(event) - - async def get_shared_state(self, key: str) -> Any: - """Get a value from the shared state.""" - return await self._shared_state.get(key) - - async def set_shared_state(self, key: str, value: Any) -> None: - """Set a value in the shared state.""" - await self._shared_state.set(key, value) - - @property - def shared_state(self) -> SharedState: - """Get the shared state.""" - return self._shared_state - - -class NoopExecutorContext(ExecutorContext): - """A no-operation executor context that does nothing.""" - - def __init__(self): - """Initialize the noop executor context.""" - super().__init__(executor_id="", shared_state=SharedState(), workflow_context=NoopWorkflowContext()) - - class Executor(Generic[T], ABC): """An abstract base class for executing tasks in a workflow. @@ -72,7 +28,7 @@ def __init__(self, id: str | None = None): self._input_type = args[0] @abstractmethod - async def _execute(self, data: T, ctx: ExecutorContext) -> Any: + async def _execute(self, data: T, ctx: WorkflowContext) -> None: """Execute the task using the registered handlers. Args: @@ -81,21 +37,16 @@ async def _execute(self, data: T, ctx: ExecutorContext) -> Any: """ raise NotImplementedError("Subclasses must implement this method.") - async def execute(self, data: T, ctx: ExecutorContext | None = None) -> Any: + async def execute(self, data: T, ctx: WorkflowContext) -> None: """Execute a task with the given data and context. Args: data: The data of type T to be processed. ctx: The execution context containing additional information. """ - if ctx is None: - ctx = NoopExecutorContext() - - await ctx.add_event(ExecutorInvokeEvent(executor_id=self._id, data=data)) - result = await self._execute(data, ctx) - await ctx.add_event(ExecutorCompleteEvent(executor_id=self._id, data=result)) - - return result + await ctx.add_event(ExecutorInvokeEvent(executor_id=self._id)) + await self._execute(data, ctx) + await ctx.add_event(ExecutorCompleteEvent(executor_id=self._id)) @property def id(self) -> str: diff --git a/python/packages/workflow/agent_framework_workflow/core/_runner.py b/python/packages/workflow/agent_framework_workflow/_runner.py similarity index 79% rename from python/packages/workflow/agent_framework_workflow/core/_runner.py rename to python/packages/workflow/agent_framework_workflow/_runner.py index fdd3480fae..d8e5b01de7 100644 --- a/python/packages/workflow/agent_framework_workflow/core/_runner.py +++ b/python/packages/workflow/agent_framework_workflow/_runner.py @@ -7,10 +7,9 @@ from typing import Any from ._edge import Edge +from ._events import WorkflowEvent +from ._runner_context import RunnerContext from ._shared_state import SharedState -from .events import WorkflowEvent -from .executor import Executor -from .workflow_context import WorkflowContext logger = logging.getLogger(__name__) @@ -22,7 +21,7 @@ def __init__( self, edges: list[Edge], shared_state: SharedState, - ctx: WorkflowContext, + ctx: RunnerContext, max_iterations: int = 100, ): self._edge_map = self._parse_edges(edges) @@ -30,12 +29,9 @@ def __init__( self._iteration = 0 self._max_iterations = max_iterations self._shared_state = shared_state - self._executors = {edge.source_id: edge.source for edge in edges} | { - edge.target_id: edge.target for edge in edges - } @property - def workflow_context(self) -> WorkflowContext: + def context(self) -> RunnerContext: """Get the workflow context.""" return self._ctx @@ -99,16 +95,3 @@ def _parse_edges(self, edges: list[Edge]) -> dict[str, list[Edge]]: for edge in edges: parsed[edge.source_id].append(edge) return parsed - - def get_executor_by_id(self, executor_id: str) -> Executor[Any]: - """Get an executor by its ID. - - Args: - executor_id: The ID of the executor to retrieve. - - Returns: - The Executor instance corresponding to the given ID. - """ - if executor_id not in self._executors: - raise ValueError(f"Executor with ID {executor_id} not found.") - return self._executors[executor_id] diff --git a/python/packages/workflow/agent_framework_workflow/core/workflow_context.py b/python/packages/workflow/agent_framework_workflow/_runner_context.py similarity index 72% rename from python/packages/workflow/agent_framework_workflow/core/workflow_context.py rename to python/packages/workflow/agent_framework_workflow/_runner_context.py index 40ff814a96..1def7b1b5e 100644 --- a/python/packages/workflow/agent_framework_workflow/core/workflow_context.py +++ b/python/packages/workflow/agent_framework_workflow/_runner_context.py @@ -1,14 +1,17 @@ # Copyright (c) Microsoft. All rights reserved. +import logging from collections import defaultdict from typing import Any, Protocol, runtime_checkable -from .events import WorkflowEvent +from ._events import WorkflowEvent + +logger = logging.getLogger(__name__) @runtime_checkable -class WorkflowContext(Protocol): - """Protocol for workflow context used by executors.""" +class RunnerContext(Protocol): + """Protocol for the execution context used by the runner.""" async def send_message(self, source_id: str, message: Any) -> None: """Send a message from the executor to the context. @@ -60,8 +63,8 @@ async def has_events(self) -> bool: ... -class InProcWorkflowContext(WorkflowContext): - """In-process execution context for testing purposes.""" +class InProcRunnerContext(RunnerContext): + """In-process execution context for local execution of workflows.""" def __init__(self): """Initialize the in-process execution context.""" @@ -99,31 +102,3 @@ async def drain_events(self) -> list[WorkflowEvent]: async def has_events(self) -> bool: """Check if there are any events in the context.""" return bool(self._events) - - -class NoopWorkflowContext(WorkflowContext): - """A no-operation execution context that does nothing.""" - - async def send_message(self, source_id: str, message: Any) -> None: - """Override to do nothing.""" - pass - - async def drain_messages(self) -> dict[str, list[Any]]: - """Override to return an empty dictionary.""" - return {} - - async def has_messages(self) -> bool: - """Override to always return False.""" - return False - - async def add_event(self, event: WorkflowEvent) -> None: - """Override to do nothing.""" - pass - - async def drain_events(self) -> list[WorkflowEvent]: - """Override to return an empty list.""" - return [] - - async def has_events(self) -> bool: - """Override to always return False.""" - return False diff --git a/python/packages/workflow/agent_framework_workflow/core/_shared_state.py b/python/packages/workflow/agent_framework_workflow/_shared_state.py similarity index 100% rename from python/packages/workflow/agent_framework_workflow/core/_shared_state.py rename to python/packages/workflow/agent_framework_workflow/_shared_state.py diff --git a/python/packages/workflow/agent_framework_workflow/core/_typing_utils.py b/python/packages/workflow/agent_framework_workflow/_typing_utils.py similarity index 100% rename from python/packages/workflow/agent_framework_workflow/core/_typing_utils.py rename to python/packages/workflow/agent_framework_workflow/_typing_utils.py diff --git a/python/packages/workflow/agent_framework_workflow/core/workflow.py b/python/packages/workflow/agent_framework_workflow/_workflow.py similarity index 79% rename from python/packages/workflow/agent_framework_workflow/core/workflow.py rename to python/packages/workflow/agent_framework_workflow/_workflow.py index 7351178a32..98ca51cd6b 100644 --- a/python/packages/workflow/agent_framework_workflow/core/workflow.py +++ b/python/packages/workflow/agent_framework_workflow/_workflow.py @@ -6,11 +6,12 @@ from typing import Any from ._edge import Edge +from ._events import WorkflowEvent +from ._executor import Executor from ._runner import Runner +from ._runner_context import InProcRunnerContext, RunnerContext from ._shared_state import SharedState -from .events import WorkflowEvent -from .executor import Executor, ExecutorContext -from .workflow_context import InProcWorkflowContext, WorkflowContext +from ._workflow_context import WorkflowContext if sys.version_info >= (3, 11): from typing import Self # pragma: no cover @@ -29,20 +30,23 @@ def __init__( self, edges: list[Edge], start_executor: Executor[Any] | str, - workflow_context: WorkflowContext, + runner_context: RunnerContext, ): """Initialize the workflow with a list of edges. Args: edges: A list of directed edges representing the connections between nodes in the workflow. start_executor: The starting executor for the workflow, which can be an Executor instance or its ID. - workflow_context: The WorkflowContext instance to be used during workflow execution. + runner_context: The RunnerContext instance to be used during workflow execution. """ self._edges = edges self._start_executor = start_executor + self._executors = {edge.source_id: edge.source for edge in edges} | { + edge.target_id: edge.target for edge in edges + } self._shared_state = SharedState() - self._runner = Runner(self._edges, self._shared_state, workflow_context) + self._runner = Runner(self._edges, self._shared_state, runner_context) async def run_stream( self, @@ -59,19 +63,32 @@ async def run_stream( executor = self._start_executor if isinstance(executor, str): - executor = self._runner.get_executor_by_id(executor) + executor = self._get_executor_by_id(executor) await executor.execute( message, - ExecutorContext( + WorkflowContext( executor.id, self._shared_state, - self._runner.workflow_context, + self._runner.context, ), ) async for event in self._runner.run_until_convergence(): yield event + def _get_executor_by_id(self, executor_id: str) -> Executor[Any]: + """Get an executor by its ID. + + Args: + executor_id: The ID of the executor to retrieve. + + Returns: + The Executor instance corresponding to the given ID. + """ + if executor_id not in self._executors: + raise ValueError(f"Executor with ID {executor_id} not found.") + return self._executors[executor_id] + class Activation(Enum): """Enum to represent the activation condition for edges in a fan in.""" @@ -90,7 +107,7 @@ def __init__(self): """Initialize the WorkflowBuilder with an empty list of edges and no starting executor.""" self._edges: list[Edge] = [] self._start_executor: Executor[Any] | str | None = None - self._workflow_context: WorkflowContext | None = None + self._runner_context: RunnerContext | None = None def add_edge( self, @@ -188,13 +205,13 @@ def set_start_executor(self, executor: Executor[Any] | str) -> "Self": self._start_executor = executor return self - def set_workflow_context(self, workflow_context: WorkflowContext) -> "Self": - """Set the workflow context for the workflow. + def set_runner_context(self, runner_context: RunnerContext) -> "Self": + """Set the runner context for the workflow. Args: - workflow_context: The WorkflowContext instance to be used during workflow execution. + runner_context: The RunnerContext instance to be used during workflow execution. """ - self._workflow_context = workflow_context + self._runner_context = runner_context return self def build(self) -> Workflow: @@ -206,6 +223,6 @@ def build(self) -> Workflow: if not self._start_executor: raise ValueError("Starting executor must be set before building the workflow.") - workflow_context = self._workflow_context or InProcWorkflowContext() + runner_context = self._runner_context or InProcRunnerContext() - return Workflow(self._edges, self._start_executor, workflow_context) + return Workflow(self._edges, self._start_executor, runner_context) diff --git a/python/packages/workflow/agent_framework_workflow/_workflow_context.py b/python/packages/workflow/agent_framework_workflow/_workflow_context.py new file mode 100644 index 0000000000..4dfbaaef9d --- /dev/null +++ b/python/packages/workflow/agent_framework_workflow/_workflow_context.py @@ -0,0 +1,42 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Any + +from ._events import WorkflowEvent +from ._runner_context import RunnerContext +from ._shared_state import SharedState + + +class WorkflowContext: + """Context for executors in a workflow. + + This class is used to provide a way for executors to interact with the workflow + context and shared state, while preventing direct access to the runtime context. + """ + + def __init__(self, executor_id: str, shared_state: SharedState, runner_context: RunnerContext): + """Initialize the executor context with the given workflow context.""" + self._runner_context = runner_context + self._executor_id = executor_id + self._shared_state = shared_state + + async def send_message(self, message: Any) -> None: + """Send a message to the workflow context.""" + await self._runner_context.send_message(self._executor_id, message) + + async def add_event(self, event: WorkflowEvent) -> None: + """Add an event to the workflow context.""" + await self._runner_context.add_event(event) + + async def get_shared_state(self, key: str) -> Any: + """Get a value from the shared state.""" + return await self._shared_state.get(key) + + async def set_shared_state(self, key: str, value: Any) -> None: + """Set a value in the shared state.""" + await self._shared_state.set(key, value) + + @property + def shared_state(self) -> SharedState: + """Get the shared state.""" + return self._shared_state diff --git a/python/packages/workflow/agent_framework_workflow/core/__init__.py b/python/packages/workflow/agent_framework_workflow/core/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/python/samples/getting_started/workflow/step_01_simple_executor.py b/python/samples/getting_started/workflow/step_01_simple_executor.py deleted file mode 100644 index 59dd39a88b..0000000000 --- a/python/samples/getting_started/workflow/step_01_simple_executor.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import sys - -from agent_framework.workflow import Executor, ExecutorContext - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - - -class SimpleExecutor(Executor[str]): - """A simple executor that processes string messages.""" - - @override - async def _execute(self, data: str, ctx: ExecutorContext) -> str: - """Execute the task by converting the input string to uppercase.""" - return data.upper() - - -async def main(): - """Main function to run the SimpleExecutor.""" - executor = SimpleExecutor() - result = await executor.execute("hello world") - print(result) # Output: HELLO WORLD - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started/workflow/step_02_simple_workflow_sequential.py b/python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py similarity index 89% rename from python/samples/getting_started/workflow/step_02_simple_workflow_sequential.py rename to python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py index a6025098be..cb8deeb58f 100644 --- a/python/samples/getting_started/workflow/step_02_simple_workflow_sequential.py +++ b/python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py @@ -5,9 +5,9 @@ from agent_framework.workflow import ( Executor, - ExecutorContext, WorkflowBuilder, WorkflowCompletedEvent, + WorkflowContext, output_message_types, ) @@ -22,12 +22,11 @@ class UpperCaseExecutor(Executor[str]): """An executor that converts text to uppercase.""" @override - async def _execute(self, data: str, ctx: ExecutorContext) -> str: + async def _execute(self, data: str, ctx: WorkflowContext) -> None: """Execute the task by converting the input string to uppercase.""" result = data.upper() await ctx.send_message(result) - return result @output_message_types(str) @@ -35,13 +34,12 @@ class ReverseTextExecutor(Executor[str]): """An executor that reverses text.""" @override - async def _execute(self, data: str, ctx: ExecutorContext) -> str: + async def _execute(self, data: str, ctx: WorkflowContext) -> None: """Execute the task by reversing the input string.""" result = data[::-1] await ctx.send_message(result) await ctx.add_event(WorkflowCompletedEvent(result)) - return result async def main(): diff --git a/python/samples/getting_started/workflow/step_02a_simple_workflow_condition.py b/python/samples/getting_started/workflow/step_02_simple_workflow_condition.py similarity index 91% rename from python/samples/getting_started/workflow/step_02a_simple_workflow_condition.py rename to python/samples/getting_started/workflow/step_02_simple_workflow_condition.py index cff7c4b487..7113c2b7df 100644 --- a/python/samples/getting_started/workflow/step_02a_simple_workflow_condition.py +++ b/python/samples/getting_started/workflow/step_02_simple_workflow_condition.py @@ -5,9 +5,9 @@ from agent_framework.workflow import ( Executor, - ExecutorContext, WorkflowBuilder, WorkflowCompletedEvent, + WorkflowContext, output_message_types, ) @@ -27,12 +27,11 @@ def __init__(self, spam_keywords: list[str], id: str | None = None): self._spam_keywords = spam_keywords @override - async def _execute(self, data: str, ctx: ExecutorContext) -> bool: + async def _execute(self, data: str, ctx: WorkflowContext) -> None: """Determine if the input string is spam.""" result = any(keyword in data.lower() for keyword in self._spam_keywords) await ctx.send_message(result) - return result @output_message_types() @@ -40,7 +39,7 @@ class RespondToMessageExecutor(Executor[bool]): """An executor that responds to a message based on spam detection.""" @override - async def _execute(self, data: bool, ctx: ExecutorContext) -> None: + async def _execute(self, data: bool, ctx: WorkflowContext) -> None: """Respond with a message based on whether the input is spam.""" if data is True: raise RuntimeError("Input is spam, cannot respond.") @@ -56,7 +55,7 @@ class RemoveSpamExecutor(Executor[bool]): """An executor that removes spam messages.""" @override - async def _execute(self, data: bool, ctx: ExecutorContext) -> None: + async def _execute(self, data: bool, ctx: WorkflowContext) -> None: """Remove the spam message.""" if data is False: raise RuntimeError("Input is not spam, cannot remove.") diff --git a/python/samples/getting_started/workflow/step_02b_simple_workflow_loop.py b/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py similarity index 82% rename from python/samples/getting_started/workflow/step_02b_simple_workflow_loop.py rename to python/samples/getting_started/workflow/step_03_simple_workflow_loop.py index 2266ea2c68..d2d66dec5e 100644 --- a/python/samples/getting_started/workflow/step_02b_simple_workflow_loop.py +++ b/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py @@ -7,9 +7,9 @@ from agent_framework.workflow import ( Executor, ExecutorCompleteEvent, - ExecutorContext, WorkflowBuilder, WorkflowCompletedEvent, + WorkflowContext, output_message_types, ) @@ -43,34 +43,28 @@ def __init__(self, bound: tuple[int, int], id: str | None = None): self._upper = bound[1] @override - async def _execute(self, data: NumberSignal, ctx: ExecutorContext) -> int: + async def _execute(self, data: NumberSignal, ctx: WorkflowContext) -> None: """Execute the task by guessing a number.""" if data == NumberSignal.INIT: self._guess = (self._lower + self._upper) // 2 await ctx.send_message(self._guess) - return self._guess - - if data == NumberSignal.MATCHED: + elif data == NumberSignal.MATCHED: # The previous guess was correct. await ctx.add_event(WorkflowCompletedEvent(f"Guessed the number: {self._guess}")) - return self._guess - - if data == NumberSignal.ABOVE: + elif data == NumberSignal.ABOVE: # The previous guess was too low. # Update the lower bound to the previous guess. # Generate a new number that is between the new bounds. self._lower = self._guess + 1 self._guess = (self._lower + self._upper) // 2 await ctx.send_message(self._guess) - return self._guess - - # The previous guess was too high. - # Update the upper bound to the previous guess. - # Generate a new number that is between the new bounds. - self._upper = self._guess - 1 - self._guess = (self._lower + self._upper) // 2 - await ctx.send_message(self._guess) - return self._guess + else: + # The previous guess was too high. + # Update the upper bound to the previous guess. + # Generate a new number that is between the new bounds. + self._upper = self._guess - 1 + self._guess = (self._lower + self._upper) // 2 + await ctx.send_message(self._guess) @output_message_types(NumberSignal) @@ -83,7 +77,7 @@ def __init__(self, target: int, id: str | None = None): self._target = target @override - async def _execute(self, data: int, ctx: ExecutorContext) -> NumberSignal: + async def _execute(self, data: int, ctx: WorkflowContext) -> None: """Judge the guessed number.""" if data == self._target: result = NumberSignal.MATCHED @@ -93,7 +87,6 @@ async def _execute(self, data: int, ctx: ExecutorContext) -> NumberSignal: result = NumberSignal.BELOW await ctx.send_message(result) - return result async def main(): diff --git a/python/samples/getting_started/workflow/step_03_simple_group_chat.py b/python/samples/getting_started/workflow/step_04_simple_group_chat.py similarity index 94% rename from python/samples/getting_started/workflow/step_03_simple_group_chat.py rename to python/samples/getting_started/workflow/step_04_simple_group_chat.py index 0ad09aa0ef..7d58a37117 100644 --- a/python/samples/getting_started/workflow/step_03_simple_group_chat.py +++ b/python/samples/getting_started/workflow/step_04_simple_group_chat.py @@ -8,9 +8,9 @@ from agent_framework.workflow import ( AgentRunEvent, Executor, - ExecutorContext, WorkflowBuilder, WorkflowCompletedEvent, + WorkflowContext, output_message_types, ) @@ -47,13 +47,13 @@ def __init__(self, members: list[str], max_round: int, id: str | None = None): self._chat_history: list[ChatMessage] = [] @override - async def _execute(self, data: list[ChatMessage], ctx: ExecutorContext) -> AgentSelectionDecision | None: + async def _execute(self, data: list[ChatMessage], ctx: WorkflowContext) -> None: """Execute the task by sending messages to the next executor in the round-robin sequence.""" self._chat_history.extend(data) if self._should_terminate(): await ctx.add_event(WorkflowCompletedEvent(data=self._chat_history)) - return None + return self._current_round += 1 selection_decision = AgentSelectionDecision( @@ -62,8 +62,6 @@ async def _execute(self, data: list[ChatMessage], ctx: ExecutorContext) -> Agent ) await ctx.send_message(selection_decision) - return selection_decision - def _should_terminate(self) -> bool: """Determine if the group chat should terminate based on the current round.""" return self._current_round >= self._max_round @@ -78,7 +76,7 @@ class FakeAgentExecutor(Executor[AgentSelectionDecision]): """An executor that simulates a group chat agent A.""" @override - async def _execute(self, data: AgentSelectionDecision, ctx: ExecutorContext) -> list[ChatMessage]: + async def _execute(self, data: AgentSelectionDecision, ctx: WorkflowContext) -> None: """Simulate a response.""" response = ChatResponse( messages=[ @@ -93,8 +91,6 @@ async def _execute(self, data: AgentSelectionDecision, ctx: ExecutorContext) -> await ctx.add_event(AgentRunEvent(self.id, data=response)) await ctx.send_message(response.messages) - return response.messages - async def main(): executor_a = FakeAgentExecutor(id="executor_a") diff --git a/python/samples/getting_started/workflow/step_04_simple_group_chat_with_hil.py b/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py similarity index 93% rename from python/samples/getting_started/workflow/step_04_simple_group_chat_with_hil.py rename to python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py index 4be653b33d..e414cb79cf 100644 --- a/python/samples/getting_started/workflow/step_04_simple_group_chat_with_hil.py +++ b/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py @@ -8,10 +8,10 @@ from agent_framework.workflow import ( AgentRunEvent, Executor, - ExecutorContext, HumanInTheLoopEvent, WorkflowBuilder, WorkflowCompletedEvent, + WorkflowContext, output_message_types, ) @@ -47,22 +47,18 @@ def __init__(self, members: list[str], id: str | None = None): self._chat_history: list[ChatMessage] = [] @override - async def _execute( - self, - data: list[ChatMessage], - ctx: ExecutorContext, - ) -> AgentSelectionDecision | list[ChatMessage] | None: + async def _execute(self, data: list[ChatMessage], ctx: WorkflowContext) -> None: """Execute the task by sending messages to the next executor in the round-robin sequence.""" self._chat_history.extend(data) if self._should_terminate(): await ctx.add_event(WorkflowCompletedEvent(data=self._chat_history)) - return None + return if self._should_request_hil(): # Request human intervention if the last message was from the assistant await ctx.send_message(self._chat_history) - return self._chat_history + return self._current_round += 1 selection_decision = AgentSelectionDecision( @@ -71,8 +67,6 @@ async def _execute( ) await ctx.send_message(selection_decision) - return selection_decision - def _should_terminate(self) -> bool: """Determine if the group chat should terminate based on the last message.""" if len(self._chat_history) == 0: @@ -105,18 +99,17 @@ def __init__(self, id: str | None = None): self._is_waiting_for_human_input = False @override - async def _execute(self, data: list[ChatMessage], ctx: ExecutorContext) -> list[ChatMessage] | None: + async def _execute(self, data: list[ChatMessage], ctx: WorkflowContext) -> None: """Simulate a human-in-the-loop response.""" if not self._is_waiting_for_human_input: # If it's not waiting but received a message, it means it should prompt for human input. self._is_waiting_for_human_input = True await ctx.add_event(HumanInTheLoopEvent(executor_id=self.id)) - return None + return self._is_waiting_for_human_input = False # If it is waiting, it means the human has provided input. It should return the messages. await ctx.send_message(data) - return data @output_message_types(list[ChatMessage]) @@ -124,7 +117,7 @@ class FakeAgentExecutor(Executor[AgentSelectionDecision]): """An executor that simulates a group chat agent A.""" @override - async def _execute(self, data: AgentSelectionDecision, ctx: ExecutorContext) -> None: + async def _execute(self, data: AgentSelectionDecision, ctx: WorkflowContext) -> None: """Simulate a response.""" response = ChatResponse( messages=[ From 0cf85359fca99df1fd2d9d918112572fd705d62c Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Wed, 30 Jul 2025 16:28:36 -0700 Subject: [PATCH 07/18] Remove add_loop --- .../agent_framework_workflow/_workflow.py | 18 ------ .../step_01_simple_workflow_sequential.py | 18 +++++- .../step_02_simple_workflow_condition.py | 57 ++++++++++++++----- .../workflow/step_03_simple_workflow_loop.py | 13 ++++- .../workflow/step_04_simple_group_chat.py | 19 ++++++- .../step_05_simple_group_chat_with_hil.py | 23 ++++++-- 6 files changed, 106 insertions(+), 42 deletions(-) diff --git a/python/packages/workflow/agent_framework_workflow/_workflow.py b/python/packages/workflow/agent_framework_workflow/_workflow.py index 98ca51cd6b..90eddfc1fe 100644 --- a/python/packages/workflow/agent_framework_workflow/_workflow.py +++ b/python/packages/workflow/agent_framework_workflow/_workflow.py @@ -165,24 +165,6 @@ def add_fan_in_edges( return self - def add_loop( - self, - source: Executor[Any], - target: Executor[Any], - condition: Callable[[Any], bool] | None = None, - ) -> "Self": - """Add a loop edge from a source executor to a target executor. - - Args: - source: The source executor of the loop edge. - target: The target executor of the loop edge. - condition: An optional condition function that be applied on only the source -> target edge. - """ - self._edges.append(Edge(source, target, condition)) - self._edges.append(Edge(target, source)) - - return self - def add_chain( self, executors: list[Executor[Any]], diff --git a/python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py b/python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py index cb8deeb58f..80ddaa83f6 100644 --- a/python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py +++ b/python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py @@ -17,6 +17,13 @@ from typing_extensions import override # pragma: no cover +""" +The following sample demonstrates a basic workflow with two executors +that process a string in sequence. The first executor converts the +input string to uppercase, and the second executor reverses the string. +""" + + @output_message_types(str) class UpperCaseExecutor(Executor[str]): """An executor that converts text to uppercase.""" @@ -26,6 +33,7 @@ async def _execute(self, data: str, ctx: WorkflowContext) -> None: """Execute the task by converting the input string to uppercase.""" result = data.upper() + # Send the result to the next executor in the workflow. await ctx.send_message(result) @@ -38,15 +46,17 @@ async def _execute(self, data: str, ctx: WorkflowContext) -> None: """Execute the task by reversing the input string.""" result = data[::-1] - await ctx.send_message(result) + # Send the result with a workflow completion event. await ctx.add_event(WorkflowCompletedEvent(result)) async def main(): """Main function to run the workflow.""" - upper_case_executor = UpperCaseExecutor() - reverse_text_executor = ReverseTextExecutor() + # Step 1: Create the executors. + upper_case_executor = UpperCaseExecutor(id="upper_case_executor") + reverse_text_executor = ReverseTextExecutor(id="reverse_text_executor") + # Step 2: Build the workflow with the defined edges. workflow = ( WorkflowBuilder() .add_edge(upper_case_executor, reverse_text_executor) @@ -54,10 +64,12 @@ async def main(): .build() ) + # Step 3: Run the workflow with an initial message. completion_event = None async for event in workflow.run_stream("hello world"): print(f"Event: {event}") if isinstance(event, WorkflowCompletedEvent): + # The WorkflowCompletedEvent contains the final result. completion_event = event if completion_event: diff --git a/python/samples/getting_started/workflow/step_02_simple_workflow_condition.py b/python/samples/getting_started/workflow/step_02_simple_workflow_condition.py index 7113c2b7df..1fe65ab816 100644 --- a/python/samples/getting_started/workflow/step_02_simple_workflow_condition.py +++ b/python/samples/getting_started/workflow/step_02_simple_workflow_condition.py @@ -2,6 +2,7 @@ import asyncio import sys +from dataclasses import dataclass from agent_framework.workflow import ( Executor, @@ -16,8 +17,23 @@ else: from typing_extensions import override # pragma: no cover +""" +The following sample demonstrates a basic workflow with two executors +that detect spam messages and respond accordingly. The first executor +checks if the input string is spam, and depending on the result, the +workflow takes different paths. +""" -@output_message_types(bool) + +@dataclass +class EmailMessage: + """A data class to hold the email message content.""" + + content: str + is_spam: bool = False + + +@output_message_types(EmailMessage) class DetectSpamExecutor(Executor[str]): """An executor that determines if a message is spam.""" @@ -31,36 +47,38 @@ async def _execute(self, data: str, ctx: WorkflowContext) -> None: """Determine if the input string is spam.""" result = any(keyword in data.lower() for keyword in self._spam_keywords) - await ctx.send_message(result) + await ctx.send_message(EmailMessage(content=data, is_spam=result)) @output_message_types() -class RespondToMessageExecutor(Executor[bool]): +class RespondToMessageExecutor(Executor[EmailMessage]): """An executor that responds to a message based on spam detection.""" @override - async def _execute(self, data: bool, ctx: WorkflowContext) -> None: + async def _execute(self, data: EmailMessage, ctx: WorkflowContext) -> None: """Respond with a message based on whether the input is spam.""" - if data is True: + if data.is_spam: raise RuntimeError("Input is spam, cannot respond.") # Simulate processing delay + print(f"Responding to message: {data.content}") await asyncio.sleep(1) await ctx.add_event(WorkflowCompletedEvent("Message processed successfully.")) @output_message_types() -class RemoveSpamExecutor(Executor[bool]): +class RemoveSpamExecutor(Executor[EmailMessage]): """An executor that removes spam messages.""" @override - async def _execute(self, data: bool, ctx: WorkflowContext) -> None: + async def _execute(self, data: EmailMessage, ctx: WorkflowContext) -> None: """Remove the spam message.""" - if data is False: + if data.is_spam is False: raise RuntimeError("Input is not spam, cannot remove.") # Simulate processing delay + print(f"Removing spam message: {data.content}") await asyncio.sleep(1) await ctx.add_event(WorkflowCompletedEvent("Spam message removed.")) @@ -68,19 +86,32 @@ async def _execute(self, data: bool, ctx: WorkflowContext) -> None: async def main(): """Main function to run the workflow.""" + # Keyword based spam detection spam_keywords = ["spam", "advertisement", "offer"] - detect_spam_executor = DetectSpamExecutor(spam_keywords) - respond_to_message_executor = RespondToMessageExecutor() - remove_spam_executor = RemoveSpamExecutor() + # Step 1: Create the executors. + detect_spam_executor = DetectSpamExecutor(spam_keywords, id="detect_spam_executor") + respond_to_message_executor = RespondToMessageExecutor(id="respond_to_message_executor") + remove_spam_executor = RemoveSpamExecutor(id="remove_spam_executor") + + # Step 2: Build the workflow with the defined edges with conditions. workflow = ( WorkflowBuilder() .set_start_executor(detect_spam_executor) - .add_edge(detect_spam_executor, respond_to_message_executor, condition=lambda x: x is False) - .add_edge(detect_spam_executor, remove_spam_executor, condition=lambda x: x is True) + .add_edge( + detect_spam_executor, + respond_to_message_executor, + condition=lambda x: x.is_spam is False, + ) + .add_edge( + detect_spam_executor, + remove_spam_executor, + condition=lambda x: x.is_spam is True, + ) .build() ) + # Step 3: Run the workflow with an input message. async for event in workflow.run_stream("This is a spam."): print(f"Event: {event}") diff --git a/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py b/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py index d2d66dec5e..ead6294c37 100644 --- a/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py +++ b/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py @@ -18,6 +18,12 @@ else: from typing_extensions import override # pragma: no cover +""" +The following sample demonstrates a basic workflow with two executors +where one executor guesses a number and the other executor judges the +guess iteratively. +""" + class NumberSignal(Enum): """Enum to represent number signals for the workflow.""" @@ -91,16 +97,21 @@ async def _execute(self, data: int, ctx: WorkflowContext) -> None: async def main(): """Main function to run the workflow.""" + # Step 1: Create the executors. guess_number_executor = GuessNumberExecutor((1, 100)) judge_executor = JudgeExecutor(30) + # Step 2: Build the workflow with the defined edges. + # This time we are creating a loop in the workflow. workflow = ( WorkflowBuilder() - .add_loop(guess_number_executor, judge_executor) + .add_edge(guess_number_executor, judge_executor) + .add_edge(judge_executor, guess_number_executor) .set_start_executor(guess_number_executor) .build() ) + # Step 3: Run the workflow and print the events. iterations = 0 async for event in workflow.run_stream(NumberSignal.INIT): if isinstance(event, ExecutorCompleteEvent) and event.executor_id == guess_number_executor.id: diff --git a/python/samples/getting_started/workflow/step_04_simple_group_chat.py b/python/samples/getting_started/workflow/step_04_simple_group_chat.py index 7d58a37117..d5816dab22 100644 --- a/python/samples/getting_started/workflow/step_04_simple_group_chat.py +++ b/python/samples/getting_started/workflow/step_04_simple_group_chat.py @@ -19,6 +19,11 @@ else: from typing_extensions import override # pragma: no cover +""" +The following sample demonstrates a basic workflow that simulates +a round-robin group chat. +""" + @dataclass class GroupChatMessage: @@ -93,6 +98,8 @@ async def _execute(self, data: AgentSelectionDecision, ctx: WorkflowContext) -> async def main(): + """Main function to run the group chat workflow.""" + # Step 1: Create the executors. executor_a = FakeAgentExecutor(id="executor_a") executor_b = FakeAgentExecutor(id="executor_b") executor_c = FakeAgentExecutor(id="executor_c") @@ -108,15 +115,21 @@ async def main(): # GroupChatManager -> executor_b -> GroupChatManager # GroupChatManager -> executor_c -> GroupChatManager + # Step 2: Build the workflow with the defined edges. + # This time we are creating edges and loops with conditions. workflow = ( WorkflowBuilder() .set_start_executor(group_chat_manager) - .add_loop(group_chat_manager, executor_a, condition=lambda x: x.selection == executor_a.id) - .add_loop(group_chat_manager, executor_b, condition=lambda x: x.selection == executor_b.id) - .add_loop(group_chat_manager, executor_c, condition=lambda x: x.selection == executor_c.id) + .add_edge(group_chat_manager, executor_a, condition=lambda x: x.selection == executor_a.id) + .add_edge(group_chat_manager, executor_b, condition=lambda x: x.selection == executor_b.id) + .add_edge(group_chat_manager, executor_c, condition=lambda x: x.selection == executor_c.id) + .add_edge(executor_a, group_chat_manager) + .add_edge(executor_b, group_chat_manager) + .add_edge(executor_c, group_chat_manager) .build() ) + # Step 3: Run the workflow with an initial message. completion_event = None async for event in workflow.run_stream([ChatMessage(ChatRole.USER, text="Start group chat")]): if isinstance(event, AgentRunEvent): diff --git a/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py b/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py index e414cb79cf..b05d6fae20 100644 --- a/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py +++ b/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py @@ -20,6 +20,11 @@ else: from typing_extensions import override # pragma: no cover +""" +The following sample demonstrates a basic workflow that simulates +a round-robin group chat with a Human-in-the-Loop (HIL) executor. +""" + @dataclass class GroupChatMessage: @@ -133,6 +138,8 @@ async def _execute(self, data: AgentSelectionDecision, ctx: WorkflowContext) -> async def main(): + """Main function to run the group chat workflow.""" + # Step 1: Create the executors. executor_a = FakeAgentExecutor(id="executor_a") executor_b = FakeAgentExecutor(id="executor_b") executor_c = FakeAgentExecutor(id="executor_c") @@ -149,16 +156,24 @@ async def main(): # CriticGroupChatManagerWithHIL -> executor_b <-> CriticGroupChatManagerWithHIL <-> HumanInTheLoopExecutor # CriticGroupChatManagerWithHIL -> executor_c <-> CriticGroupChatManagerWithHIL <-> HumanInTheLoopExecutor + # Step 2: Build the workflow with the defined edges. workflow = ( WorkflowBuilder() .set_start_executor(group_chat_manager) - .add_loop(group_chat_manager, hil_executor) - .add_loop(group_chat_manager, executor_a, condition=lambda x: x.selection == executor_a.id) - .add_loop(group_chat_manager, executor_b, condition=lambda x: x.selection == executor_b.id) - .add_loop(group_chat_manager, executor_c, condition=lambda x: x.selection == executor_c.id) + .add_edge(group_chat_manager, hil_executor) + .add_edge(hil_executor, group_chat_manager) + .add_edge(group_chat_manager, executor_a, condition=lambda x: x.selection == executor_a.id) + .add_edge(group_chat_manager, executor_b, condition=lambda x: x.selection == executor_b.id) + .add_edge(group_chat_manager, executor_c, condition=lambda x: x.selection == executor_c.id) + .add_edge(executor_a, group_chat_manager) + .add_edge(executor_b, group_chat_manager) + .add_edge(executor_c, group_chat_manager) .build() ) + # Step 3: Run the workflow with an initial message. + # Here we are capturing the human-in-the-loop event and allowing the user to provide input. + # Once the user provides input, we will provide it back to the workflow to continue the execution. completion_event: WorkflowCompletedEvent | None = None human_in_the_loop_event: HumanInTheLoopEvent | None = None user_input = "Start group chat" From 00ac111e8c7f0ef7449e122f0392cd8a1eecaa79 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Wed, 30 Jul 2025 17:23:51 -0700 Subject: [PATCH 08/18] Add map reduce sample, remove Activation conditions --- .gitignore | 16 + .../agent_framework_workflow/_workflow.py | 34 +- python/pyproject.toml | 1 + .../workflow/resources/long_text.txt | 199 +++++++++++ .../workflow/step_06_map_reduce.py | 317 ++++++++++++++++++ python/uv.lock | 13 +- 6 files changed, 558 insertions(+), 22 deletions(-) create mode 100644 python/samples/getting_started/workflow/resources/long_text.txt create mode 100644 python/samples/getting_started/workflow/step_06_map_reduce.py diff --git a/.gitignore b/.gitignore index 372dbbeb94..0846cdd699 100644 --- a/.gitignore +++ b/.gitignore @@ -179,3 +179,19 @@ cython_debug/ .vs/ **/.user/** + +# Temporary files +*.~tmp +*.~bak +*.~swp +*.~swo + +# Temporary directories +*tmp/ +*temp/ +*.tmp/ +*.temp/ +tmp*/ +temp*/ +.tmp/ +.temp/ diff --git a/python/packages/workflow/agent_framework_workflow/_workflow.py b/python/packages/workflow/agent_framework_workflow/_workflow.py index 90eddfc1fe..8c1f81fb47 100644 --- a/python/packages/workflow/agent_framework_workflow/_workflow.py +++ b/python/packages/workflow/agent_framework_workflow/_workflow.py @@ -1,8 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. import sys -from collections.abc import AsyncIterable, Callable -from enum import Enum +from collections.abc import AsyncIterable, Callable, Sequence from typing import Any from ._edge import Edge @@ -90,13 +89,6 @@ def _get_executor_by_id(self, executor_id: str) -> Executor[Any]: return self._executors[executor_id] -class Activation(Enum): - """Enum to represent the activation condition for edges in a fan in.""" - - WhenAll = "when_all" - WhenAny = "when_any" - - class WorkflowBuilder: """A builder class for constructing workflows. @@ -127,7 +119,7 @@ def add_edge( self._edges.append(Edge(source, target, condition)) return self - def add_fan_out_edges(self, source: Executor[Any], targets: list[Executor[Any]]) -> "Self": + def add_fan_out_edges(self, source: Executor[Any], targets: Sequence[Executor[Any]]) -> "Self": """Add multiple edges to the workflow. Args: @@ -140,26 +132,26 @@ def add_fan_out_edges(self, source: Executor[Any], targets: list[Executor[Any]]) def add_fan_in_edges( self, - sources: list[Executor[Any]], + sources: Sequence[Executor[Any]], target: Executor[Any], - activation: Activation = Activation.WhenAll, ) -> "Self": """Add multiple edges from sources to a single target executor. + The edges will be grouped together for synchronized processing, meaning + the target executor will only be executed once all source executors have completed. + Args: sources: A list of source executors for the edges. target: The target executor for the edges. - activation: The activation condition for the edges, either WhenAll or WhenAny. """ edges = [Edge(source, target) for source in sources] - if activation == Activation.WhenAll: - # Set the edge groups for the edges to ensure they are processed together. - for i, edge in enumerate(edges): - group_ids: list[str] = [] - group_ids.extend([e.id for e in edges[0:i]]) - group_ids.extend([e.id for e in edges[i + 1 :]]) - edge.set_edge_group(group_ids) + # Set the edge groups for the edges to ensure they are processed together. + for i, edge in enumerate(edges): + group_ids: list[str] = [] + group_ids.extend([e.id for e in edges[0:i]]) + group_ids.extend([e.id for e in edges[i + 1 :]]) + edge.set_edge_group(group_ids) self._edges.extend(edges) @@ -167,7 +159,7 @@ def add_fan_in_edges( def add_chain( self, - executors: list[Executor[Any]], + executors: Sequence[Executor[Any]], ) -> "Self": """Add a chain of executors to the workflow. diff --git a/python/pyproject.toml b/python/pyproject.toml index 3860577006..d929fb26eb 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -44,6 +44,7 @@ dev = [ "diskcache", "redis", "sphinx-autobuild", + "aiofiles>=24.1.0", ] [tool.uv] diff --git a/python/samples/getting_started/workflow/resources/long_text.txt b/python/samples/getting_started/workflow/resources/long_text.txt new file mode 100644 index 0000000000..ffba0e7d1a --- /dev/null +++ b/python/samples/getting_started/workflow/resources/long_text.txt @@ -0,0 +1,199 @@ +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. + +Lorem ipsum dolor sit amet consectetur adipiscing elit. Quisque faucibus ex sapien vitae pellentesque sem placerat. In id cursus mi pretium tellus duis convallis. Tempus leo eu aenean sed diam urna tempor. Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Iaculis massa nisl malesuada lacinia integer nunc posuere. Ut hendrerit semper vel class aptent taciti sociosqu. Ad litora torquent per conubia nostra inceptos himenaeos. \ No newline at end of file diff --git a/python/samples/getting_started/workflow/step_06_map_reduce.py b/python/samples/getting_started/workflow/step_06_map_reduce.py new file mode 100644 index 0000000000..df433340d8 --- /dev/null +++ b/python/samples/getting_started/workflow/step_06_map_reduce.py @@ -0,0 +1,317 @@ +# Copyright (c) Microsoft. All rights reserved. + +import ast +import asyncio +import os +import sys +from collections import defaultdict +from dataclasses import dataclass + +import aiofiles +from agent_framework.workflow import ( + Executor, + WorkflowBuilder, + WorkflowCompletedEvent, + WorkflowContext, + output_message_types, +) + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + + +""" +The following sample demonstrates a basic map reduce workflow that +processes a large text file by splitting it into smaller chunks, +mapping each word to a count, shuffling the results, and reducing them +to a final count per word. + +Intermediate results are stored in a temporary directory, and the +final results are written to a file in the same directory. +""" + +# Define the temporary directory for storing intermediate results +DIR = os.path.dirname(__file__) +TEMP_DIR = os.path.join(DIR, "tmp") +# Ensure the temporary directory exists +os.makedirs(TEMP_DIR, exist_ok=True) + +# Define a key for the shared state to store the data to be processed +SHARED_STATE_DATA_KEY = "data_to_be_processed" + + +@dataclass +class SplitCompleted: + """A data class to hold the completed state of the SplitExecutor.""" + + map_executor_id: str + + +@output_message_types(list[str]) +class SplitDataExecutor(Executor[str]): + """An executor that splits data into smaller chunks based on the number of nodes available.""" + + def __init__(self, map_executor_ids: list[str], id: str | None = None): + """Initialize the executor with the number of nodes.""" + super().__init__(id) + self._map_executor_ids = map_executor_ids + + @override + async def _execute(self, data: str, ctx: WorkflowContext) -> None: + """Execute the task by splitting the data into chunks. + + Args: + data: A string containing the text to be processed. + ctx: The execution context containing the shared state and other information. + """ + # Process data into a list of words and remove empty lines/words. + word_list = self._preprocess(data) + + # Store the data to be processed state for later use. + await ctx.set_shared_state(SHARED_STATE_DATA_KEY, word_list) + + # Split the word_list into chunks that are represented by the start and end indices. + # The start and end indices tuples will be stored in the shared state. + map_executor_count = len(self._map_executor_ids) + chunk_size = len(word_list) // map_executor_count # Assuming map_executor_count is not 0. + + async def _process_chunk(i: int) -> None: + """Process each chunk and send a message to the executor.""" + start_index = i * chunk_size + end_index = start_index + chunk_size if i < map_executor_count - 1 else len(word_list) + + # The start and end indices are stored in the shared state for the MapExecutor. + # This allows the MapExecutor to know which part of the data it should process. + await ctx.set_shared_state(self._map_executor_ids[i], (start_index, end_index)) + await ctx.send_message(SplitCompleted(self._map_executor_ids[i])) + + tasks = [asyncio.create_task(_process_chunk(i)) for i in range(map_executor_count)] + await asyncio.gather(*tasks) + + def _preprocess(self, data: str) -> list[str]: + """Preprocess the input data and return a list of words. + + Args: + data: The input data to be processed. + + Returns: + A list of words extracted from the input data. + """ + line_list = [line.strip() for line in data.splitlines() if line.strip()] + return [word for line in line_list for word in line.split() if word] + + +@dataclass +class MapCompleted: + """A data class to hold the completed state of the MapExecutor.""" + + file_path: str + + +@output_message_types(MapCompleted) +class MapExecutor(Executor[SplitCompleted]): + """An executor that applies a function to each item in the data and save the result to a file.""" + + @override + async def _execute(self, data: SplitCompleted, ctx: WorkflowContext) -> None: + """Execute the task by applying a function to each item and same result to a file. + + Args: + data: An instance of SplitCompleted signaling the map step can be started. + ctx: The execution context containing the shared state and other information. + """ + # Retrieve the data to be processed from the shared state.# Define a key for the shared state to store the data to be processed + data_to_be_processed: list[str] = await ctx.get_shared_state(SHARED_STATE_DATA_KEY) + chunk_start, chunk_end = await ctx.get_shared_state(self.id) + + results = [(item, 1) for item in data_to_be_processed[chunk_start:chunk_end]] + + file_path = os.path.join(TEMP_DIR, f"map_results_{self.id}.txt") + async with aiofiles.open(file_path, "w") as f: + await f.writelines([f"{item}: {count}\n" for item, count in results]) + + await ctx.send_message(MapCompleted(file_path)) + + +@dataclass +class ShuffleCompleted: + """A data class to hold the completed state of the ShuffleExecutor.""" + + file_path: str + reducer_id: str + + +@output_message_types(ShuffleCompleted) +class ShuffleExecutor(Executor[list[MapCompleted]]): + """An executor that redistributes results from the map step to the reduce step.""" + + def __init__(self, reducer_ids: list[str], id: str | None = None): + """Initialize the executor with the number of nodes.""" + super().__init__(id) + self._reducer_ids = reducer_ids + + @override + async def _execute(self, data: list[MapCompleted], ctx: WorkflowContext) -> None: + """Execute the task by aggregating the results. + + Args: + data: A list of MapCompleted instances containing the file paths of the map results. + ctx: The execution context containing the shared state and other information. + """ + chunks = await self._preprocess(data) + + async def _process_chunk(chunk: list[tuple[str, list[int]]], index: int) -> None: + """Process each chunk and save it to a file.""" + file_path = os.path.join(TEMP_DIR, f"shuffle_results_{index}.txt") + async with aiofiles.open(file_path, "w") as f: + await f.writelines([f"{key}: {value}\n" for key, value in chunk]) + await ctx.send_message(ShuffleCompleted(file_path, self._reducer_ids[index])) + + tasks = [asyncio.create_task(_process_chunk(chunk, i)) for i, chunk in enumerate(chunks)] + await asyncio.gather(*tasks) + + async def _preprocess(self, data: list[MapCompleted]) -> list[list[tuple[str, list[int]]]]: + """Preprocess the input data and return a list of data to be processed by the reduce executors. + + Args: + data: A list of MapCompleted instances containing the file paths of the map results. + + Returns: + A list of lists, where each inner list contains tuples of (key, value) pairs to be processed + by the reduce executors. + """ + map_results: list[tuple[str, int]] = [] + for result in data: + async with aiofiles.open(result.file_path, "r") as f: + map_results.extend([ + (line.strip().split(": ")[0], int(line.strip().split(": ")[1])) for line in await f.readlines() + ]) + + # Group values by the first element + intermediate_results: defaultdict[str, list[int]] = defaultdict(list[int]) + for item in map_results: + key = item[0] + value = item[1] + intermediate_results[key].append(value) + + # Convert defaultdict to a list + aggregated_results = [(key, values) for key, values in intermediate_results.items()] + + # Sort by the first element + aggregated_results.sort(key=lambda x: x[0]) + + # Split the intermediate results into chunks for the reduce executors + reduce_executor_count = len(self._reducer_ids) + chunk_size = len(aggregated_results) // reduce_executor_count + remaining = len(aggregated_results) % reduce_executor_count + + chunks = [ + aggregated_results[i : i + chunk_size] for i in range(0, len(aggregated_results) - remaining, chunk_size) + ] + # Append the remaining items to the last chunk + if remaining > 0: + chunks[-1].extend(aggregated_results[-remaining:]) + + return chunks + + +@dataclass +class ReduceCompleted: + """A data class to hold the completed state of the ReduceExecutor.""" + + file_path: str + + +@output_message_types(ReduceCompleted) +class ReduceExecutor(Executor[ShuffleCompleted]): + """An executor that reduces the results from the ShuffleExecutor.""" + + @override + async def _execute(self, data: ShuffleCompleted, ctx: WorkflowContext) -> None: + """Execute the task by reducing the results. + + Args: + data: An instance of ShuffleCompleted containing the file path of the shuffle results. + ctx: The execution context containing the shared state and other information. + """ + if data.reducer_id != self.id: + # If the reducer ID does not match, skip processing. + return + + # Read the intermediate results from the file + async with aiofiles.open(data.file_path, "r") as f: + lines = await f.readlines() + + # Aggregate the results + reduced_results: dict[str, int] = defaultdict(int) + for line in lines: + key, value = line.split(": ") + reduced_results[key] = sum(ast.literal_eval(value)) + + # Write the reduced results to a file + file_path = os.path.join(TEMP_DIR, f"reduced_results_{self.id}.txt") + async with aiofiles.open(file_path, "w") as f: + await f.writelines([f"{key}: {value}\n" for key, value in reduced_results.items()]) + + await ctx.send_message(ReduceCompleted(file_path)) + + +class CompletionExecutor(Executor[list[ReduceCompleted]]): + """An executor that completes the workflow by aggregating the results from the ReduceExecutors.""" + + @override + async def _execute(self, data: list[ReduceCompleted], ctx: WorkflowContext) -> None: + """Execute the task by aggregating the results. + + Args: + data: A list of ReduceCompleted instances containing the file paths of the reduced results. + ctx: The execution context containing the shared state and other information. + """ + await ctx.add_event(WorkflowCompletedEvent(data=[result.file_path for result in data])) + + +async def main(): + """Main function to run the workflow.""" + # Step 1: Create the executors. + map_executors = [MapExecutor(id=f"map_executor_{i}") for i in range(3)] + split_data_executor = SplitDataExecutor( + [map_executor.id for map_executor in map_executors], + id="split_data_executor", + ) + reduce_executors = [ReduceExecutor(id=f"reduce_executor_{i}") for i in range(4)] + shuffle_executor = ShuffleExecutor( + [reduce_executor.id for reduce_executor in reduce_executors], + id="shuffle_executor", + ) + completion_executor = CompletionExecutor(id="completion_executor") + + # Step 2: Build the workflow. + workflow = ( + WorkflowBuilder() + .set_start_executor(split_data_executor) + .add_fan_out_edges(split_data_executor, map_executors) + .add_fan_in_edges(map_executors, shuffle_executor) + .add_fan_out_edges(shuffle_executor, reduce_executors) + .add_fan_in_edges(reduce_executors, completion_executor) + .build() + ) + + # Step 3: Open the text file and read its content. + async with aiofiles.open(os.path.join(DIR, "resources", "long_text.txt"), "r") as f: + raw_text = await f.read() + + # Step 4: Run the workflow with the raw text as input. + completion_event = None + async for event in workflow.run_stream(raw_text): + print(f"Event: {event}") + if isinstance(event, WorkflowCompletedEvent): + completion_event = event + + if completion_event: + print(f"Completion Event: {completion_event}") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/uv.lock b/python/uv.lock index 809576fe9a..a2f256960f 100644 --- a/python/uv.lock +++ b/python/uv.lock @@ -121,6 +121,7 @@ dependencies = [ [package.dev-dependencies] dev = [ + { name = "aiofiles", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "autodoc-pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "diskcache", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "markdown-it-py", extra = ["linkify"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -163,6 +164,7 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ + { name = "aiofiles", specifier = ">=24.1.0" }, { name = "autodoc-pydantic", specifier = "~=2.2" }, { name = "diskcache" }, { name = "markdown-it-py", extras = ["linkify"] }, @@ -205,6 +207,15 @@ dependencies = [ [package.metadata] requires-dist = [{ name = "agent-framework", editable = "packages/main" }] +[[package]] +name = "aiofiles" +version = "24.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c", size = 30247, upload-time = "2024-06-24T11:02:03.584Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5", size = 15896, upload-time = "2024-06-24T11:02:01.529Z" }, +] + [[package]] name = "aiohappyeyeballs" version = "2.6.1" @@ -880,7 +891,7 @@ name = "exceptiongroup" version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, + { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } wheels = [ From b1efe9d473688b7596d2d816ea42c64866f356e7 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Thu, 31 Jul 2025 17:42:30 -0700 Subject: [PATCH 09/18] Add AgentExecutor and allow multiple handlers per executor --- .../main/agent_framework/workflow/__init__.py | 5 +- .../agent_framework/workflow/__init__.pyi | 10 +- .../agent_framework_workflow/__init__.py | 7 +- .../agent_framework_workflow/_edge.py | 39 ++- .../agent_framework_workflow/_events.py | 30 +- .../agent_framework_workflow/_executor.py | 258 +++++++++++++++--- .../agent_framework_workflow/_runner.py | 20 +- .../_runner_context.py | 26 +- .../agent_framework_workflow/_workflow.py | 29 +- .../_workflow_context.py | 30 +- .../step_01_simple_workflow_sequential.py | 33 +-- .../step_02_simple_workflow_condition.py | 69 +++-- .../workflow/step_03_simple_workflow_loop.py | 32 +-- .../workflow/step_04_simple_group_chat.py | 151 +++++----- .../step_05_simple_group_chat_with_hil.py | 150 +++++----- .../workflow/step_06_map_reduce.py | 67 +++-- 16 files changed, 593 insertions(+), 363 deletions(-) diff --git a/python/packages/main/agent_framework/workflow/__init__.py b/python/packages/main/agent_framework/workflow/__init__.py index b726a2f0b2..44a25c155f 100644 --- a/python/packages/main/agent_framework/workflow/__init__.py +++ b/python/packages/main/agent_framework/workflow/__init__.py @@ -11,7 +11,6 @@ "__version__", "events", "WorkflowBuilder", - "output_message_types", "ExecutorCompleteEvent", "ExecutorEvent", "ExecutorInvokeEvent", @@ -21,6 +20,10 @@ "WorkflowStartedEvent", "AgentRunEvent", "AgentRunStreamingEvent", + "message_handler", + "AgentExecutor", + "AgentExecutorRequest", + "AgentExecutorResponse", ] diff --git a/python/packages/main/agent_framework/workflow/__init__.pyi b/python/packages/main/agent_framework/workflow/__init__.pyi index a30b0c9bcb..58ba1b8f75 100644 --- a/python/packages/main/agent_framework/workflow/__init__.pyi +++ b/python/packages/main/agent_framework/workflow/__init__.pyi @@ -1,6 +1,9 @@ # Copyright (c) Microsoft. All rights reserved. from agent_framework_workflow import ( + AgentExecutor, + AgentExecutorRequest, + AgentExecutorResponse, AgentRunEvent, AgentRunStreamingEvent, Executor, @@ -14,10 +17,13 @@ from agent_framework_workflow import ( WorkflowEvent, WorkflowStartedEvent, __version__, - output_message_types, + message_handler, ) __all__ = [ + "AgentExecutor", + "AgentExecutorRequest", + "AgentExecutorResponse", "AgentRunEvent", "AgentRunStreamingEvent", "Executor", @@ -31,5 +37,5 @@ __all__ = [ "WorkflowEvent", "WorkflowStartedEvent", "__version__", - "output_message_types", + "message_handler", ] diff --git a/python/packages/workflow/agent_framework_workflow/__init__.py b/python/packages/workflow/agent_framework_workflow/__init__.py index 4335a6f13a..d1c2c0b6f0 100644 --- a/python/packages/workflow/agent_framework_workflow/__init__.py +++ b/python/packages/workflow/agent_framework_workflow/__init__.py @@ -13,7 +13,7 @@ WorkflowEvent, WorkflowStartedEvent, ) -from ._executor import Executor, output_message_types +from ._executor import AgentExecutor, AgentExecutorRequest, AgentExecutorResponse, Executor, message_handler from ._workflow import WorkflowBuilder from ._workflow_context import WorkflowContext @@ -24,6 +24,9 @@ __all__ = [ + "AgentExecutor", + "AgentExecutorRequest", + "AgentExecutorResponse", "AgentRunEvent", "AgentRunStreamingEvent", "Executor", @@ -37,5 +40,5 @@ "WorkflowEvent", "WorkflowStartedEvent", "__version__", - "output_message_types", + "message_handler", ] diff --git a/python/packages/workflow/agent_framework_workflow/_edge.py b/python/packages/workflow/agent_framework_workflow/_edge.py index 861fd0f9bd..a31eec29fa 100644 --- a/python/packages/workflow/agent_framework_workflow/_edge.py +++ b/python/packages/workflow/agent_framework_workflow/_edge.py @@ -5,7 +5,7 @@ from typing import Any, ClassVar from ._executor import Executor -from ._runner_context import RunnerContext +from ._runner_context import Message, RunnerContext from ._shared_state import SharedState from ._workflow_context import WorkflowContext @@ -17,8 +17,8 @@ class Edge: def __init__( self, - source: Executor[Any], - target: Executor[Any], + source: Executor, + target: Executor, condition: Callable[[Any], bool] | None = None, ): """Initialize the edge with a source and target node.""" @@ -55,40 +55,49 @@ def source_and_target_from_id(cls, edge_id: str) -> tuple[str, str]: raise ValueError(f"Invalid edge ID format: {edge_id}") return ids[0], ids[1] - async def send_message(self, data: Any, shared_state: SharedState, ctx: RunnerContext) -> None: + def can_handle(self, message_data: Any) -> bool: + """Check if the edge can handle the given data.""" + if not self._edge_group_ids: + return self.target.can_handle(message_data) + + # If the edge is part of an edge group, the target should expect a list of the data type. + return self.target.can_handle([message_data]) + + async def send_message(self, message: Message, shared_state: SharedState, ctx: RunnerContext) -> None: """Send a message along this edge.""" - if not self._edge_group_ids and self._should_route(data): - await self.target.execute(data, WorkflowContext(self.target.id, shared_state, ctx)) + if not self.can_handle(message.data): + raise RuntimeError(f"Edge {self.id} cannot handle data of type {type(message.data)}.") + + if not self._edge_group_ids and self._should_route(message.data): + await self.target.execute(message.data, WorkflowContext(self.target.id, shared_state, ctx)) elif self._edge_group_ids: # Logic: # 1. If not all edges in the edge group have data in the shared state, # add the data to the shared state. # 2. If all edges in the edge group have data in the shared state, # copy the data to a list and send it to the target executor. - messages = [] + message_list: list[Message] = [] async with shared_state.hold() as held_shared_state: has_data = await asyncio.gather( *(held_shared_state.has_within_hold(edge_id) for edge_id in self._edge_group_ids) ) if not all(has_data): - await held_shared_state.set_within_hold(self.id, data) + await held_shared_state.set_within_hold(self.id, message) else: - messages = [ + message_list = [ await held_shared_state.get_within_hold(edge_id) for edge_id in self._edge_group_ids - ] + [data] + ] + [message] # Remove the data from the shared state after retrieving it await asyncio.gather( *(held_shared_state.delete_within_hold(edge_id) for edge_id in self._edge_group_ids) ) - if messages: - await self.target.execute(messages, WorkflowContext(self.target.id, shared_state, ctx)) + if message_list: + data_list = [msg.data for msg in message_list] + await self.target.execute(data_list, WorkflowContext(self.target.id, shared_state, ctx)) def _should_route(self, data: Any) -> bool: """Determine if message should be routed through this edge.""" - if not self.target.can_handle(data): - return False - if self._condition is None: return True diff --git a/python/packages/workflow/agent_framework_workflow/_events.py b/python/packages/workflow/agent_framework_workflow/_events.py index eb5c1f18f4..977f8ad295 100644 --- a/python/packages/workflow/agent_framework_workflow/_events.py +++ b/python/packages/workflow/agent_framework_workflow/_events.py @@ -2,7 +2,7 @@ from typing import Any -from agent_framework import ChatResponse, ChatResponseUpdate +from agent_framework import AgentRunResponse, AgentRunResponseUpdate class WorkflowEvent: @@ -29,6 +29,30 @@ class WorkflowCompletedEvent(WorkflowEvent): ... +class WorkflowWarningEvent(WorkflowEvent): + """Event triggered when a warning occurs in the workflow.""" + + def __init__(self, data: str): + """Initialize the workflow warning event with optional data and warning message.""" + super().__init__(data) + + def __repr__(self): + """Return a string representation of the workflow warning event.""" + return f"{self.__class__.__name__}(message={self.data})" + + +class WorkflowErrorEvent(WorkflowEvent): + """Event triggered when an error occurs in the workflow.""" + + def __init__(self, data: Exception): + """Initialize the workflow error event with optional data and error message.""" + super().__init__(data) + + def __repr__(self): + """Return a string representation of the workflow error event.""" + return f"{self.__class__.__name__}(exception={self.data})" + + class ExecutorEvent(WorkflowEvent): """Base class for executor events.""" @@ -74,7 +98,7 @@ def __repr__(self): class AgentRunStreamingEvent(ExecutorEvent): """Event triggered when an agent is streaming messages.""" - def __init__(self, executor_id: str, data: ChatResponseUpdate | None = None): + def __init__(self, executor_id: str, data: AgentRunResponseUpdate | None = None): """Initialize the agent streaming event.""" super().__init__(executor_id, data) @@ -86,7 +110,7 @@ def __repr__(self): class AgentRunEvent(ExecutorEvent): """Event triggered when an agent run is completed.""" - def __init__(self, executor_id: str, data: ChatResponse | None = None): + def __init__(self, executor_id: str, data: AgentRunResponse | None = None): """Initialize the agent run event.""" super().__init__(executor_id, data) diff --git a/python/packages/workflow/agent_framework_workflow/_executor.py b/python/packages/workflow/agent_framework_workflow/_executor.py index 02496b22aa..d30df7c25e 100644 --- a/python/packages/workflow/agent_framework_workflow/_executor.py +++ b/python/packages/workflow/agent_framework_workflow/_executor.py @@ -1,78 +1,254 @@ # Copyright (c) Microsoft. All rights reserved. +import functools +import inspect import uuid -from abc import ABC, abstractmethod -from typing import Any, Generic, TypeVar, get_args +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Any, TypeVar, overload -from ._events import ExecutorCompleteEvent, ExecutorInvokeEvent +from agent_framework import AgentRunResponse, AgentRunResponseUpdate, AgentThread, AIAgent, ChatMessage + +from ._events import AgentRunEvent, AgentRunStreamingEvent, ExecutorCompleteEvent, ExecutorInvokeEvent from ._typing_utils import is_instance_of from ._workflow_context import WorkflowContext -T = TypeVar("T") - +# region: Executor -class Executor(Generic[T], ABC): - """An abstract base class for executing tasks in a workflow. - Args: - T: The type of the task to be executed. - """ +class Executor: + """An executor is a component that processes messages in a workflow.""" def __init__(self, id: str | None = None): """Initialize the executor with a unique identifier.""" self._id = id or str(uuid.uuid4()) - args = get_args(self.__orig_bases__[0]) # type: ignore - if len(args) != 1: - raise ValueError(f"Executor must be parameterized with a single type, got {args}") - self._input_type = args[0] + self._message_handlers: dict[type, Callable[[Any, WorkflowContext], Any]] = {} + self._discover_handlers() + + if not self._message_handlers: + raise ValueError( + f"Executor {self.__class__.__name__} has no message handlers defined. " + "Please define at least one message handler using the @message_handler decorator." + ) - @abstractmethod - async def _execute(self, data: T, ctx: WorkflowContext) -> None: - """Execute the task using the registered handlers. + async def execute( + self, + message: Any, + context: WorkflowContext, + ) -> None: + """Execute the executor with a given message and context. Args: - data: The data of type T to be processed. - ctx: The execution context containing additional information. + message: The message to be processed by the executor. + context: The workflow context in which the executor operates. + + Returns: + An awaitable that resolves to the result of the execution. """ - raise NotImplementedError("Subclasses must implement this method.") + handler: Callable[[Any, WorkflowContext], Any] | None = None + for message_type in self._message_handlers: + if is_instance_of(message, message_type): + handler = self._message_handlers[message_type] + break - async def execute(self, data: T, ctx: WorkflowContext) -> None: - """Execute a task with the given data and context. + if handler is None: + raise RuntimeError(f"Executor {self.__class__.__name__} cannot handle message of type {type(message)}.") - Args: - data: The data of type T to be processed. - ctx: The execution context containing additional information. - """ - await ctx.add_event(ExecutorInvokeEvent(executor_id=self._id)) - await self._execute(data, ctx) - await ctx.add_event(ExecutorCompleteEvent(executor_id=self._id)) + await context.add_event(ExecutorInvokeEvent(self.id)) + await handler(message, context) + await context.add_event(ExecutorCompleteEvent(self.id)) @property def id(self) -> str: """Get the unique identifier of the executor.""" return self._id - def can_handle(self, data: Any) -> bool: - """Determine if the executor can handle the given data. + def _discover_handlers(self) -> None: + """Discover message handlers in the executor class.""" + for attr_name in dir(self): + attr = getattr(self, attr_name) + if callable(attr) and hasattr(attr, "_handler_spec"): + handler_spec = attr._handler_spec # type: ignore + if self._message_handlers.get(handler_spec["message_type"]) is not None: + raise ValueError( + f"Duplicate message handler for type {handler_spec['message_type']} " + f"in {self.__class__.__name__}" + ) + self._message_handlers[handler_spec["message_type"]] = attr + + def can_handle(self, message: Any) -> bool: + """Check if the executor can handle a given message type. Args: - data: The data to check. + message: The message to check. Returns: - bool: True if the executor can handle the data, False otherwise. + True if the executor can handle the message type, False otherwise. """ - return is_instance_of(data, self._input_type) + return any(is_instance_of(message, message_type) for message_type in self._message_handlers) + + +# endregion: Executor + +# region: Message Handler Decorator + + +ExecutorT = TypeVar("ExecutorT", bound="Executor") + + +@overload +def message_handler( + func: Callable[[ExecutorT, Any, WorkflowContext], Awaitable[Any]], +) -> Callable[[ExecutorT, Any, WorkflowContext], Awaitable[Any]]: ... + + +@overload +def message_handler( + func: None = None, + *, + output_types: list[type] | None = None, +) -> Callable[ + [Callable[[ExecutorT, Any, WorkflowContext], Awaitable[Any]]], + Callable[[ExecutorT, Any, WorkflowContext], Awaitable[Any]], +]: ... + + +def message_handler( + func: Callable[[ExecutorT, Any, WorkflowContext], Awaitable[Any]] | None = None, + *, + output_types: list[type] | None = None, +) -> ( + Callable[[ExecutorT, Any, WorkflowContext], Awaitable[Any]] + | Callable[ + [Callable[[ExecutorT, Any, WorkflowContext], Awaitable[Any]]], + Callable[[ExecutorT, Any, WorkflowContext], Awaitable[Any]], + ] +): + """Decorator to register a message handler for an executor. + + Args: + func: The function to decorate. Can be None when using with parameters. + output_types: Optional list of message types this handler can emit. + Returns: + The decorated function with handler metadata. -TExecutor = TypeVar("TExecutor", bound=Executor[Any]) + Example: + @message_handler + async def handle_string(self, message: str, ctx: WorkflowContext) -> None: + ... + @message_handler(output_types=[str, int]) + async def handle_data(self, message: dict, ctx: WorkflowContext) -> None: + ... + """ + + def decorator( + func: Callable[[ExecutorT, Any, WorkflowContext], Awaitable[Any]], + ) -> Callable[[ExecutorT, Any, WorkflowContext], Awaitable[Any]]: + # Extract the message type from a message handler function. + sig = inspect.signature(func) + params = list(sig.parameters.values()) + + if len(params) != 3: # self, message, ctx + raise ValueError(f"Message handler must have exactly 3 parameters, got {len(params)}") + + message_type = params[1].annotation + if message_type is inspect.Parameter.empty: + raise ValueError("Message handler's second parameter must have a type annotation") + + @functools.wraps(func) + async def wrapper(self: ExecutorT, message: Any, ctx: WorkflowContext) -> Any: + """Wrapper function to call the message handler.""" + return await func(self, message, ctx) + + wrapper._handler_spec = { # type: ignore + "name": func.__name__, + "message_type": message_type, + "output_types": output_types or [], + } + + return wrapper + + if func is None: + return decorator + return decorator(func) + + +# endregion: Message Handler Decorator + +# region: Agent Executor -def output_message_types(*output_types: type): - """Decorator to specify the output types of an executor.""" - def decorator(cls: type[TExecutor]) -> type[TExecutor]: - cls._declare_output_types = output_types # type: ignore - return cls +@dataclass +class AgentExecutorRequest: + """A request to an agent executor. - return decorator + Attributes: + messages: A list of chat messages to be processed by the agent. + should_respond: A flag indicating whether the agent should respond to the messages. + If False, the messages will be saved to the executor's cache but not sent to the agent. + """ + + messages: list[ChatMessage] + should_respond: bool = True + + +@dataclass +class AgentExecutorResponse: + """A response from an agent executor. + + Attributes: + executor_id: The ID of the executor that generated the response. + response: The agent run response containing the messages generated by the agent. + """ + + executor_id: str + agent_run_response: AgentRunResponse + + +class AgentExecutor(Executor): + """An executor that wraps an agent for handling messages.""" + + def __init__( + self, + agent: AIAgent, + agent_thread: AgentThread | None = None, + streaming: bool = False, + id: str | None = None, + ): + """Initialize the executor with a unique identifier.""" + super().__init__(id) + self._agent = agent + self._agent_thread = agent_thread or self._agent.get_new_thread() + self._streaming = streaming + self._cache: list[ChatMessage] = [] + + @message_handler(output_types=[AgentExecutorResponse]) + async def run(self, request: AgentExecutorRequest, ctx: WorkflowContext) -> None: + """Run the agent executor with the given request.""" + self._cache.extend(request.messages) + + if request.should_respond: + if self._streaming: + updates: list[AgentRunResponseUpdate] = [] + async for update in self._agent.run_streaming( + self._cache, + thread=self._agent_thread, + ): + updates.append(update) + await ctx.add_event(AgentRunStreamingEvent(self.id, update)) + response = AgentRunResponse.from_agent_run_response_updates(updates) + else: + response = await self._agent.run( + self._cache, + thread=self._agent_thread, + ) + await ctx.add_event(AgentRunEvent(self.id, response)) + + await ctx.send_message(AgentExecutorResponse(self.id, response)) + self._cache.clear() + + +# endregion: Agent Executor diff --git a/python/packages/workflow/agent_framework_workflow/_runner.py b/python/packages/workflow/agent_framework_workflow/_runner.py index d8e5b01de7..0ddf7cfa2d 100644 --- a/python/packages/workflow/agent_framework_workflow/_runner.py +++ b/python/packages/workflow/agent_framework_workflow/_runner.py @@ -4,11 +4,10 @@ import logging from collections import defaultdict from collections.abc import AsyncIterable -from typing import Any from ._edge import Edge -from ._events import WorkflowEvent -from ._runner_context import RunnerContext +from ._events import WorkflowEvent, WorkflowWarningEvent +from ._runner_context import Message, RunnerContext from ._shared_state import SharedState logger = logging.getLogger(__name__) @@ -54,7 +53,7 @@ async def run_until_convergence(self) -> AsyncIterable[WorkflowEvent]: async def _run_iteration(self): """Run a superstep of the workflow execution.""" - async def _deliver_messages(source_executor_id: str, messages: list[Any]) -> None: + async def _deliver_messages(source_executor_id: str, messages: list[Message]) -> None: """Deliver messages to the executors. Outer loop to concurrently deliver messages from all sources to their targets. @@ -62,13 +61,24 @@ async def _deliver_messages(source_executor_id: str, messages: list[Any]) -> Non async def _deliver_messages_inner( edge: Edge, - messages: list[Any], + messages: list[Message], ) -> None: """Deliver messages to a specific target executor. Inner loop to deliver messages to a specific target executor. """ for message in messages: + if message.target_id is not None and message.target_id != edge.target_id: + continue + + if not edge.can_handle(message.data): + warning_msg = ( + f"Edge {edge.id} cannot handle message with data type {type(message.data)}. Skipping." + ) + logger.warning(warning_msg) + await self._ctx.add_event(WorkflowWarningEvent(warning_msg)) + continue + await edge.send_message(message, self._shared_state, self._ctx) associated_edges = self._edge_map.get(source_executor_id, []) diff --git a/python/packages/workflow/agent_framework_workflow/_runner_context.py b/python/packages/workflow/agent_framework_workflow/_runner_context.py index 1def7b1b5e..43152fa592 100644 --- a/python/packages/workflow/agent_framework_workflow/_runner_context.py +++ b/python/packages/workflow/agent_framework_workflow/_runner_context.py @@ -2,18 +2,30 @@ import logging from collections import defaultdict -from typing import Any, Protocol, runtime_checkable +from dataclasses import dataclass +from typing import Any, Protocol, TypeVar, runtime_checkable from ._events import WorkflowEvent logger = logging.getLogger(__name__) +T = TypeVar("T") + + +@dataclass +class Message: + """A class representing a message in the workflow.""" + + data: Any + source_id: str + target_id: str | None = None + @runtime_checkable class RunnerContext(Protocol): """Protocol for the execution context used by the runner.""" - async def send_message(self, source_id: str, message: Any) -> None: + async def send_message(self, message: Message) -> None: """Send a message from the executor to the context. Args: @@ -22,7 +34,7 @@ async def send_message(self, source_id: str, message: Any) -> None: """ ... - async def drain_messages(self) -> dict[str, list[Any]]: + async def drain_messages(self) -> dict[str, list[Message]]: """Drain all messages from the context. Returns: @@ -68,14 +80,14 @@ class InProcRunnerContext(RunnerContext): def __init__(self): """Initialize the in-process execution context.""" - self._messages: defaultdict[str, list[Any]] = defaultdict(list) + self._messages: defaultdict[str, list[Message]] = defaultdict(list) self._events: list[WorkflowEvent] = [] - async def send_message(self, source_id: str, message: Any) -> None: + async def send_message(self, message: Message) -> None: """Send a message from the executor to the context.""" - self._messages[source_id].append(message) + self._messages[message.source_id].append(message) - async def drain_messages(self) -> dict[str, list[Any]]: + async def drain_messages(self) -> dict[str, list[Message]]: """Drain all messages from the context.""" messages = dict(self._messages) self._messages.clear() diff --git a/python/packages/workflow/agent_framework_workflow/_workflow.py b/python/packages/workflow/agent_framework_workflow/_workflow.py index 8c1f81fb47..52ec0cd6e9 100644 --- a/python/packages/workflow/agent_framework_workflow/_workflow.py +++ b/python/packages/workflow/agent_framework_workflow/_workflow.py @@ -2,7 +2,7 @@ import sys from collections.abc import AsyncIterable, Callable, Sequence -from typing import Any +from typing import Any, Generic, TypeVar from ._edge import Edge from ._events import WorkflowEvent @@ -18,6 +18,9 @@ from typing_extensions import Self # pragma: no cover +TIn = TypeVar("TIn") + + class Workflow: """A class representing a workflow that can be executed. @@ -28,7 +31,7 @@ class Workflow: def __init__( self, edges: list[Edge], - start_executor: Executor[Any] | str, + start_executor: Executor | str, runner_context: RunnerContext, ): """Initialize the workflow with a list of edges. @@ -50,7 +53,7 @@ def __init__( async def run_stream( self, message: Any, - executor: Executor[Any] | str | None = None, + executor: Executor | str | None = None, ) -> AsyncIterable[WorkflowEvent]: """Send a message to the starting executor of the workflow. @@ -75,7 +78,7 @@ async def run_stream( async for event in self._runner.run_until_convergence(): yield event - def _get_executor_by_id(self, executor_id: str) -> Executor[Any]: + def _get_executor_by_id(self, executor_id: str) -> Executor: """Get an executor by its ID. Args: @@ -89,7 +92,7 @@ def _get_executor_by_id(self, executor_id: str) -> Executor[Any]: return self._executors[executor_id] -class WorkflowBuilder: +class WorkflowBuilder(Generic[TIn]): """A builder class for constructing workflows. This class provides methods to add edges and set the starting executor for the workflow. @@ -98,13 +101,13 @@ class WorkflowBuilder: def __init__(self): """Initialize the WorkflowBuilder with an empty list of edges and no starting executor.""" self._edges: list[Edge] = [] - self._start_executor: Executor[Any] | str | None = None + self._start_executor: Executor | str | None = None self._runner_context: RunnerContext | None = None def add_edge( self, - source: Executor[Any], - target: Executor[Any], + source: Executor, + target: Executor, condition: Callable[[Any], bool] | None = None, ) -> "Self": """Add a directed edge between two executors. @@ -119,7 +122,7 @@ def add_edge( self._edges.append(Edge(source, target, condition)) return self - def add_fan_out_edges(self, source: Executor[Any], targets: Sequence[Executor[Any]]) -> "Self": + def add_fan_out_edges(self, source: Executor, targets: Sequence[Executor]) -> "Self": """Add multiple edges to the workflow. Args: @@ -132,8 +135,8 @@ def add_fan_out_edges(self, source: Executor[Any], targets: Sequence[Executor[An def add_fan_in_edges( self, - sources: Sequence[Executor[Any]], - target: Executor[Any], + sources: Sequence[Executor], + target: Executor, ) -> "Self": """Add multiple edges from sources to a single target executor. @@ -159,7 +162,7 @@ def add_fan_in_edges( def add_chain( self, - executors: Sequence[Executor[Any]], + executors: Sequence[Executor], ) -> "Self": """Add a chain of executors to the workflow. @@ -170,7 +173,7 @@ def add_chain( self.add_edge(executors[i], executors[i + 1]) return self - def set_start_executor(self, executor: Executor[Any] | str) -> "Self": + def set_start_executor(self, executor: Executor | str) -> "Self": """Set the starting executor for the workflow. Args: diff --git a/python/packages/workflow/agent_framework_workflow/_workflow_context.py b/python/packages/workflow/agent_framework_workflow/_workflow_context.py index 4dfbaaef9d..ff6a3f5faa 100644 --- a/python/packages/workflow/agent_framework_workflow/_workflow_context.py +++ b/python/packages/workflow/agent_framework_workflow/_workflow_context.py @@ -3,7 +3,7 @@ from typing import Any from ._events import WorkflowEvent -from ._runner_context import RunnerContext +from ._runner_context import Message, RunnerContext from ._shared_state import SharedState @@ -14,15 +14,33 @@ class WorkflowContext: context and shared state, while preventing direct access to the runtime context. """ - def __init__(self, executor_id: str, shared_state: SharedState, runner_context: RunnerContext): - """Initialize the executor context with the given workflow context.""" - self._runner_context = runner_context + def __init__( + self, + executor_id: str, + shared_state: SharedState, + runner_context: RunnerContext, + ): + """Initialize the executor context with the given workflow context. + + Args: + executor_id: The unique identifier of the executor that this context belongs to. + source_executor_id: The unique identifier of the source executor that generated this context. + shared_state: The shared state for the workflow. + runner_context: The runner context that provides methods to send messages and events. + """ self._executor_id = executor_id + self._runner_context = runner_context self._shared_state = shared_state - async def send_message(self, message: Any) -> None: + async def send_message(self, message: Any, target_id: str | None = None) -> None: """Send a message to the workflow context.""" - await self._runner_context.send_message(self._executor_id, message) + await self._runner_context.send_message( + Message( + data=message, + source_id=self._executor_id, + target_id=target_id, + ) + ) async def add_event(self, event: WorkflowEvent) -> None: """Add an event to the workflow context.""" diff --git a/python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py b/python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py index 80ddaa83f6..a28b6bba12 100644 --- a/python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py +++ b/python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py @@ -1,21 +1,8 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio -import sys - -from agent_framework.workflow import ( - Executor, - WorkflowBuilder, - WorkflowCompletedEvent, - WorkflowContext, - output_message_types, -) - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover +from agent_framework.workflow import Executor, WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, message_handler """ The following sample demonstrates a basic workflow with two executors @@ -24,27 +11,25 @@ """ -@output_message_types(str) -class UpperCaseExecutor(Executor[str]): +class UpperCaseExecutor(Executor): """An executor that converts text to uppercase.""" - @override - async def _execute(self, data: str, ctx: WorkflowContext) -> None: + @message_handler(output_types=[str]) + async def to_upper_case(self, text: str, ctx: WorkflowContext) -> None: """Execute the task by converting the input string to uppercase.""" - result = data.upper() + result = text.upper() # Send the result to the next executor in the workflow. await ctx.send_message(result) -@output_message_types(str) -class ReverseTextExecutor(Executor[str]): +class ReverseTextExecutor(Executor): """An executor that reverses text.""" - @override - async def _execute(self, data: str, ctx: WorkflowContext) -> None: + @message_handler + async def reverse_text(self, text: str, ctx: WorkflowContext) -> None: """Execute the task by reversing the input string.""" - result = data[::-1] + result = text[::-1] # Send the result with a workflow completion event. await ctx.add_event(WorkflowCompletedEvent(result)) diff --git a/python/samples/getting_started/workflow/step_02_simple_workflow_condition.py b/python/samples/getting_started/workflow/step_02_simple_workflow_condition.py index 1fe65ab816..9dbf053a24 100644 --- a/python/samples/getting_started/workflow/step_02_simple_workflow_condition.py +++ b/python/samples/getting_started/workflow/step_02_simple_workflow_condition.py @@ -1,7 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio -import sys from dataclasses import dataclass from agent_framework.workflow import ( @@ -9,14 +8,9 @@ WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, - output_message_types, + message_handler, ) -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - """ The following sample demonstrates a basic workflow with two executors that detect spam messages and respond accordingly. The first executor @@ -26,15 +20,14 @@ @dataclass -class EmailMessage: +class SpamDetectorResponse: """A data class to hold the email message content.""" - content: str + email: str is_spam: bool = False -@output_message_types(EmailMessage) -class DetectSpamExecutor(Executor[str]): +class SpamDetector(Executor): """An executor that determines if a message is spam.""" def __init__(self, spam_keywords: list[str], id: str | None = None): @@ -42,43 +35,49 @@ def __init__(self, spam_keywords: list[str], id: str | None = None): super().__init__(id=id) self._spam_keywords = spam_keywords - @override - async def _execute(self, data: str, ctx: WorkflowContext) -> None: + @message_handler(output_types=[SpamDetectorResponse]) + async def handle_email(self, email: str, ctx: WorkflowContext) -> None: """Determine if the input string is spam.""" - result = any(keyword in data.lower() for keyword in self._spam_keywords) + result = any(keyword in email.lower() for keyword in self._spam_keywords) - await ctx.send_message(EmailMessage(content=data, is_spam=result)) + await ctx.send_message(SpamDetectorResponse(email=email, is_spam=result)) -@output_message_types() -class RespondToMessageExecutor(Executor[EmailMessage]): +class SendResponse(Executor): """An executor that responds to a message based on spam detection.""" - @override - async def _execute(self, data: EmailMessage, ctx: WorkflowContext) -> None: + @message_handler + async def handle_detector_response( + self, + spam_detector_response: SpamDetectorResponse, + ctx: WorkflowContext, + ) -> None: """Respond with a message based on whether the input is spam.""" - if data.is_spam: + if spam_detector_response.is_spam: raise RuntimeError("Input is spam, cannot respond.") # Simulate processing delay - print(f"Responding to message: {data.content}") + print(f"Responding to message: {spam_detector_response.email}") await asyncio.sleep(1) await ctx.add_event(WorkflowCompletedEvent("Message processed successfully.")) -@output_message_types() -class RemoveSpamExecutor(Executor[EmailMessage]): +class RemoveSpam(Executor): """An executor that removes spam messages.""" - @override - async def _execute(self, data: EmailMessage, ctx: WorkflowContext) -> None: + @message_handler + async def handle_detector_response( + self, + spam_detector_response: SpamDetectorResponse, + ctx: WorkflowContext, + ) -> None: """Remove the spam message.""" - if data.is_spam is False: + if spam_detector_response.is_spam is False: raise RuntimeError("Input is not spam, cannot remove.") # Simulate processing delay - print(f"Removing spam message: {data.content}") + print(f"Removing spam message: {spam_detector_response.email}") await asyncio.sleep(1) await ctx.add_event(WorkflowCompletedEvent("Spam message removed.")) @@ -90,22 +89,22 @@ async def main(): spam_keywords = ["spam", "advertisement", "offer"] # Step 1: Create the executors. - detect_spam_executor = DetectSpamExecutor(spam_keywords, id="detect_spam_executor") - respond_to_message_executor = RespondToMessageExecutor(id="respond_to_message_executor") - remove_spam_executor = RemoveSpamExecutor(id="remove_spam_executor") + spam_detector = SpamDetector(spam_keywords, id="spam_detector") + send_response = SendResponse(id="send_response") + remove_spam = RemoveSpam(id="remove_spam") # Step 2: Build the workflow with the defined edges with conditions. workflow = ( WorkflowBuilder() - .set_start_executor(detect_spam_executor) + .set_start_executor(spam_detector) .add_edge( - detect_spam_executor, - respond_to_message_executor, + spam_detector, + send_response, condition=lambda x: x.is_spam is False, ) .add_edge( - detect_spam_executor, - remove_spam_executor, + spam_detector, + remove_spam, condition=lambda x: x.is_spam is True, ) .build() diff --git a/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py b/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py index ead6294c37..86abae8a08 100644 --- a/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py +++ b/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py @@ -1,7 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio -import sys from enum import Enum from agent_framework.workflow import ( @@ -10,14 +9,9 @@ WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, - output_message_types, + message_handler, ) -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - """ The following sample demonstrates a basic workflow with two executors where one executor guesses a number and the other executor judges the @@ -38,8 +32,7 @@ class NumberSignal(Enum): INIT = "init" -@output_message_types(int) -class GuessNumberExecutor(Executor[NumberSignal]): +class GuessNumberExecutor(Executor): """An executor that guesses a number.""" def __init__(self, bound: tuple[int, int], id: str | None = None): @@ -48,16 +41,16 @@ def __init__(self, bound: tuple[int, int], id: str | None = None): self._lower = bound[0] self._upper = bound[1] - @override - async def _execute(self, data: NumberSignal, ctx: WorkflowContext) -> None: + @message_handler(output_types=[int]) + async def guess_number(self, feedback: NumberSignal, ctx: WorkflowContext) -> None: """Execute the task by guessing a number.""" - if data == NumberSignal.INIT: + if feedback == NumberSignal.INIT: self._guess = (self._lower + self._upper) // 2 await ctx.send_message(self._guess) - elif data == NumberSignal.MATCHED: + elif feedback == NumberSignal.MATCHED: # The previous guess was correct. await ctx.add_event(WorkflowCompletedEvent(f"Guessed the number: {self._guess}")) - elif data == NumberSignal.ABOVE: + elif feedback == NumberSignal.ABOVE: # The previous guess was too low. # Update the lower bound to the previous guess. # Generate a new number that is between the new bounds. @@ -73,8 +66,7 @@ async def _execute(self, data: NumberSignal, ctx: WorkflowContext) -> None: await ctx.send_message(self._guess) -@output_message_types(NumberSignal) -class JudgeExecutor(Executor[int]): +class JudgeExecutor(Executor): """An executor that judges the guessed number.""" def __init__(self, target: int, id: str | None = None): @@ -82,12 +74,12 @@ def __init__(self, target: int, id: str | None = None): super().__init__(id=id) self._target = target - @override - async def _execute(self, data: int, ctx: WorkflowContext) -> None: + @message_handler(output_types=[NumberSignal]) + async def judge(self, number: int, ctx: WorkflowContext) -> None: """Judge the guessed number.""" - if data == self._target: + if number == self._target: result = NumberSignal.MATCHED - elif data < self._target: + elif number < self._target: result = NumberSignal.ABOVE else: result = NumberSignal.BELOW diff --git a/python/samples/getting_started/workflow/step_04_simple_group_chat.py b/python/samples/getting_started/workflow/step_04_simple_group_chat.py index d5816dab22..36453a9d83 100644 --- a/python/samples/getting_started/workflow/step_04_simple_group_chat.py +++ b/python/samples/getting_started/workflow/step_04_simple_group_chat.py @@ -1,46 +1,28 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio -import sys -from dataclasses import dataclass -from agent_framework import ChatMessage, ChatResponse, ChatRole +from agent_framework import ChatClientAgent, ChatMessage, ChatRole +from agent_framework.azure import AzureChatClient from agent_framework.workflow import ( + AgentExecutor, + AgentExecutorRequest, + AgentExecutorResponse, AgentRunEvent, Executor, WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, - output_message_types, + message_handler, ) -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - """ The following sample demonstrates a basic workflow that simulates a round-robin group chat. """ -@dataclass -class GroupChatMessage: - """A data class to hold the messages in a group chat.""" - - messages: list[ChatMessage] - - -@dataclass -class AgentSelectionDecision(GroupChatMessage): - """A data class to hold the decision made by the manager executor.""" - - selection: str - - -@output_message_types(AgentSelectionDecision) -class RoundRobinGroupChatManager(Executor[list[ChatMessage]]): +class RoundRobinGroupChatManager(Executor): """An executor that manages a round-robin group chat.""" def __init__(self, members: list[str], max_round: int, id: str | None = None): @@ -49,23 +31,48 @@ def __init__(self, members: list[str], max_round: int, id: str | None = None): self._members = members self._max_round = max_round self._current_round = 0 - self._chat_history: list[ChatMessage] = [] - @override - async def _execute(self, data: list[ChatMessage], ctx: WorkflowContext) -> None: + @message_handler(output_types=[AgentExecutorRequest]) + async def start(self, task: str, ctx: WorkflowContext) -> None: """Execute the task by sending messages to the next executor in the round-robin sequence.""" - self._chat_history.extend(data) + initial_message = ChatMessage(ChatRole.USER, text=task) + + # Send the initial message to the members + await asyncio.gather(*[ + ctx.send_message( + AgentExecutorRequest(messages=[initial_message], should_respond=False), + target_id=member_id, + ) + for member_id in self._members + ]) + + # Invoke the first member to start the round-robin chat + await ctx.send_message( + AgentExecutorRequest(messages=[], should_respond=True), + target_id=self._get_next_member(), + ) + @message_handler(output_types=[AgentExecutorRequest]) + async def handle_agent_response(self, response: AgentExecutorResponse, ctx: WorkflowContext) -> None: + """Execute the task by sending messages to the next executor in the round-robin sequence.""" + # Send the response to the other members + await asyncio.gather(*[ + ctx.send_message( + AgentExecutorRequest(messages=response.agent_run_response.messages, should_respond=False), + target_id=member_id, + ) + for member_id in self._members + if member_id != response.executor_id + ]) + + # Check for termination condition if self._should_terminate(): - await ctx.add_event(WorkflowCompletedEvent(data=self._chat_history)) + await ctx.add_event(WorkflowCompletedEvent(data=response)) return - self._current_round += 1 - selection_decision = AgentSelectionDecision( - messages=self._chat_history, - selection=self._get_next_member(), - ) - await ctx.send_message(selection_decision) + # Request the next member to respond + selection = self._get_next_member() + await ctx.send_message(AgentExecutorRequest(messages=[], should_respond=True), target_id=selection) def _should_terminate(self) -> bool: """Determine if the group chat should terminate based on the current round.""" @@ -73,65 +80,59 @@ def _should_terminate(self) -> bool: def _get_next_member(self) -> str: """Get the next member in the round-robin sequence.""" - return self._members[(self._current_round - 1) % len(self._members)] - - -@output_message_types(list[ChatMessage]) -class FakeAgentExecutor(Executor[AgentSelectionDecision]): - """An executor that simulates a group chat agent A.""" - - @override - async def _execute(self, data: AgentSelectionDecision, ctx: WorkflowContext) -> None: - """Simulate a response.""" - response = ChatResponse( - messages=[ - ChatMessage( - ChatRole.ASSISTANT, - text=f"{self.id} received request. Current message size: {len(data.messages)}", - author_name=f"{self.id}", - ) - ] - ) + next_member = self._members[self._current_round % len(self._members)] + self._current_round += 1 - await ctx.add_event(AgentRunEvent(self.id, data=response)) - await ctx.send_message(response.messages) + return next_member async def main(): """Main function to run the group chat workflow.""" + # Step 1: Create the executors. - executor_a = FakeAgentExecutor(id="executor_a") - executor_b = FakeAgentExecutor(id="executor_b") - executor_c = FakeAgentExecutor(id="executor_c") + chat_client = AzureChatClient() + writer = AgentExecutor( + ChatClientAgent( + chat_client, + instructions=( + "You are an excellent content writer. You create new content and edit contents based on the feedback." + ), + ), + id="writer", + ) + reviewer = AgentExecutor( + ChatClientAgent( + chat_client, + instructions=( + "You are an excellent content reviewer. You review the content and provide feedback to the writer." + ), + ), + id="reviewer", + ) group_chat_manager = RoundRobinGroupChatManager( - members=[executor_a.id, executor_b.id, executor_c.id], - max_round=3, + members=[writer.id, reviewer.id], + # max_rounds is odd, so that the writer gets the last round + max_round=5, id="group_chat_manager", ) - # The workflow graph: - # - # GroupChatManager -> executor_a -> GroupChatManager - # GroupChatManager -> executor_b -> GroupChatManager - # GroupChatManager -> executor_c -> GroupChatManager # Step 2: Build the workflow with the defined edges. - # This time we are creating edges and loops with conditions. workflow = ( WorkflowBuilder() .set_start_executor(group_chat_manager) - .add_edge(group_chat_manager, executor_a, condition=lambda x: x.selection == executor_a.id) - .add_edge(group_chat_manager, executor_b, condition=lambda x: x.selection == executor_b.id) - .add_edge(group_chat_manager, executor_c, condition=lambda x: x.selection == executor_c.id) - .add_edge(executor_a, group_chat_manager) - .add_edge(executor_b, group_chat_manager) - .add_edge(executor_c, group_chat_manager) + .add_edge(group_chat_manager, writer) + .add_edge(group_chat_manager, reviewer) + .add_edge(writer, group_chat_manager) + .add_edge(reviewer, group_chat_manager) .build() ) # Step 3: Run the workflow with an initial message. completion_event = None - async for event in workflow.run_stream([ChatMessage(ChatRole.USER, text="Start group chat")]): + async for event in workflow.run_stream( + "Create a slogan for a new electric SUV that is affordable and fun to drive." + ): if isinstance(event, AgentRunEvent): print(f"{event}") diff --git a/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py b/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py index b05d6fae20..277cb98214 100644 --- a/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py +++ b/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py @@ -4,21 +4,24 @@ import sys from dataclasses import dataclass -from agent_framework import ChatMessage, ChatResponse, ChatRole +from agent_framework import ChatClientAgent, ChatMessage, ChatRole +from agent_framework.azure import AzureChatClient from agent_framework.workflow import ( - AgentRunEvent, + AgentExecutor, + AgentExecutorRequest, + AgentExecutorResponse, Executor, HumanInTheLoopEvent, WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, - output_message_types, + message_handler, ) if sys.version_info >= (3, 12): - from typing import override # pragma: no cover + pass # pragma: no cover else: - from typing_extensions import override # pragma: no cover + pass # pragma: no cover """ The following sample demonstrates a basic workflow that simulates @@ -40,8 +43,7 @@ class AgentSelectionDecision(GroupChatMessage): selection: str -@output_message_types(AgentSelectionDecision, list[ChatMessage]) -class CriticGroupChatManagerWithHIL(Executor[list[ChatMessage]]): +class CriticGroupChatManagerWithHIL(Executor): """An executor that manages a round-robin group chat.""" def __init__(self, members: list[str], id: str | None = None): @@ -51,26 +53,53 @@ def __init__(self, members: list[str], id: str | None = None): self._current_round = 0 self._chat_history: list[ChatMessage] = [] - @override - async def _execute(self, data: list[ChatMessage], ctx: WorkflowContext) -> None: + @message_handler(output_types=[AgentExecutorRequest]) + async def start(self, task: str, ctx: WorkflowContext) -> None: """Execute the task by sending messages to the next executor in the round-robin sequence.""" - self._chat_history.extend(data) + initial_message = ChatMessage(ChatRole.USER, text=task) - if self._should_terminate(): - await ctx.add_event(WorkflowCompletedEvent(data=self._chat_history)) - return + # Send the initial message to the members + await asyncio.gather(*[ + ctx.send_message( + AgentExecutorRequest(messages=[initial_message], should_respond=False), + target_id=member_id, + ) + for member_id in self._members + ]) + + # Invoke the first member to start the round-robin chat + await ctx.send_message( + AgentExecutorRequest(messages=[], should_respond=True), + target_id=self._get_next_member(), + ) + + # Update the cache with the initial message + self._chat_history.append(initial_message) - if self._should_request_hil(): - # Request human intervention if the last message was from the assistant - await ctx.send_message(self._chat_history) + @message_handler(output_types=[AgentExecutorRequest]) + async def handle_agent_response(self, response: AgentExecutorResponse, ctx: WorkflowContext) -> None: + """Execute the task by sending messages to the next executor in the round-robin sequence.""" + # Update the chat history with the response + self._chat_history.extend(response.agent_run_response.messages) + + # Send the response to the other members + await asyncio.gather(*[ + ctx.send_message( + AgentExecutorRequest(messages=response.agent_run_response.messages, should_respond=False), + target_id=member_id, + ) + for member_id in self._members + if member_id != response.executor_id + ]) + + # Check for termination condition + if self._should_terminate(): + await ctx.add_event(WorkflowCompletedEvent(data=response)) return - self._current_round += 1 - selection_decision = AgentSelectionDecision( - messages=self._chat_history, - selection=self._get_next_member(), - ) - await ctx.send_message(selection_decision) + # Request the next member to respond + selection = self._get_next_member() + await ctx.send_message(AgentExecutorRequest(messages=[], should_respond=True), target_id=selection) def _should_terminate(self) -> bool: """Determine if the group chat should terminate based on the last message.""" @@ -78,7 +107,7 @@ def _should_terminate(self) -> bool: return False last_message = self._chat_history[-1] - return bool(last_message.role == ChatRole.USER and "stop" in last_message.text.lower()) + return bool(last_message.role == ChatRole.USER and "approve" in last_message.text.lower()) def _should_request_hil(self) -> bool: """Determine if the group chat should request HIL based on the last message.""" @@ -93,68 +122,33 @@ def _get_next_member(self) -> str: return self._members[(self._current_round - 1) % len(self._members)] -@output_message_types(list[ChatMessage]) -class HumanInTheLoopExecutor(Executor[list[ChatMessage]]): - """An executor that simulates a human-in-the-loop decision-making process.""" - - def __init__(self, id: str | None = None): - """Initialize the executor with a unique identifier.""" - super().__init__(id) - - self._is_waiting_for_human_input = False - - @override - async def _execute(self, data: list[ChatMessage], ctx: WorkflowContext) -> None: - """Simulate a human-in-the-loop response.""" - if not self._is_waiting_for_human_input: - # If it's not waiting but received a message, it means it should prompt for human input. - self._is_waiting_for_human_input = True - await ctx.add_event(HumanInTheLoopEvent(executor_id=self.id)) - return - - self._is_waiting_for_human_input = False - # If it is waiting, it means the human has provided input. It should return the messages. - await ctx.send_message(data) - - -@output_message_types(list[ChatMessage]) -class FakeAgentExecutor(Executor[AgentSelectionDecision]): - """An executor that simulates a group chat agent A.""" - - @override - async def _execute(self, data: AgentSelectionDecision, ctx: WorkflowContext) -> None: - """Simulate a response.""" - response = ChatResponse( - messages=[ - ChatMessage( - ChatRole.ASSISTANT, - text=f"{self.id} received request. Current message size: {len(data.messages)}", - author_name=f"{self.id}", - ) - ] - ) - await ctx.add_event(AgentRunEvent(self.id, data=response)) - await ctx.send_message(response.messages) - - async def main(): """Main function to run the group chat workflow.""" # Step 1: Create the executors. - executor_a = FakeAgentExecutor(id="executor_a") - executor_b = FakeAgentExecutor(id="executor_b") - executor_c = FakeAgentExecutor(id="executor_c") - - hil_executor = HumanInTheLoopExecutor(id="hil_executor") + chat_client = AzureChatClient() + writer = AgentExecutor( + ChatClientAgent( + chat_client, + instructions=( + "You are an excellent content writer. You create new content and edit contents based on the feedback." + ), + ), + id="writer", + ) + reviewer = AgentExecutor( + ChatClientAgent( + chat_client, + instructions=( + "You are an excellent content reviewer. You review the content and provide feedback to the writer." + ), + ), + id="reviewer", + ) group_chat_manager = CriticGroupChatManagerWithHIL( - members=[executor_a.id, executor_b.id, executor_c.id], + members=[writer.id, reviewer.id], id="group_chat_manager", ) - # The workflow graph: - # - # CriticGroupChatManagerWithHIL -> executor_a <-> CriticGroupChatManagerWithHIL <-> HumanInTheLoopExecutor - # CriticGroupChatManagerWithHIL -> executor_b <-> CriticGroupChatManagerWithHIL <-> HumanInTheLoopExecutor - # CriticGroupChatManagerWithHIL -> executor_c <-> CriticGroupChatManagerWithHIL <-> HumanInTheLoopExecutor # Step 2: Build the workflow with the defined edges. workflow = ( diff --git a/python/samples/getting_started/workflow/step_06_map_reduce.py b/python/samples/getting_started/workflow/step_06_map_reduce.py index df433340d8..19cfc6488d 100644 --- a/python/samples/getting_started/workflow/step_06_map_reduce.py +++ b/python/samples/getting_started/workflow/step_06_map_reduce.py @@ -13,13 +13,13 @@ WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, - output_message_types, + message_handler, ) if sys.version_info >= (3, 12): - from typing import override # pragma: no cover + pass # pragma: no cover else: - from typing_extensions import override # pragma: no cover + pass # pragma: no cover """ @@ -42,15 +42,13 @@ SHARED_STATE_DATA_KEY = "data_to_be_processed" -@dataclass class SplitCompleted: - """A data class to hold the completed state of the SplitExecutor.""" + """A class to signal the completion of the Split executor.""" - map_executor_id: str + ... -@output_message_types(list[str]) -class SplitDataExecutor(Executor[str]): +class Split(Executor): """An executor that splits data into smaller chunks based on the number of nodes available.""" def __init__(self, map_executor_ids: list[str], id: str | None = None): @@ -58,8 +56,8 @@ def __init__(self, map_executor_ids: list[str], id: str | None = None): super().__init__(id) self._map_executor_ids = map_executor_ids - @override - async def _execute(self, data: str, ctx: WorkflowContext) -> None: + @message_handler(output_types=[SplitCompleted]) + async def split(self, data: str, ctx: WorkflowContext) -> None: """Execute the task by splitting the data into chunks. Args: @@ -85,7 +83,7 @@ async def _process_chunk(i: int) -> None: # The start and end indices are stored in the shared state for the MapExecutor. # This allows the MapExecutor to know which part of the data it should process. await ctx.set_shared_state(self._map_executor_ids[i], (start_index, end_index)) - await ctx.send_message(SplitCompleted(self._map_executor_ids[i])) + await ctx.send_message(SplitCompleted(), self._map_executor_ids[i]) tasks = [asyncio.create_task(_process_chunk(i)) for i in range(map_executor_count)] await asyncio.gather(*tasks) @@ -110,12 +108,11 @@ class MapCompleted: file_path: str -@output_message_types(MapCompleted) -class MapExecutor(Executor[SplitCompleted]): +class Map(Executor): """An executor that applies a function to each item in the data and save the result to a file.""" - @override - async def _execute(self, data: SplitCompleted, ctx: WorkflowContext) -> None: + @message_handler(output_types=[MapCompleted]) + async def map(self, _: SplitCompleted, ctx: WorkflowContext) -> None: """Execute the task by applying a function to each item and same result to a file. Args: @@ -143,8 +140,7 @@ class ShuffleCompleted: reducer_id: str -@output_message_types(ShuffleCompleted) -class ShuffleExecutor(Executor[list[MapCompleted]]): +class Shuffle(Executor): """An executor that redistributes results from the map step to the reduce step.""" def __init__(self, reducer_ids: list[str], id: str | None = None): @@ -152,8 +148,8 @@ def __init__(self, reducer_ids: list[str], id: str | None = None): super().__init__(id) self._reducer_ids = reducer_ids - @override - async def _execute(self, data: list[MapCompleted], ctx: WorkflowContext) -> None: + @message_handler(output_types=[ShuffleCompleted]) + async def shuffle(self, data: list[MapCompleted], ctx: WorkflowContext) -> None: """Execute the task by aggregating the results. Args: @@ -224,11 +220,10 @@ class ReduceCompleted: file_path: str -@output_message_types(ReduceCompleted) -class ReduceExecutor(Executor[ShuffleCompleted]): +class Reduce(Executor): """An executor that reduces the results from the ShuffleExecutor.""" - @override + @message_handler(output_types=[ReduceCompleted]) async def _execute(self, data: ShuffleCompleted, ctx: WorkflowContext) -> None: """Execute the task by reducing the results. @@ -258,11 +253,11 @@ async def _execute(self, data: ShuffleCompleted, ctx: WorkflowContext) -> None: await ctx.send_message(ReduceCompleted(file_path)) -class CompletionExecutor(Executor[list[ReduceCompleted]]): +class CompletionExecutor(Executor): """An executor that completes the workflow by aggregating the results from the ReduceExecutors.""" - @override - async def _execute(self, data: list[ReduceCompleted], ctx: WorkflowContext) -> None: + @message_handler + async def complete(self, data: list[ReduceCompleted], ctx: WorkflowContext) -> None: """Execute the task by aggregating the results. Args: @@ -275,14 +270,14 @@ async def _execute(self, data: list[ReduceCompleted], ctx: WorkflowContext) -> N async def main(): """Main function to run the workflow.""" # Step 1: Create the executors. - map_executors = [MapExecutor(id=f"map_executor_{i}") for i in range(3)] - split_data_executor = SplitDataExecutor( - [map_executor.id for map_executor in map_executors], + map_operations = [Map(id=f"map_executor_{i}") for i in range(3)] + split_operation = Split( + [map_operation.id for map_operation in map_operations], id="split_data_executor", ) - reduce_executors = [ReduceExecutor(id=f"reduce_executor_{i}") for i in range(4)] - shuffle_executor = ShuffleExecutor( - [reduce_executor.id for reduce_executor in reduce_executors], + reduce_operations = [Reduce(id=f"reduce_executor_{i}") for i in range(4)] + shuffle_operation = Shuffle( + [reduce_operation.id for reduce_operation in reduce_operations], id="shuffle_executor", ) completion_executor = CompletionExecutor(id="completion_executor") @@ -290,11 +285,11 @@ async def main(): # Step 2: Build the workflow. workflow = ( WorkflowBuilder() - .set_start_executor(split_data_executor) - .add_fan_out_edges(split_data_executor, map_executors) - .add_fan_in_edges(map_executors, shuffle_executor) - .add_fan_out_edges(shuffle_executor, reduce_executors) - .add_fan_in_edges(reduce_executors, completion_executor) + .set_start_executor(split_operation) + .add_fan_out_edges(split_operation, map_operations) + .add_fan_in_edges(map_operations, shuffle_operation) + .add_fan_out_edges(shuffle_operation, reduce_operations) + .add_fan_in_edges(reduce_operations, completion_executor) .build() ) From ffaf4f045ab16a6e0e20d0e952a7163f92be4972 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Mon, 4 Aug 2025 09:06:45 -0700 Subject: [PATCH 10/18] Minor improvement --- python/packages/workflow/agent_framework_workflow/_executor.py | 3 ++- python/packages/workflow/agent_framework_workflow/_workflow.py | 2 ++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/python/packages/workflow/agent_framework_workflow/_executor.py b/python/packages/workflow/agent_framework_workflow/_executor.py index d30df7c25e..6d58c67e2c 100644 --- a/python/packages/workflow/agent_framework_workflow/_executor.py +++ b/python/packages/workflow/agent_framework_workflow/_executor.py @@ -214,12 +214,13 @@ class AgentExecutor(Executor): def __init__( self, agent: AIAgent, + *, agent_thread: AgentThread | None = None, streaming: bool = False, id: str | None = None, ): """Initialize the executor with a unique identifier.""" - super().__init__(id) + super().__init__(id or agent.id) self._agent = agent self._agent_thread = agent_thread or self._agent.get_new_thread() self._streaming = streaming diff --git a/python/packages/workflow/agent_framework_workflow/_workflow.py b/python/packages/workflow/agent_framework_workflow/_workflow.py index 52ec0cd6e9..caa54cf475 100644 --- a/python/packages/workflow/agent_framework_workflow/_workflow.py +++ b/python/packages/workflow/agent_framework_workflow/_workflow.py @@ -125,6 +125,8 @@ def add_edge( def add_fan_out_edges(self, source: Executor, targets: Sequence[Executor]) -> "Self": """Add multiple edges to the workflow. + Messages from the source executor will be sent to all target executors. + Args: source: The source executor of the edges. targets: A list of target executors for the edges. From 821e317e6a5aa10fad2917a46ebd5c7e1fe124ed Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Mon, 4 Aug 2025 11:16:53 -0700 Subject: [PATCH 11/18] Add RequestInfoExecutor --- .../main/agent_framework/workflow/__init__.py | 4 +- .../agent_framework/workflow/__init__.pyi | 8 +- .../agent_framework_workflow/__init__.py | 18 +- .../agent_framework_workflow/_edge.py | 7 +- .../agent_framework_workflow/_events.py | 39 ++++- .../agent_framework_workflow/_executor.py | 74 +++++++- .../agent_framework_workflow/_runner.py | 7 +- .../agent_framework_workflow/_workflow.py | 36 +++- .../_workflow_context.py | 27 ++- .../step_05_simple_group_chat_with_hil.py | 161 ++++++++++-------- 10 files changed, 278 insertions(+), 103 deletions(-) diff --git a/python/packages/main/agent_framework/workflow/__init__.py b/python/packages/main/agent_framework/workflow/__init__.py index 44a25c155f..ed9b4cdb87 100644 --- a/python/packages/main/agent_framework/workflow/__init__.py +++ b/python/packages/main/agent_framework/workflow/__init__.py @@ -14,7 +14,7 @@ "ExecutorCompleteEvent", "ExecutorEvent", "ExecutorInvokeEvent", - "HumanInTheLoopEvent", + "RequestInfoEvent", "WorkflowCompletedEvent", "WorkflowEvent", "WorkflowStartedEvent", @@ -24,6 +24,8 @@ "AgentExecutor", "AgentExecutorRequest", "AgentExecutorResponse", + "RequestInfoExecutor", + "RequestInfoMessage", ] diff --git a/python/packages/main/agent_framework/workflow/__init__.pyi b/python/packages/main/agent_framework/workflow/__init__.pyi index 58ba1b8f75..ff0d555137 100644 --- a/python/packages/main/agent_framework/workflow/__init__.pyi +++ b/python/packages/main/agent_framework/workflow/__init__.pyi @@ -10,7 +10,9 @@ from agent_framework_workflow import ( ExecutorCompleteEvent, ExecutorEvent, ExecutorInvokeEvent, - HumanInTheLoopEvent, + RequestInfoEvent, + RequestInfoExecutor, + RequestInfoMessage, WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, @@ -30,7 +32,9 @@ __all__ = [ "ExecutorCompleteEvent", "ExecutorEvent", "ExecutorInvokeEvent", - "HumanInTheLoopEvent", + "RequestInfoEvent", + "RequestInfoExecutor", + "RequestInfoMessage", "WorkflowBuilder", "WorkflowCompletedEvent", "WorkflowContext", diff --git a/python/packages/workflow/agent_framework_workflow/__init__.py b/python/packages/workflow/agent_framework_workflow/__init__.py index d1c2c0b6f0..ac53726125 100644 --- a/python/packages/workflow/agent_framework_workflow/__init__.py +++ b/python/packages/workflow/agent_framework_workflow/__init__.py @@ -8,12 +8,20 @@ ExecutorCompleteEvent, ExecutorEvent, ExecutorInvokeEvent, - HumanInTheLoopEvent, + RequestInfoEvent, WorkflowCompletedEvent, WorkflowEvent, WorkflowStartedEvent, ) -from ._executor import AgentExecutor, AgentExecutorRequest, AgentExecutorResponse, Executor, message_handler +from ._executor import ( + AgentExecutor, + AgentExecutorRequest, + AgentExecutorResponse, + Executor, + RequestInfoExecutor, + RequestInfoMessage, + message_handler, +) from ._workflow import WorkflowBuilder from ._workflow_context import WorkflowContext @@ -33,7 +41,11 @@ "ExecutorCompleteEvent", "ExecutorEvent", "ExecutorInvokeEvent", - "HumanInTheLoopEvent", + "RequestInfoEvent", + "RequestInfoEvent", + "RequestInfoExecutor", + "RequestInfoExecutor", + "RequestInfoMessage", "WorkflowBuilder", "WorkflowCompletedEvent", "WorkflowContext", diff --git a/python/packages/workflow/agent_framework_workflow/_edge.py b/python/packages/workflow/agent_framework_workflow/_edge.py index a31eec29fa..0a963cc0a3 100644 --- a/python/packages/workflow/agent_framework_workflow/_edge.py +++ b/python/packages/workflow/agent_framework_workflow/_edge.py @@ -69,7 +69,9 @@ async def send_message(self, message: Message, shared_state: SharedState, ctx: R raise RuntimeError(f"Edge {self.id} cannot handle data of type {type(message.data)}.") if not self._edge_group_ids and self._should_route(message.data): - await self.target.execute(message.data, WorkflowContext(self.target.id, shared_state, ctx)) + await self.target.execute( + message.data, WorkflowContext(self.target.id, [self.source.id], shared_state, ctx) + ) elif self._edge_group_ids: # Logic: # 1. If not all edges in the edge group have data in the shared state, @@ -94,7 +96,8 @@ async def send_message(self, message: Message, shared_state: SharedState, ctx: R if message_list: data_list = [msg.data for msg in message_list] - await self.target.execute(data_list, WorkflowContext(self.target.id, shared_state, ctx)) + source_ids = [msg.source_id for msg in message_list] + await self.target.execute(data_list, WorkflowContext(self.target.id, source_ids, shared_state, ctx)) def _should_route(self, data: Any) -> bool: """Determine if message should be routed through this edge.""" diff --git a/python/packages/workflow/agent_framework_workflow/_events.py b/python/packages/workflow/agent_framework_workflow/_events.py index 977f8ad295..c24440a5f5 100644 --- a/python/packages/workflow/agent_framework_workflow/_events.py +++ b/python/packages/workflow/agent_framework_workflow/_events.py @@ -66,17 +66,38 @@ def __repr__(self): return f"{self.__class__.__name__}(executor_id={self.executor_id}, data={self.data})" -class HumanInTheLoopEvent(ExecutorEvent): - """Event triggered when human intervention is required in the workflow.""" - - def __init__(self, executor_id: str, data: Any | None = None, **kwargs: Any): - """Initialize the human-in-the-loop event with optional data.""" - super().__init__(executor_id, data) - self.kwargs = kwargs +class RequestInfoEvent(WorkflowEvent): + """Event triggered when a workflow executor requests external information.""" + + def __init__( + self, + request_id: str, + source_executor_id: str, + request_type: type, + request_data: Any, + ): + """Initialize the request info event. + + Args: + request_id: Unique identifier for the request. + source_executor_id: ID of the executor that made the request. + request_type: Type of the request (e.g., a specific data type). + request_data: The data associated with the request. + """ + super().__init__(request_data) + self.request_id = request_id + self.source_executor_id = source_executor_id + self.request_type = request_type def __repr__(self): - """Return a string representation of the human-in-the-loop event.""" - return f"{self.__class__.__name__}(executor_id={self.executor_id}, data={self.data}, kwargs={self.kwargs})" + """Return a string representation of the request info event.""" + return ( + f"{self.__class__.__name__}(" + f"request_id={self.request_id}, " + f"source_executor_id={self.source_executor_id}, " + f"request_type={self.request_type.__name__}, " + f"data={self.data})" + ) class ExecutorInvokeEvent(ExecutorEvent): diff --git a/python/packages/workflow/agent_framework_workflow/_executor.py b/python/packages/workflow/agent_framework_workflow/_executor.py index 6d58c67e2c..95379c0ff1 100644 --- a/python/packages/workflow/agent_framework_workflow/_executor.py +++ b/python/packages/workflow/agent_framework_workflow/_executor.py @@ -5,11 +5,11 @@ import uuid from collections.abc import Awaitable, Callable from dataclasses import dataclass -from typing import Any, TypeVar, overload +from typing import Any, ClassVar, TypeVar, overload from agent_framework import AgentRunResponse, AgentRunResponseUpdate, AgentThread, AIAgent, ChatMessage -from ._events import AgentRunEvent, AgentRunStreamingEvent, ExecutorCompleteEvent, ExecutorInvokeEvent +from ._events import AgentRunEvent, AgentRunStreamingEvent, ExecutorCompleteEvent, ExecutorInvokeEvent, RequestInfoEvent from ._typing_utils import is_instance_of from ._workflow_context import WorkflowContext @@ -209,7 +209,7 @@ class AgentExecutorResponse: class AgentExecutor(Executor): - """An executor that wraps an agent for handling messages.""" + """built-in executor that wraps an agent for handling messages.""" def __init__( self, @@ -253,3 +253,71 @@ async def run(self, request: AgentExecutorRequest, ctx: WorkflowContext) -> None # endregion: Agent Executor + + +# region: Request Info Executor + + +@dataclass +class RequestInfoMessage: + """Base class for all request messages in workflows. + + Any message that should be routed to the RequestInfoExecutor for external + handling must inherit from this class. This ensures type safety and makes + the request/response pattern explicit. + """ + + request_id: str = str(uuid.uuid4()) + + +class RequestInfoExecutor(Executor): + """Built-in executor that handles request/response patterns in workflows. + + This executor acts as a gateway for external information requests. When it receives + a request message, it saves the request details and emits a RequestInfoEvent. When + a response is provided externally, it emits the response as a message. + """ + + # Well-known ID for the request info executor + EXECUTOR_ID: ClassVar[str] = "request_info" + + def __init__(self): + """Initialize the RequestInfoExecutor with its well-known ID.""" + super().__init__(id=self.EXECUTOR_ID) + self._request_events: dict[str, RequestInfoEvent] = {} + + @message_handler + async def run(self, message: RequestInfoMessage, ctx: WorkflowContext) -> None: + """Run the RequestInfoExecutor with the given message.""" + source_executor_id = ctx.get_source_executor_id() + + event = RequestInfoEvent( + request_id=message.request_id, + source_executor_id=source_executor_id, + request_type=type(message), + request_data=message, + ) + self._request_events[message.request_id] = event + await ctx.add_event(event) + + async def handle_response( + self, + response_data: Any, + request_id: str, + ctx: WorkflowContext, + ) -> None: + """Handle a response to a request. + + Args: + request_id: The ID of the request to which this response corresponds. + response_data: The data returned in the response. + ctx: The workflow context for sending the response. + """ + if request_id not in self._request_events: + raise ValueError(f"No request found with ID: {request_id}") + + event = self._request_events.pop(request_id) + await ctx.send_message(response_data, target_id=event.source_executor_id) + + +# endregion: Request Info Executor diff --git a/python/packages/workflow/agent_framework_workflow/_runner.py b/python/packages/workflow/agent_framework_workflow/_runner.py index 0ddf7cfa2d..28b74bc696 100644 --- a/python/packages/workflow/agent_framework_workflow/_runner.py +++ b/python/packages/workflow/agent_framework_workflow/_runner.py @@ -6,7 +6,7 @@ from collections.abc import AsyncIterable from ._edge import Edge -from ._events import WorkflowEvent, WorkflowWarningEvent +from ._events import WorkflowEvent from ._runner_context import Message, RunnerContext from ._shared_state import SharedState @@ -72,11 +72,6 @@ async def _deliver_messages_inner( continue if not edge.can_handle(message.data): - warning_msg = ( - f"Edge {edge.id} cannot handle message with data type {type(message.data)}. Skipping." - ) - logger.warning(warning_msg) - await self._ctx.add_event(WorkflowWarningEvent(warning_msg)) continue await edge.send_message(message, self._shared_state, self._ctx) diff --git a/python/packages/workflow/agent_framework_workflow/_workflow.py b/python/packages/workflow/agent_framework_workflow/_workflow.py index caa54cf475..17bb57ea4b 100644 --- a/python/packages/workflow/agent_framework_workflow/_workflow.py +++ b/python/packages/workflow/agent_framework_workflow/_workflow.py @@ -6,7 +6,7 @@ from ._edge import Edge from ._events import WorkflowEvent -from ._executor import Executor +from ._executor import Executor, RequestInfoExecutor from ._runner import Runner from ._runner_context import InProcRunnerContext, RunnerContext from ._shared_state import SharedState @@ -71,6 +71,11 @@ async def run_stream( message, WorkflowContext( executor.id, + [ + # Using the workflow class name as the source executor ID when + # delivering the first message to the starting executor + self.__class__.__name__ + ], self._shared_state, self._runner.context, ), @@ -78,6 +83,35 @@ async def run_stream( async for event in self._runner.run_until_convergence(): yield event + async def send_response(self, response: Any, request_id: str) -> AsyncIterable[WorkflowEvent]: + """Send a response back to the workflow. + + Args: + response: The response data to be sent. + request_id: The ID of the request that this response corresponds to. + """ + request_info_executor = self._get_executor_by_id(RequestInfoExecutor.EXECUTOR_ID) + if not isinstance(request_info_executor, RequestInfoExecutor): + raise ValueError(f"Executor with ID {RequestInfoExecutor.EXECUTOR_ID} is not a RequestInfoExecutor.") + + await request_info_executor.handle_response( + response, + request_id, + WorkflowContext( + request_info_executor.id, + [ + # Using the workflow class name as the source executor ID when + # delivering the first message to the starting executor + self.__class__.__name__ + ], + self._shared_state, + self._runner.context, + ), + ) + + async for event in self._runner.run_until_convergence(): + yield event + def _get_executor_by_id(self, executor_id: str) -> Executor: """Get an executor by its ID. diff --git a/python/packages/workflow/agent_framework_workflow/_workflow_context.py b/python/packages/workflow/agent_framework_workflow/_workflow_context.py index ff6a3f5faa..b8df0f0bbd 100644 --- a/python/packages/workflow/agent_framework_workflow/_workflow_context.py +++ b/python/packages/workflow/agent_framework_workflow/_workflow_context.py @@ -17,6 +17,7 @@ class WorkflowContext: def __init__( self, executor_id: str, + source_executor_ids: list[str], shared_state: SharedState, runner_context: RunnerContext, ): @@ -24,14 +25,20 @@ def __init__( Args: executor_id: The unique identifier of the executor that this context belongs to. - source_executor_id: The unique identifier of the source executor that generated this context. + source_executor_ids: The IDs of the source executors that sent messages to this executor. + This is a list to support fan_in scenarios where multiple sources send aggregated + messages to the same executor. shared_state: The shared state for the workflow. runner_context: The runner context that provides methods to send messages and events. """ self._executor_id = executor_id + self._source_executor_ids = source_executor_ids self._runner_context = runner_context self._shared_state = shared_state + if not self._source_executor_ids: + raise ValueError("source_executor_ids cannot be empty. At least one source executor ID is required.") + async def send_message(self, message: Any, target_id: str | None = None) -> None: """Send a message to the workflow context.""" await self._runner_context.send_message( @@ -54,6 +61,24 @@ async def set_shared_state(self, key: str, value: Any) -> None: """Set a value in the shared state.""" await self._shared_state.set(key, value) + def get_source_executor_id(self) -> str: + """Get the ID of the source executor that sent the message to this executor. + + Raises: + RuntimeError: If there are multiple source executors, this method raises an error. + """ + if len(self._source_executor_ids) > 1: + raise RuntimeError( + "Cannot get source executor ID when there are multiple source executors. " + "Access the full list via the source_executor_ids property instead." + ) + return self._source_executor_ids[0] + + @property + def source_executor_ids(self) -> list[str]: + """Get the IDs of the source executors that sent messages to this executor.""" + return self._source_executor_ids + @property def shared_state(self) -> SharedState: """Get the shared state.""" diff --git a/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py b/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py index 277cb98214..d7ea89e088 100644 --- a/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py +++ b/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py @@ -1,8 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio -import sys -from dataclasses import dataclass from agent_framework import ChatClientAgent, ChatMessage, ChatRole from agent_framework.azure import AzureChatClient @@ -11,39 +9,22 @@ AgentExecutorRequest, AgentExecutorResponse, Executor, - HumanInTheLoopEvent, + RequestInfoEvent, + RequestInfoExecutor, + RequestInfoMessage, WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, message_handler, ) -if sys.version_info >= (3, 12): - pass # pragma: no cover -else: - pass # pragma: no cover - """ The following sample demonstrates a basic workflow that simulates a round-robin group chat with a Human-in-the-Loop (HIL) executor. """ -@dataclass -class GroupChatMessage: - """A data class to hold the messages in a group chat.""" - - messages: list[ChatMessage] - - -@dataclass -class AgentSelectionDecision(GroupChatMessage): - """A data class to hold the decision made by the Human-in-the-Loop executor.""" - - selection: str - - -class CriticGroupChatManagerWithHIL(Executor): +class CriticGroupChatManager(Executor): """An executor that manages a round-robin group chat.""" def __init__(self, members: list[str], id: str | None = None): @@ -55,17 +36,11 @@ def __init__(self, members: list[str], id: str | None = None): @message_handler(output_types=[AgentExecutorRequest]) async def start(self, task: str, ctx: WorkflowContext) -> None: - """Execute the task by sending messages to the next executor in the round-robin sequence.""" + """Handler that starts the group chat with an initial task.""" initial_message = ChatMessage(ChatRole.USER, text=task) # Send the initial message to the members - await asyncio.gather(*[ - ctx.send_message( - AgentExecutorRequest(messages=[initial_message], should_respond=False), - target_id=member_id, - ) - for member_id in self._members - ]) + await self._broadcast_message([initial_message], ctx) # Invoke the first member to start the round-robin chat await ctx.send_message( @@ -76,21 +51,19 @@ async def start(self, task: str, ctx: WorkflowContext) -> None: # Update the cache with the initial message self._chat_history.append(initial_message) - @message_handler(output_types=[AgentExecutorRequest]) + @message_handler(output_types=[AgentExecutorRequest, RequestInfoMessage]) async def handle_agent_response(self, response: AgentExecutorResponse, ctx: WorkflowContext) -> None: - """Execute the task by sending messages to the next executor in the round-robin sequence.""" + """Handler that processes the response from the agent.""" # Update the chat history with the response self._chat_history.extend(response.agent_run_response.messages) # Send the response to the other members - await asyncio.gather(*[ - ctx.send_message( - AgentExecutorRequest(messages=response.agent_run_response.messages, should_respond=False), - target_id=member_id, - ) - for member_id in self._members - if member_id != response.executor_id - ]) + await self._broadcast_message(response.agent_run_response.messages, ctx, exclude_id=response.executor_id) + + # Check if we need to request additional information + if self._should_request_info(): + await ctx.send_message(RequestInfoMessage()) + return # Check for termination condition if self._should_terminate(): @@ -101,6 +74,40 @@ async def handle_agent_response(self, response: AgentExecutorResponse, ctx: Work selection = self._get_next_member() await ctx.send_message(AgentExecutorRequest(messages=[], should_respond=True), target_id=selection) + @message_handler(output_types=[AgentExecutorRequest]) + async def handle_request_response(self, response: list[ChatMessage], ctx: WorkflowContext) -> None: + """Handler that processes the response from the RequestInfoExecutor.""" + # Update the chat history with the response + self._chat_history.extend(response) + + # Send the response to the other members + await self._broadcast_message(response, ctx) + + # Check for termination condition + if self._should_terminate(): + await ctx.add_event(WorkflowCompletedEvent(data=response)) + return + + # Request the next member to respond + selection = self._get_next_member() + await ctx.send_message(AgentExecutorRequest(messages=[], should_respond=True), target_id=selection) + + async def _broadcast_message( + self, + messages: list[ChatMessage], + ctx: WorkflowContext, + exclude_id: str | None = None, + ) -> None: + """Broadcast messages to all members.""" + await asyncio.gather(*[ + ctx.send_message( + AgentExecutorRequest(messages=messages, should_respond=False), + target_id=member_id, + ) + for member_id in self._members + if member_id != exclude_id + ]) + def _should_terminate(self) -> bool: """Determine if the group chat should terminate based on the last message.""" if len(self._chat_history) == 0: @@ -109,7 +116,7 @@ def _should_terminate(self) -> bool: last_message = self._chat_history[-1] return bool(last_message.role == ChatRole.USER and "approve" in last_message.text.lower()) - def _should_request_hil(self) -> bool: + def _should_request_info(self) -> bool: """Determine if the group chat should request HIL based on the last message.""" if len(self._chat_history) == 0: return True @@ -119,82 +126,86 @@ def _should_request_hil(self) -> bool: def _get_next_member(self) -> str: """Get the next member in the round-robin sequence.""" - return self._members[(self._current_round - 1) % len(self._members)] + next_member = self._members[self._current_round % len(self._members)] + self._current_round += 1 + + return next_member async def main(): """Main function to run the group chat workflow.""" # Step 1: Create the executors. - chat_client = AzureChatClient() writer = AgentExecutor( ChatClientAgent( - chat_client, + AzureChatClient(), instructions=( "You are an excellent content writer. You create new content and edit contents based on the feedback." ), + name="Writer", + id="Writer", ), - id="writer", ) reviewer = AgentExecutor( ChatClientAgent( - chat_client, + AzureChatClient(), instructions=( - "You are an excellent content reviewer. You review the content and provide feedback to the writer." + "You are an excellent content reviewer. You review the content and provide feedback to the writer. " + "You do not address user requests. Only provide feedback to the writer." ), + name="Reviewer", + id="Reviewer", ), - id="reviewer", ) - group_chat_manager = CriticGroupChatManagerWithHIL( - members=[writer.id, reviewer.id], - id="group_chat_manager", - ) + group_chat_manager = CriticGroupChatManager(members=[writer.id, reviewer.id], id="GroupChatManager") + + request_info_executor = RequestInfoExecutor() # Step 2: Build the workflow with the defined edges. workflow = ( WorkflowBuilder() .set_start_executor(group_chat_manager) - .add_edge(group_chat_manager, hil_executor) - .add_edge(hil_executor, group_chat_manager) - .add_edge(group_chat_manager, executor_a, condition=lambda x: x.selection == executor_a.id) - .add_edge(group_chat_manager, executor_b, condition=lambda x: x.selection == executor_b.id) - .add_edge(group_chat_manager, executor_c, condition=lambda x: x.selection == executor_c.id) - .add_edge(executor_a, group_chat_manager) - .add_edge(executor_b, group_chat_manager) - .add_edge(executor_c, group_chat_manager) + .add_edge(group_chat_manager, request_info_executor) + .add_edge(request_info_executor, group_chat_manager) + .add_edge(group_chat_manager, writer) + .add_edge(group_chat_manager, reviewer) + .add_edge(writer, group_chat_manager) + .add_edge(reviewer, group_chat_manager) .build() ) # Step 3: Run the workflow with an initial message. - # Here we are capturing the human-in-the-loop event and allowing the user to provide input. + # Here we are capturing the RequestInfoEvent event and allowing the user to provide input. # Once the user provides input, we will provide it back to the workflow to continue the execution. completion_event: WorkflowCompletedEvent | None = None - human_in_the_loop_event: HumanInTheLoopEvent | None = None - user_input = "Start group chat" + request_info_event: RequestInfoEvent | None = None + user_input = "" while True: - # Depending on whether we have a human-in-the-loop event, we either + # Depending on whether we have a RequestInfoEvent event, we either # run the workflow normally or send the message to the HIL executor. - if not human_in_the_loop_event: - response = workflow.run_stream([ChatMessage(ChatRole.USER, text=user_input)]) - else: + if not request_info_event: response = workflow.run_stream( + "Create a slogan for a new electric SUV that is affordable and fun to drive." + ) + else: + response = workflow.send_response( [ChatMessage(ChatRole.USER, text=user_input)], - executor=human_in_the_loop_event.executor_id, + request_info_event.request_id, ) - human_in_the_loop_event = None + request_info_event = None async for event in response: - print(f"{event}") + print(event) if isinstance(event, WorkflowCompletedEvent): completion_event = event - elif isinstance(event, HumanInTheLoopEvent): - human_in_the_loop_event = event + elif isinstance(event, RequestInfoEvent): + request_info_event = event # Prompt for user input if we are waiting for human intervention - if human_in_the_loop_event: - user_input = input("Human intervention required. Type 'stop' to end the loop or any message to continue: ") + if request_info_event: + user_input = input("Human feedback required. Please provide your input (type 'approve' to end): ") elif completion_event: break From daadd32da2c05e94c4cadf03ab2455a07e7ba2ca Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Mon, 4 Aug 2025 15:17:22 -0700 Subject: [PATCH 12/18] Add unit tests part 1 --- python/packages/workflow/tests/test_edge.py | 47 ++++++++ .../packages/workflow/tests/test_executor.py | 102 ++++++++++++++++++ python/packages/workflow/tests/test_runner.py | 85 +++++++++++++++ 3 files changed, 234 insertions(+) create mode 100644 python/packages/workflow/tests/test_edge.py create mode 100644 python/packages/workflow/tests/test_executor.py create mode 100644 python/packages/workflow/tests/test_runner.py diff --git a/python/packages/workflow/tests/test_edge.py b/python/packages/workflow/tests/test_edge.py new file mode 100644 index 0000000000..19251230bd --- /dev/null +++ b/python/packages/workflow/tests/test_edge.py @@ -0,0 +1,47 @@ +# Copyright (c) Microsoft. All rights reserved. + +from dataclasses import dataclass +from typing import Any + +from agent_framework.workflow import Executor, WorkflowContext, message_handler + +from agent_framework_workflow._edge import Edge + + +@dataclass +class MockMessage: + """A mock message for testing purposes.""" + + data: Any + + +class MockExecutor(Executor): + """A mock executor for testing purposes.""" + + @message_handler + async def mock_handler(self, message: MockMessage, ctx: WorkflowContext) -> None: + """A mock handler that does nothing.""" + pass + + +def test_create_edge(): + """Test creating an edge with a source and target executor.""" + source = MockExecutor(id="source_executor") + target = MockExecutor(id="target_executor") + + edge = Edge(source=source, target=target) + + assert edge.source_id == "source_executor" + assert edge.target_id == "target_executor" + assert edge.id == f"{edge.source_id}{Edge.ID_SEPARATOR}{edge.target_id}" + assert (edge.source_id, edge.target_id) == Edge.source_and_target_from_id(edge.id) + + +def test_edge_can_handle(): + """Test creating an edge with a source and target executor.""" + source = MockExecutor(id="source_executor") + target = MockExecutor(id="target_executor") + + edge = Edge(source=source, target=target) + + assert edge.can_handle(MockMessage(data="test")) diff --git a/python/packages/workflow/tests/test_executor.py b/python/packages/workflow/tests/test_executor.py new file mode 100644 index 0000000000..06028fa326 --- /dev/null +++ b/python/packages/workflow/tests/test_executor.py @@ -0,0 +1,102 @@ +# Copyright (c) Microsoft. All rights reserved. + +import pytest +from agent_framework.workflow import Executor, WorkflowContext, message_handler + + +def test_executor_without_handlers(): + """Test that an executor without handlers raises an error when trying to run.""" + + class MockExecutorWithoutHandlers(Executor): + """A mock executor that does not implement any handlers.""" + + pass + + with pytest.raises(ValueError): + MockExecutorWithoutHandlers() + + +def test_executor_handler_without_annotations(): + """Test that an executor with one handler without annotations raises an error when trying to run.""" + + with pytest.raises(ValueError): + + class MockExecutorWithOneHandlerWithoutAnnotations(Executor): # type: ignore + """A mock executor with one handler that does not implement any annotations.""" + + @message_handler + async def handle(self, message, ctx) -> None: # type: ignore + """A mock handler that does not implement any annotations.""" + pass + + +def test_executor_invalid_handler_signature(): + """Test that an executor with an invalid handler signature raises an error when trying to run.""" + + with pytest.raises(ValueError): + + class MockExecutorWithInvalidHandlerSignature(Executor): # type: ignore + """A mock executor with an invalid handler signature.""" + + @message_handler # type: ignore + async def handle(self, message, other, ctx) -> None: # type: ignore + """A mock handler with an invalid signature.""" + pass + + +def test_executor_with_valid_handlers(): + """Test that an executor with valid handlers can be instantiated and run.""" + + class MockExecutorWithValidHandlers(Executor): # type: ignore + """A mock executor with valid handlers.""" + + @message_handler + async def handle_text(self, text: str, ctx: WorkflowContext) -> None: # type: ignore + """A mock handler with a valid signature.""" + pass + + @message_handler + async def handle_number(self, number: int, ctx: WorkflowContext) -> None: # type: ignore + """Another mock handler with a valid signature.""" + pass + + executor = MockExecutorWithValidHandlers() + assert executor.id is not None + assert len(executor._message_handlers) == 2 # type: ignore + assert executor.can_handle("text") is True + assert executor.can_handle(42) is True + assert executor.can_handle(3.14) is False + + +def test_executor_handlers_with_output_types(): + """Test that an executor with handlers that specify output types can be instantiated and run.""" + + class MockExecutorWithOutputTypes(Executor): # type: ignore + """A mock executor with handlers that specify output types.""" + + @message_handler(output_types=[str]) + async def handle_string(self, text: str, ctx: WorkflowContext) -> None: # type: ignore + """A mock handler that outputs a string.""" + pass + + @message_handler(output_types=[int]) + async def handle_integer(self, number: int, ctx: WorkflowContext) -> None: # type: ignore + """A mock handler that outputs an integer.""" + pass + + executor = MockExecutorWithOutputTypes() + assert len(executor._message_handlers) == 2 # type: ignore + + string_handler = executor._message_handlers[str] # type: ignore + assert string_handler is not None + assert string_handler._handler_spec is not None # type: ignore + assert string_handler._handler_spec["name"] == "handle_string" # type: ignore + assert string_handler._handler_spec["message_type"] is str # type: ignore + assert string_handler._handler_spec["output_types"] == [str] # type: ignore + + int_handler = executor._message_handlers[int] # type: ignore + assert int_handler is not None + assert int_handler._handler_spec is not None # type: ignore + assert int_handler._handler_spec["name"] == "handle_integer" # type: ignore + assert int_handler._handler_spec["message_type"] is int # type: ignore + assert int_handler._handler_spec["output_types"] == [int] # type: ignore diff --git a/python/packages/workflow/tests/test_runner.py b/python/packages/workflow/tests/test_runner.py new file mode 100644 index 0000000000..ef72872b0c --- /dev/null +++ b/python/packages/workflow/tests/test_runner.py @@ -0,0 +1,85 @@ +# Copyright (c) Microsoft. All rights reserved. + +from dataclasses import dataclass + +from agent_framework.workflow import Executor, WorkflowCompletedEvent, WorkflowContext, WorkflowEvent, message_handler + +from agent_framework_workflow._edge import Edge +from agent_framework_workflow._runner import Runner +from agent_framework_workflow._runner_context import InProcRunnerContext, RunnerContext +from agent_framework_workflow._shared_state import SharedState + + +@dataclass +class MockMessage: + """A mock message for testing purposes.""" + + data: int + + +class MockExecutor(Executor): + """A mock executor for testing purposes.""" + + @message_handler(output_types=[MockMessage]) + async def mock_handler(self, message: MockMessage, ctx: WorkflowContext) -> None: + """A mock handler that does nothing.""" + if message.data < 10: + await ctx.send_message(MockMessage(data=message.data + 1)) + else: + await ctx.add_event(WorkflowCompletedEvent(data=message.data)) + + +def test_create_runner(): + """Test creating a runner with edges and shared state.""" + executor_a = MockExecutor(id="executor_a") + executor_b = MockExecutor(id="executor_b") + + # Create a loop + edges = [ + Edge(source=executor_a, target=executor_b), + Edge(source=executor_b, target=executor_a), + ] + + runner = Runner(edges, shared_state=SharedState(), ctx=InProcRunnerContext()) + + assert runner.context is not None and isinstance(runner.context, RunnerContext) + + +async def test_runner_run_until_convergence(): + """Test running the runner with a simple workflow.""" + executor_a = MockExecutor(id="executor_a") + executor_b = MockExecutor(id="executor_b") + + # Create a loop + edges = [ + Edge(source=executor_a, target=executor_b), + Edge(source=executor_b, target=executor_a), + ] + + shared_state = SharedState() + ctx = InProcRunnerContext() + + runner = Runner(edges, shared_state, ctx) + async for event in runner.run_until_convergence(): + assert isinstance(event, WorkflowEvent) + if isinstance(event, WorkflowCompletedEvent): + assert event.data == 10 + + +async def test_runner_run_until_convergence_not_completed(): + """Test running the runner with a simple workflow.""" + executor_a = MockExecutor(id="executor_a") + executor_b = MockExecutor(id="executor_b") + + # Create a loop + edges = [ + Edge(source=executor_a, target=executor_b), + Edge(source=executor_b, target=executor_a), + ] + + shared_state = SharedState() + ctx = InProcRunnerContext() + + runner = Runner(edges, shared_state, ctx, max_iterations=5) + async for event in runner.run_until_convergence(): + assert not isinstance(event, WorkflowCompletedEvent) From 2c232027888281509cc4a75c3d9f73a46e1b60c0 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Mon, 4 Aug 2025 15:27:59 -0700 Subject: [PATCH 13/18] Address comments 2 --- .../agent_framework_workflow/_workflow.py | 20 ++----------------- 1 file changed, 2 insertions(+), 18 deletions(-) diff --git a/python/packages/workflow/agent_framework_workflow/_workflow.py b/python/packages/workflow/agent_framework_workflow/_workflow.py index 17bb57ea4b..db4ee60564 100644 --- a/python/packages/workflow/agent_framework_workflow/_workflow.py +++ b/python/packages/workflow/agent_framework_workflow/_workflow.py @@ -53,17 +53,13 @@ def __init__( async def run_stream( self, message: Any, - executor: Executor | str | None = None, ) -> AsyncIterable[WorkflowEvent]: """Send a message to the starting executor of the workflow. Args: message: The message to be sent to the starting executor. - executor: The executor to which the message should be sent. If None, the starting executor is used. """ - if not executor: - executor = self._start_executor - + executor = self._start_executor if isinstance(executor, str): executor = self._get_executor_by_id(executor) @@ -136,7 +132,6 @@ def __init__(self): """Initialize the WorkflowBuilder with an empty list of edges and no starting executor.""" self._edges: list[Edge] = [] self._start_executor: Executor | str | None = None - self._runner_context: RunnerContext | None = None def add_edge( self, @@ -218,15 +213,6 @@ def set_start_executor(self, executor: Executor | str) -> "Self": self._start_executor = executor return self - def set_runner_context(self, runner_context: RunnerContext) -> "Self": - """Set the runner context for the workflow. - - Args: - runner_context: The RunnerContext instance to be used during workflow execution. - """ - self._runner_context = runner_context - return self - def build(self) -> Workflow: """Build and return the constructed workflow. @@ -236,6 +222,4 @@ def build(self) -> Workflow: if not self._start_executor: raise ValueError("Starting executor must be set before building the workflow.") - runner_context = self._runner_context or InProcRunnerContext() - - return Workflow(self._edges, self._start_executor, runner_context) + return Workflow(self._edges, self._start_executor, InProcRunnerContext()) From 54d16d0ef5e7396bb5f93ac4a7405a6283156137 Mon Sep 17 00:00:00 2001 From: Evan Mattson Date: Tue, 5 Aug 2025 15:05:21 +0900 Subject: [PATCH 14/18] Pre-commit update --- .../agent_framework_workflow/__init__.py | 14 + .../agent_framework_workflow/_validation.py | 487 ++++++++++++++++ .../agent_framework_workflow/_workflow.py | 10 + .../workflow/tests/test_validation.py | 546 ++++++++++++++++++ 4 files changed, 1057 insertions(+) create mode 100644 python/packages/workflow/agent_framework_workflow/_validation.py create mode 100644 python/packages/workflow/tests/test_validation.py diff --git a/python/packages/workflow/agent_framework_workflow/__init__.py b/python/packages/workflow/agent_framework_workflow/__init__.py index ac53726125..dc7e329dac 100644 --- a/python/packages/workflow/agent_framework_workflow/__init__.py +++ b/python/packages/workflow/agent_framework_workflow/__init__.py @@ -22,6 +22,14 @@ RequestInfoMessage, message_handler, ) +from ._validation import ( + EdgeDuplicationError, + GraphConnectivityError, + TypeCompatibilityError, + ValidationTypeEnum, + WorkflowValidationError, + validate_workflow_graph, +) from ._workflow import WorkflowBuilder from ._workflow_context import WorkflowContext @@ -37,20 +45,26 @@ "AgentExecutorResponse", "AgentRunEvent", "AgentRunStreamingEvent", + "EdgeDuplicationError", "Executor", "ExecutorCompleteEvent", "ExecutorEvent", "ExecutorInvokeEvent", + "GraphConnectivityError", "RequestInfoEvent", "RequestInfoEvent", "RequestInfoExecutor", "RequestInfoExecutor", "RequestInfoMessage", + "TypeCompatibilityError", + "ValidationTypeEnum", "WorkflowBuilder", "WorkflowCompletedEvent", "WorkflowContext", "WorkflowEvent", "WorkflowStartedEvent", + "WorkflowValidationError", "__version__", "message_handler", + "validate_workflow_graph", ] diff --git a/python/packages/workflow/agent_framework_workflow/_validation.py b/python/packages/workflow/agent_framework_workflow/_validation.py new file mode 100644 index 0000000000..82739104dd --- /dev/null +++ b/python/packages/workflow/agent_framework_workflow/_validation.py @@ -0,0 +1,487 @@ +# Copyright (c) Microsoft. All rights reserved. + +import inspect +import logging +from collections import defaultdict +from enum import Enum +from typing import Any, Union, get_args, get_origin + +from ._edge import Edge +from ._executor import Executor + +logger = logging.getLogger(__name__) + + +# region Enums and Base Classes +class ValidationTypeEnum(Enum): + """Enumeration of workflow validation types.""" + + EDGE_DUPLICATION = "EDGE_DUPLICATION" + TYPE_COMPATIBILITY = "TYPE_COMPATIBILITY" + GRAPH_CONNECTIVITY = "GRAPH_CONNECTIVITY" + + +class WorkflowValidationError(Exception): + """Base exception for workflow validation errors.""" + + def __init__(self, message: str, validation_type: ValidationTypeEnum): + super().__init__(message) + self.message = message + self.validation_type = validation_type + + def __str__(self) -> str: + return f"[{self.validation_type.value}] {self.message}" + + +class EdgeDuplicationError(WorkflowValidationError): + """Exception raised when duplicate edges are detected in the workflow.""" + + def __init__(self, edge_id: str): + super().__init__( + message=f"Duplicate edge detected: {edge_id}. Each edge in the workflow must be unique.", + validation_type=ValidationTypeEnum.EDGE_DUPLICATION, + ) + self.edge_id = edge_id + + +class TypeCompatibilityError(WorkflowValidationError): + """Exception raised when type incompatibility is detected between connected executors.""" + + def __init__( + self, + source_executor_id: str, + target_executor_id: str, + source_types: list[type[Any]], + target_types: list[type[Any]], + ): + # Use a placeholder for incompatible types - will be computed in WorkflowGraphValidator + super().__init__( + message=f"Type incompatibility between executors '{source_executor_id}' -> '{target_executor_id}'. " + f"Source executor outputs types {[str(t) for t in source_types]} but target executor " + f"can only handle types {[str(t) for t in target_types]}.", + validation_type=ValidationTypeEnum.TYPE_COMPATIBILITY, + ) + self.source_executor_id = source_executor_id + self.target_executor_id = target_executor_id + self.source_types = source_types + self.target_types = target_types + + +class GraphConnectivityError(WorkflowValidationError): + """Exception raised when graph connectivity issues are detected.""" + + def __init__(self, message: str): + super().__init__(message, validation_type=ValidationTypeEnum.GRAPH_CONNECTIVITY) + + +# endregion + + +# region Workflow Graph Validator +class WorkflowGraphValidator: + """Validator for workflow graphs. + + This validator performs multiple validation checks: + 1. Edge duplication validation + 2. Type compatibility validation between connected executors + 3. Graph connectivity validation + """ + + def __init__(self): + self._edges: list[Edge] = [] + self._executors: dict[str, Executor] = {} + + # region Core Validation Methods + def validate_workflow(self, edges: list[Edge], start_executor: Executor | str) -> None: + """Validate the entire workflow graph. + + Args: + edges: list of edges in the workflow + start_executor: The starting executor (can be instance or ID) + + Raises: + WorkflowValidationError: If any validation fails + """ + self._edges = edges + self._executors = self._build_executor_map(edges) + + # Validate that start_executor exists in the graph + # It should because we check for it in the WorkflowBuilder + # but we do it here for completeness. + start_executor_id = start_executor.id if isinstance(start_executor, Executor) else start_executor + if start_executor_id not in self._executors: + raise GraphConnectivityError(f"Start executor '{start_executor_id}' is not present in the workflow graph") + + # Run all checks + self._validate_edge_duplication() + self._validate_type_compatibility() + self._validate_graph_connectivity(start_executor_id) + self._validate_self_loops() + self._validate_handler_ambiguity() + self._validate_dead_ends() + self._validate_cycles() + + def _build_executor_map(self, edges: list[Edge]) -> dict[str, Executor]: + """Build a map of executor IDs to executor instances.""" + executors: dict[str, Executor] = {} + for edge in edges: + executors[edge.source_id] = edge.source + executors[edge.target_id] = edge.target + return executors + + # endregion + + # region Edge and Type Validation + def _validate_edge_duplication(self) -> None: + """Validate that there are no duplicate edges in the workflow. + + Raises: + EdgeDuplicationError: If duplicate edges are found + """ + seen_edge_ids: set[str] = set() + + for edge in self._edges: + edge_id = edge.id + if edge_id in seen_edge_ids: + raise EdgeDuplicationError(edge_id) + seen_edge_ids.add(edge_id) + + def _validate_type_compatibility(self) -> None: + """Validate type compatibility between connected executors. + + This checks that the output types of source executors are compatible + with the input types expected by target executors. + + Raises: + TypeCompatibilityError: If type incompatibility is detected + """ + for edge in self._edges: + source_executor = edge.source + target_executor = edge.target + + # Get output types from source executor + source_output_types = self._get_executor_output_types(source_executor) + + # Get input types from target executor + target_input_types = self._get_executor_input_types(target_executor) + + # If either executor has no type information, log warning and skip validation + # This allows for dynamic typing scenarios but warns about reduced validation coverage + if not source_output_types or not target_input_types: + if not source_output_types: + logger.warning( + f"Executor '{source_executor.id}' has no output type annotations. " + f"Type compatibility validation will be skipped for edges from this executor. " + f"Consider adding output_types to @message_handler decorators for better validation." + ) + if not target_input_types: + logger.warning( + f"Executor '{target_executor.id}' has no input type annotations. " + f"Type compatibility validation will be skipped for edges to this executor. " + f"Consider adding type annotations to message handler parameters for better validation." + ) + continue + + # Check if any source output type is compatible with any target input type + compatible = False + compatible_pairs: list[tuple[type[Any], type[Any]]] = [] + + for source_type in source_output_types: + for target_type in target_input_types: + if self._is_type_compatible(source_type, target_type): + compatible = True + compatible_pairs.append((source_type, target_type)) + + # Log successful type compatibility for debugging + if compatible: + logger.debug( + f"Type compatibility validated for edge '{source_executor.id}' -> '{target_executor.id}'. " + f"Compatible type pairs: {[(str(s), str(t)) for s, t in compatible_pairs]}" + ) + + if not compatible: + # Enhanced error with more detailed information + raise TypeCompatibilityError( + source_executor.id, + target_executor.id, + source_output_types, + target_input_types, + ) + + def _get_executor_output_types(self, executor: Executor) -> list[type[Any]]: + """Extract output types from an executor's message handlers. + + Args: + executor: The executor to analyze + + Returns: + list of types that this executor can output + """ + output_types: list[type[Any]] = [] + + for attr_name in dir(executor): + attr = getattr(executor, attr_name) + if callable(attr) and hasattr(attr, "_handler_spec"): + handler_spec = attr._handler_spec # type: ignore + handler_output_types = handler_spec.get("output_types", []) + output_types.extend(handler_output_types) + + return output_types + + def _get_executor_input_types(self, executor: Executor) -> list[type[Any]]: + """Extract input types from an executor's message handlers. + + Args: + executor: The executor to analyze + + Returns: + list of types that this executor can handle as input + """ + input_types: list[type[Any]] = [] + + # Access the private _message_handlers attribute to get input types + if hasattr(executor, "_message_handlers"): + input_types.extend(executor._message_handlers.keys()) # type: ignore + + return input_types + + # endregion + + # region Graph Connectivity Validation + def _validate_graph_connectivity(self, start_executor_id: str) -> None: + """Validate graph connectivity and detect potential issues. + + This performs several checks: + - Detects unreachable executors from the start node + - Detects isolated executors (no incoming or outgoing edges) + - Warns about potential infinite loops + + Args: + start_executor_id: The ID of the starting executor + + Raises: + GraphConnectivityError: If connectivity issues are detected + """ + # Build adjacency list for the graph + graph: dict[str, list[str]] = defaultdict(list) + all_executors = set(self._executors.keys()) + + for edge in self._edges: + graph[edge.source_id].append(edge.target_id) + + # Find reachable nodes from start + reachable = self._find_reachable_nodes(graph, start_executor_id) + + # Check for unreachable executors + unreachable = all_executors - reachable + if unreachable: + raise GraphConnectivityError( + f"The following executors are unreachable from the start executor '{start_executor_id}': " + f"{sorted(unreachable)}. This may indicate a disconnected workflow graph." + ) + + # Check for isolated executors (no edges) + isolated_executors: list[str] = [] + for executor_id in all_executors: + has_incoming = any(edge.target_id == executor_id for edge in self._edges) + has_outgoing = any(edge.source_id == executor_id for edge in self._edges) + + if not has_incoming and not has_outgoing and executor_id != start_executor_id: + isolated_executors.append(executor_id) + + if isolated_executors: + raise GraphConnectivityError( + f"The following executors are isolated (no incoming or outgoing edges): " + f"{sorted(isolated_executors)}. Isolated executors will never be executed." + ) + + def _find_reachable_nodes(self, graph: dict[str, list[str]], start: str) -> set[str]: + """Find all nodes reachable from the start node using DFS. + + Args: + graph: Adjacency list representation of the graph + start: Starting node ID + + Returns: + Set of reachable node IDs + """ + visited: set[str] = set() + stack = [start] + + while stack: + node = stack.pop() + if node not in visited: + visited.add(node) + stack.extend(graph[node]) + + return visited + + # endregion + + # region Additional Validation Scenarios + def _validate_self_loops(self) -> None: + """Detect and log self-loops (edges from executor to itself). + + Self-loops might indicate recursive processing which could be intentional + but should be highlighted for review. + """ + self_loops = [edge for edge in self._edges if edge.source_id == edge.target_id] + + for edge in self_loops: + logger.warning( + f"Self-loop detected: Executor '{edge.source_id}' connects to itself. " + f"This may cause infinite recursion if not properly handled with conditions." + ) + + def _validate_handler_ambiguity(self) -> None: + """Check for potential ambiguity in message handlers. + + Warns when executors have multiple handlers that could handle the same type, + which might lead to unexpected behavior. + """ + for executor_id, executor in self._executors.items(): + input_types = self._get_executor_input_types(executor) + + # Check for duplicate input types + seen_types: set[type[Any]] = set() + duplicate_types: set[type[Any]] = set() + + for input_type in input_types: + if input_type in seen_types: + duplicate_types.add(input_type) + seen_types.add(input_type) + + if duplicate_types: + logger.warning( + f"Executor '{executor_id}' has multiple handlers for the same input types: " + f"{[str(t) for t in duplicate_types]}. This may lead to ambiguous message routing." + ) + + def _validate_dead_ends(self) -> None: + """Identify executors that have no outgoing edges (potential dead ends). + + These might be intentional final nodes or could indicate missing connections. + """ + executors_with_outgoing = {edge.source_id for edge in self._edges} + all_executor_ids = set(self._executors.keys()) + dead_ends = all_executor_ids - executors_with_outgoing + + if dead_ends: + logger.info( + f"Dead-end executors detected (no outgoing edges): {sorted(dead_ends)}. " + f"Verify these are intended as final nodes in the workflow." + ) + + def _validate_cycles(self) -> None: + """Detect cycles in the workflow graph. + + Cycles might be intentional for iterative processing but should be flagged + for review to ensure proper termination conditions exist. + """ + # Build adjacency list + graph: dict[str, list[str]] = defaultdict(list) + for edge in self._edges: + graph[edge.source_id].append(edge.target_id) + + # Use DFS to detect cycles + white = set(self._executors.keys()) # Unvisited + gray: set[str] = set() # Currently being processed + black: set[str] = set() # Completely processed + + def has_cycle(node: str) -> bool: + if node in gray: # Back edge found - cycle detected + return True + if node in black: # Already processed + return False + + # Mark as being processed + white.discard(node) + gray.add(node) + + # Visit neighbors + for neighbor in graph[node]: + if has_cycle(neighbor): + return True + + # Mark as completely processed + gray.discard(node) + black.add(node) + return False + + # Check for cycles starting from any unvisited node + cycle_detected = False + while white and not cycle_detected: + start_node = next(iter(white)) + if has_cycle(start_node): + cycle_detected = True + + if cycle_detected: + logger.warning( + "Cycle detected in the workflow graph. " + "Ensure proper termination conditions exist to prevent infinite loops." + ) + + # endregion + + # region Type Compatibility Utilities + @staticmethod + def _is_type_compatible(source_type: type[Any], target_type: type[Any]) -> bool: + """Check if source_type is compatible with target_type.""" + # Handle Any type + if source_type is Any or target_type is Any: + return True + + # Handle exact match + if source_type == target_type: + return True + + # Handle inheritance + try: + if inspect.isclass(source_type) and inspect.isclass(target_type): + return issubclass(source_type, target_type) + except TypeError: + # Handle generic types that can't be used with issubclass + pass + + # Handle Union types + source_origin = get_origin(source_type) + target_origin = get_origin(target_type) + + if target_origin is Union: + target_args = get_args(target_type) + return any(WorkflowGraphValidator._is_type_compatible(source_type, arg) for arg in target_args) + + if source_origin is Union: + source_args = get_args(source_type) + return all(WorkflowGraphValidator._is_type_compatible(arg, target_type) for arg in source_args) + + # Handle generic types + if source_origin is not None and target_origin is not None and source_origin == target_origin: + source_args = get_args(source_type) + target_args = get_args(target_type) + if len(source_args) == len(target_args): + return all( + WorkflowGraphValidator._is_type_compatible(s_arg, t_arg) + for s_arg, t_arg in zip(source_args, target_args, strict=True) + ) + + return False + + # endregion + + +# endregion + + +def validate_workflow_graph(edges: list[Edge], start_executor: Executor | str) -> None: + """Convenience function to validate a workflow graph. + + Args: + edges: list of edges in the workflow + start_executor: The starting executor (can be instance or ID) + + Raises: + WorkflowValidationError: If any validation fails + """ + validator = WorkflowGraphValidator() + validator.validate_workflow(edges, start_executor) diff --git a/python/packages/workflow/agent_framework_workflow/_workflow.py b/python/packages/workflow/agent_framework_workflow/_workflow.py index db4ee60564..4b35a31777 100644 --- a/python/packages/workflow/agent_framework_workflow/_workflow.py +++ b/python/packages/workflow/agent_framework_workflow/_workflow.py @@ -10,6 +10,7 @@ from ._runner import Runner from ._runner_context import InProcRunnerContext, RunnerContext from ._shared_state import SharedState +from ._validation import validate_workflow_graph from ._workflow_context import WorkflowContext if sys.version_info >= (3, 11): @@ -216,10 +217,19 @@ def set_start_executor(self, executor: Executor | str) -> "Self": def build(self) -> Workflow: """Build and return the constructed workflow. + This method performs validation before building the workflow. + Returns: A Workflow instance with the defined edges and starting executor. + + Raises: + ValueError: If starting executor is not set. + WorkflowValidationError: If workflow validation fails (includes EdgeDuplicationError, + TypeCompatibilityError, and GraphConnectivityError subclasses). """ if not self._start_executor: raise ValueError("Starting executor must be set before building the workflow.") + validate_workflow_graph(self._edges, self._start_executor) + return Workflow(self._edges, self._start_executor, InProcRunnerContext()) diff --git a/python/packages/workflow/tests/test_validation.py b/python/packages/workflow/tests/test_validation.py new file mode 100644 index 0000000000..82dad0f2f9 --- /dev/null +++ b/python/packages/workflow/tests/test_validation.py @@ -0,0 +1,546 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +from typing import Any + +import pytest + +from agent_framework_workflow import ( + EdgeDuplicationError, + Executor, + GraphConnectivityError, + TypeCompatibilityError, + ValidationTypeEnum, + WorkflowBuilder, + WorkflowContext, + WorkflowValidationError, + message_handler, + validate_workflow_graph, +) +from agent_framework_workflow._edge import Edge + + +class StringExecutor(Executor): + @message_handler(output_types=[str]) + async def handle_string(self, message: str, ctx: WorkflowContext) -> None: + await ctx.send_message(message.upper()) + + +class IntExecutor(Executor): + @message_handler(output_types=[int]) + async def handle_int(self, message: int, ctx: WorkflowContext) -> None: + await ctx.send_message(message * 2) + + +class AnyExecutor(Executor): + @message_handler + async def handle_any(self, message: Any, ctx: WorkflowContext) -> None: + await ctx.send_message(f"Processed: {message}") + + +class NoOutputTypesExecutor(Executor): + @message_handler + async def handle_message(self, message: str, ctx: WorkflowContext) -> None: + await ctx.send_message("processed") + + +class MultiTypeExecutor(Executor): + @message_handler(output_types=[str]) + async def handle_string(self, message: str, ctx: WorkflowContext) -> None: + await ctx.send_message(f"String: {message}") + + @message_handler(output_types=[int]) + async def handle_int(self, message: int, ctx: WorkflowContext) -> None: + await ctx.send_message(f"Int: {message}") + + +def test_valid_workflow_passes_validation(): + executor1 = StringExecutor(id="string_executor") + executor2 = StringExecutor(id="string_executor_2") + + # Create a valid workflow + workflow = ( + WorkflowBuilder() + .add_edge(executor1, executor2) + .set_start_executor(executor1) + .build() # This should not raise any exceptions + ) + + assert workflow is not None + + +def test_edge_duplication_validation_fails(): + executor1 = StringExecutor(id="executor1") + executor2 = StringExecutor(id="executor2") + + with pytest.raises(EdgeDuplicationError) as exc_info: + WorkflowBuilder().add_edge(executor1, executor2).add_edge(executor1, executor2).set_start_executor( + executor1 + ).build() + + assert "executor1->executor2" in str(exc_info.value) + assert exc_info.value.validation_type == ValidationTypeEnum.EDGE_DUPLICATION + + +def test_type_compatibility_validation_fails(): + string_executor = StringExecutor(id="string_executor") + int_executor = IntExecutor(id="int_executor") + + with pytest.raises(TypeCompatibilityError) as exc_info: + WorkflowBuilder().add_edge(string_executor, int_executor).set_start_executor(string_executor).build() + + error = exc_info.value + assert error.source_executor_id == "string_executor" + assert error.target_executor_id == "int_executor" + assert error.validation_type == ValidationTypeEnum.TYPE_COMPATIBILITY + + +def test_type_compatibility_with_any_type_passes(): + string_executor = StringExecutor(id="string_executor") + any_executor = AnyExecutor(id="any_executor") + + # This should not raise an exception + workflow = WorkflowBuilder().add_edge(string_executor, any_executor).set_start_executor(string_executor).build() + + assert workflow is not None + + +def test_type_compatibility_with_no_output_types(): + no_output_executor = NoOutputTypesExecutor(id="no_output") + string_executor = StringExecutor(id="string_executor") + + # This should pass validation since no output types are specified + workflow = ( + WorkflowBuilder().add_edge(no_output_executor, string_executor).set_start_executor(no_output_executor).build() + ) + + assert workflow is not None + + +def test_multi_type_executor_compatibility(): + string_executor = StringExecutor(id="string_executor") + multi_type_executor = MultiTypeExecutor(id="multi_type") + + # String executor outputs strings, multi-type can handle strings + workflow = ( + WorkflowBuilder().add_edge(string_executor, multi_type_executor).set_start_executor(string_executor).build() + ) + + assert workflow is not None + + +def test_graph_connectivity_unreachable_executors(): + executor1 = StringExecutor(id="executor1") + executor2 = StringExecutor(id="executor2") + executor3 = StringExecutor(id="executor3") # This will be unreachable + + with pytest.raises(GraphConnectivityError) as exc_info: + WorkflowBuilder().add_edge(executor1, executor2).add_edge(executor3, executor2).set_start_executor( + executor1 + ).build() + + assert "unreachable" in str(exc_info.value).lower() + assert "executor3" in str(exc_info.value) + assert exc_info.value.validation_type == ValidationTypeEnum.GRAPH_CONNECTIVITY + + +def test_graph_connectivity_isolated_executors(): + executor1 = StringExecutor(id="executor1") + executor2 = StringExecutor(id="executor2") + executor3 = StringExecutor(id="executor3") # This will be isolated + + # Create edges that include an isolated executor (self-loop that's not connected to main graph) + edges = [Edge(executor1, executor2), Edge(executor3, executor3)] # Self-loop to include in graph + + with pytest.raises(GraphConnectivityError) as exc_info: + validate_workflow_graph(edges, executor1) + + assert "unreachable" in str(exc_info.value).lower() + assert "executor3" in str(exc_info.value) + + +def test_start_executor_not_in_graph(): + executor1 = StringExecutor(id="executor1") + executor2 = StringExecutor(id="executor2") + executor3 = StringExecutor(id="executor3") # Not in graph + + with pytest.raises(GraphConnectivityError) as exc_info: + WorkflowBuilder().add_edge(executor1, executor2).set_start_executor(executor3).build() + + assert "not present in the workflow graph" in str(exc_info.value) + + +def test_missing_start_executor(): + executor1 = StringExecutor(id="executor1") + executor2 = StringExecutor(id="executor2") + + with pytest.raises(ValueError) as exc_info: + WorkflowBuilder().add_edge(executor1, executor2).build() + + assert "Starting executor must be set" in str(exc_info.value) + + +def test_workflow_validation_error_base_class(): + error = WorkflowValidationError("Test message", ValidationTypeEnum.EDGE_DUPLICATION) + assert str(error) == "[EDGE_DUPLICATION] Test message" + assert error.message == "Test message" + assert error.validation_type == ValidationTypeEnum.EDGE_DUPLICATION + + +def test_complex_workflow_validation(): + # Create a workflow with multiple paths + executor1 = StringExecutor(id="executor1") + executor2 = MultiTypeExecutor(id="executor2") + executor3 = StringExecutor(id="executor3") + executor4 = AnyExecutor(id="executor4") + + workflow = ( + WorkflowBuilder() + .add_edge(executor1, executor2) # str -> MultiType (compatible) + .add_edge(executor2, executor3) # MultiType -> str (compatible) + .add_edge(executor2, executor4) # MultiType -> Any (compatible) + .add_edge(executor3, executor4) # str -> Any (compatible) + .set_start_executor(executor1) + .build() + ) + + assert workflow is not None + + +def test_type_compatibility_inheritance(): + class BaseExecutor(Executor): + @message_handler(output_types=[str]) + async def handle_base(self, message: str, ctx: WorkflowContext) -> None: + await ctx.send_message("base") + + class DerivedExecutor(Executor): + @message_handler(output_types=[str]) + async def handle_derived(self, message: str, ctx: WorkflowContext) -> None: + await ctx.send_message("derived") + + base_executor = BaseExecutor(id="base") + derived_executor = DerivedExecutor(id="derived") + + # This should pass since both handle str + workflow = WorkflowBuilder().add_edge(base_executor, derived_executor).set_start_executor(base_executor).build() + + assert workflow is not None + + +def test_direct_validation_function(): + executor1 = StringExecutor(id="executor1") + executor2 = StringExecutor(id="executor2") + edges = [Edge(executor1, executor2)] + + # This should not raise any exceptions + validate_workflow_graph(edges, executor1) + + # Test with invalid start executor + executor3 = StringExecutor(id="executor3") + with pytest.raises(GraphConnectivityError): + validate_workflow_graph(edges, executor3) + + +def test_fan_out_validation(): + source = StringExecutor(id="source") + target1 = StringExecutor(id="target1") + target2 = AnyExecutor(id="target2") + + workflow = WorkflowBuilder().add_fan_out_edges(source, [target1, target2]).set_start_executor(source).build() + + assert workflow is not None + + +def test_fan_in_validation(): + start_executor = StringExecutor(id="start") + source1 = StringExecutor(id="source1") + source2 = StringExecutor(id="source2") + target = StringExecutor(id="target") + + # Create a proper fan-in by having a start executor that connects to both sources + workflow = ( + WorkflowBuilder() + .add_edge(start_executor, source1) # Start connects to source1 + .add_edge(start_executor, source2) # Start connects to source2 + .add_fan_in_edges([source1, source2], target) # Both sources fan-in to target + .set_start_executor(start_executor) + .build() + ) + + assert workflow is not None + + +def test_chain_validation(): + executor1 = StringExecutor(id="executor1") + executor2 = StringExecutor(id="executor2") + executor3 = AnyExecutor(id="executor3") + + workflow = WorkflowBuilder().add_chain([executor1, executor2, executor3]).set_start_executor(executor1).build() + + assert workflow is not None + + +def test_logging_for_missing_output_types(caplog: Any) -> None: + caplog.set_level(logging.WARNING) + + # Create executor without output types + no_output_executor = NoOutputTypesExecutor(id="no_output") + string_executor = StringExecutor(id="string_executor") + + # This should trigger a warning log + workflow = ( + WorkflowBuilder().add_edge(no_output_executor, string_executor).set_start_executor(no_output_executor).build() + ) + + assert workflow is not None + assert "has no output type annotations" in caplog.text + assert "Consider adding output_types to @message_handler decorators" in caplog.text + + +def test_logging_for_missing_input_types(caplog: Any) -> None: + caplog.set_level(logging.WARNING) + + class NoInputTypesExecutor(Executor): + # Handler without type annotation for input parameter + async def handle_message(self, message: Any, ctx: WorkflowContext) -> None: + await ctx.send_message("processed") + + def _discover_handlers(self) -> None: + # Override to manually register handler without type info + self._message_handlers[str] = self.handle_message + + string_executor = StringExecutor(id="string_executor") + no_input_executor = NoInputTypesExecutor(id="no_input") + + # This should pass since NoInputTypesExecutor has no proper input types + workflow = ( + WorkflowBuilder().add_edge(string_executor, no_input_executor).set_start_executor(string_executor).build() + ) + + assert workflow is not None + + +def test_self_loop_detection_warning(caplog: Any) -> None: + caplog.set_level(logging.WARNING) + + executor = StringExecutor(id="self_loop_executor") + + # Create a self-loop + workflow = WorkflowBuilder().add_edge(executor, executor).set_start_executor(executor).build() + + assert workflow is not None + assert "Self-loop detected" in caplog.text + assert "may cause infinite recursion" in caplog.text + + +def test_handler_validation_basic(caplog: Any) -> None: + caplog.set_level(logging.WARNING) + + # Test basic handler validation - ensure the validation code runs without errors + start_executor = StringExecutor(id="start") + target_executor = StringExecutor(id="target") + + workflow = WorkflowBuilder().add_edge(start_executor, target_executor).set_start_executor(start_executor).build() + + assert workflow is not None + # Just ensure the validation runs without errors + + +def test_dead_end_detection(caplog: Any) -> None: + caplog.set_level(logging.INFO) + + executor1 = StringExecutor(id="executor1") + executor2 = StringExecutor(id="executor2") # This will be a dead end + + workflow = WorkflowBuilder().add_edge(executor1, executor2).set_start_executor(executor1).build() + + assert workflow is not None + assert "Dead-end executors detected" in caplog.text + assert "executor2" in caplog.text + assert "Verify these are intended as final nodes" in caplog.text + + +def test_cycle_detection_warning(caplog: Any) -> None: + caplog.set_level(logging.WARNING) + + executor1 = StringExecutor(id="executor1") + executor2 = StringExecutor(id="executor2") + executor3 = StringExecutor(id="executor3") + + # Create a cycle: executor1 -> executor2 -> executor3 -> executor1 + workflow = ( + WorkflowBuilder() + .add_edge(executor1, executor2) + .add_edge(executor2, executor3) + .add_edge(executor3, executor1) + .set_start_executor(executor1) + .build() + ) + + assert workflow is not None + assert "Cycle detected in the workflow graph" in caplog.text + assert "Ensure proper termination conditions exist" in caplog.text + + +def test_successful_type_compatibility_logging(caplog: Any) -> None: + caplog.set_level(logging.DEBUG) + + executor1 = StringExecutor(id="executor1") + executor2 = StringExecutor(id="executor2") + + workflow = WorkflowBuilder().add_edge(executor1, executor2).set_start_executor(executor1).build() + + assert workflow is not None + assert "Type compatibility validated for edge" in caplog.text + assert "Compatible type pairs" in caplog.text + + +def test_complex_cycle_detection(caplog: Any) -> None: + caplog.set_level(logging.WARNING) + + # Create a more complex graph with multiple cycles + executor1 = StringExecutor(id="executor1") + executor2 = StringExecutor(id="executor2") + executor3 = StringExecutor(id="executor3") + executor4 = StringExecutor(id="executor4") + + # Create multiple paths and cycles + workflow = ( + WorkflowBuilder() + .add_edge(executor1, executor2) + .add_edge(executor2, executor3) + .add_edge(executor3, executor4) + .add_edge(executor4, executor2) # Creates cycle: executor2 -> executor3 -> executor4 -> executor2 + .set_start_executor(executor1) + .build() + ) + + assert workflow is not None + assert "Cycle detected in the workflow graph" in caplog.text + + +def test_no_cycles_in_simple_chain(caplog: Any) -> None: + caplog.set_level(logging.WARNING) + + executor1 = StringExecutor(id="executor1") + executor2 = StringExecutor(id="executor2") + executor3 = StringExecutor(id="executor3") + + # Simple chain without cycles + workflow = ( + WorkflowBuilder() + .add_edge(executor1, executor2) + .add_edge(executor2, executor3) + .set_start_executor(executor1) + .build() + ) + + assert workflow is not None + # Should not log cycle detection + assert "Cycle detected" not in caplog.text + + +def test_multiple_dead_ends_detection(caplog: Any) -> None: + caplog.set_level(logging.INFO) + + executor1 = StringExecutor(id="executor1") + executor2 = StringExecutor(id="executor2") # Dead end + executor3 = StringExecutor(id="executor3") # Dead end + + workflow = ( + WorkflowBuilder() + .add_edge(executor1, executor2) + .add_edge(executor1, executor3) + .set_start_executor(executor1) + .build() + ) + + assert workflow is not None + assert "Dead-end executors detected" in caplog.text + assert "executor2" in caplog.text and "executor3" in caplog.text + + +def test_single_executor_workflow(caplog: Any) -> None: + caplog.set_level(logging.INFO) + + # Test workflow with minimal structure + executor1 = StringExecutor(id="executor1") + executor2 = StringExecutor(id="executor2") + + # Create a simple two-executor workflow to avoid graph validation issues + workflow = WorkflowBuilder().add_edge(executor1, executor2).set_start_executor(executor1).build() + + assert workflow is not None + # Should detect executor2 as dead end + assert "Dead-end executors detected" in caplog.text + + +def test_enhanced_type_compatibility_error_details(): + string_executor = StringExecutor(id="string_executor") + int_executor = IntExecutor(id="int_executor") + + with pytest.raises(TypeCompatibilityError) as exc_info: + WorkflowBuilder().add_edge(string_executor, int_executor).set_start_executor(string_executor).build() + + error = exc_info.value + # Verify enhanced error contains detailed type information + assert "Source executor outputs types" in str(error) + assert "target executor can only handle types" in str(error) + assert error.source_types is not None + assert error.target_types is not None + + +def test_union_type_compatibility_validation() -> None: + class UnionOutputExecutor(Executor): + @message_handler(output_types=[str, int]) + async def handle_message(self, message: str, ctx: WorkflowContext) -> None: + await ctx.send_message("output") + + class UnionInputExecutor(Executor): + @message_handler(output_types=[str]) + async def handle_message(self, message: str, ctx: WorkflowContext) -> None: + await ctx.send_message("processed") + + union_output = UnionOutputExecutor(id="union_output") + union_input = UnionInputExecutor(id="union_input") + + # This should pass validation due to type compatibility (str) + workflow = WorkflowBuilder().add_edge(union_output, union_input).set_start_executor(union_output).build() + + assert workflow is not None + + +def test_generic_type_compatibility() -> None: + class ListOutputExecutor(Executor): + @message_handler(output_types=[list[str]]) + async def handle_message(self, message: str, ctx: WorkflowContext) -> None: + await ctx.send_message(["output"]) + + class ListInputExecutor(Executor): + @message_handler(output_types=[str]) + async def handle_message(self, message: list[str], ctx: WorkflowContext) -> None: + await ctx.send_message("processed") + + list_output = ListOutputExecutor(id="list_output") + list_input = ListInputExecutor(id="list_input") + + # This should pass validation for generic type compatibility + workflow = WorkflowBuilder().add_edge(list_output, list_input).set_start_executor(list_output).build() + + assert workflow is not None + + +def test_validation_enum_usage() -> None: + # Test that all validation types use the enum correctly + edge_error = EdgeDuplicationError("test->test") + assert edge_error.validation_type == ValidationTypeEnum.EDGE_DUPLICATION + + type_error = TypeCompatibilityError("source", "target", [str], [int]) + assert type_error.validation_type == ValidationTypeEnum.TYPE_COMPATIBILITY + + graph_error = GraphConnectivityError("test message") + assert graph_error.validation_type == ValidationTypeEnum.GRAPH_CONNECTIVITY + + # Test enum string representation + assert str(ValidationTypeEnum.EDGE_DUPLICATION) == "ValidationTypeEnum.EDGE_DUPLICATION" + assert ValidationTypeEnum.EDGE_DUPLICATION.value == "EDGE_DUPLICATION" From 966e4931588b655902b284dc8867419903c6d9b7 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Tue, 5 Aug 2025 12:11:59 -0700 Subject: [PATCH 15/18] Add run method and more unit tests --- .../main/agent_framework/workflow/__init__.py | 4 +- .../agent_framework/workflow/__init__.pyi | 8 +- .../agent_framework_workflow/__init__.py | 8 +- .../agent_framework_workflow/_edge.py | 4 + .../agent_framework_workflow/_events.py | 2 +- .../agent_framework_workflow/_executor.py | 10 +- .../agent_framework_workflow/_runner.py | 38 ++- .../agent_framework_workflow/_validation.py | 12 +- .../agent_framework_workflow/_workflow.py | 144 +++++++-- python/packages/workflow/tests/test_runner.py | 68 ++++- .../workflow/tests/test_validation.py | 11 +- .../packages/workflow/tests/test_workflow.py | 275 ++++++++++++++++++ .../workflow/tests/test_workflow_builder.py | 65 +++++ ...a_simple_workflow_sequential_non_stream.py | 58 ++++ .../workflow/step_03_simple_workflow_loop.py | 4 +- .../step_05_simple_group_chat_with_hil.py | 11 +- 16 files changed, 653 insertions(+), 69 deletions(-) create mode 100644 python/packages/workflow/tests/test_workflow.py create mode 100644 python/packages/workflow/tests/test_workflow_builder.py create mode 100644 python/samples/getting_started/workflow/step_01a_simple_workflow_sequential_non_stream.py diff --git a/python/packages/main/agent_framework/workflow/__init__.py b/python/packages/main/agent_framework/workflow/__init__.py index ed9b4cdb87..34da99edf9 100644 --- a/python/packages/main/agent_framework/workflow/__init__.py +++ b/python/packages/main/agent_framework/workflow/__init__.py @@ -11,7 +11,7 @@ "__version__", "events", "WorkflowBuilder", - "ExecutorCompleteEvent", + "ExecutorCompletedEvent", "ExecutorEvent", "ExecutorInvokeEvent", "RequestInfoEvent", @@ -26,6 +26,8 @@ "AgentExecutorResponse", "RequestInfoExecutor", "RequestInfoMessage", + "WorkflowRunResult", + "Workflow", ] diff --git a/python/packages/main/agent_framework/workflow/__init__.pyi b/python/packages/main/agent_framework/workflow/__init__.pyi index ff0d555137..1119864f7c 100644 --- a/python/packages/main/agent_framework/workflow/__init__.pyi +++ b/python/packages/main/agent_framework/workflow/__init__.pyi @@ -7,16 +7,18 @@ from agent_framework_workflow import ( AgentRunEvent, AgentRunStreamingEvent, Executor, - ExecutorCompleteEvent, + ExecutorCompletedEvent, ExecutorEvent, ExecutorInvokeEvent, RequestInfoEvent, RequestInfoExecutor, RequestInfoMessage, + Workflow, WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, WorkflowEvent, + WorkflowRunResult, WorkflowStartedEvent, __version__, message_handler, @@ -29,16 +31,18 @@ __all__ = [ "AgentRunEvent", "AgentRunStreamingEvent", "Executor", - "ExecutorCompleteEvent", + "ExecutorCompletedEvent", "ExecutorEvent", "ExecutorInvokeEvent", "RequestInfoEvent", "RequestInfoExecutor", "RequestInfoMessage", + "Workflow", "WorkflowBuilder", "WorkflowCompletedEvent", "WorkflowContext", "WorkflowEvent", + "WorkflowRunResult", "WorkflowStartedEvent", "__version__", "message_handler", diff --git a/python/packages/workflow/agent_framework_workflow/__init__.py b/python/packages/workflow/agent_framework_workflow/__init__.py index dc7e329dac..4fb1263f14 100644 --- a/python/packages/workflow/agent_framework_workflow/__init__.py +++ b/python/packages/workflow/agent_framework_workflow/__init__.py @@ -5,7 +5,7 @@ from ._events import ( AgentRunEvent, AgentRunStreamingEvent, - ExecutorCompleteEvent, + ExecutorCompletedEvent, ExecutorEvent, ExecutorInvokeEvent, RequestInfoEvent, @@ -30,7 +30,7 @@ WorkflowValidationError, validate_workflow_graph, ) -from ._workflow import WorkflowBuilder +from ._workflow import Workflow, WorkflowBuilder, WorkflowRunResult from ._workflow_context import WorkflowContext try: @@ -47,7 +47,7 @@ "AgentRunStreamingEvent", "EdgeDuplicationError", "Executor", - "ExecutorCompleteEvent", + "ExecutorCompletedEvent", "ExecutorEvent", "ExecutorInvokeEvent", "GraphConnectivityError", @@ -58,10 +58,12 @@ "RequestInfoMessage", "TypeCompatibilityError", "ValidationTypeEnum", + "Workflow", "WorkflowBuilder", "WorkflowCompletedEvent", "WorkflowContext", "WorkflowEvent", + "WorkflowRunResult", "WorkflowStartedEvent", "WorkflowValidationError", "__version__", diff --git a/python/packages/workflow/agent_framework_workflow/_edge.py b/python/packages/workflow/agent_framework_workflow/_edge.py index 0a963cc0a3..80deb4da27 100644 --- a/python/packages/workflow/agent_framework_workflow/_edge.py +++ b/python/packages/workflow/agent_framework_workflow/_edge.py @@ -45,6 +45,10 @@ def id(self) -> str: """Get the unique ID of the edge.""" return f"{self.source_id}{self.ID_SEPARATOR}{self.target_id}" + def has_edge_group(self) -> bool: + """Check if the edge is part of an edge group.""" + return bool(self._edge_group_ids) + @classmethod def source_and_target_from_id(cls, edge_id: str) -> tuple[str, str]: """Extract the source and target IDs from the edge ID.""" diff --git a/python/packages/workflow/agent_framework_workflow/_events.py b/python/packages/workflow/agent_framework_workflow/_events.py index c24440a5f5..bc56c707d5 100644 --- a/python/packages/workflow/agent_framework_workflow/_events.py +++ b/python/packages/workflow/agent_framework_workflow/_events.py @@ -108,7 +108,7 @@ def __repr__(self): return f"{self.__class__.__name__}(executor_id={self.executor_id})" -class ExecutorCompleteEvent(ExecutorEvent): +class ExecutorCompletedEvent(ExecutorEvent): """Event triggered when an executor handler is completed.""" def __repr__(self): diff --git a/python/packages/workflow/agent_framework_workflow/_executor.py b/python/packages/workflow/agent_framework_workflow/_executor.py index 95379c0ff1..deb029d0b4 100644 --- a/python/packages/workflow/agent_framework_workflow/_executor.py +++ b/python/packages/workflow/agent_framework_workflow/_executor.py @@ -9,7 +9,13 @@ from agent_framework import AgentRunResponse, AgentRunResponseUpdate, AgentThread, AIAgent, ChatMessage -from ._events import AgentRunEvent, AgentRunStreamingEvent, ExecutorCompleteEvent, ExecutorInvokeEvent, RequestInfoEvent +from ._events import ( + AgentRunEvent, + AgentRunStreamingEvent, + ExecutorCompletedEvent, + ExecutorInvokeEvent, + RequestInfoEvent, +) from ._typing_utils import is_instance_of from ._workflow_context import WorkflowContext @@ -57,7 +63,7 @@ async def execute( await context.add_event(ExecutorInvokeEvent(self.id)) await handler(message, context) - await context.add_event(ExecutorCompleteEvent(self.id)) + await context.add_event(ExecutorCompletedEvent(self.id)) @property def id(self) -> str: diff --git a/python/packages/workflow/agent_framework_workflow/_runner.py b/python/packages/workflow/agent_framework_workflow/_runner.py index 28b74bc696..dabd3f56e0 100644 --- a/python/packages/workflow/agent_framework_workflow/_runner.py +++ b/python/packages/workflow/agent_framework_workflow/_runner.py @@ -12,6 +12,8 @@ logger = logging.getLogger(__name__) +DEFAULT_MAX_ITERATIONS = 100 + class Runner: """A class to run a workflow in Pregel supersteps.""" @@ -21,13 +23,14 @@ def __init__( edges: list[Edge], shared_state: SharedState, ctx: RunnerContext, - max_iterations: int = 100, + max_iterations: int = DEFAULT_MAX_ITERATIONS, ): self._edge_map = self._parse_edges(edges) self._ctx = ctx self._iteration = 0 self._max_iterations = max_iterations self._shared_state = shared_state + self._is_running = False @property def context(self) -> RunnerContext: @@ -36,19 +39,26 @@ def context(self) -> RunnerContext: async def run_until_convergence(self) -> AsyncIterable[WorkflowEvent]: """Run the workflow until no more messages are sent.""" - while self._iteration < self._max_iterations: - await self._run_iteration() - self._iteration += 1 - - if await self._ctx.has_events(): - events = await self._ctx.drain_events() - for event in events: - yield event - - if not await self._ctx.has_messages(): - break - - self._iteration = 0 + try: + if self._is_running: + raise RuntimeError("Runner is already running.") + self._is_running = True + while self._iteration < self._max_iterations: + await self._run_iteration() + self._iteration += 1 + + if await self._ctx.has_events(): + events = await self._ctx.drain_events() + for event in events: + yield event + + if not await self._ctx.has_messages(): + break + else: + raise RuntimeError(f"Runner did not converge after {self._max_iterations} iterations.") + finally: + self._is_running = False + self._iteration = 0 async def _run_iteration(self): """Run a superstep of the workflow execution.""" diff --git a/python/packages/workflow/agent_framework_workflow/_validation.py b/python/packages/workflow/agent_framework_workflow/_validation.py index 82739104dd..6a1715e511 100644 --- a/python/packages/workflow/agent_framework_workflow/_validation.py +++ b/python/packages/workflow/agent_framework_workflow/_validation.py @@ -188,9 +188,15 @@ def _validate_type_compatibility(self) -> None: for source_type in source_output_types: for target_type in target_input_types: - if self._is_type_compatible(source_type, target_type): - compatible = True - compatible_pairs.append((source_type, target_type)) + if edge.has_edge_group(): + # If the edge is part of an edge group, the target expects a list of data types + if self._is_type_compatible(list[source_type], target_type): + compatible = True + compatible_pairs.append((list[source_type], target_type)) + else: + if self._is_type_compatible(source_type, target_type): + compatible = True + compatible_pairs.append((source_type, target_type)) # Log successful type compatibility for debugging if compatible: diff --git a/python/packages/workflow/agent_framework_workflow/_workflow.py b/python/packages/workflow/agent_framework_workflow/_workflow.py index 4b35a31777..c7d67375a5 100644 --- a/python/packages/workflow/agent_framework_workflow/_workflow.py +++ b/python/packages/workflow/agent_framework_workflow/_workflow.py @@ -1,13 +1,14 @@ # Copyright (c) Microsoft. All rights reserved. +import asyncio import sys from collections.abc import AsyncIterable, Callable, Sequence -from typing import Any, Generic, TypeVar +from typing import Any from ._edge import Edge -from ._events import WorkflowEvent +from ._events import RequestInfoEvent, WorkflowCompletedEvent, WorkflowEvent from ._executor import Executor, RequestInfoExecutor -from ._runner import Runner +from ._runner import DEFAULT_MAX_ITERATIONS, Runner from ._runner_context import InProcRunnerContext, RunnerContext from ._shared_state import SharedState from ._validation import validate_workflow_graph @@ -19,7 +20,32 @@ from typing_extensions import Self # pragma: no cover -TIn = TypeVar("TIn") +class WorkflowRunResult(list[WorkflowEvent]): + """A list of events generated during the workflow execution in non-streaming mode.""" + + def get_completed_event(self) -> WorkflowCompletedEvent | None: + """Get the completed event from the workflow run result. + + Returns: + A completed WorkflowEvent instance if the workflow has a completed event, otherwise None. + + Raises: + ValueError: If there are multiple completed events in the workflow run result. + """ + completed_events = [event for event in self if isinstance(event, WorkflowCompletedEvent)] + if not completed_events: + return None + if len(completed_events) > 1: + raise ValueError("Multiple completed events found.") + return completed_events[0] + + def get_request_info_events(self) -> list[RequestInfoEvent]: + """Get all request info events from the workflow run result. + + Returns: + A list of RequestInfoEvent instances found in the workflow run result. + """ + return [event for event in self if isinstance(event, RequestInfoEvent)] class Workflow: @@ -34,6 +60,7 @@ def __init__( edges: list[Edge], start_executor: Executor | str, runner_context: RunnerContext, + max_iterations: int, ): """Initialize the workflow with a list of edges. @@ -41,6 +68,7 @@ def __init__( edges: A list of directed edges representing the connections between nodes in the workflow. start_executor: The starting executor for the workflow, which can be an Executor instance or its ID. runner_context: The RunnerContext instance to be used during workflow execution. + max_iterations: The maximum number of iterations the workflow will run for convergence. """ self._edges = edges self._start_executor = start_executor @@ -49,13 +77,31 @@ def __init__( } self._shared_state = SharedState() - self._runner = Runner(self._edges, self._shared_state, runner_context) + self._runner = Runner(self._edges, self._shared_state, runner_context, max_iterations=max_iterations) - async def run_stream( - self, - message: Any, - ) -> AsyncIterable[WorkflowEvent]: - """Send a message to the starting executor of the workflow. + @property + def edges(self) -> list[Edge]: + """Get the list of edges in the workflow.""" + return self._edges + + @property + def start_executor(self) -> Executor: + """Get the starting executor of the workflow. + + Returns: + The starting executor, which can be an Executor instance or its ID. + """ + if isinstance(self._start_executor, str): + return self._get_executor_by_id(self._start_executor) + return self._start_executor + + @property + def executors(self) -> list[Executor]: + """Get the list of executors in the workflow.""" + return list(self._executors.values()) + + async def run_stream(self, message: Any) -> AsyncIterable[WorkflowEvent]: + """Send a message to the starting executor of the workflow and stream the events generated by the workflow. Args: message: The message to be sent to the starting executor. @@ -80,35 +126,63 @@ async def run_stream( async for event in self._runner.run_until_convergence(): yield event - async def send_response(self, response: Any, request_id: str) -> AsyncIterable[WorkflowEvent]: - """Send a response back to the workflow. + async def send_responses_stream(self, responses: dict[str, Any]) -> AsyncIterable[WorkflowEvent]: + """Send responses back to the workflow and stream the events generated by the workflow. Args: - response: The response data to be sent. - request_id: The ID of the request that this response corresponds to. + responses: The responses to be sent back to the workflow, where keys are request IDs + and values are the corresponding response data. """ request_info_executor = self._get_executor_by_id(RequestInfoExecutor.EXECUTOR_ID) if not isinstance(request_info_executor, RequestInfoExecutor): raise ValueError(f"Executor with ID {RequestInfoExecutor.EXECUTOR_ID} is not a RequestInfoExecutor.") - await request_info_executor.handle_response( - response, - request_id, - WorkflowContext( - request_info_executor.id, - [ - # Using the workflow class name as the source executor ID when - # delivering the first message to the starting executor - self.__class__.__name__ - ], - self._shared_state, - self._runner.context, - ), - ) + async def _handle_response(response: Any, request_id: str) -> None: + """Handle the response from the RequestInfoExecutor.""" + await request_info_executor.handle_response( + response, + request_id, + WorkflowContext( + request_info_executor.id, + [ + # Using the workflow class name as the source executor ID when + # delivering the first message to the starting executor + self.__class__.__name__ + ], + self._shared_state, + self._runner.context, + ), + ) + + await asyncio.gather(*[_handle_response(response, request_id) for request_id, response in responses.items()]) async for event in self._runner.run_until_convergence(): yield event + async def run(self, message: Any) -> WorkflowRunResult: + """Run the workflow with the given message. + + Args: + message: The message to be processed by the workflow. + + Returns: + A WorkflowRunResult instance containing a list of events generated during the workflow execution. + """ + events = [event async for event in self.run_stream(message)] + return WorkflowRunResult(events) + + async def send_responses(self, responses: dict[str, Any]) -> WorkflowRunResult: + """Send responses back to the workflow. + + Args: + responses: A dictionary where keys are request IDs and values are the corresponding response data. + + Returns: + A WorkflowRunResult instance containing a list of events generated during the workflow execution. + """ + events = [event async for event in self.send_responses_stream(responses)] + return WorkflowRunResult(events) + def _get_executor_by_id(self, executor_id: str) -> Executor: """Get an executor by its ID. @@ -123,7 +197,7 @@ def _get_executor_by_id(self, executor_id: str) -> Executor: return self._executors[executor_id] -class WorkflowBuilder(Generic[TIn]): +class WorkflowBuilder: """A builder class for constructing workflows. This class provides methods to add edges and set the starting executor for the workflow. @@ -133,6 +207,7 @@ def __init__(self): """Initialize the WorkflowBuilder with an empty list of edges and no starting executor.""" self._edges: list[Edge] = [] self._start_executor: Executor | str | None = None + self._max_iterations: int = DEFAULT_MAX_ITERATIONS def add_edge( self, @@ -214,6 +289,15 @@ def set_start_executor(self, executor: Executor | str) -> "Self": self._start_executor = executor return self + def set_max_iterations(self, max_iterations: int) -> "Self": + """Set the maximum number of iterations for the workflow. + + Args: + max_iterations: The maximum number of iterations the workflow will run for convergence. + """ + self._max_iterations = max_iterations + return self + def build(self) -> Workflow: """Build and return the constructed workflow. @@ -232,4 +316,4 @@ def build(self) -> Workflow: validate_workflow_graph(self._edges, self._start_executor) - return Workflow(self._edges, self._start_executor, InProcRunnerContext()) + return Workflow(self._edges, self._start_executor, InProcRunnerContext(), self._max_iterations) diff --git a/python/packages/workflow/tests/test_runner.py b/python/packages/workflow/tests/test_runner.py index ef72872b0c..2a50421f3d 100644 --- a/python/packages/workflow/tests/test_runner.py +++ b/python/packages/workflow/tests/test_runner.py @@ -1,7 +1,9 @@ # Copyright (c) Microsoft. All rights reserved. +import asyncio from dataclasses import dataclass +import pytest from agent_framework.workflow import Executor, WorkflowCompletedEvent, WorkflowContext, WorkflowEvent, message_handler from agent_framework_workflow._edge import Edge @@ -22,7 +24,6 @@ class MockExecutor(Executor): @message_handler(output_types=[MockMessage]) async def mock_handler(self, message: MockMessage, ctx: WorkflowContext) -> None: - """A mock handler that does nothing.""" if message.data < 10: await ctx.send_message(MockMessage(data=message.data + 1)) else: @@ -60,10 +61,23 @@ async def test_runner_run_until_convergence(): ctx = InProcRunnerContext() runner = Runner(edges, shared_state, ctx) + + result: int | None = None + await executor_a.execute( + MockMessage(data=0), + WorkflowContext( + executor_id=executor_a.id, + source_executor_ids=["START"], + shared_state=shared_state, + runner_context=ctx, + ), + ) async for event in runner.run_until_convergence(): assert isinstance(event, WorkflowEvent) if isinstance(event, WorkflowCompletedEvent): - assert event.data == 10 + result = event.data + + assert result is not None and result == 10 async def test_runner_run_until_convergence_not_completed(): @@ -81,5 +95,51 @@ async def test_runner_run_until_convergence_not_completed(): ctx = InProcRunnerContext() runner = Runner(edges, shared_state, ctx, max_iterations=5) - async for event in runner.run_until_convergence(): - assert not isinstance(event, WorkflowCompletedEvent) + + await executor_a.execute( + MockMessage(data=0), + WorkflowContext( + executor_id=executor_a.id, + source_executor_ids=["START"], + shared_state=shared_state, + runner_context=ctx, + ), + ) + with pytest.raises(RuntimeError, match="Runner did not converge after 5 iterations."): + async for event in runner.run_until_convergence(): + assert not isinstance(event, WorkflowCompletedEvent) + + +async def test_runner_already_running(): + """Test that running the runner while it is already running raises an error.""" + executor_a = MockExecutor(id="executor_a") + executor_b = MockExecutor(id="executor_b") + + # Create a loop + edges = [ + Edge(source=executor_a, target=executor_b), + Edge(source=executor_b, target=executor_a), + ] + + shared_state = SharedState() + ctx = InProcRunnerContext() + + runner = Runner(edges, shared_state, ctx) + + await executor_a.execute( + MockMessage(data=0), + WorkflowContext( + executor_id=executor_a.id, + source_executor_ids=["START"], + shared_state=shared_state, + runner_context=ctx, + ), + ) + + with pytest.raises(RuntimeError, match="Runner is already running."): + + async def _run(): + async for _ in runner.run_until_convergence(): + pass + + await asyncio.gather(_run(), _run()) diff --git a/python/packages/workflow/tests/test_validation.py b/python/packages/workflow/tests/test_validation.py index 82dad0f2f9..d1b4962c9c 100644 --- a/python/packages/workflow/tests/test_validation.py +++ b/python/packages/workflow/tests/test_validation.py @@ -26,6 +26,15 @@ async def handle_string(self, message: str, ctx: WorkflowContext) -> None: await ctx.send_message(message.upper()) +class StringAggregator(Executor): + """A mock executor that aggregates results from multiple executors.""" + + @message_handler(output_types=[str]) + async def mock_handler(self, messages: list[str], ctx: WorkflowContext) -> None: + # This mock simply returns the data incremented by 1 + await ctx.send_message("Aggregated: " + ", ".join(messages)) + + class IntExecutor(Executor): @message_handler(output_types=[int]) async def handle_int(self, message: int, ctx: WorkflowContext) -> None: @@ -255,7 +264,7 @@ def test_fan_in_validation(): start_executor = StringExecutor(id="start") source1 = StringExecutor(id="source1") source2 = StringExecutor(id="source2") - target = StringExecutor(id="target") + target = StringAggregator(id="target") # Create a proper fan-in by having a start executor that connects to both sources workflow = ( diff --git a/python/packages/workflow/tests/test_workflow.py b/python/packages/workflow/tests/test_workflow.py new file mode 100644 index 0000000000..155bc9ff92 --- /dev/null +++ b/python/packages/workflow/tests/test_workflow.py @@ -0,0 +1,275 @@ +# Copyright (c) Microsoft. All rights reserved. + +from dataclasses import dataclass + +import pytest +from agent_framework.workflow import ( + Executor, + RequestInfoEvent, + RequestInfoExecutor, + RequestInfoMessage, + WorkflowBuilder, + WorkflowCompletedEvent, + WorkflowContext, + WorkflowEvent, + message_handler, +) + + +@dataclass +class MockMessage: + """A mock message for testing purposes.""" + + data: int + + +class MockExecutor(Executor): + """A mock executor for testing purposes.""" + + def __init__(self, id: str, limit: int = 10): + """Initialize the mock executor with a limit.""" + super().__init__(id=id) + self.limit = limit + + @message_handler(output_types=[MockMessage]) + async def mock_handler(self, message: MockMessage, ctx: WorkflowContext) -> None: + if message.data < self.limit: + await ctx.send_message(MockMessage(data=message.data + 1)) + else: + await ctx.add_event(WorkflowCompletedEvent(data=message.data)) + + +class MockAggregator(Executor): + """A mock executor that aggregates results from multiple executors.""" + + @message_handler + async def mock_handler(self, messages: list[MockMessage], ctx: WorkflowContext) -> None: + # This mock simply returns the data incremented by 1 + await ctx.add_event(WorkflowCompletedEvent(data=sum(msg.data for msg in messages))) + + +@dataclass +class ApprovalMessage: + """A mock message for approval requests.""" + + approved: bool + + +class MockExecutorRequestApproval(Executor): + """A mock executor that simulates a request for approval.""" + + @message_handler(output_types=[RequestInfoMessage]) + async def mock_handler_a(self, message: MockMessage, ctx: WorkflowContext) -> None: + """A mock handler that requests approval.""" + await ctx.set_shared_state(self.id, message.data) + await ctx.send_message(RequestInfoMessage()) + + @message_handler(output_types=[MockMessage]) + async def mock_handler_b(self, message: ApprovalMessage, ctx: WorkflowContext) -> None: + """A mock handler that processes the approval response.""" + data = await ctx.get_shared_state(self.id) + if message.approved: + await ctx.add_event(WorkflowCompletedEvent(data=data)) + else: + await ctx.send_message(MockMessage(data=data)) + + +async def test_workflow_run_stream(): + """Test the workflow run stream.""" + executor_a = MockExecutor(id="executor_a") + executor_b = MockExecutor(id="executor_b") + + workflow = ( + WorkflowBuilder() + .set_start_executor(executor_a) + .add_edge(executor_a, executor_b) + .add_edge(executor_b, executor_a) + .build() + ) + + result: int | None = None + async for event in workflow.run_stream(MockMessage(data=0)): + assert isinstance(event, WorkflowEvent) + if isinstance(event, WorkflowCompletedEvent): + result = event.data + + assert result is not None and result == 10 + + +async def test_workflow_run_stream_not_completed(): + """Test the workflow run stream.""" + executor_a = MockExecutor(id="executor_a") + executor_b = MockExecutor(id="executor_b") + + workflow = ( + WorkflowBuilder() + .set_start_executor(executor_a) + .add_edge(executor_a, executor_b) + .add_edge(executor_b, executor_a) + .set_max_iterations(5) + .build() + ) + + with pytest.raises(RuntimeError): + async for _ in workflow.run_stream(MockMessage(data=0)): + pass + + +async def test_workflow_run(): + """Test the workflow run.""" + executor_a = MockExecutor(id="executor_a") + executor_b = MockExecutor(id="executor_b") + + workflow = ( + WorkflowBuilder() + .set_start_executor(executor_a) + .add_edge(executor_a, executor_b) + .add_edge(executor_b, executor_a) + .build() + ) + + events = await workflow.run(MockMessage(data=0)) + completed_event = events.get_completed_event() + assert isinstance(completed_event, WorkflowCompletedEvent) + assert completed_event.data == 10 + + +async def test_workflow_run_not_completed(): + """Test the workflow run.""" + executor_a = MockExecutor(id="executor_a") + executor_b = MockExecutor(id="executor_b") + + workflow = ( + WorkflowBuilder() + .set_start_executor(executor_a) + .add_edge(executor_a, executor_b) + .add_edge(executor_b, executor_a) + .set_max_iterations(5) + .build() + ) + + with pytest.raises(RuntimeError): + await workflow.run(MockMessage(data=0)) + + +async def test_workflow_send_responses_stream(): + """Test the workflow run with approval.""" + executor_a = MockExecutor(id="executor_a") + executor_b = MockExecutorRequestApproval(id="executor_b") + request_info_executor = RequestInfoExecutor() + + workflow = ( + WorkflowBuilder() + .set_start_executor(executor_a) + .add_edge(executor_a, executor_b) + .add_edge(executor_b, executor_a) + .add_edge(executor_b, request_info_executor) + .add_edge(request_info_executor, executor_b) + .build() + ) + + request_info_event: RequestInfoEvent | None = None + async for event in workflow.run_stream(MockMessage(data=0)): + if isinstance(event, RequestInfoEvent): + request_info_event = event + + assert request_info_event is not None + result: int | None = None + async for event in workflow.send_responses_stream({request_info_event.request_id: ApprovalMessage(approved=True)}): + if isinstance(event, WorkflowCompletedEvent): + result = event.data + + assert result is not None and result == 1 # The data should be incremented by 1 from the initial message + + +async def test_workflow_send_responses(): + """Test the workflow run with approval.""" + executor_a = MockExecutor(id="executor_a") + executor_b = MockExecutorRequestApproval(id="executor_b") + request_info_executor = RequestInfoExecutor() + + workflow = ( + WorkflowBuilder() + .set_start_executor(executor_a) + .add_edge(executor_a, executor_b) + .add_edge(executor_b, executor_a) + .add_edge(executor_b, request_info_executor) + .add_edge(request_info_executor, executor_b) + .build() + ) + + events = await workflow.run(MockMessage(data=0)) + request_info_events = events.get_request_info_events() + + assert len(request_info_events) == 1 + + result = await workflow.send_responses({request_info_events[0].request_id: ApprovalMessage(approved=True)}) + + completed_event = result.get_completed_event() + assert isinstance(completed_event, WorkflowCompletedEvent) + assert completed_event.data == 1 # The data should be incremented by 1 from the initial message + + +async def test_fan_out(): + """Test a fan-out workflow.""" + executor_a = MockExecutor(id="executor_a") + executor_b = MockExecutor(id="executor_b", limit=1) + executor_c = MockExecutor(id="executor_c", limit=2) # This executor will not complete the workflow + + workflow = ( + WorkflowBuilder().set_start_executor(executor_a).add_fan_out_edges(executor_a, [executor_b, executor_c]).build() + ) + + events = await workflow.run(MockMessage(data=0)) + + # Each executor will emit two events: ExecutorInvokeEvent and ExecutorCompletedEvent + # executor_b will also emit a WorkflowCompletedEvent + assert len(events) == 7 + + completed_event = events.get_completed_event() + assert completed_event is not None and completed_event.data == 1 + + +async def test_fan_out_multiple_completed_events(): + """Test a fan-out workflow with multiple completed events.""" + executor_a = MockExecutor(id="executor_a") + executor_b = MockExecutor(id="executor_b", limit=1) + executor_c = MockExecutor(id="executor_c", limit=1) + + workflow = ( + WorkflowBuilder().set_start_executor(executor_a).add_fan_out_edges(executor_a, [executor_b, executor_c]).build() + ) + + events = await workflow.run(MockMessage(data=0)) + + # Each executor will emit two events: ExecutorInvokeEvent and ExecutorCompletedEvent + # executor_a and executor_b will also emit a WorkflowCompletedEvent + assert len(events) == 8 + + with pytest.raises(ValueError): + events.get_completed_event() + + +async def test_fan_in(): + """Test a fan-in workflow.""" + executor_a = MockExecutor(id="executor_a") + executor_b = MockExecutor(id="executor_b") + executor_c = MockExecutor(id="executor_c") + aggregator = MockAggregator(id="aggregator") + + workflow = ( + WorkflowBuilder() + .set_start_executor(executor_a) + .add_fan_out_edges(executor_a, [executor_b, executor_c]) + .add_fan_in_edges([executor_b, executor_c], aggregator) + .build() + ) + + events = await workflow.run(MockMessage(data=0)) + + # Each executor will emit two events: ExecutorInvokeEvent and ExecutorCompletedEvent + # aggregator will also emit a WorkflowCompletedEvent + assert len(events) == 9 + + completed_event = events.get_completed_event() + assert completed_event is not None and completed_event.data == 4 diff --git a/python/packages/workflow/tests/test_workflow_builder.py b/python/packages/workflow/tests/test_workflow_builder.py new file mode 100644 index 0000000000..06d80a9c97 --- /dev/null +++ b/python/packages/workflow/tests/test_workflow_builder.py @@ -0,0 +1,65 @@ +# Copyright (c) Microsoft. All rights reserved. + +from dataclasses import dataclass +from typing import Any + +import pytest +from agent_framework.workflow import Executor, WorkflowBuilder, WorkflowContext, message_handler + + +@dataclass +class MockMessage: + """A mock message for testing purposes.""" + + data: Any + + +class MockExecutor(Executor): + """A mock executor for testing purposes.""" + + @message_handler(output_types=[MockMessage]) + async def mock_handler(self, message: MockMessage, ctx: WorkflowContext) -> None: + """A mock handler that does nothing.""" + pass + + +class MockAggregator(Executor): + """A mock executor that aggregates results from multiple executors.""" + + @message_handler(output_types=[MockMessage]) + async def mock_handler(self, messages: list[MockMessage], ctx: WorkflowContext) -> None: + # This mock simply returns the data incremented by 1 + pass + + +def test_workflow_builder_without_start_executor_throws(): + """Test creating a workflow builder without a start executor.""" + + builder = WorkflowBuilder() + with pytest.raises(ValueError): + builder.build() + + +def test_workflow_builder_fluent_api(): + """Test the fluent API of the workflow builder.""" + executor_a = MockExecutor(id="executor_a") + executor_b = MockExecutor(id="executor_b") + executor_c = MockExecutor(id="executor_c") + executor_d = MockExecutor(id="executor_d") + executor_e = MockAggregator(id="executor_e") + executor_f = MockExecutor(id="executor_f") + + workflow = ( + WorkflowBuilder() + .set_start_executor(executor_a) + .add_edge(executor_a, executor_b) + .add_fan_out_edges(executor_b, [executor_c, executor_d]) + .add_fan_in_edges([executor_c, executor_d], executor_e) + .add_chain([executor_e, executor_f]) + .set_max_iterations(5) + .build() + ) + + assert len(workflow.edges) == 6 + assert workflow.start_executor.id == executor_a.id + assert len(workflow.executors) == 6 diff --git a/python/samples/getting_started/workflow/step_01a_simple_workflow_sequential_non_stream.py b/python/samples/getting_started/workflow/step_01a_simple_workflow_sequential_non_stream.py new file mode 100644 index 0000000000..c38f443df8 --- /dev/null +++ b/python/samples/getting_started/workflow/step_01a_simple_workflow_sequential_non_stream.py @@ -0,0 +1,58 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from agent_framework.workflow import Executor, WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, message_handler + +""" +The following sample demonstrates a basic workflow with two executors +that process a string in sequence. The first executor converts the +input string to uppercase, and the second executor reverses the string. +""" + + +class UpperCaseExecutor(Executor): + """An executor that converts text to uppercase.""" + + @message_handler(output_types=[str]) + async def to_upper_case(self, text: str, ctx: WorkflowContext) -> None: + """Execute the task by converting the input string to uppercase.""" + result = text.upper() + + # Send the result to the next executor in the workflow. + await ctx.send_message(result) + + +class ReverseTextExecutor(Executor): + """An executor that reverses text.""" + + @message_handler + async def reverse_text(self, text: str, ctx: WorkflowContext) -> None: + """Execute the task by reversing the input string.""" + result = text[::-1] + + # Send the result with a workflow completion event. + await ctx.add_event(WorkflowCompletedEvent(result)) + + +async def main(): + """Main function to run the workflow.""" + # Step 1: Create the executors. + upper_case_executor = UpperCaseExecutor(id="upper_case_executor") + reverse_text_executor = ReverseTextExecutor(id="reverse_text_executor") + + # Step 2: Build the workflow with the defined edges. + workflow = ( + WorkflowBuilder() + .add_edge(upper_case_executor, reverse_text_executor) + .set_start_executor(upper_case_executor) + .build() + ) + + # Step 3: Run the workflow with an initial message. + events = await workflow.run("hello world") + print(events.get_completed_event()) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py b/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py index 86abae8a08..82e01ac300 100644 --- a/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py +++ b/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py @@ -5,7 +5,7 @@ from agent_framework.workflow import ( Executor, - ExecutorCompleteEvent, + ExecutorCompletedEvent, WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, @@ -106,7 +106,7 @@ async def main(): # Step 3: Run the workflow and print the events. iterations = 0 async for event in workflow.run_stream(NumberSignal.INIT): - if isinstance(event, ExecutorCompleteEvent) and event.executor_id == guess_number_executor.id: + if isinstance(event, ExecutorCompletedEvent) and event.executor_id == guess_number_executor.id: iterations += 1 print(f"Event: {event}") diff --git a/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py b/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py index d7ea89e088..8ac4c9b4ff 100644 --- a/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py +++ b/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py @@ -185,17 +185,16 @@ async def main(): # Depending on whether we have a RequestInfoEvent event, we either # run the workflow normally or send the message to the HIL executor. if not request_info_event: - response = workflow.run_stream( + response_stream = workflow.run_stream( "Create a slogan for a new electric SUV that is affordable and fun to drive." ) else: - response = workflow.send_response( - [ChatMessage(ChatRole.USER, text=user_input)], - request_info_event.request_id, - ) + response_stream = workflow.send_responses_stream({ + request_info_event.request_id: [ChatMessage(ChatRole.USER, text=user_input)] + }) request_info_event = None - async for event in response: + async for event in response_stream: print(event) if isinstance(event, WorkflowCompletedEvent): From cb1f7a8647abcd7053f40d32bd6ad8c6ba4110bf Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Tue, 5 Aug 2025 13:27:37 -0700 Subject: [PATCH 16/18] Add xml docs --- .../agent_framework_workflow/_edge.py | 44 ++++++++++++++--- .../agent_framework_workflow/_events.py | 26 +++++----- .../agent_framework_workflow/_executor.py | 24 ++++++--- .../agent_framework_workflow/_runner.py | 10 +++- .../_runner_context.py | 1 - .../agent_framework_workflow/_shared_state.py | 2 +- .../agent_framework_workflow/_typing_utils.py | 10 +++- .../agent_framework_workflow/_workflow.py | 49 +++++++++++++++---- .../_workflow_context.py | 8 ++- 9 files changed, 133 insertions(+), 41 deletions(-) diff --git a/python/packages/workflow/agent_framework_workflow/_edge.py b/python/packages/workflow/agent_framework_workflow/_edge.py index 80deb4da27..0d7ca8bb18 100644 --- a/python/packages/workflow/agent_framework_workflow/_edge.py +++ b/python/packages/workflow/agent_framework_workflow/_edge.py @@ -20,8 +20,16 @@ def __init__( source: Executor, target: Executor, condition: Callable[[Any], bool] | None = None, - ): - """Initialize the edge with a source and target node.""" + ) -> None: + """Initialize the edge with a source and target node. + + Args: + source (Executor): The source executor of the edge. + target (Executor): The target executor of the edge. + condition (Callable[[Any], bool], optional): A condition function that determines + if the edge can handle the data. If None, the edge can handle any data type. + Defaults to None. + """ self.source = source self.target = target self._condition = condition @@ -51,7 +59,14 @@ def has_edge_group(self) -> bool: @classmethod def source_and_target_from_id(cls, edge_id: str) -> tuple[str, str]: - """Extract the source and target IDs from the edge ID.""" + """Extract the source and target IDs from the edge ID. + + Args: + edge_id (str): The edge ID in the format "source_id->target_id". + + Returns: + tuple[str, str]: A tuple containing the source ID and target ID. + """ if cls.ID_SEPARATOR not in edge_id: raise ValueError(f"Invalid edge ID format: {edge_id}") ids = edge_id.split(cls.ID_SEPARATOR) @@ -60,7 +75,14 @@ def source_and_target_from_id(cls, edge_id: str) -> tuple[str, str]: return ids[0], ids[1] def can_handle(self, message_data: Any) -> bool: - """Check if the edge can handle the given data.""" + """Check if the edge can handle the given data. + + Args: + message_data (Any): The data to check. + + Returns: + bool: True if the edge can handle the data, False otherwise. + """ if not self._edge_group_ids: return self.target.can_handle(message_data) @@ -68,7 +90,13 @@ def can_handle(self, message_data: Any) -> bool: return self.target.can_handle([message_data]) async def send_message(self, message: Message, shared_state: SharedState, ctx: RunnerContext) -> None: - """Send a message along this edge.""" + """Send a message along this edge. + + Args: + message (Message): The message to send. + shared_state (SharedState): The shared state to use for holding data. + ctx (RunnerContext): The context for the runner. + """ if not self.can_handle(message.data): raise RuntimeError(f"Edge {self.id} cannot handle data of type {type(message.data)}.") @@ -111,7 +139,11 @@ def _should_route(self, data: Any) -> bool: return self._condition(data) def set_edge_group(self, edge_group_ids: list[str]) -> None: - """Set the edge group IDs for this edge.""" + """Set the edge group IDs for this edge. + + Args: + edge_group_ids (list[str]): A list of edge IDs that belong to the same edge group. + """ # Validate that the edges in the edge group contain the same target executor as this edge # TODO(@taochen): An edge cannot be part of multiple edge groups. # TODO(@taochen): Can an edge have both a condition and an edge group? diff --git a/python/packages/workflow/agent_framework_workflow/_events.py b/python/packages/workflow/agent_framework_workflow/_events.py index bc56c707d5..223d7dddd1 100644 --- a/python/packages/workflow/agent_framework_workflow/_events.py +++ b/python/packages/workflow/agent_framework_workflow/_events.py @@ -53,19 +53,6 @@ def __repr__(self): return f"{self.__class__.__name__}(exception={self.data})" -class ExecutorEvent(WorkflowEvent): - """Base class for executor events.""" - - def __init__(self, executor_id: str, data: Any | None = None): - """Initialize the executor event with an executor ID and optional data.""" - super().__init__(data) - self.executor_id = executor_id - - def __repr__(self): - """Return a string representation of the executor event.""" - return f"{self.__class__.__name__}(executor_id={self.executor_id}, data={self.data})" - - class RequestInfoEvent(WorkflowEvent): """Event triggered when a workflow executor requests external information.""" @@ -100,6 +87,19 @@ def __repr__(self): ) +class ExecutorEvent(WorkflowEvent): + """Base class for executor events.""" + + def __init__(self, executor_id: str, data: Any | None = None): + """Initialize the executor event with an executor ID and optional data.""" + super().__init__(data) + self.executor_id = executor_id + + def __repr__(self): + """Return a string representation of the executor event.""" + return f"{self.__class__.__name__}(executor_id={self.executor_id}, data={self.data})" + + class ExecutorInvokeEvent(ExecutorEvent): """Event triggered when an executor handler is invoked.""" diff --git a/python/packages/workflow/agent_framework_workflow/_executor.py b/python/packages/workflow/agent_framework_workflow/_executor.py index deb029d0b4..701abdae34 100644 --- a/python/packages/workflow/agent_framework_workflow/_executor.py +++ b/python/packages/workflow/agent_framework_workflow/_executor.py @@ -25,8 +25,12 @@ class Executor: """An executor is a component that processes messages in a workflow.""" - def __init__(self, id: str | None = None): - """Initialize the executor with a unique identifier.""" + def __init__(self, id: str | None = None) -> None: + """Initialize the executor with a unique identifier. + + Args: + id: A unique identifier for the executor. If None, a new UUID will be generated. + """ self._id = id or str(uuid.uuid4()) self._message_handlers: dict[type, Callable[[Any, WorkflowContext], Any]] = {} @@ -38,11 +42,7 @@ def __init__(self, id: str | None = None): "Please define at least one message handler using the @message_handler decorator." ) - async def execute( - self, - message: Any, - context: WorkflowContext, - ) -> None: + async def execute(self, message: Any, context: WorkflowContext) -> None: """Execute the executor with a given message and context. Args: @@ -225,7 +225,15 @@ def __init__( streaming: bool = False, id: str | None = None, ): - """Initialize the executor with a unique identifier.""" + """Initialize the executor with a unique identifier. + + Args: + agent: The agent to be wrapped by this executor. + agent_thread: The thread to use for running the agent. If None, a new thread will be created. + streaming: Whether to enable streaming for the agent. If enabled, the executor will emit + AgentRunStreamingEvent updates instead of a single AgentRunEvent. + id: A unique identifier for the executor. If None, a new UUID will be generated. + """ super().__init__(id or agent.id) self._agent = agent self._agent_thread = agent_thread or self._agent.get_new_thread() diff --git a/python/packages/workflow/agent_framework_workflow/_runner.py b/python/packages/workflow/agent_framework_workflow/_runner.py index dabd3f56e0..dd53d8c4da 100644 --- a/python/packages/workflow/agent_framework_workflow/_runner.py +++ b/python/packages/workflow/agent_framework_workflow/_runner.py @@ -24,7 +24,15 @@ def __init__( shared_state: SharedState, ctx: RunnerContext, max_iterations: int = DEFAULT_MAX_ITERATIONS, - ): + ) -> None: + """Initialize the runner with edges, shared state, and context. + + Args: + edges: The edges of the workflow. + shared_state: The shared state for the workflow. + ctx: The runner context for the workflow. + max_iterations: The maximum number of iterations to run. + """ self._edge_map = self._parse_edges(edges) self._ctx = ctx self._iteration = 0 diff --git a/python/packages/workflow/agent_framework_workflow/_runner_context.py b/python/packages/workflow/agent_framework_workflow/_runner_context.py index 43152fa592..bcb1e1ba34 100644 --- a/python/packages/workflow/agent_framework_workflow/_runner_context.py +++ b/python/packages/workflow/agent_framework_workflow/_runner_context.py @@ -29,7 +29,6 @@ async def send_message(self, message: Message) -> None: """Send a message from the executor to the context. Args: - source_id: The ID of the executor sending the message. message: The message to be sent. """ ... diff --git a/python/packages/workflow/agent_framework_workflow/_shared_state.py b/python/packages/workflow/agent_framework_workflow/_shared_state.py index 2c194db5e5..a43bcfc30b 100644 --- a/python/packages/workflow/agent_framework_workflow/_shared_state.py +++ b/python/packages/workflow/agent_framework_workflow/_shared_state.py @@ -8,7 +8,7 @@ class SharedState: """A class to manage shared state in a workflow.""" - def __init__(self): + def __init__(self) -> None: """Initialize the shared state.""" self._state: dict[str, Any] = {} self._shared_state_lock = asyncio.Lock() diff --git a/python/packages/workflow/agent_framework_workflow/_typing_utils.py b/python/packages/workflow/agent_framework_workflow/_typing_utils.py index b22e13786e..f8547d886e 100644 --- a/python/packages/workflow/agent_framework_workflow/_typing_utils.py +++ b/python/packages/workflow/agent_framework_workflow/_typing_utils.py @@ -4,7 +4,15 @@ def is_instance_of(data: Any, target_type: type) -> bool: - """Check if the data is an instance of the target type.""" + """Check if the data is an instance of the target type. + + Args: + data (Any): The data to check. + target_type (type): The type to check against. + + Returns: + bool: True if data is an instance of target_type, False otherwise. + """ origin = get_origin(target_type) args = get_args(target_type) diff --git a/python/packages/workflow/agent_framework_workflow/_workflow.py b/python/packages/workflow/agent_framework_workflow/_workflow.py index c7d67375a5..9f46fcbd48 100644 --- a/python/packages/workflow/agent_framework_workflow/_workflow.py +++ b/python/packages/workflow/agent_framework_workflow/_workflow.py @@ -105,6 +105,9 @@ async def run_stream(self, message: Any) -> AsyncIterable[WorkflowEvent]: Args: message: The message to be sent to the starting executor. + + Yields: + WorkflowEvent: The events generated during the workflow execution. """ executor = self._start_executor if isinstance(executor, str): @@ -132,6 +135,9 @@ async def send_responses_stream(self, responses: dict[str, Any]) -> AsyncIterabl Args: responses: The responses to be sent back to the workflow, where keys are request IDs and values are the corresponding response data. + + Yields: + WorkflowEvent: The events generated during the workflow execution after sending the responses. """ request_info_executor = self._get_executor_by_id(RequestInfoExecutor.EXECUTOR_ID) if not isinstance(request_info_executor, RequestInfoExecutor): @@ -217,6 +223,8 @@ def add_edge( ) -> "Self": """Add a directed edge between two executors. + The output types of the source and the input types of the target must be compatible. + Args: source: The source executor of the edge. target: The target executor of the edge. @@ -230,6 +238,7 @@ def add_edge( def add_fan_out_edges(self, source: Executor, targets: Sequence[Executor]) -> "Self": """Add multiple edges to the workflow. + The output types of the source and the input types of the targets must be compatible. Messages from the source executor will be sent to all target executors. Args: @@ -240,16 +249,36 @@ def add_fan_out_edges(self, source: Executor, targets: Sequence[Executor]) -> "S self._edges.append(Edge(source, target)) return self - def add_fan_in_edges( - self, - sources: Sequence[Executor], - target: Executor, - ) -> "Self": + def add_fan_in_edges(self, sources: Sequence[Executor], target: Executor) -> "Self": """Add multiple edges from sources to a single target executor. The edges will be grouped together for synchronized processing, meaning the target executor will only be executed once all source executors have completed. + The target executor will receive a list of messages aggregated from all source executors. + Thus the input types of the target executor must be compatible with a list of the output + types of the source executors. For example: + + class Target(Executor): + @message_handler + def handle_messages(self, messages: list[Message]) -> None: + # Process the aggregated messages from all sources + + class Source(Executor): + @message_handler(output_type=[Message]) + def handle_message(self, message: Message) -> None: + # Send a message to the target executor + self.send_message(message) + + workflow = ( + WorkflowBuilder() + .add_fan_in_edges( + [Source(id="source1"), Source(id="source2")], + Target(id="target") + ) + .build() + ) + Args: sources: A list of source executors for the edges. target: The target executor for the edges. @@ -267,12 +296,14 @@ def add_fan_in_edges( return self - def add_chain( - self, - executors: Sequence[Executor], - ) -> "Self": + def add_chain(self, executors: Sequence[Executor]) -> "Self": """Add a chain of executors to the workflow. + The output of each executor in the chain will be sent to the next executor in the chain. + The input types of each executor must be compatible with the output types of the previous executor. + + Circles in the chain are not allowed, meaning the chain cannot have two executors with the same ID. + Args: executors: A list of executors to be added to the chain. """ diff --git a/python/packages/workflow/agent_framework_workflow/_workflow_context.py b/python/packages/workflow/agent_framework_workflow/_workflow_context.py index b8df0f0bbd..ca24748543 100644 --- a/python/packages/workflow/agent_framework_workflow/_workflow_context.py +++ b/python/packages/workflow/agent_framework_workflow/_workflow_context.py @@ -40,7 +40,13 @@ def __init__( raise ValueError("source_executor_ids cannot be empty. At least one source executor ID is required.") async def send_message(self, message: Any, target_id: str | None = None) -> None: - """Send a message to the workflow context.""" + """Send a message to the workflow context. + + Args: + message: The message to send. This can be any data type that the target executor can handle. + target_id: The ID of the target executor to send the message to. + If None, the message will be sent to all target executors. + """ await self._runner_context.send_message( Message( data=message, From 14e5a8f5b46fccf14ecdc756bd11a75898f5baa3 Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Tue, 5 Aug 2025 16:53:31 -0700 Subject: [PATCH 17/18] run_stream -> run_streaming --- .../workflow/agent_framework_workflow/_workflow.py | 8 ++++---- python/packages/workflow/tests/test_workflow.py | 14 ++++++++------ .../workflow/step_01_simple_workflow_sequential.py | 2 +- .../workflow/step_02_simple_workflow_condition.py | 2 +- .../workflow/step_03_simple_workflow_loop.py | 2 +- .../workflow/step_04_simple_group_chat.py | 2 +- .../workflow/step_05_simple_group_chat_with_hil.py | 4 ++-- .../getting_started/workflow/step_06_map_reduce.py | 2 +- 8 files changed, 19 insertions(+), 17 deletions(-) diff --git a/python/packages/workflow/agent_framework_workflow/_workflow.py b/python/packages/workflow/agent_framework_workflow/_workflow.py index 9f46fcbd48..1aeb75cb6b 100644 --- a/python/packages/workflow/agent_framework_workflow/_workflow.py +++ b/python/packages/workflow/agent_framework_workflow/_workflow.py @@ -100,7 +100,7 @@ def executors(self) -> list[Executor]: """Get the list of executors in the workflow.""" return list(self._executors.values()) - async def run_stream(self, message: Any) -> AsyncIterable[WorkflowEvent]: + async def run_streaming(self, message: Any) -> AsyncIterable[WorkflowEvent]: """Send a message to the starting executor of the workflow and stream the events generated by the workflow. Args: @@ -129,7 +129,7 @@ async def run_stream(self, message: Any) -> AsyncIterable[WorkflowEvent]: async for event in self._runner.run_until_convergence(): yield event - async def send_responses_stream(self, responses: dict[str, Any]) -> AsyncIterable[WorkflowEvent]: + async def send_responses_streaming(self, responses: dict[str, Any]) -> AsyncIterable[WorkflowEvent]: """Send responses back to the workflow and stream the events generated by the workflow. Args: @@ -174,7 +174,7 @@ async def run(self, message: Any) -> WorkflowRunResult: Returns: A WorkflowRunResult instance containing a list of events generated during the workflow execution. """ - events = [event async for event in self.run_stream(message)] + events = [event async for event in self.run_streaming(message)] return WorkflowRunResult(events) async def send_responses(self, responses: dict[str, Any]) -> WorkflowRunResult: @@ -186,7 +186,7 @@ async def send_responses(self, responses: dict[str, Any]) -> WorkflowRunResult: Returns: A WorkflowRunResult instance containing a list of events generated during the workflow execution. """ - events = [event async for event in self.send_responses_stream(responses)] + events = [event async for event in self.send_responses_streaming(responses)] return WorkflowRunResult(events) def _get_executor_by_id(self, executor_id: str) -> Executor: diff --git a/python/packages/workflow/tests/test_workflow.py b/python/packages/workflow/tests/test_workflow.py index 155bc9ff92..52057e9556 100644 --- a/python/packages/workflow/tests/test_workflow.py +++ b/python/packages/workflow/tests/test_workflow.py @@ -74,7 +74,7 @@ async def mock_handler_b(self, message: ApprovalMessage, ctx: WorkflowContext) - await ctx.send_message(MockMessage(data=data)) -async def test_workflow_run_stream(): +async def test_workflow_run_streaming(): """Test the workflow run stream.""" executor_a = MockExecutor(id="executor_a") executor_b = MockExecutor(id="executor_b") @@ -88,7 +88,7 @@ async def test_workflow_run_stream(): ) result: int | None = None - async for event in workflow.run_stream(MockMessage(data=0)): + async for event in workflow.run_streaming(MockMessage(data=0)): assert isinstance(event, WorkflowEvent) if isinstance(event, WorkflowCompletedEvent): result = event.data @@ -111,7 +111,7 @@ async def test_workflow_run_stream_not_completed(): ) with pytest.raises(RuntimeError): - async for _ in workflow.run_stream(MockMessage(data=0)): + async for _ in workflow.run_streaming(MockMessage(data=0)): pass @@ -152,7 +152,7 @@ async def test_workflow_run_not_completed(): await workflow.run(MockMessage(data=0)) -async def test_workflow_send_responses_stream(): +async def test_workflow_send_responses_streaming(): """Test the workflow run with approval.""" executor_a = MockExecutor(id="executor_a") executor_b = MockExecutorRequestApproval(id="executor_b") @@ -169,13 +169,15 @@ async def test_workflow_send_responses_stream(): ) request_info_event: RequestInfoEvent | None = None - async for event in workflow.run_stream(MockMessage(data=0)): + async for event in workflow.run_streaming(MockMessage(data=0)): if isinstance(event, RequestInfoEvent): request_info_event = event assert request_info_event is not None result: int | None = None - async for event in workflow.send_responses_stream({request_info_event.request_id: ApprovalMessage(approved=True)}): + async for event in workflow.send_responses_streaming({ + request_info_event.request_id: ApprovalMessage(approved=True) + }): if isinstance(event, WorkflowCompletedEvent): result = event.data diff --git a/python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py b/python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py index a28b6bba12..1bcaa03025 100644 --- a/python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py +++ b/python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py @@ -51,7 +51,7 @@ async def main(): # Step 3: Run the workflow with an initial message. completion_event = None - async for event in workflow.run_stream("hello world"): + async for event in workflow.run_streaming("hello world"): print(f"Event: {event}") if isinstance(event, WorkflowCompletedEvent): # The WorkflowCompletedEvent contains the final result. diff --git a/python/samples/getting_started/workflow/step_02_simple_workflow_condition.py b/python/samples/getting_started/workflow/step_02_simple_workflow_condition.py index 9dbf053a24..a5bc693a81 100644 --- a/python/samples/getting_started/workflow/step_02_simple_workflow_condition.py +++ b/python/samples/getting_started/workflow/step_02_simple_workflow_condition.py @@ -111,7 +111,7 @@ async def main(): ) # Step 3: Run the workflow with an input message. - async for event in workflow.run_stream("This is a spam."): + async for event in workflow.run_streaming("This is a spam."): print(f"Event: {event}") diff --git a/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py b/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py index 82e01ac300..91ba6eef2e 100644 --- a/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py +++ b/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py @@ -105,7 +105,7 @@ async def main(): # Step 3: Run the workflow and print the events. iterations = 0 - async for event in workflow.run_stream(NumberSignal.INIT): + async for event in workflow.run_streaming(NumberSignal.INIT): if isinstance(event, ExecutorCompletedEvent) and event.executor_id == guess_number_executor.id: iterations += 1 print(f"Event: {event}") diff --git a/python/samples/getting_started/workflow/step_04_simple_group_chat.py b/python/samples/getting_started/workflow/step_04_simple_group_chat.py index 36453a9d83..ab85954985 100644 --- a/python/samples/getting_started/workflow/step_04_simple_group_chat.py +++ b/python/samples/getting_started/workflow/step_04_simple_group_chat.py @@ -130,7 +130,7 @@ async def main(): # Step 3: Run the workflow with an initial message. completion_event = None - async for event in workflow.run_stream( + async for event in workflow.run_streaming( "Create a slogan for a new electric SUV that is affordable and fun to drive." ): if isinstance(event, AgentRunEvent): diff --git a/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py b/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py index 8ac4c9b4ff..6615e38d60 100644 --- a/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py +++ b/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py @@ -185,11 +185,11 @@ async def main(): # Depending on whether we have a RequestInfoEvent event, we either # run the workflow normally or send the message to the HIL executor. if not request_info_event: - response_stream = workflow.run_stream( + response_stream = workflow.run_streaming( "Create a slogan for a new electric SUV that is affordable and fun to drive." ) else: - response_stream = workflow.send_responses_stream({ + response_stream = workflow.send_responses_streaming({ request_info_event.request_id: [ChatMessage(ChatRole.USER, text=user_input)] }) request_info_event = None diff --git a/python/samples/getting_started/workflow/step_06_map_reduce.py b/python/samples/getting_started/workflow/step_06_map_reduce.py index 19cfc6488d..a7ddb6325f 100644 --- a/python/samples/getting_started/workflow/step_06_map_reduce.py +++ b/python/samples/getting_started/workflow/step_06_map_reduce.py @@ -299,7 +299,7 @@ async def main(): # Step 4: Run the workflow with the raw text as input. completion_event = None - async for event in workflow.run_stream(raw_text): + async for event in workflow.run_streaming(raw_text): print(f"Event: {event}") if isinstance(event, WorkflowCompletedEvent): completion_event = event From d68065cb216fe8ae3205288a17db28518131bc9d Mon Sep 17 00:00:00 2001 From: Tao Chen Date: Wed, 6 Aug 2025 08:29:39 -0700 Subject: [PATCH 18/18] message_handler -> handler --- .../main/agent_framework/workflow/__init__.py | 2 +- .../agent_framework/workflow/__init__.pyi | 4 +- .../agent_framework_workflow/__init__.py | 4 +- .../agent_framework_workflow/_executor.py | 49 +++++++++---------- .../agent_framework_workflow/_validation.py | 8 +-- .../agent_framework_workflow/_workflow.py | 4 +- python/packages/workflow/tests/test_edge.py | 4 +- .../packages/workflow/tests/test_executor.py | 22 ++++----- python/packages/workflow/tests/test_runner.py | 4 +- .../workflow/tests/test_validation.py | 32 ++++++------ .../packages/workflow/tests/test_workflow.py | 10 ++-- .../workflow/tests/test_workflow_builder.py | 6 +-- .../step_01_simple_workflow_sequential.py | 6 +-- ...a_simple_workflow_sequential_non_stream.py | 6 +-- .../step_02_simple_workflow_condition.py | 8 +-- .../workflow/step_03_simple_workflow_loop.py | 6 +-- .../workflow/step_04_simple_group_chat.py | 6 +-- .../step_05_simple_group_chat_with_hil.py | 8 +-- .../workflow/step_06_map_reduce.py | 12 ++--- 19 files changed, 100 insertions(+), 101 deletions(-) diff --git a/python/packages/main/agent_framework/workflow/__init__.py b/python/packages/main/agent_framework/workflow/__init__.py index 34da99edf9..14669fecab 100644 --- a/python/packages/main/agent_framework/workflow/__init__.py +++ b/python/packages/main/agent_framework/workflow/__init__.py @@ -20,7 +20,7 @@ "WorkflowStartedEvent", "AgentRunEvent", "AgentRunStreamingEvent", - "message_handler", + "handler", "AgentExecutor", "AgentExecutorRequest", "AgentExecutorResponse", diff --git a/python/packages/main/agent_framework/workflow/__init__.pyi b/python/packages/main/agent_framework/workflow/__init__.pyi index 1119864f7c..30ea6fde9c 100644 --- a/python/packages/main/agent_framework/workflow/__init__.pyi +++ b/python/packages/main/agent_framework/workflow/__init__.pyi @@ -21,7 +21,7 @@ from agent_framework_workflow import ( WorkflowRunResult, WorkflowStartedEvent, __version__, - message_handler, + handler, ) __all__ = [ @@ -45,5 +45,5 @@ __all__ = [ "WorkflowRunResult", "WorkflowStartedEvent", "__version__", - "message_handler", + "handler", ] diff --git a/python/packages/workflow/agent_framework_workflow/__init__.py b/python/packages/workflow/agent_framework_workflow/__init__.py index 4fb1263f14..ee9404e07b 100644 --- a/python/packages/workflow/agent_framework_workflow/__init__.py +++ b/python/packages/workflow/agent_framework_workflow/__init__.py @@ -20,7 +20,7 @@ Executor, RequestInfoExecutor, RequestInfoMessage, - message_handler, + handler, ) from ._validation import ( EdgeDuplicationError, @@ -67,6 +67,6 @@ "WorkflowStartedEvent", "WorkflowValidationError", "__version__", - "message_handler", + "handler", "validate_workflow_graph", ] diff --git a/python/packages/workflow/agent_framework_workflow/_executor.py b/python/packages/workflow/agent_framework_workflow/_executor.py index 701abdae34..aa43859426 100644 --- a/python/packages/workflow/agent_framework_workflow/_executor.py +++ b/python/packages/workflow/agent_framework_workflow/_executor.py @@ -33,13 +33,13 @@ def __init__(self, id: str | None = None) -> None: """ self._id = id or str(uuid.uuid4()) - self._message_handlers: dict[type, Callable[[Any, WorkflowContext], Any]] = {} + self._handlers: dict[type, Callable[[Any, WorkflowContext], Any]] = {} self._discover_handlers() - if not self._message_handlers: + if not self._handlers: raise ValueError( - f"Executor {self.__class__.__name__} has no message handlers defined. " - "Please define at least one message handler using the @message_handler decorator." + f"Executor {self.__class__.__name__} has no handlers defined. " + "Please define at least one handler using the @handler decorator." ) async def execute(self, message: Any, context: WorkflowContext) -> None: @@ -53,9 +53,9 @@ async def execute(self, message: Any, context: WorkflowContext) -> None: An awaitable that resolves to the result of the execution. """ handler: Callable[[Any, WorkflowContext], Any] | None = None - for message_type in self._message_handlers: + for message_type in self._handlers: if is_instance_of(message, message_type): - handler = self._message_handlers[message_type] + handler = self._handlers[message_type] break if handler is None: @@ -76,12 +76,11 @@ def _discover_handlers(self) -> None: attr = getattr(self, attr_name) if callable(attr) and hasattr(attr, "_handler_spec"): handler_spec = attr._handler_spec # type: ignore - if self._message_handlers.get(handler_spec["message_type"]) is not None: + if self._handlers.get(handler_spec["message_type"]) is not None: raise ValueError( - f"Duplicate message handler for type {handler_spec['message_type']} " - f"in {self.__class__.__name__}" + f"Duplicate handler for type {handler_spec['message_type']} in {self.__class__.__name__}" ) - self._message_handlers[handler_spec["message_type"]] = attr + self._handlers[handler_spec["message_type"]] = attr def can_handle(self, message: Any) -> bool: """Check if the executor can handle a given message type. @@ -92,25 +91,25 @@ def can_handle(self, message: Any) -> bool: Returns: True if the executor can handle the message type, False otherwise. """ - return any(is_instance_of(message, message_type) for message_type in self._message_handlers) + return any(is_instance_of(message, message_type) for message_type in self._handlers) # endregion: Executor -# region: Message Handler Decorator +# region: Handler Decorator ExecutorT = TypeVar("ExecutorT", bound="Executor") @overload -def message_handler( +def handler( func: Callable[[ExecutorT, Any, WorkflowContext], Awaitable[Any]], ) -> Callable[[ExecutorT, Any, WorkflowContext], Awaitable[Any]]: ... @overload -def message_handler( +def handler( func: None = None, *, output_types: list[type] | None = None, @@ -120,7 +119,7 @@ def message_handler( ]: ... -def message_handler( +def handler( func: Callable[[ExecutorT, Any, WorkflowContext], Awaitable[Any]] | None = None, *, output_types: list[type] | None = None, @@ -131,7 +130,7 @@ def message_handler( Callable[[ExecutorT, Any, WorkflowContext], Awaitable[Any]], ] ): - """Decorator to register a message handler for an executor. + """Decorator to register a handler for an executor. Args: func: The function to decorate. Can be None when using with parameters. @@ -141,11 +140,11 @@ def message_handler( The decorated function with handler metadata. Example: - @message_handler + @handler async def handle_string(self, message: str, ctx: WorkflowContext) -> None: ... - @message_handler(output_types=[str, int]) + @handler(output_types=[str, int]) async def handle_data(self, message: dict, ctx: WorkflowContext) -> None: ... """ @@ -153,20 +152,20 @@ async def handle_data(self, message: dict, ctx: WorkflowContext) -> None: def decorator( func: Callable[[ExecutorT, Any, WorkflowContext], Awaitable[Any]], ) -> Callable[[ExecutorT, Any, WorkflowContext], Awaitable[Any]]: - # Extract the message type from a message handler function. + # Extract the message type from a handler function. sig = inspect.signature(func) params = list(sig.parameters.values()) if len(params) != 3: # self, message, ctx - raise ValueError(f"Message handler must have exactly 3 parameters, got {len(params)}") + raise ValueError(f"Handler must have exactly 3 parameters, got {len(params)}") message_type = params[1].annotation if message_type is inspect.Parameter.empty: - raise ValueError("Message handler's second parameter must have a type annotation") + raise ValueError("Handler's second parameter must have a type annotation") @functools.wraps(func) async def wrapper(self: ExecutorT, message: Any, ctx: WorkflowContext) -> Any: - """Wrapper function to call the message handler.""" + """Wrapper function to call the handler.""" return await func(self, message, ctx) wrapper._handler_spec = { # type: ignore @@ -182,7 +181,7 @@ async def wrapper(self: ExecutorT, message: Any, ctx: WorkflowContext) -> Any: return decorator(func) -# endregion: Message Handler Decorator +# endregion: Handler Decorator # region: Agent Executor @@ -240,7 +239,7 @@ def __init__( self._streaming = streaming self._cache: list[ChatMessage] = [] - @message_handler(output_types=[AgentExecutorResponse]) + @handler(output_types=[AgentExecutorResponse]) async def run(self, request: AgentExecutorRequest, ctx: WorkflowContext) -> None: """Run the agent executor with the given request.""" self._cache.extend(request.messages) @@ -300,7 +299,7 @@ def __init__(self): super().__init__(id=self.EXECUTOR_ID) self._request_events: dict[str, RequestInfoEvent] = {} - @message_handler + @handler async def run(self, message: RequestInfoMessage, ctx: WorkflowContext) -> None: """Run the RequestInfoExecutor with the given message.""" source_executor_id = ctx.get_source_executor_id() diff --git a/python/packages/workflow/agent_framework_workflow/_validation.py b/python/packages/workflow/agent_framework_workflow/_validation.py index 6a1715e511..0ce9cd2e76 100644 --- a/python/packages/workflow/agent_framework_workflow/_validation.py +++ b/python/packages/workflow/agent_framework_workflow/_validation.py @@ -172,7 +172,7 @@ def _validate_type_compatibility(self) -> None: logger.warning( f"Executor '{source_executor.id}' has no output type annotations. " f"Type compatibility validation will be skipped for edges from this executor. " - f"Consider adding output_types to @message_handler decorators for better validation." + f"Consider adding output_types to @handler decorators for better validation." ) if not target_input_types: logger.warning( @@ -245,9 +245,9 @@ def _get_executor_input_types(self, executor: Executor) -> list[type[Any]]: """ input_types: list[type[Any]] = [] - # Access the private _message_handlers attribute to get input types - if hasattr(executor, "_message_handlers"): - input_types.extend(executor._message_handlers.keys()) # type: ignore + # Access the private _handlers attribute to get input types + if hasattr(executor, "_handlers"): + input_types.extend(executor._handlers.keys()) # type: ignore return input_types diff --git a/python/packages/workflow/agent_framework_workflow/_workflow.py b/python/packages/workflow/agent_framework_workflow/_workflow.py index 1aeb75cb6b..84cc8178b5 100644 --- a/python/packages/workflow/agent_framework_workflow/_workflow.py +++ b/python/packages/workflow/agent_framework_workflow/_workflow.py @@ -260,12 +260,12 @@ def add_fan_in_edges(self, sources: Sequence[Executor], target: Executor) -> "Se types of the source executors. For example: class Target(Executor): - @message_handler + @handler def handle_messages(self, messages: list[Message]) -> None: # Process the aggregated messages from all sources class Source(Executor): - @message_handler(output_type=[Message]) + @handler(output_type=[Message]) def handle_message(self, message: Message) -> None: # Send a message to the target executor self.send_message(message) diff --git a/python/packages/workflow/tests/test_edge.py b/python/packages/workflow/tests/test_edge.py index 19251230bd..b1c41c4470 100644 --- a/python/packages/workflow/tests/test_edge.py +++ b/python/packages/workflow/tests/test_edge.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from typing import Any -from agent_framework.workflow import Executor, WorkflowContext, message_handler +from agent_framework.workflow import Executor, WorkflowContext, handler from agent_framework_workflow._edge import Edge @@ -18,7 +18,7 @@ class MockMessage: class MockExecutor(Executor): """A mock executor for testing purposes.""" - @message_handler + @handler async def mock_handler(self, message: MockMessage, ctx: WorkflowContext) -> None: """A mock handler that does nothing.""" pass diff --git a/python/packages/workflow/tests/test_executor.py b/python/packages/workflow/tests/test_executor.py index 06028fa326..c4f8bb92e0 100644 --- a/python/packages/workflow/tests/test_executor.py +++ b/python/packages/workflow/tests/test_executor.py @@ -1,7 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. import pytest -from agent_framework.workflow import Executor, WorkflowContext, message_handler +from agent_framework.workflow import Executor, WorkflowContext, handler def test_executor_without_handlers(): @@ -24,7 +24,7 @@ def test_executor_handler_without_annotations(): class MockExecutorWithOneHandlerWithoutAnnotations(Executor): # type: ignore """A mock executor with one handler that does not implement any annotations.""" - @message_handler + @handler async def handle(self, message, ctx) -> None: # type: ignore """A mock handler that does not implement any annotations.""" pass @@ -38,7 +38,7 @@ def test_executor_invalid_handler_signature(): class MockExecutorWithInvalidHandlerSignature(Executor): # type: ignore """A mock executor with an invalid handler signature.""" - @message_handler # type: ignore + @handler # type: ignore async def handle(self, message, other, ctx) -> None: # type: ignore """A mock handler with an invalid signature.""" pass @@ -50,19 +50,19 @@ def test_executor_with_valid_handlers(): class MockExecutorWithValidHandlers(Executor): # type: ignore """A mock executor with valid handlers.""" - @message_handler + @handler async def handle_text(self, text: str, ctx: WorkflowContext) -> None: # type: ignore """A mock handler with a valid signature.""" pass - @message_handler + @handler async def handle_number(self, number: int, ctx: WorkflowContext) -> None: # type: ignore """Another mock handler with a valid signature.""" pass executor = MockExecutorWithValidHandlers() assert executor.id is not None - assert len(executor._message_handlers) == 2 # type: ignore + assert len(executor._handlers) == 2 # type: ignore assert executor.can_handle("text") is True assert executor.can_handle(42) is True assert executor.can_handle(3.14) is False @@ -74,27 +74,27 @@ def test_executor_handlers_with_output_types(): class MockExecutorWithOutputTypes(Executor): # type: ignore """A mock executor with handlers that specify output types.""" - @message_handler(output_types=[str]) + @handler(output_types=[str]) async def handle_string(self, text: str, ctx: WorkflowContext) -> None: # type: ignore """A mock handler that outputs a string.""" pass - @message_handler(output_types=[int]) + @handler(output_types=[int]) async def handle_integer(self, number: int, ctx: WorkflowContext) -> None: # type: ignore """A mock handler that outputs an integer.""" pass executor = MockExecutorWithOutputTypes() - assert len(executor._message_handlers) == 2 # type: ignore + assert len(executor._handlers) == 2 # type: ignore - string_handler = executor._message_handlers[str] # type: ignore + string_handler = executor._handlers[str] # type: ignore assert string_handler is not None assert string_handler._handler_spec is not None # type: ignore assert string_handler._handler_spec["name"] == "handle_string" # type: ignore assert string_handler._handler_spec["message_type"] is str # type: ignore assert string_handler._handler_spec["output_types"] == [str] # type: ignore - int_handler = executor._message_handlers[int] # type: ignore + int_handler = executor._handlers[int] # type: ignore assert int_handler is not None assert int_handler._handler_spec is not None # type: ignore assert int_handler._handler_spec["name"] == "handle_integer" # type: ignore diff --git a/python/packages/workflow/tests/test_runner.py b/python/packages/workflow/tests/test_runner.py index 2a50421f3d..a4a1abb43c 100644 --- a/python/packages/workflow/tests/test_runner.py +++ b/python/packages/workflow/tests/test_runner.py @@ -4,7 +4,7 @@ from dataclasses import dataclass import pytest -from agent_framework.workflow import Executor, WorkflowCompletedEvent, WorkflowContext, WorkflowEvent, message_handler +from agent_framework.workflow import Executor, WorkflowCompletedEvent, WorkflowContext, WorkflowEvent, handler from agent_framework_workflow._edge import Edge from agent_framework_workflow._runner import Runner @@ -22,7 +22,7 @@ class MockMessage: class MockExecutor(Executor): """A mock executor for testing purposes.""" - @message_handler(output_types=[MockMessage]) + @handler(output_types=[MockMessage]) async def mock_handler(self, message: MockMessage, ctx: WorkflowContext) -> None: if message.data < 10: await ctx.send_message(MockMessage(data=message.data + 1)) diff --git a/python/packages/workflow/tests/test_validation.py b/python/packages/workflow/tests/test_validation.py index d1b4962c9c..23a14a4490 100644 --- a/python/packages/workflow/tests/test_validation.py +++ b/python/packages/workflow/tests/test_validation.py @@ -14,14 +14,14 @@ WorkflowBuilder, WorkflowContext, WorkflowValidationError, - message_handler, + handler, validate_workflow_graph, ) from agent_framework_workflow._edge import Edge class StringExecutor(Executor): - @message_handler(output_types=[str]) + @handler(output_types=[str]) async def handle_string(self, message: str, ctx: WorkflowContext) -> None: await ctx.send_message(message.upper()) @@ -29,36 +29,36 @@ async def handle_string(self, message: str, ctx: WorkflowContext) -> None: class StringAggregator(Executor): """A mock executor that aggregates results from multiple executors.""" - @message_handler(output_types=[str]) + @handler(output_types=[str]) async def mock_handler(self, messages: list[str], ctx: WorkflowContext) -> None: # This mock simply returns the data incremented by 1 await ctx.send_message("Aggregated: " + ", ".join(messages)) class IntExecutor(Executor): - @message_handler(output_types=[int]) + @handler(output_types=[int]) async def handle_int(self, message: int, ctx: WorkflowContext) -> None: await ctx.send_message(message * 2) class AnyExecutor(Executor): - @message_handler + @handler async def handle_any(self, message: Any, ctx: WorkflowContext) -> None: await ctx.send_message(f"Processed: {message}") class NoOutputTypesExecutor(Executor): - @message_handler + @handler async def handle_message(self, message: str, ctx: WorkflowContext) -> None: await ctx.send_message("processed") class MultiTypeExecutor(Executor): - @message_handler(output_types=[str]) + @handler(output_types=[str]) async def handle_string(self, message: str, ctx: WorkflowContext) -> None: await ctx.send_message(f"String: {message}") - @message_handler(output_types=[int]) + @handler(output_types=[int]) async def handle_int(self, message: int, ctx: WorkflowContext) -> None: await ctx.send_message(f"Int: {message}") @@ -218,12 +218,12 @@ def test_complex_workflow_validation(): def test_type_compatibility_inheritance(): class BaseExecutor(Executor): - @message_handler(output_types=[str]) + @handler(output_types=[str]) async def handle_base(self, message: str, ctx: WorkflowContext) -> None: await ctx.send_message("base") class DerivedExecutor(Executor): - @message_handler(output_types=[str]) + @handler(output_types=[str]) async def handle_derived(self, message: str, ctx: WorkflowContext) -> None: await ctx.send_message("derived") @@ -303,7 +303,7 @@ def test_logging_for_missing_output_types(caplog: Any) -> None: assert workflow is not None assert "has no output type annotations" in caplog.text - assert "Consider adding output_types to @message_handler decorators" in caplog.text + assert "Consider adding output_types to @handler decorators" in caplog.text def test_logging_for_missing_input_types(caplog: Any) -> None: @@ -316,7 +316,7 @@ async def handle_message(self, message: Any, ctx: WorkflowContext) -> None: def _discover_handlers(self) -> None: # Override to manually register handler without type info - self._message_handlers[str] = self.handle_message + self._handlers[str] = self.handle_message string_executor = StringExecutor(id="string_executor") no_input_executor = NoInputTypesExecutor(id="no_input") @@ -501,12 +501,12 @@ def test_enhanced_type_compatibility_error_details(): def test_union_type_compatibility_validation() -> None: class UnionOutputExecutor(Executor): - @message_handler(output_types=[str, int]) + @handler(output_types=[str, int]) async def handle_message(self, message: str, ctx: WorkflowContext) -> None: await ctx.send_message("output") class UnionInputExecutor(Executor): - @message_handler(output_types=[str]) + @handler(output_types=[str]) async def handle_message(self, message: str, ctx: WorkflowContext) -> None: await ctx.send_message("processed") @@ -521,12 +521,12 @@ async def handle_message(self, message: str, ctx: WorkflowContext) -> None: def test_generic_type_compatibility() -> None: class ListOutputExecutor(Executor): - @message_handler(output_types=[list[str]]) + @handler(output_types=[list[str]]) async def handle_message(self, message: str, ctx: WorkflowContext) -> None: await ctx.send_message(["output"]) class ListInputExecutor(Executor): - @message_handler(output_types=[str]) + @handler(output_types=[str]) async def handle_message(self, message: list[str], ctx: WorkflowContext) -> None: await ctx.send_message("processed") diff --git a/python/packages/workflow/tests/test_workflow.py b/python/packages/workflow/tests/test_workflow.py index 52057e9556..b5be8dc2ca 100644 --- a/python/packages/workflow/tests/test_workflow.py +++ b/python/packages/workflow/tests/test_workflow.py @@ -12,7 +12,7 @@ WorkflowCompletedEvent, WorkflowContext, WorkflowEvent, - message_handler, + handler, ) @@ -31,7 +31,7 @@ def __init__(self, id: str, limit: int = 10): super().__init__(id=id) self.limit = limit - @message_handler(output_types=[MockMessage]) + @handler(output_types=[MockMessage]) async def mock_handler(self, message: MockMessage, ctx: WorkflowContext) -> None: if message.data < self.limit: await ctx.send_message(MockMessage(data=message.data + 1)) @@ -42,7 +42,7 @@ async def mock_handler(self, message: MockMessage, ctx: WorkflowContext) -> None class MockAggregator(Executor): """A mock executor that aggregates results from multiple executors.""" - @message_handler + @handler async def mock_handler(self, messages: list[MockMessage], ctx: WorkflowContext) -> None: # This mock simply returns the data incremented by 1 await ctx.add_event(WorkflowCompletedEvent(data=sum(msg.data for msg in messages))) @@ -58,13 +58,13 @@ class ApprovalMessage: class MockExecutorRequestApproval(Executor): """A mock executor that simulates a request for approval.""" - @message_handler(output_types=[RequestInfoMessage]) + @handler(output_types=[RequestInfoMessage]) async def mock_handler_a(self, message: MockMessage, ctx: WorkflowContext) -> None: """A mock handler that requests approval.""" await ctx.set_shared_state(self.id, message.data) await ctx.send_message(RequestInfoMessage()) - @message_handler(output_types=[MockMessage]) + @handler(output_types=[MockMessage]) async def mock_handler_b(self, message: ApprovalMessage, ctx: WorkflowContext) -> None: """A mock handler that processes the approval response.""" data = await ctx.get_shared_state(self.id) diff --git a/python/packages/workflow/tests/test_workflow_builder.py b/python/packages/workflow/tests/test_workflow_builder.py index 06d80a9c97..5135314485 100644 --- a/python/packages/workflow/tests/test_workflow_builder.py +++ b/python/packages/workflow/tests/test_workflow_builder.py @@ -4,7 +4,7 @@ from typing import Any import pytest -from agent_framework.workflow import Executor, WorkflowBuilder, WorkflowContext, message_handler +from agent_framework.workflow import Executor, WorkflowBuilder, WorkflowContext, handler @dataclass @@ -17,7 +17,7 @@ class MockMessage: class MockExecutor(Executor): """A mock executor for testing purposes.""" - @message_handler(output_types=[MockMessage]) + @handler(output_types=[MockMessage]) async def mock_handler(self, message: MockMessage, ctx: WorkflowContext) -> None: """A mock handler that does nothing.""" pass @@ -26,7 +26,7 @@ async def mock_handler(self, message: MockMessage, ctx: WorkflowContext) -> None class MockAggregator(Executor): """A mock executor that aggregates results from multiple executors.""" - @message_handler(output_types=[MockMessage]) + @handler(output_types=[MockMessage]) async def mock_handler(self, messages: list[MockMessage], ctx: WorkflowContext) -> None: # This mock simply returns the data incremented by 1 pass diff --git a/python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py b/python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py index 1bcaa03025..ff887598d3 100644 --- a/python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py +++ b/python/samples/getting_started/workflow/step_01_simple_workflow_sequential.py @@ -2,7 +2,7 @@ import asyncio -from agent_framework.workflow import Executor, WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, message_handler +from agent_framework.workflow import Executor, WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, handler """ The following sample demonstrates a basic workflow with two executors @@ -14,7 +14,7 @@ class UpperCaseExecutor(Executor): """An executor that converts text to uppercase.""" - @message_handler(output_types=[str]) + @handler(output_types=[str]) async def to_upper_case(self, text: str, ctx: WorkflowContext) -> None: """Execute the task by converting the input string to uppercase.""" result = text.upper() @@ -26,7 +26,7 @@ async def to_upper_case(self, text: str, ctx: WorkflowContext) -> None: class ReverseTextExecutor(Executor): """An executor that reverses text.""" - @message_handler + @handler async def reverse_text(self, text: str, ctx: WorkflowContext) -> None: """Execute the task by reversing the input string.""" result = text[::-1] diff --git a/python/samples/getting_started/workflow/step_01a_simple_workflow_sequential_non_stream.py b/python/samples/getting_started/workflow/step_01a_simple_workflow_sequential_non_stream.py index c38f443df8..d38569fcfc 100644 --- a/python/samples/getting_started/workflow/step_01a_simple_workflow_sequential_non_stream.py +++ b/python/samples/getting_started/workflow/step_01a_simple_workflow_sequential_non_stream.py @@ -2,7 +2,7 @@ import asyncio -from agent_framework.workflow import Executor, WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, message_handler +from agent_framework.workflow import Executor, WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, handler """ The following sample demonstrates a basic workflow with two executors @@ -14,7 +14,7 @@ class UpperCaseExecutor(Executor): """An executor that converts text to uppercase.""" - @message_handler(output_types=[str]) + @handler(output_types=[str]) async def to_upper_case(self, text: str, ctx: WorkflowContext) -> None: """Execute the task by converting the input string to uppercase.""" result = text.upper() @@ -26,7 +26,7 @@ async def to_upper_case(self, text: str, ctx: WorkflowContext) -> None: class ReverseTextExecutor(Executor): """An executor that reverses text.""" - @message_handler + @handler async def reverse_text(self, text: str, ctx: WorkflowContext) -> None: """Execute the task by reversing the input string.""" result = text[::-1] diff --git a/python/samples/getting_started/workflow/step_02_simple_workflow_condition.py b/python/samples/getting_started/workflow/step_02_simple_workflow_condition.py index a5bc693a81..1d8625c02c 100644 --- a/python/samples/getting_started/workflow/step_02_simple_workflow_condition.py +++ b/python/samples/getting_started/workflow/step_02_simple_workflow_condition.py @@ -8,7 +8,7 @@ WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, - message_handler, + handler, ) """ @@ -35,7 +35,7 @@ def __init__(self, spam_keywords: list[str], id: str | None = None): super().__init__(id=id) self._spam_keywords = spam_keywords - @message_handler(output_types=[SpamDetectorResponse]) + @handler(output_types=[SpamDetectorResponse]) async def handle_email(self, email: str, ctx: WorkflowContext) -> None: """Determine if the input string is spam.""" result = any(keyword in email.lower() for keyword in self._spam_keywords) @@ -46,7 +46,7 @@ async def handle_email(self, email: str, ctx: WorkflowContext) -> None: class SendResponse(Executor): """An executor that responds to a message based on spam detection.""" - @message_handler + @handler async def handle_detector_response( self, spam_detector_response: SpamDetectorResponse, @@ -66,7 +66,7 @@ async def handle_detector_response( class RemoveSpam(Executor): """An executor that removes spam messages.""" - @message_handler + @handler async def handle_detector_response( self, spam_detector_response: SpamDetectorResponse, diff --git a/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py b/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py index 91ba6eef2e..dea61e8fcc 100644 --- a/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py +++ b/python/samples/getting_started/workflow/step_03_simple_workflow_loop.py @@ -9,7 +9,7 @@ WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, - message_handler, + handler, ) """ @@ -41,7 +41,7 @@ def __init__(self, bound: tuple[int, int], id: str | None = None): self._lower = bound[0] self._upper = bound[1] - @message_handler(output_types=[int]) + @handler(output_types=[int]) async def guess_number(self, feedback: NumberSignal, ctx: WorkflowContext) -> None: """Execute the task by guessing a number.""" if feedback == NumberSignal.INIT: @@ -74,7 +74,7 @@ def __init__(self, target: int, id: str | None = None): super().__init__(id=id) self._target = target - @message_handler(output_types=[NumberSignal]) + @handler(output_types=[NumberSignal]) async def judge(self, number: int, ctx: WorkflowContext) -> None: """Judge the guessed number.""" if number == self._target: diff --git a/python/samples/getting_started/workflow/step_04_simple_group_chat.py b/python/samples/getting_started/workflow/step_04_simple_group_chat.py index ab85954985..dd5ae88e3f 100644 --- a/python/samples/getting_started/workflow/step_04_simple_group_chat.py +++ b/python/samples/getting_started/workflow/step_04_simple_group_chat.py @@ -13,7 +13,7 @@ WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, - message_handler, + handler, ) """ @@ -32,7 +32,7 @@ def __init__(self, members: list[str], max_round: int, id: str | None = None): self._max_round = max_round self._current_round = 0 - @message_handler(output_types=[AgentExecutorRequest]) + @handler(output_types=[AgentExecutorRequest]) async def start(self, task: str, ctx: WorkflowContext) -> None: """Execute the task by sending messages to the next executor in the round-robin sequence.""" initial_message = ChatMessage(ChatRole.USER, text=task) @@ -52,7 +52,7 @@ async def start(self, task: str, ctx: WorkflowContext) -> None: target_id=self._get_next_member(), ) - @message_handler(output_types=[AgentExecutorRequest]) + @handler(output_types=[AgentExecutorRequest]) async def handle_agent_response(self, response: AgentExecutorResponse, ctx: WorkflowContext) -> None: """Execute the task by sending messages to the next executor in the round-robin sequence.""" # Send the response to the other members diff --git a/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py b/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py index 6615e38d60..4f0040a249 100644 --- a/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py +++ b/python/samples/getting_started/workflow/step_05_simple_group_chat_with_hil.py @@ -15,7 +15,7 @@ WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, - message_handler, + handler, ) """ @@ -34,7 +34,7 @@ def __init__(self, members: list[str], id: str | None = None): self._current_round = 0 self._chat_history: list[ChatMessage] = [] - @message_handler(output_types=[AgentExecutorRequest]) + @handler(output_types=[AgentExecutorRequest]) async def start(self, task: str, ctx: WorkflowContext) -> None: """Handler that starts the group chat with an initial task.""" initial_message = ChatMessage(ChatRole.USER, text=task) @@ -51,7 +51,7 @@ async def start(self, task: str, ctx: WorkflowContext) -> None: # Update the cache with the initial message self._chat_history.append(initial_message) - @message_handler(output_types=[AgentExecutorRequest, RequestInfoMessage]) + @handler(output_types=[AgentExecutorRequest, RequestInfoMessage]) async def handle_agent_response(self, response: AgentExecutorResponse, ctx: WorkflowContext) -> None: """Handler that processes the response from the agent.""" # Update the chat history with the response @@ -74,7 +74,7 @@ async def handle_agent_response(self, response: AgentExecutorResponse, ctx: Work selection = self._get_next_member() await ctx.send_message(AgentExecutorRequest(messages=[], should_respond=True), target_id=selection) - @message_handler(output_types=[AgentExecutorRequest]) + @handler(output_types=[AgentExecutorRequest]) async def handle_request_response(self, response: list[ChatMessage], ctx: WorkflowContext) -> None: """Handler that processes the response from the RequestInfoExecutor.""" # Update the chat history with the response diff --git a/python/samples/getting_started/workflow/step_06_map_reduce.py b/python/samples/getting_started/workflow/step_06_map_reduce.py index a7ddb6325f..8879bce982 100644 --- a/python/samples/getting_started/workflow/step_06_map_reduce.py +++ b/python/samples/getting_started/workflow/step_06_map_reduce.py @@ -13,7 +13,7 @@ WorkflowBuilder, WorkflowCompletedEvent, WorkflowContext, - message_handler, + handler, ) if sys.version_info >= (3, 12): @@ -56,7 +56,7 @@ def __init__(self, map_executor_ids: list[str], id: str | None = None): super().__init__(id) self._map_executor_ids = map_executor_ids - @message_handler(output_types=[SplitCompleted]) + @handler(output_types=[SplitCompleted]) async def split(self, data: str, ctx: WorkflowContext) -> None: """Execute the task by splitting the data into chunks. @@ -111,7 +111,7 @@ class MapCompleted: class Map(Executor): """An executor that applies a function to each item in the data and save the result to a file.""" - @message_handler(output_types=[MapCompleted]) + @handler(output_types=[MapCompleted]) async def map(self, _: SplitCompleted, ctx: WorkflowContext) -> None: """Execute the task by applying a function to each item and same result to a file. @@ -148,7 +148,7 @@ def __init__(self, reducer_ids: list[str], id: str | None = None): super().__init__(id) self._reducer_ids = reducer_ids - @message_handler(output_types=[ShuffleCompleted]) + @handler(output_types=[ShuffleCompleted]) async def shuffle(self, data: list[MapCompleted], ctx: WorkflowContext) -> None: """Execute the task by aggregating the results. @@ -223,7 +223,7 @@ class ReduceCompleted: class Reduce(Executor): """An executor that reduces the results from the ShuffleExecutor.""" - @message_handler(output_types=[ReduceCompleted]) + @handler(output_types=[ReduceCompleted]) async def _execute(self, data: ShuffleCompleted, ctx: WorkflowContext) -> None: """Execute the task by reducing the results. @@ -256,7 +256,7 @@ async def _execute(self, data: ShuffleCompleted, ctx: WorkflowContext) -> None: class CompletionExecutor(Executor): """An executor that completes the workflow by aggregating the results from the ReduceExecutors.""" - @message_handler + @handler async def complete(self, data: list[ReduceCompleted], ctx: WorkflowContext) -> None: """Execute the task by aggregating the results.