diff --git a/.gitignore b/.gitignore index d85db694..2fb783ef 100644 --- a/.gitignore +++ b/.gitignore @@ -17,3 +17,7 @@ examples/local/* .logs scratch_pad.py .* +*.png +*.html +usecases/ +compare_gemini_outputs_v1.py diff --git a/flo_ai/examples/arium_linear_usage.py b/flo_ai/examples/arium_linear_usage.py new file mode 100644 index 00000000..b1fefabb --- /dev/null +++ b/flo_ai/examples/arium_linear_usage.py @@ -0,0 +1,113 @@ +import asyncio +from flo_ai.arium.builder import AriumBuilder +from flo_ai.models.agent import Agent +from flo_ai.llm.openai_llm import OpenAI + + +async def simple_example(): + """ + Simple example: 2 agents connected directly together + Agent 1 (Greeter) -> Agent 2 (Responder) + """ + llm = OpenAI(model='gpt-4o-mini', temperature=0.7) + + # Agent 1: Greeter + greeter = Agent( + name='greeter', + system_prompt='You are a friendly greeter. Say hello and introduce the topic to the next agent.', + llm=llm, + ) + + # Agent 2: Responder + responder = Agent( + name='responder', + system_prompt='You are a helpful responder. Provide a thoughtful response to what the greeter shared.', + llm=llm, + ) + + # Connect agents directly: greeter -> responder + result = await ( + AriumBuilder() + .add_agent(greeter) + .add_agent(responder) + .start_with(greeter) + .connect(greeter, responder) # Direct connection + .end_with(responder) + .build_and_run(["Hello, I'd like to learn about Python programming!"]) + ) + + print('Simple Example Result:') + print(result) + return result + + +async def main(): + """ + Example showing how to create 2 simple agents connected directly together + using the AriumBuilder. + """ + + # Create LLM instance + llm = OpenAI(model='gpt-4o-mini', temperature=0.7) + + # Create first agent - Content Analyst + content_analyst = Agent( + name='content_analyst', + system_prompt="""You are a content analyst. When you receive input, analyze it and provide: + 1. A brief summary of the content + 2. The main topics covered + 3. Any insights or observations + + Pass your analysis to the next agent for final processing.""", + llm=llm, + role='Content Analyst', + ) + + # Create second agent - Summary Generator + summary_generator = Agent( + name='summary_generator', + system_prompt="""You are a summary generator. You receive analysis from the content analyst. + Your job is to create a concise, well-structured final summary that includes: + 1. Key takeaways + 2. Actionable insights + 3. A clear conclusion + + Make your response clear and professional.""", + llm=llm, + role='Summary Generator', + ) + + # Create Arium workflow using AriumBuilder + print('Building Arium workflow...') + + result = await ( + AriumBuilder() + .add_agents([content_analyst, summary_generator]) + .start_with(content_analyst) + .connect(content_analyst, summary_generator) # Direct connection + .end_with(summary_generator) + .build_and_run( + [ + 'Machine learning is revolutionizing various industries. ' + 'From healthcare to finance, AI systems are being deployed ' + 'to automate processes, improve decision-making, and enhance ' + 'customer experiences. However, challenges remain around ' + 'data privacy, algorithmic bias, and the need for skilled ' + 'professionals to manage these systems effectively.' + ] + ) + ) + + print('\n' + '=' * 50) + print('ARIUM WORKFLOW RESULT:') + print('=' * 50) + print(result) + + +if __name__ == '__main__': + print('Running simple example first...') + asyncio.run(simple_example()) + + print('\n' + '=' * 80) + print('Now running detailed example...') + asyncio.run(main()) diff --git a/flo_ai/examples/example_graph_visualization.py b/flo_ai/examples/example_graph_visualization.py new file mode 100644 index 00000000..8c42bb93 --- /dev/null +++ b/flo_ai/examples/example_graph_visualization.py @@ -0,0 +1,159 @@ +#!/usr/bin/env python3 +""" +Example script demonstrating the graph visualization feature in BaseArium. +This script creates a simple workflow with agents and tools, then generates a PNG visualization. +""" + +from flo_ai.arium.base import BaseArium +from flo_ai.models.agent import Agent +from flo_ai.tool.flo_tool import flo_tool +from flo_ai.llm.openai_llm import OpenAI +from typing import Literal + + +# Create a simple validation tool using the @flo_tool decorator +@flo_tool( + name='validation_tool', + description='Validates input data and returns validation status', + parameter_descriptions={ + 'data': 'The data to validate', + 'strict': 'Whether to use strict validation rules', + }, +) +async def validate_data(data: str, strict: bool = False) -> str: + """Validate the input data according to specified rules.""" + if not data: + return 'invalid: empty data' + + if strict and len(data) < 10: + return 'invalid: data too short for strict validation' + + return 'valid: data passed validation' + + +# Create mock agents for demonstration +def create_sample_agents(): + """Create sample agents for the demonstration""" + # Note: This would normally require a valid OpenAI API key + # For demonstration purposes, we'll use a mock LLM + try: + llm = OpenAI(model='gpt-4') + except Exception: + # If OpenAI is not available, create a mock LLM + class MockLLM: + def __init__(self, model): + self.model = model + + llm = MockLLM('gpt-4') + + input_processor = Agent( + name='input_processor', + system_prompt='Process incoming requests', + llm=llm, + role='Input Processor', + ) + + analyzer = Agent( + name='analyzer', + system_prompt='Analyze processed data', + llm=llm, + role='Data Analyzer', + ) + + decision_maker = Agent( + name='decision_maker', + system_prompt='Make decisions based on analysis', + llm=llm, + role='Decision Maker', + ) + + output_formatter = Agent( + name='output_formatter', + system_prompt='Format final output', + llm=llm, + role='Output Formatter', + ) + + return input_processor, analyzer, decision_maker, output_formatter + + +# Router function for demonstration +def analysis_router( + analysis_result: str, +) -> Literal['decision_maker', 'output_formatter']: + """Route based on analysis result""" + if 'complex' in analysis_result.lower(): + return 'decision_maker' + else: + return 'output_formatter' + + +def main(): + """Create a sample workflow and generate visualization""" + + # Create the BaseArium instance + arium = BaseArium() + + # Create sample agents and tools + input_processor, analyzer, decision_maker, output_formatter = create_sample_agents() + + # Get the tool from the decorated function + validation_tool = validate_data.tool + + # Add nodes to the arium + arium.add_nodes( + [input_processor, validation_tool, analyzer, decision_maker, output_formatter] + ) + + # Set up the workflow + # Start with input processor + arium.start_at(input_processor) + + # Input processor -> Validation tool + arium.add_edge('input_processor', ['validation_tool']) + + # Validation tool -> Analyzer + arium.add_edge('validation_tool', ['analyzer']) + + # Analyzer -> Decision maker OR Output formatter (with router) + arium.add_edge( + 'analyzer', ['decision_maker', 'output_formatter'], router=analysis_router + ) + + # Decision maker -> Output formatter + arium.add_edge('decision_maker', ['output_formatter']) + + # Output formatter -> End + arium.add_end_to(output_formatter) + + # Validate the graph + try: + arium.validate_graph() + print('✅ Graph validation successful!') + except ValueError as e: + print(f'❌ Graph validation failed: {e}') + return + + # Generate visualization + print('🎨 Generating graph visualization...') + + # Generate with default settings + arium.visualize_graph('workflow_graph.png') + + # Generate with custom settings + arium.visualize_graph( + output_path='custom_workflow_graph.png', + figsize=(14, 10), + node_size=4000, + font_size=12, + dpi=400, + ) + + print('✅ Graph visualization completed!') + print('📁 Check the following files:') + print(' - workflow_graph.png (default settings)') + print(' - custom_workflow_graph.png (custom settings)') + + +if __name__ == '__main__': + main() diff --git a/flo_ai/examples/yaml_agent_example.py b/flo_ai/examples/yaml_agent_example.py index e725c21b..d50e4c45 100644 --- a/flo_ai/examples/yaml_agent_example.py +++ b/flo_ai/examples/yaml_agent_example.py @@ -3,7 +3,6 @@ # Example YAML configuration yaml_config = """ apiVersion: flo/alpha-v1 -kind: FloAgent metadata: name: email-summary-flo version: 1.0.0 diff --git a/flo_ai/flo_ai/arium/README.md b/flo_ai/flo_ai/arium/README.md new file mode 100644 index 00000000..e0d4b06e --- /dev/null +++ b/flo_ai/flo_ai/arium/README.md @@ -0,0 +1,171 @@ +# Arium Builder Pattern + +The `AriumBuilder` provides a fluent, easy-to-use interface for building and running Arium workflows. It eliminates the need for manual graph construction and makes it simple to create complex multi-agent workflows. + +## Quick Start + +```python +from flo_ai.arium import AriumBuilder, create_arium + +# Simple workflow +result = await (AriumBuilder() + .add_agent(my_agent) + .add_tool(my_tool) + .start_with(my_agent) + .connect(my_agent, my_tool) + .end_with(my_tool) + .build_and_run(["Hello, world!"])) +``` + +## Key Features + +- **Fluent Interface**: Chain method calls for readable workflow construction +- **Automatic Compilation**: No need to manually call `compile()` +- **Default Memory**: Uses `MessageMemory` by default if none provided +- **Easy Connections**: Simple `connect()` method for linear workflows +- **Flexible Routing**: Full support for custom router functions +- **Visualization**: Built-in graph visualization support +- **Reusable Workflows**: Build once, run multiple times + +## API Reference + +### AriumBuilder Methods + +| Method | Description | +|--------|-------------| +| `with_memory(memory)` | Set custom memory for the workflow | +| `add_agent(agent)` | Add a single agent | +| `add_agents(agents)` | Add multiple agents | +| `add_tool(tool)` | Add a single tool | +| `add_tools(tools)` | Add multiple tools | +| `start_with(node)` | Set the starting node | +| `end_with(node)` | Add an ending node | +| `connect(from_node, to_node)` | Simple connection between nodes | +| `add_edge(from_node, to_nodes, router)` | Add edge with optional router | +| `build()` | Build the Arium instance | +| `build_and_run(inputs)` | Build and run in one step | +| `visualize(output_path, title)` | Generate workflow visualization | +| `reset()` | Reset builder to start fresh | + +### Convenience Functions + +- `create_arium()` - Returns a new `AriumBuilder` instance + +## Usage Patterns + +### 1. Linear Workflow + +```python +result = await (AriumBuilder() + .add_agent(agent1) + .add_tool(tool1) + .add_agent(agent2) + .start_with(agent1) + .connect(agent1, tool1) + .connect(tool1, agent2) + .end_with(agent2) + .build_and_run(inputs)) +``` + +### 2. Branching Workflow + +```python +def my_router(memory) -> Literal["path_a", "path_b"]: + # Your routing logic here + return "path_a" + +result = await (AriumBuilder() + .add_agent(classifier) + .add_tool(tool_a) + .add_tool(tool_b) + .add_agent(final_agent) + .start_with(classifier) + .add_edge(classifier, [tool_a, tool_b], my_router) + .connect(tool_a, final_agent) + .connect(tool_b, final_agent) + .end_with(final_agent) + .build_and_run(inputs)) +``` + +### 3. Build and Reuse + +```python +# Build once +arium = (AriumBuilder() + .add_agent(my_agent) + .start_with(my_agent) + .end_with(my_agent) + .build()) + +# Run multiple times +result1 = await arium.run(["Input 1"]) +result2 = await arium.run(["Input 2"]) +``` + +### 4. Complex Multi-Agent Workflow + +```python +arium = (AriumBuilder() + .add_agents([agent1, agent2, agent3]) + .add_tools([tool1, tool2]) + .with_memory(custom_memory) + .start_with(agent1) + .connect(agent1, tool1) + .connect(tool1, agent2) + .add_edge(agent2, [agent3, tool2], router_fn) + .connect(tool2, agent3) + .end_with(agent3) + .visualize("my_workflow.png", "My Workflow") + .build()) + +result = await arium.run(inputs) +``` + +## Migration from Manual Construction + +### Before (Manual) +```python +from flo_ai.arium import Arium +from flo_ai.arium.memory import MessageMemory + +# Manual construction +arium = Arium(MessageMemory()) +arium.add_nodes([agent1, tool1, agent2]) +arium.start_at(agent1) +arium.add_edge(agent1.name, [tool1.name]) +arium.add_edge(tool1.name, [agent2.name]) +arium.add_end_to(agent2) +arium.compile() + +result = await arium.run(inputs) +``` + +### After (Builder) +```python +from flo_ai.arium import AriumBuilder + +# Builder pattern +result = await (AriumBuilder() + .add_agent(agent1) + .add_tool(tool1) + .add_agent(agent2) + .start_with(agent1) + .connect(agent1, tool1) + .connect(tool1, agent2) + .end_with(agent2) + .build_and_run(inputs)) +``` + +## Error Handling + +The builder includes comprehensive validation: + +- Ensures at least one agent or tool is added +- Requires a start node to be specified +- Requires at least one end node +- Validates router function signatures +- Checks for orphaned nodes + +## Examples + +See `examples.py` for complete working examples of different workflow patterns. \ No newline at end of file diff --git a/flo_ai/flo_ai/arium/__init__.py b/flo_ai/flo_ai/arium/__init__.py new file mode 100644 index 00000000..4417d222 --- /dev/null +++ b/flo_ai/flo_ai/arium/__init__.py @@ -0,0 +1,17 @@ +from .arium import Arium +from .base import BaseArium +from .builder import AriumBuilder, create_arium +from .memory import MessageMemory, BaseMemory +from .models import StartNode, EndNode, Edge + +__all__ = [ + 'Arium', + 'BaseArium', + 'AriumBuilder', + 'create_arium', + 'MessageMemory', + 'BaseMemory', + 'StartNode', + 'EndNode', + 'Edge', +] diff --git a/flo_ai/flo_ai/arium/arium.py b/flo_ai/flo_ai/arium/arium.py new file mode 100644 index 00000000..8022c93c --- /dev/null +++ b/flo_ai/flo_ai/arium/arium.py @@ -0,0 +1,78 @@ +from flo_ai.arium.base import BaseArium +from flo_ai.arium.memory import MessageMemory, BaseMemory +from flo_ai.llm.base_llm import ImageMessage +from typing import List +from flo_ai.models.agent import Agent +from flo_ai.tool.base_tool import Tool +from flo_ai.arium.models import StartNode, EndNode +from flo_ai.utils.logger import logger + + +class Arium(BaseArium): + def __init__(self, memory: BaseMemory): + super().__init__() + self.is_compiled = False + self.memory = memory if memory else MessageMemory() + + def compile(self): + self.validate_graph() + self.is_compiled = True + + async def run(self, inputs: List[str | ImageMessage]): + if not self.is_compiled: + raise ValueError('Arium is not compiled') + + if not self.memory: + raise ValueError('Arium has no memory') + + if not self.nodes: + raise ValueError('Arium has no nodes') + + return await self._execute_graph(inputs) + + async def _execute_graph(self, inputs: List[str | ImageMessage]): + [self.memory.add(msg) for msg in inputs] + + current_node = self.nodes[self.start_node_name] + current_edge = self.edges[self.start_node_name] + + logger.info(f'Executing graph from {current_node.name}') + while current_node.name != self.end_node_name: + # execute current node + result = await self._execute_node(current_node) + + # update results to memory + self._add_to_memory(result) + + # find next node post current node + next_node_name = current_edge.router_fn( + memory=self.memory, + navigation_thresholds=current_edge.navigation_threshold, + ) + + # find next edge + # TODO: next_node_name might not be in self.edges if it's the end node. Handle this case + next_edge = ( + self.edges[next_node_name] if next_node_name in self.edges else None + ) + + # update current node + current_node = self.nodes[next_node_name] + current_edge = next_edge + + return self.memory.get() + + async def _execute_node(self, node: Agent | Tool | StartNode | EndNode): + if isinstance(node, Agent): + return await node.run(self.memory.get()) + elif isinstance(node, Tool): + return await node.execute(self.memory.get()) + elif isinstance(node, StartNode): + return None + elif isinstance(node, EndNode): + return None + + def _add_to_memory(self, result: str): + # TODO result will be None for start and end nodes + if result: + self.memory.add(result) diff --git a/flo_ai/flo_ai/arium/base.py b/flo_ai/flo_ai/arium/base.py new file mode 100644 index 00000000..8e68e999 --- /dev/null +++ b/flo_ai/flo_ai/arium/base.py @@ -0,0 +1,354 @@ +import inspect +from functools import partial +from flo_ai.models.agent import Agent +from flo_ai.tool.base_tool import Tool +from flo_ai.utils.logger import logger +from typing import List, Optional, Callable, Literal, get_origin, get_args, Dict +from flo_ai.arium.models import StartNode, EndNode, Edge, default_router +from pathlib import Path + + +class BaseArium: + def __init__(self): + self.start_node_name = '__start__' + self.end_node_name = '__end__' + self.nodes: Dict[str, Agent | Tool | StartNode | EndNode] = dict() + self.edges: Dict[str, Edge] = dict() + + def add_nodes(self, agents: List[Agent | Tool | StartNode | EndNode]): + self.nodes.update({agent.name: agent for agent in agents}) + + def start_at(self, node: Agent | Tool | StartNode | EndNode): + start_node = StartNode() + if start_node.name in self.nodes: + raise ValueError(f'Start node {start_node.name} already exists') + self.nodes[start_node.name] = start_node + self.edges[start_node.name] = Edge( + router_fn=partial(default_router, to_node=node.name), to_nodes=[node.name] + ) + + def add_end_to(self, node: Agent | Tool | StartNode | EndNode): + end_node = EndNode() + if end_node.name in self.nodes: + raise ValueError(f'End node {end_node.name} already exists') + self.nodes[end_node.name] = end_node + self.edges[node.name] = Edge( + router_fn=partial(default_router, to_node=end_node.name), + to_nodes=[end_node.name], + ) + + def _check_router_return_type(self, router: Callable) -> Optional[List]: + try: + # Get the function signature + sig = inspect.signature(router) + return_annotation = sig.return_annotation + + # Check if there's no return annotation + if return_annotation == inspect.Signature.empty: + return None + + # Check if the return type is a Literal + origin = get_origin(return_annotation) + + # In Python 3.8+, Literal types have get_origin() return typing.Literal + if origin is Literal: + # Extract the literal values + literal_values = list(get_args(return_annotation)) + return literal_values + + return None + + except Exception as e: + logger.error(f'Error checking router return type: {e}') + return None + + def add_edge( + self, + from_node: str, + to_nodes: List[str] = None, + router: Optional[Callable] = None, + navigation_threshold: Dict[str, int] = {}, + ): + if router and not callable(router): + raise ValueError('Router must be a callable') + + if not to_nodes: + raise ValueError('To nodes must be provided') + + if not router and len(to_nodes) != 1: + raise ValueError( + 'Exactly one to node must be provided if router is not provided' + ) + + if from_node not in self.nodes: + raise ValueError(f'Node {from_node} not found') + + wrong_nodes = [ + wrong_to_node + for wrong_to_node in to_nodes + if wrong_to_node not in self.nodes + ] + if wrong_nodes: + raise ValueError(f'Nodes {wrong_nodes} not found') + + if router: + literal_values = self._check_router_return_type(router) + if literal_values is None: + raise ValueError('Router return type is not a Literal') + + invalid_literals = [val for val in literal_values if val not in to_nodes] + if invalid_literals: + raise ValueError( + f'Router return type includes literal values {invalid_literals} that are not in to_nodes {to_nodes}' + ) + + if set(literal_values) != set(to_nodes): + raise ValueError( + f'Router return type values {literal_values} do not match to_nodes {to_nodes}' + ) + + self.edges[from_node] = Edge( + router_fn=router + if router + else partial(default_router, to_node=to_nodes[0]), + to_nodes=to_nodes, + navigation_threshold=navigation_threshold, + ) + + def check_orphan_nodes(self) -> List[str]: + if not self.nodes: + return [] + + # Get all nodes with outgoing edges + nodes_with_outgoing = set(self.edges.keys()) + + # Get all nodes with incoming edges by examining router return types and to_nodes + nodes_with_incoming = set() + + for _, target in self.edges.items(): + nodes_with_incoming.update(target.to_nodes) + + # Find orphan nodes: nodes that have neither incoming nor outgoing edges + all_nodes = set(self.nodes.keys()) + connected_nodes = nodes_with_outgoing.union(nodes_with_incoming) + orphan_nodes = all_nodes - connected_nodes + + return list(orphan_nodes) + + def validate_graph(self) -> bool: + orphan_nodes = self.check_orphan_nodes() + + if orphan_nodes: + raise ValueError( + f'Orphan nodes found: {orphan_nodes}. These nodes have no incoming or outgoing edges.' + ) + + # Check for exactly 1 start node + start_nodes = [ + node for node in self.nodes.values() if isinstance(node, StartNode) + ] + if len(start_nodes) == 0: + raise ValueError( + f'Graph must have exactly 1 start node. Found 0 start nodes: {start_nodes}' + ) + elif len(start_nodes) > 1: + raise ValueError( + f'Graph must have exactly 1 start node. Found {len(start_nodes)} start nodes.' + ) + + # Check for at least 1 end node + end_nodes = [node for node in self.nodes.values() if isinstance(node, EndNode)] + if len(end_nodes) == 0: + raise ValueError('Graph must have at least 1 end node. Found 0 end nodes.') + + return True + + def visualize_graph( + self, + output_path: str = 'graph_visualization.png', + graph_title: str = 'Arium Graph Visualization', + figsize: tuple = (12, 8), + node_size: int = 3000, + font_size: int = 10, + dpi: int = 300, + ) -> None: + """ + Generate a graph visualization and save it as PNG. + + Args: + output_path: Path where the PNG file will be saved + figsize: Figure size as (width, height) in inches + node_size: Size of the nodes in the graph + font_size: Font size for node labels + dpi: Resolution of the saved image + """ + import matplotlib.pyplot as plt + import matplotlib.patches as patches + import networkx as nx + + if not self.nodes: + logger.error('No nodes to visualize') + return + + # Create directed graph + G = nx.DiGraph() + + # Add nodes with their types + for node_name, node in self.nodes.items(): + node_type = self._get_node_type(node) + G.add_node(node_name, node_type=node_type, node_obj=node) + + # Add edges + for from_node, edge in self.edges.items(): + for to_node in edge.to_nodes: + if to_node in self.nodes: + G.add_edge(from_node, to_node, edge_obj=edge) + + # Create matplotlib figure + fig, ax = plt.subplots(figsize=figsize, dpi=dpi) + + # Use hierarchical layout for better DAG visualization + try: + # Try different layouts for better DAG appearance + pos = nx.nx_agraph.graphviz_layout(G, prog='dot', args='-Grankdir=TB') + except Exception as e: + logger.error(f'Error in graphviz_layout: {e}') + try: + # Fallback to planar layout for DAG structure + pos = nx.planar_layout(G) + except Exception as e: + logger.error(f'Error in graphviz_layout: {e}') + # Final fallback to shell layout for better hierarchy + pos = nx.shell_layout(G) + + # Define colors for different node types + node_colors = { + 'start': '#90EE90', # Light green + 'end': '#FFB6C1', # Light pink + 'agent': '#87CEEB', # Sky blue + 'tool': '#DDA0DD', # Plum + } + + # Draw nodes with different colors based on type + for node_name, node_data in G.nodes(data=True): + node_type = node_data['node_type'] + color = node_colors.get(node_type, '#CCCCCC') + + nx.draw_networkx_nodes( + G, + pos, + nodelist=[node_name], + node_color=color, + node_size=node_size, + alpha=0.9, + ax=ax, + ) + + # Separate edges by router type for different styling + default_edges = [] + custom_edges = [] + + for edge_data in G.edges(data=True): + from_node, to_node, data = edge_data + edge_obj = data['edge_obj'] + + if edge_obj.is_default_router(): + default_edges.append((from_node, to_node)) + else: + custom_edges.append((from_node, to_node)) + + # Draw default router edges with dotted lines + if default_edges: + nx.draw_networkx_edges( + G, + pos, + edgelist=default_edges, + edge_color='gray', + arrows=True, + arrowsize=20, + arrowstyle='->', + connectionstyle='arc3,rad=0.1', + style='solid', + width=2, + ax=ax, + ) + + # Draw custom router edges with solid lines + if custom_edges: + nx.draw_networkx_edges( + G, + pos, + edgelist=custom_edges, + edge_color='black', + arrows=True, + arrowsize=20, + arrowstyle='->', + connectionstyle='arc3,rad=0.1', + style='dotted', + width=2, + ax=ax, + ) + + # Add labels + nx.draw_networkx_labels(G, pos, font_size=font_size, font_weight='bold', ax=ax) + + # Add title + plt.title(graph_title, fontsize=16, fontweight='bold', pad=20) + + # Add legend + legend_elements = [ + patches.Patch(color='#90EE90', label='Start Node'), + patches.Patch(color='#FFB6C1', label='End Node'), + patches.Patch(color='#87CEEB', label='Agent'), + patches.Patch(color='#DDA0DD', label='Tool'), + patches.Patch( + facecolor='none', + edgecolor='gray', + linestyle='dotted', + linewidth=2, + label='Default Router', + ), + patches.Patch( + facecolor='none', + edgecolor='black', + linestyle='solid', + linewidth=2, + label='Custom Router', + ), + ] + ax.legend(handles=legend_elements, loc='upper right', bbox_to_anchor=(1.15, 1)) + + # Remove axes + ax.axis('off') + + # Adjust layout to prevent legend cutoff + plt.tight_layout() + + # Save the figure + output_path = Path(output_path) + output_path.parent.mkdir(parents=True, exist_ok=True) + + plt.savefig( + str(output_path), + format='png', + dpi=dpi, + bbox_inches='tight', + facecolor='white', + edgecolor='none', + ) + plt.close() + + logger.info(f'Graph visualization saved to: {output_path}') + + def _get_node_type(self, node) -> str: + """Helper method to determine node type for visualization.""" + if isinstance(node, StartNode): + return 'start' + elif isinstance(node, EndNode): + return 'end' + elif isinstance(node, Agent): + return 'agent' + elif isinstance(node, Tool): + return 'tool' + else: + return 'unknown' diff --git a/flo_ai/flo_ai/arium/builder.py b/flo_ai/flo_ai/arium/builder.py new file mode 100644 index 00000000..1b541d1d --- /dev/null +++ b/flo_ai/flo_ai/arium/builder.py @@ -0,0 +1,172 @@ +from typing import List, Optional, Callable, Union, Dict +from flo_ai.arium.arium import Arium +from flo_ai.arium.memory import MessageMemory, BaseMemory +from flo_ai.models.agent import Agent +from flo_ai.tool.base_tool import Tool +from flo_ai.llm.base_llm import ImageMessage + + +class AriumBuilder: + """ + A builder class for creating and configuring Arium instances with a fluent interface. + + Example usage: + result = (AriumBuilder() + .with_memory(my_memory) + .add_agent(agent1) + .add_tool(tool1) + .start_with(agent1) + .add_edge(agent1, [tool1], router_fn) + .end_with(tool1) + .build_and_run(["Hello, world!"])) + """ + + def __init__(self): + self._memory: Optional[BaseMemory] = None + self._agents: List[Agent] = [] + self._tools: List[Tool] = [] + self._start_node: Optional[Union[Agent, Tool]] = None + self._end_nodes: List[Union[Agent, Tool]] = [] + self._edges: List[ + tuple + ] = [] # (from_node, to_nodes, router, navigation_threshold) + self._arium: Optional[Arium] = None + + def with_memory(self, memory: BaseMemory) -> 'AriumBuilder': + """Set the memory for the Arium.""" + self._memory = memory + return self + + def add_agent(self, agent: Agent) -> 'AriumBuilder': + """Add an agent to the Arium.""" + self._agents.append(agent) + return self + + def add_agents(self, agents: List[Agent]) -> 'AriumBuilder': + """Add multiple agents to the Arium.""" + self._agents.extend(agents) + return self + + def add_tool(self, tool: Tool) -> 'AriumBuilder': + """Add a tool to the Arium.""" + self._tools.append(tool) + return self + + def add_tools(self, tools: List[Tool]) -> 'AriumBuilder': + """Add multiple tools to the Arium.""" + self._tools.extend(tools) + return self + + def start_with(self, node: Union[Agent, Tool]) -> 'AriumBuilder': + """Set the starting node for the Arium.""" + self._start_node = node + return self + + def end_with(self, node: Union[Agent, Tool]) -> 'AriumBuilder': + """Add an ending node to the Arium.""" + if node not in self._end_nodes: + self._end_nodes.append(node) + return self + + def add_edge( + self, + from_node: Union[Agent, Tool], + to_nodes: List[Union[Agent, Tool]], + router: Optional[Callable] = None, + navigation_threshold: Dict[str, int] = {}, + ) -> 'AriumBuilder': + """Add an edge between nodes with an optional router function.""" + self._edges.append((from_node, to_nodes, router, navigation_threshold)) + return self + + def connect( + self, + from_node: Union[Agent, Tool], + to_node: Union[Agent, Tool], + navigation_threshold: Dict[str, int] = {}, + ) -> 'AriumBuilder': + """Simple connection between two nodes without a router.""" + return self.add_edge( + from_node, [to_node], navigation_threshold=navigation_threshold + ) + + def build(self) -> Arium: + """Build the Arium instance from the configured components.""" + # Use default memory if none provided + if self._memory is None: + self._memory = MessageMemory() + + # Create Arium instance + arium = Arium(self._memory) + + # Add all nodes + all_nodes = [] + all_nodes.extend(self._agents) + all_nodes.extend(self._tools) + + if not all_nodes: + raise ValueError('No agents or tools added to the Arium') + + arium.add_nodes(all_nodes) + + # Set start node + if self._start_node is None: + raise ValueError( + 'No start node specified. Use start_with() to set a start node.' + ) + + arium.start_at(self._start_node) + + # Add edges + for from_node, to_nodes, router, navigation_threshold in self._edges: + arium.add_edge( + from_node.name, + [node.name for node in to_nodes], + router, + navigation_threshold, + ) + + # Add end nodes + if not self._end_nodes: + raise ValueError('No end nodes specified. Use end_with() to add end nodes.') + + for end_node in self._end_nodes: + arium.add_end_to(end_node) + + # Compile the Arium + arium.compile() + + self._arium = arium + return arium + + async def build_and_run(self, inputs: List[Union[str, ImageMessage]]) -> List[dict]: + """Build the Arium and run it with the given inputs.""" + arium = self.build() + return await arium.run(inputs) + + def visualize( + self, output_path: str = 'arium_graph.png', title: str = 'Arium Workflow' + ) -> 'AriumBuilder': + """Generate a visualization of the Arium graph.""" + if self._arium is None: + self.build() + + self._arium.visualize_graph(output_path=output_path, graph_title=title) + return self + + def reset(self) -> 'AriumBuilder': + """Reset the builder to start fresh.""" + self._memory = None + self._agents = [] + self._tools = [] + self._start_node = None + self._end_nodes = [] + self._edges = [] + self._arium = None + return self + + +# Convenience function for creating a builder +def create_arium() -> AriumBuilder: + """Create a new AriumBuilder instance.""" + return AriumBuilder() diff --git a/flo_ai/flo_ai/arium/examples.py b/flo_ai/flo_ai/arium/examples.py new file mode 100644 index 00000000..cbdacc44 --- /dev/null +++ b/flo_ai/flo_ai/arium/examples.py @@ -0,0 +1,170 @@ +""" +Examples demonstrating how to use the AriumBuilder pattern for creating and running Arium workflows. +""" + +from typing import Literal +from flo_ai.arium import AriumBuilder, create_arium +from flo_ai.models.agent import Agent +from flo_ai.tool.base_tool import Tool +from flo_ai.arium.memory import MessageMemory + + +# Example 1: Simple Linear Workflow +async def example_linear_workflow(): + """Example of a simple linear workflow: Agent -> Tool -> Agent""" + + # Create some example agents and tools (these would be your actual implementations) + analyzer_agent = Agent(name='analyzer', prompt='Analyze the input') + processing_tool = Tool(name='processor') + summarizer_agent = Agent(name='summarizer', prompt='Summarize the results') + + # Build and run the workflow + result = await ( + AriumBuilder() + .add_agent(analyzer_agent) + .add_tool(processing_tool) + .add_agent(summarizer_agent) + .start_with(analyzer_agent) + .connect(analyzer_agent, processing_tool) + .connect(processing_tool, summarizer_agent) + .end_with(summarizer_agent) + .build_and_run(['Analyze this text']) + ) + + return result + + +# Example 2: Branching Workflow with Router +async def example_branching_workflow(): + """Example of a branching workflow with conditional routing""" + + # Create agents and tools + classifier_agent = Agent(name='classifier', prompt='Classify the input type') + text_processor = Tool(name='text_processor') + image_processor = Tool(name='image_processor') + final_agent = Agent(name='final', prompt='Provide final response') + + # Router function for conditional branching + def content_router(memory) -> Literal['text_processor', 'image_processor']: + # Simple example logic (in real use, this would analyze the memory) + last_message = memory[-1]['content'] + if 'image' in last_message.lower(): + return 'image_processor' + return 'text_processor' + + # Build the workflow + result = await ( + AriumBuilder() + .add_agent(classifier_agent) + .add_tool(text_processor) + .add_tool(image_processor) + .add_agent(final_agent) + .start_with(classifier_agent) + .add_edge(classifier_agent, [text_processor, image_processor], content_router) + .connect(text_processor, final_agent) + .connect(image_processor, final_agent) + .end_with(final_agent) + .build_and_run(['Process this content']) + ) + + return result + + +# Example 3: Complex Multi-Agent Workflow +async def example_complex_workflow(): + """Example of a more complex workflow with multiple agents and tools""" + + # Create multiple agents and tools + input_agent = Agent(name='input_handler', prompt='Handle initial input') + researcher_agent = Agent(name='researcher', prompt='Research the topic') + analyzer_agent = Agent(name='analyzer', prompt='Analyze findings') + writer_agent = Agent(name='writer', prompt='Write the final report') + + search_tool = Tool(name='search_tool') + data_tool = Tool(name='data_processor') + + # Router for deciding next step after analysis + def analysis_router(memory) -> Literal['writer', 'researcher']: + # Example logic: if we need more research, go back to researcher + # otherwise go to writer + return 'writer' # Simplified for example + + # Build complex workflow + arium = ( + AriumBuilder() + .add_agents([input_agent, researcher_agent, analyzer_agent, writer_agent]) + .add_tools([search_tool, data_tool]) + .with_memory(MessageMemory()) + .start_with(input_agent) + .connect(input_agent, researcher_agent) + .connect(researcher_agent, search_tool) + .connect(search_tool, data_tool) + .connect(data_tool, analyzer_agent) + .add_edge(analyzer_agent, [writer_agent, researcher_agent], analysis_router) + .end_with(writer_agent) + .build() + ) + + # You can also visualize the workflow + arium.visualize_graph( + output_path='complex_workflow.png', graph_title='Complex Multi-Agent Workflow' + ) + + # Run the workflow + result = await arium.run(['Research and write a report on AI trends']) + return result + + +# Example 4: Using the convenience function +async def example_convenience_function(): + """Example using the create_arium convenience function""" + + agent1 = Agent(name='agent1', prompt='First agent') + agent2 = Agent(name='agent2', prompt='Second agent') + + result = await ( + create_arium() + .add_agent(agent1) + .add_agent(agent2) + .start_with(agent1) + .connect(agent1, agent2) + .end_with(agent2) + .build_and_run(['Hello']) + ) + + return result + + +# Example 5: Building and reusing an Arium +async def example_build_and_reuse(): + """Example of building an Arium once and reusing it""" + + agent = Agent(name='echo_agent', prompt='Echo the input') + + # Build the Arium + arium = AriumBuilder().add_agent(agent).start_with(agent).end_with(agent).build() + + # Run it multiple times with different inputs + result1 = await arium.run(['First input']) + result2 = await arium.run(['Second input']) + + return result1, result2 + + +if __name__ == '__main__': + import asyncio + + # Run the examples + async def main(): + print('Running AriumBuilder examples...') + + # You can uncomment and run these examples + # result1 = await example_linear_workflow() + # result2 = await example_branching_workflow() + # result3 = await example_complex_workflow() + # result4 = await example_convenience_function() + # result5 = await example_build_and_reuse() + + print('Examples completed!') + + asyncio.run(main()) diff --git a/flo_ai/flo_ai/arium/memory.py b/flo_ai/flo_ai/arium/memory.py new file mode 100644 index 00000000..88e9f113 --- /dev/null +++ b/flo_ai/flo_ai/arium/memory.py @@ -0,0 +1,25 @@ +from abc import ABC, abstractmethod +from typing import TypeVar, Generic, List, Dict + +# Define the generic type variable +T = TypeVar('T') + + +class BaseMemory(ABC, Generic[T]): + @abstractmethod + def add(self, m: T): + pass + + def get(self) -> List[T]: + pass + + +class MessageMemory(BaseMemory[Dict[str, str]]): + def __init__(self): + self.messages = [] + + def add(self, message: Dict[str, str]): + self.messages.append(message) + + def get(self) -> List[Dict[str, str]]: + return self.messages diff --git a/flo_ai/flo_ai/arium/models.py b/flo_ai/flo_ai/arium/models.py new file mode 100644 index 00000000..43320222 --- /dev/null +++ b/flo_ai/flo_ai/arium/models.py @@ -0,0 +1,37 @@ +from dataclasses import dataclass, field +from typing import Callable, Dict, List +from functools import partial +from flo_ai.arium.memory import BaseMemory + + +def default_router( + to_node: str, + memory: BaseMemory, + navigation_thresholds: Dict[str, int] = {}, +) -> str: + nt = navigation_thresholds.get(to_node, None) + if nt and nt <= 0: + raise ValueError(f'Navigation threshold for {to_node} hit, in default router') + return to_node + + +@dataclass +class StartNode: + name = '__start__' + + +@dataclass +class EndNode: + name = '__end__' + + +@dataclass +class Edge: + router_fn: Callable | partial + to_nodes: List[str] + navigation_threshold: Dict[str, int] = field(default_factory=dict) + + def is_default_router(self) -> bool: + if isinstance(self.router_fn, partial): + return self.router_fn.func.__name__ == 'default_router' + return False diff --git a/flo_ai/flo_ai/llm/base_llm.py b/flo_ai/flo_ai/llm/base_llm.py index 74577a90..2a5888df 100644 --- a/flo_ai/flo_ai/llm/base_llm.py +++ b/flo_ai/flo_ai/llm/base_llm.py @@ -6,10 +6,10 @@ @dataclass class ImageMessage: - image_url: str - image_file_path: str - image_base64: str - mime_type: str + image_url: Optional[str] = None + image_file_path: Optional[str] = None + image_base64: Optional[str] = None + mime_type: Optional[str] = None class BaseLLM(ABC): diff --git a/flo_ai/flo_ai/llm/gemini_llm.py b/flo_ai/flo_ai/llm/gemini_llm.py index d01a8d1a..94670620 100644 --- a/flo_ai/flo_ai/llm/gemini_llm.py +++ b/flo_ai/flo_ai/llm/gemini_llm.py @@ -27,34 +27,37 @@ async def generate( ) -> Dict[str, Any]: # Convert messages to Gemini format # Gemini uses a simple content string format - content = '' + contents = [] + system_prompt = '' for msg in messages: role = msg['role'] message_content = msg['content'] if role == 'system': - content += f'System: {message_content}\n' - elif role == 'user': - content += f'User: {message_content}\n' - elif role == 'assistant': - content += f'Assistant: {message_content}\n' + system_prompt += f'{message_content}\n' + else: + contents.append(message_content) # Add output schema instruction if provided if output_schema: - content += f'\nPlease provide your response in JSON format according to this schema:\n{json.dumps(output_schema, indent=2)}\n' + contents += f'\nPlease provide your response in JSON format according to this schema:\n{json.dumps(output_schema, indent=2)}\n' # Add function information if provided if functions: - content += f'\nAvailable functions:\n{json.dumps(functions, indent=2)}\n' + contents += f'\nAvailable functions:\n{json.dumps(functions, indent=2)}\n' try: # Prepare generation config - generation_config = {'temperature': self.temperature, **self.kwargs} + generation_config = genai.types.GenerateContentConfig( + temperature=self.temperature, + system_instruction=system_prompt, + **self.kwargs, + ) # Make the API call response = self.client.models.generate_content( model=self.model, - contents=content, + contents=contents, config=generation_config if generation_config else None, ) diff --git a/flo_ai/flo_ai/models/agent.py b/flo_ai/flo_ai/models/agent.py index c57f675b..fd1cbc84 100644 --- a/flo_ai/flo_ai/models/agent.py +++ b/flo_ai/flo_ai/models/agent.py @@ -1,9 +1,9 @@ +import json from typing import Dict, Any, List, Optional from flo_ai.models.base_agent import BaseAgent, AgentType, ReasoningPattern from flo_ai.llm.base_llm import BaseLLM, ImageMessage from flo_ai.tool.base_tool import Tool, ToolExecutionError from flo_ai.models.agent_error import AgentError -import json from flo_ai.utils.logger import logger @@ -132,6 +132,7 @@ async def _run_with_tools(self, retry_count: int = 0) -> str: while tool_call_count < max_tool_calls: formatted_tools = self.llm.format_tools_for_llm(self.tools) + print(messages) response = await self.llm.generate( messages, functions=formatted_tools, @@ -170,7 +171,7 @@ async def _run_with_tools(self, retry_count: int = 0) -> str: { 'role': 'function', 'name': function_name, - 'content': str(function_response), + 'content': f'Here is the result of the tool call: \n {str(function_response)}', } ) diff --git a/flo_ai/flo_ai/utils/logger.py b/flo_ai/flo_ai/utils/logger.py index e2376466..a6ebfbc4 100644 --- a/flo_ai/flo_ai/utils/logger.py +++ b/flo_ai/flo_ai/utils/logger.py @@ -1,7 +1,7 @@ import logging import os -log_level = os.environ.get('LOG_LEVEL', 'DEBUG') +log_level = os.environ.get('LOG_LEVEL', 'INFO') logging.getLogger('uvicorn').setLevel(log_level) log_format = ( '%(asctime)s | %(levelname)-8s | %(name)s | %(filename)s:%(lineno)d | %(message)s' diff --git a/flo_ai/poetry.lock b/flo_ai/poetry.lock index 2e480bb3..8617815f 100644 --- a/flo_ai/poetry.lock +++ b/flo_ai/poetry.lock @@ -37,6 +37,32 @@ files = [ {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] +[[package]] +name = "anthropic" +version = "0.57.1" +description = "The official Python library for the anthropic API" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "anthropic-0.57.1-py3-none-any.whl", hash = "sha256:33afc1f395af207d07ff1bffc0a3d1caac53c371793792569c5d2f09283ea306"}, + {file = "anthropic-0.57.1.tar.gz", hash = "sha256:7815dd92245a70d21f65f356f33fc80c5072eada87fb49437767ea2918b2c4b0"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.25.0,<1" +jiter = ">=0.4.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +typing-extensions = ">=4.10,<5" + +[package.extras] +aiohttp = ["aiohttp", "httpx-aiohttp (>=0.1.6)"] +bedrock = ["boto3 (>=1.28.57)", "botocore (>=1.31.57)"] +vertex = ["google-auth[requests] (>=2,<3)"] + [[package]] name = "antiorm" version = "1.2.1" @@ -472,6 +498,99 @@ traitlets = ">=4" [package.extras] test = ["pytest"] +[[package]] +name = "contourpy" +version = "1.3.2" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "contourpy-1.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba38e3f9f330af820c4b27ceb4b9c7feee5fe0493ea53a8720f4792667465934"}, + {file = "contourpy-1.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc41ba0714aa2968d1f8674ec97504a8f7e334f48eeacebcaa6256213acb0989"}, + {file = "contourpy-1.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9be002b31c558d1ddf1b9b415b162c603405414bacd6932d031c5b5a8b757f0d"}, + {file = "contourpy-1.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8d2e74acbcba3bfdb6d9d8384cdc4f9260cae86ed9beee8bd5f54fee49a430b9"}, + {file = "contourpy-1.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e259bced5549ac64410162adc973c5e2fb77f04df4a439d00b478e57a0e65512"}, + {file = "contourpy-1.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad687a04bc802cbe8b9c399c07162a3c35e227e2daccf1668eb1f278cb698631"}, + {file = "contourpy-1.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cdd22595308f53ef2f891040ab2b93d79192513ffccbd7fe19be7aa773a5e09f"}, + {file = "contourpy-1.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b4f54d6a2defe9f257327b0f243612dd051cc43825587520b1bf74a31e2f6ef2"}, + {file = "contourpy-1.3.2-cp310-cp310-win32.whl", hash = "sha256:f939a054192ddc596e031e50bb13b657ce318cf13d264f095ce9db7dc6ae81c0"}, + {file = "contourpy-1.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c440093bbc8fc21c637c03bafcbef95ccd963bc6e0514ad887932c18ca2a759a"}, + {file = "contourpy-1.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a37a2fb93d4df3fc4c0e363ea4d16f83195fc09c891bc8ce072b9d084853445"}, + {file = "contourpy-1.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b7cd50c38f500bbcc9b6a46643a40e0913673f869315d8e70de0438817cb7773"}, + {file = "contourpy-1.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6658ccc7251a4433eebd89ed2672c2ed96fba367fd25ca9512aa92a4b46c4f1"}, + {file = "contourpy-1.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:70771a461aaeb335df14deb6c97439973d253ae70660ca085eec25241137ef43"}, + {file = "contourpy-1.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65a887a6e8c4cd0897507d814b14c54a8c2e2aa4ac9f7686292f9769fcf9a6ab"}, + {file = "contourpy-1.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3859783aefa2b8355697f16642695a5b9792e7a46ab86da1118a4a23a51a33d7"}, + {file = "contourpy-1.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eab0f6db315fa4d70f1d8ab514e527f0366ec021ff853d7ed6a2d33605cf4b83"}, + {file = "contourpy-1.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d91a3ccc7fea94ca0acab82ceb77f396d50a1f67412efe4c526f5d20264e6ecd"}, + {file = "contourpy-1.3.2-cp311-cp311-win32.whl", hash = "sha256:1c48188778d4d2f3d48e4643fb15d8608b1d01e4b4d6b0548d9b336c28fc9b6f"}, + {file = "contourpy-1.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:5ebac872ba09cb8f2131c46b8739a7ff71de28a24c869bcad554477eb089a878"}, + {file = "contourpy-1.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4caf2bcd2969402bf77edc4cb6034c7dd7c0803213b3523f111eb7460a51b8d2"}, + {file = "contourpy-1.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:82199cb78276249796419fe36b7386bd8d2cc3f28b3bc19fe2454fe2e26c4c15"}, + {file = "contourpy-1.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:106fab697af11456fcba3e352ad50effe493a90f893fca6c2ca5c033820cea92"}, + {file = "contourpy-1.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d14f12932a8d620e307f715857107b1d1845cc44fdb5da2bc8e850f5ceba9f87"}, + {file = "contourpy-1.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:532fd26e715560721bb0d5fc7610fce279b3699b018600ab999d1be895b09415"}, + {file = "contourpy-1.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b383144cf2d2c29f01a1e8170f50dacf0eac02d64139dcd709a8ac4eb3cfe"}, + {file = "contourpy-1.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c49f73e61f1f774650a55d221803b101d966ca0c5a2d6d5e4320ec3997489441"}, + {file = "contourpy-1.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3d80b2c0300583228ac98d0a927a1ba6a2ba6b8a742463c564f1d419ee5b211e"}, + {file = "contourpy-1.3.2-cp312-cp312-win32.whl", hash = "sha256:90df94c89a91b7362e1142cbee7568f86514412ab8a2c0d0fca72d7e91b62912"}, + {file = "contourpy-1.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:8c942a01d9163e2e5cfb05cb66110121b8d07ad438a17f9e766317bcb62abf73"}, + {file = "contourpy-1.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:de39db2604ae755316cb5967728f4bea92685884b1e767b7c24e983ef5f771cb"}, + {file = "contourpy-1.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3f9e896f447c5c8618f1edb2bafa9a4030f22a575ec418ad70611450720b5b08"}, + {file = "contourpy-1.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71e2bd4a1c4188f5c2b8d274da78faab884b59df20df63c34f74aa1813c4427c"}, + {file = "contourpy-1.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de425af81b6cea33101ae95ece1f696af39446db9682a0b56daaa48cfc29f38f"}, + {file = "contourpy-1.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:977e98a0e0480d3fe292246417239d2d45435904afd6d7332d8455981c408b85"}, + {file = "contourpy-1.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:434f0adf84911c924519d2b08fc10491dd282b20bdd3fa8f60fd816ea0b48841"}, + {file = "contourpy-1.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c66c4906cdbc50e9cba65978823e6e00b45682eb09adbb78c9775b74eb222422"}, + {file = "contourpy-1.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8b7fc0cd78ba2f4695fd0a6ad81a19e7e3ab825c31b577f384aa9d7817dc3bef"}, + {file = "contourpy-1.3.2-cp313-cp313-win32.whl", hash = "sha256:15ce6ab60957ca74cff444fe66d9045c1fd3e92c8936894ebd1f3eef2fff075f"}, + {file = "contourpy-1.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e1578f7eafce927b168752ed7e22646dad6cd9bca673c60bff55889fa236ebf9"}, + {file = "contourpy-1.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0475b1f6604896bc7c53bb070e355e9321e1bc0d381735421a2d2068ec56531f"}, + {file = "contourpy-1.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c85bb486e9be652314bb5b9e2e3b0d1b2e643d5eec4992c0fbe8ac71775da739"}, + {file = "contourpy-1.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:745b57db7758f3ffc05a10254edd3182a2a83402a89c00957a8e8a22f5582823"}, + {file = "contourpy-1.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:970e9173dbd7eba9b4e01aab19215a48ee5dd3f43cef736eebde064a171f89a5"}, + {file = "contourpy-1.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6c4639a9c22230276b7bffb6a850dfc8258a2521305e1faefe804d006b2e532"}, + {file = "contourpy-1.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc829960f34ba36aad4302e78eabf3ef16a3a100863f0d4eeddf30e8a485a03b"}, + {file = "contourpy-1.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d32530b534e986374fc19eaa77fcb87e8a99e5431499949b828312bdcd20ac52"}, + {file = "contourpy-1.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e298e7e70cf4eb179cc1077be1c725b5fd131ebc81181bf0c03525c8abc297fd"}, + {file = "contourpy-1.3.2-cp313-cp313t-win32.whl", hash = "sha256:d0e589ae0d55204991450bb5c23f571c64fe43adaa53f93fc902a84c96f52fe1"}, + {file = "contourpy-1.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:78e9253c3de756b3f6a5174d024c4835acd59eb3f8e2ca13e775dbffe1558f69"}, + {file = "contourpy-1.3.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fd93cc7f3139b6dd7aab2f26a90dde0aa9fc264dbf70f6740d498a70b860b82c"}, + {file = "contourpy-1.3.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:107ba8a6a7eec58bb475329e6d3b95deba9440667c4d62b9b6063942b61d7f16"}, + {file = "contourpy-1.3.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ded1706ed0c1049224531b81128efbd5084598f18d8a2d9efae833edbd2b40ad"}, + {file = "contourpy-1.3.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5f5964cdad279256c084b69c3f412b7801e15356b16efa9d78aa974041903da0"}, + {file = "contourpy-1.3.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49b65a95d642d4efa8f64ba12558fcb83407e58a2dfba9d796d77b63ccfcaff5"}, + {file = "contourpy-1.3.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8c5acb8dddb0752bf252e01a3035b21443158910ac16a3b0d20e7fed7d534ce5"}, + {file = "contourpy-1.3.2.tar.gz", hash = "sha256:b6945942715a034c671b7fc54f9588126b0b8bf23db2696e3ca8328f3ff0ab54"}, +] + +[package.dependencies] +numpy = ">=1.23" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] +mypy = ["bokeh", "contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.15.0)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"] + +[[package]] +name = "cycler" +version = "0.12.1" +description = "Composable style cycles" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, + {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, +] + +[package.extras] +docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] +tests = ["pytest", "pytest-cov", "pytest-xdist"] + [[package]] name = "db" version = "0.1.1" @@ -640,6 +759,72 @@ redshift-connector = "^2.1.5" type = "directory" url = "../flo_ai_tools" +[[package]] +name = "fonttools" +version = "4.58.5" +description = "Tools to manipulate font files" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "fonttools-4.58.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d500d399aa4e92d969a0d21052696fa762385bb23c3e733703af4a195ad9f34c"}, + {file = "fonttools-4.58.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b00530b84f87792891874938bd42f47af2f7f4c2a1d70466e6eb7166577853ab"}, + {file = "fonttools-4.58.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c5579fb3744dfec151b5c29b35857df83e01f06fe446e8c2ebaf1effd7e6cdce"}, + {file = "fonttools-4.58.5-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adf440deecfcc2390998e649156e3bdd0b615863228c484732dc06ac04f57385"}, + {file = "fonttools-4.58.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a81769fc4d473c808310c9ed91fbe01b67f615e3196fb9773e093939f59e6783"}, + {file = "fonttools-4.58.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0162a6a37b0ca70d8505311d541e291cd6cab54d1a986ae3d2686c56c0581e8f"}, + {file = "fonttools-4.58.5-cp310-cp310-win32.whl", hash = "sha256:1cde303422198fdc7f502dbdf1bf65306166cdb9446debd6c7fb826b4d66a530"}, + {file = "fonttools-4.58.5-cp310-cp310-win_amd64.whl", hash = "sha256:75cf8c2812c898dd3d70d62b2b768df4eeb524a83fb987a512ddb3863d6a8c54"}, + {file = "fonttools-4.58.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cda226253bf14c559bc5a17c570d46abd70315c9a687d91c0e01147f87736182"}, + {file = "fonttools-4.58.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:83a96e4a4e65efd6c098da549ec34f328f08963acd2d7bc910ceba01d2dc73e6"}, + {file = "fonttools-4.58.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2d172b92dff59ef8929b4452d5a7b19b8e92081aa87bfb2d82b03b1ff14fc667"}, + {file = "fonttools-4.58.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0bfddfd09aafbbfb3bd98ae67415fbe51eccd614c17db0c8844fe724fbc5d43d"}, + {file = "fonttools-4.58.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cfde5045f1bc92ad11b4b7551807564045a1b38cb037eb3c2bc4e737cd3a8d0f"}, + {file = "fonttools-4.58.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3515ac47a9a5ac025d2899d195198314023d89492340ba86e4ba79451f7518a8"}, + {file = "fonttools-4.58.5-cp311-cp311-win32.whl", hash = "sha256:9f7e2ab9c10b6811b4f12a0768661325a48e664ec0a0530232c1605896a598db"}, + {file = "fonttools-4.58.5-cp311-cp311-win_amd64.whl", hash = "sha256:126c16ec4a672c9cb5c1c255dc438d15436b470afc8e9cac25a2d39dd2dc26eb"}, + {file = "fonttools-4.58.5-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c3af3fefaafb570a03051a0d6899b8374dcf8e6a4560e42575843aef33bdbad6"}, + {file = "fonttools-4.58.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:688137789dbd44e8757ad77b49a771539d8069195ffa9a8bcf18176e90bbd86d"}, + {file = "fonttools-4.58.5-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2af65836cf84cd7cb882d0b353bdc73643a497ce23b7414c26499bb8128ca1af"}, + {file = "fonttools-4.58.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d2d79cfeb456bf438cb9fb87437634d4d6f228f27572ca5c5355e58472d5519d"}, + {file = "fonttools-4.58.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0feac9dda9a48a7a342a593f35d50a5cee2dbd27a03a4c4a5192834a4853b204"}, + {file = "fonttools-4.58.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36555230e168511e83ad8637232268649634b8dfff6ef58f46e1ebc057a041ad"}, + {file = "fonttools-4.58.5-cp312-cp312-win32.whl", hash = "sha256:26ec05319353842d127bd02516eacb25b97ca83966e40e9ad6fab85cab0576f4"}, + {file = "fonttools-4.58.5-cp312-cp312-win_amd64.whl", hash = "sha256:778a632e538f82c1920579c0c01566a8f83dc24470c96efbf2fbac698907f569"}, + {file = "fonttools-4.58.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f4b6f1360da13cecc88c0d60716145b31e1015fbe6a59e32f73a4404e2ea92cf"}, + {file = "fonttools-4.58.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4a036822e915692aa2c03e2decc60f49a8190f8111b639c947a4f4e5774d0d7a"}, + {file = "fonttools-4.58.5-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a6d7709fcf4577b0f294ee6327088884ca95046e1eccde87c53bbba4d5008541"}, + {file = "fonttools-4.58.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9b5099ca99b79d6d67162778b1b1616fc0e1de02c1a178248a0da8d78a33852"}, + {file = "fonttools-4.58.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3f2c05a8d82a4d15aebfdb3506e90793aea16e0302cec385134dd960647a36c0"}, + {file = "fonttools-4.58.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79f0c4b1cc63839b61deeac646d8dba46f8ed40332c2ac1b9997281462c2e4ba"}, + {file = "fonttools-4.58.5-cp313-cp313-win32.whl", hash = "sha256:a1a9a2c462760976882131cbab7d63407813413a2d32cd699e86a1ff22bf7aa5"}, + {file = "fonttools-4.58.5-cp313-cp313-win_amd64.whl", hash = "sha256:bca61b14031a4b7dc87e14bf6ca34c275f8e4b9f7a37bc2fe746b532a924cf30"}, + {file = "fonttools-4.58.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:082410bc40014db55be5457836043f0dd1e6b3817c7d11a0aeb44eaa862890af"}, + {file = "fonttools-4.58.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0b0983be58d8c8acb11161fdd3b43d64015cef8c3d65ad9289a252243b236128"}, + {file = "fonttools-4.58.5-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5a0e28fb6abc31ba45a2d11dc2fe826e5a074013d13b7b447b441e8236e5f1c"}, + {file = "fonttools-4.58.5-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d506652abc285934ee949a5f3a952c5d52a09257bc2ba44a92db3ec2804c76fe"}, + {file = "fonttools-4.58.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9e2d71676025dd74a21d682be36d4846aa03644c619f2c2d695a11a7262433f6"}, + {file = "fonttools-4.58.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb46a73759efc8a7eca40203843241cd3c79aa983ed7f7515548ed3d82073761"}, + {file = "fonttools-4.58.5-cp39-cp39-win32.whl", hash = "sha256:bf09f14d73a18c62eb9ad1cac98a37569241ba3cd5789cc578286c128cc29f7f"}, + {file = "fonttools-4.58.5-cp39-cp39-win_amd64.whl", hash = "sha256:8ddb7c0c3e91b187acc1bed31857376926569a18a348ac58d6a71eb8a6b22393"}, + {file = "fonttools-4.58.5-py3-none-any.whl", hash = "sha256:e48a487ed24d9b611c5c4b25db1e50e69e9854ca2670e39a3486ffcd98863ec4"}, + {file = "fonttools-4.58.5.tar.gz", hash = "sha256:b2a35b0a19f1837284b3a23dd64fd7761b8911d50911ecd2bdbaf5b2d1b5df9c"}, +] + +[package.extras] +all = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\"", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0) ; python_version <= \"3.12\"", "xattr ; sys_platform == \"darwin\"", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\""] +lxml = ["lxml (>=4.0)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr ; sys_platform == \"darwin\""] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=15.1.0) ; python_version <= \"3.12\""] +woff = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "zopfli (>=0.1.4)"] + [[package]] name = "gitdb" version = "4.0.12" @@ -1169,6 +1354,96 @@ traitlets = ">=5.3" docs = ["intersphinx-registry", "myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-spelling", "traitlets"] test = ["ipykernel", "pre-commit", "pytest (<9)", "pytest-cov", "pytest-timeout"] +[[package]] +name = "kiwisolver" +version = "1.4.8" +description = "A fast implementation of the Cassowary constraint solver" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88c6f252f6816a73b1f8c904f7bbe02fd67c09a69f7cb8a0eecdbf5ce78e63db"}, + {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72941acb7b67138f35b879bbe85be0f6c6a70cab78fe3ef6db9c024d9223e5b"}, + {file = "kiwisolver-1.4.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce2cf1e5688edcb727fdf7cd1bbd0b6416758996826a8be1d958f91880d0809d"}, + {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c8bf637892dc6e6aad2bc6d4d69d08764166e5e3f69d469e55427b6ac001b19d"}, + {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:034d2c891f76bd3edbdb3ea11140d8510dca675443da7304205a2eaa45d8334c"}, + {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47b28d1dfe0793d5e96bce90835e17edf9a499b53969b03c6c47ea5985844c3"}, + {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb158fe28ca0c29f2260cca8c43005329ad58452c36f0edf298204de32a9a3ed"}, + {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5536185fce131780ebd809f8e623bf4030ce1b161353166c49a3c74c287897f"}, + {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:369b75d40abedc1da2c1f4de13f3482cb99e3237b38726710f4a793432b1c5ff"}, + {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:641f2ddf9358c80faa22e22eb4c9f54bd3f0e442e038728f500e3b978d00aa7d"}, + {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d561d2d8883e0819445cfe58d7ddd673e4015c3c57261d7bdcd3710d0d14005c"}, + {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1732e065704b47c9afca7ffa272f845300a4eb959276bf6970dc07265e73b605"}, + {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bcb1ebc3547619c3b58a39e2448af089ea2ef44b37988caf432447374941574e"}, + {file = "kiwisolver-1.4.8-cp310-cp310-win_amd64.whl", hash = "sha256:89c107041f7b27844179ea9c85d6da275aa55ecf28413e87624d033cf1f6b751"}, + {file = "kiwisolver-1.4.8-cp310-cp310-win_arm64.whl", hash = "sha256:b5773efa2be9eb9fcf5415ea3ab70fc785d598729fd6057bea38d539ead28271"}, + {file = "kiwisolver-1.4.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a4d3601908c560bdf880f07d94f31d734afd1bb71e96585cace0e38ef44c6d84"}, + {file = "kiwisolver-1.4.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:856b269c4d28a5c0d5e6c1955ec36ebfd1651ac00e1ce0afa3e28da95293b561"}, + {file = "kiwisolver-1.4.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c2b9a96e0f326205af81a15718a9073328df1173a2619a68553decb7097fd5d7"}, + {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5020c83e8553f770cb3b5fc13faac40f17e0b205bd237aebd21d53d733adb03"}, + {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dace81d28c787956bfbfbbfd72fdcef014f37d9b48830829e488fdb32b49d954"}, + {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11e1022b524bd48ae56c9b4f9296bce77e15a2e42a502cceba602f804b32bb79"}, + {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b9b4d2892fefc886f30301cdd80debd8bb01ecdf165a449eb6e78f79f0fabd6"}, + {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a96c0e790ee875d65e340ab383700e2b4891677b7fcd30a699146f9384a2bb0"}, + {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23454ff084b07ac54ca8be535f4174170c1094a4cff78fbae4f73a4bcc0d4dab"}, + {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:87b287251ad6488e95b4f0b4a79a6d04d3ea35fde6340eb38fbd1ca9cd35bbbc"}, + {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b21dbe165081142b1232a240fc6383fd32cdd877ca6cc89eab93e5f5883e1c25"}, + {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:768cade2c2df13db52475bd28d3a3fac8c9eff04b0e9e2fda0f3760f20b3f7fc"}, + {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d47cfb2650f0e103d4bf68b0b5804c68da97272c84bb12850d877a95c056bd67"}, + {file = "kiwisolver-1.4.8-cp311-cp311-win_amd64.whl", hash = "sha256:ed33ca2002a779a2e20eeb06aea7721b6e47f2d4b8a8ece979d8ba9e2a167e34"}, + {file = "kiwisolver-1.4.8-cp311-cp311-win_arm64.whl", hash = "sha256:16523b40aab60426ffdebe33ac374457cf62863e330a90a0383639ce14bf44b2"}, + {file = "kiwisolver-1.4.8-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d6af5e8815fd02997cb6ad9bbed0ee1e60014438ee1a5c2444c96f87b8843502"}, + {file = "kiwisolver-1.4.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bade438f86e21d91e0cf5dd7c0ed00cda0f77c8c1616bd83f9fc157fa6760d31"}, + {file = "kiwisolver-1.4.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b83dc6769ddbc57613280118fb4ce3cd08899cc3369f7d0e0fab518a7cf37fdb"}, + {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111793b232842991be367ed828076b03d96202c19221b5ebab421ce8bcad016f"}, + {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:257af1622860e51b1a9d0ce387bf5c2c4f36a90594cb9514f55b074bcc787cfc"}, + {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69b5637c3f316cab1ec1c9a12b8c5f4750a4c4b71af9157645bf32830e39c03a"}, + {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:782bb86f245ec18009890e7cb8d13a5ef54dcf2ebe18ed65f795e635a96a1c6a"}, + {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc978a80a0db3a66d25767b03688f1147a69e6237175c0f4ffffaaedf744055a"}, + {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:36dbbfd34838500a31f52c9786990d00150860e46cd5041386f217101350f0d3"}, + {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:eaa973f1e05131de5ff3569bbba7f5fd07ea0595d3870ed4a526d486fe57fa1b"}, + {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a66f60f8d0c87ab7f59b6fb80e642ebb29fec354a4dfad687ca4092ae69d04f4"}, + {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858416b7fb777a53f0c59ca08190ce24e9abbd3cffa18886a5781b8e3e26f65d"}, + {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:085940635c62697391baafaaeabdf3dd7a6c3643577dde337f4d66eba021b2b8"}, + {file = "kiwisolver-1.4.8-cp312-cp312-win_amd64.whl", hash = "sha256:01c3d31902c7db5fb6182832713d3b4122ad9317c2c5877d0539227d96bb2e50"}, + {file = "kiwisolver-1.4.8-cp312-cp312-win_arm64.whl", hash = "sha256:a3c44cb68861de93f0c4a8175fbaa691f0aa22550c331fefef02b618a9dcb476"}, + {file = "kiwisolver-1.4.8-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1c8ceb754339793c24aee1c9fb2485b5b1f5bb1c2c214ff13368431e51fc9a09"}, + {file = "kiwisolver-1.4.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a62808ac74b5e55a04a408cda6156f986cefbcf0ada13572696b507cc92fa1"}, + {file = "kiwisolver-1.4.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:68269e60ee4929893aad82666821aaacbd455284124817af45c11e50a4b42e3c"}, + {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34d142fba9c464bc3bbfeff15c96eab0e7310343d6aefb62a79d51421fcc5f1b"}, + {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc373e0eef45b59197de815b1b28ef89ae3955e7722cc9710fb91cd77b7f47"}, + {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77e6f57a20b9bd4e1e2cedda4d0b986ebd0216236f0106e55c28aea3d3d69b16"}, + {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08e77738ed7538f036cd1170cbed942ef749137b1311fa2bbe2a7fda2f6bf3cc"}, + {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5ce1e481a74b44dd5e92ff03ea0cb371ae7a0268318e202be06c8f04f4f1246"}, + {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fc2ace710ba7c1dfd1a3b42530b62b9ceed115f19a1656adefce7b1782a37794"}, + {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3452046c37c7692bd52b0e752b87954ef86ee2224e624ef7ce6cb21e8c41cc1b"}, + {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7e9a60b50fe8b2ec6f448fe8d81b07e40141bfced7f896309df271a0b92f80f3"}, + {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:918139571133f366e8362fa4a297aeba86c7816b7ecf0bc79168080e2bd79957"}, + {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e063ef9f89885a1d68dd8b2e18f5ead48653176d10a0e324e3b0030e3a69adeb"}, + {file = "kiwisolver-1.4.8-cp313-cp313-win_amd64.whl", hash = "sha256:a17b7c4f5b2c51bb68ed379defd608a03954a1845dfed7cc0117f1cc8a9b7fd2"}, + {file = "kiwisolver-1.4.8-cp313-cp313-win_arm64.whl", hash = "sha256:3cd3bc628b25f74aedc6d374d5babf0166a92ff1317f46267f12d2ed54bc1d30"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:370fd2df41660ed4e26b8c9d6bbcad668fbe2560462cba151a721d49e5b6628c"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:84a2f830d42707de1d191b9490ac186bf7997a9495d4e9072210a1296345f7dc"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7a3ad337add5148cf51ce0b55642dc551c0b9d6248458a757f98796ca7348712"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7506488470f41169b86d8c9aeff587293f530a23a23a49d6bc64dab66bedc71e"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f0121b07b356a22fb0414cec4666bbe36fd6d0d759db3d37228f496ed67c880"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6d6bd87df62c27d4185de7c511c6248040afae67028a8a22012b010bc7ad062"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:291331973c64bb9cce50bbe871fb2e675c4331dab4f31abe89f175ad7679a4d7"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:893f5525bb92d3d735878ec00f781b2de998333659507d29ea4466208df37bed"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b47a465040146981dc9db8647981b8cb96366fbc8d452b031e4f8fdffec3f26d"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:99cea8b9dd34ff80c521aef46a1dddb0dcc0283cf18bde6d756f1e6f31772165"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:151dffc4865e5fe6dafce5480fab84f950d14566c480c08a53c663a0020504b6"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:577facaa411c10421314598b50413aa1ebcf5126f704f1e5d72d7e4e9f020d90"}, + {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:be4816dc51c8a471749d664161b434912eee82f2ea66bd7628bd14583a833e85"}, + {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e7a019419b7b510f0f7c9dceff8c5eae2392037eae483a7f9162625233802b0a"}, + {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:286b18e86682fd2217a48fc6be6b0f20c1d0ed10958d8dc53453ad58d7be0bf8"}, + {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4191ee8dfd0be1c3666ccbac178c5a05d5f8d689bbe3fc92f3c4abec817f8fe0"}, + {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cd2785b9391f2873ad46088ed7599a6a71e762e1ea33e87514b1a441ed1da1c"}, + {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c07b29089b7ba090b6f1a669f1411f27221c3662b3a1b7010e67b59bb5a6f10b"}, + {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:65ea09a5a3faadd59c2ce96dc7bf0f364986a315949dc6374f04396b0d60e09b"}, + {file = "kiwisolver-1.4.8.tar.gz", hash = "sha256:23d5f023bdc8c7e54eb65f03ca5d5bb25b601eac4d7f1a042888a1f45237987e"}, +] + [[package]] name = "lxml" version = "5.4.0" @@ -1389,6 +1664,64 @@ files = [ {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] +[[package]] +name = "matplotlib" +version = "3.10.3" +description = "Python plotting package" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "matplotlib-3.10.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:213fadd6348d106ca7db99e113f1bea1e65e383c3ba76e8556ba4a3054b65ae7"}, + {file = "matplotlib-3.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3bec61cb8221f0ca6313889308326e7bb303d0d302c5cc9e523b2f2e6c73deb"}, + {file = "matplotlib-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c21ae75651c0231b3ba014b6d5e08fb969c40cdb5a011e33e99ed0c9ea86ecb"}, + {file = "matplotlib-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a49e39755580b08e30e3620efc659330eac5d6534ab7eae50fa5e31f53ee4e30"}, + {file = "matplotlib-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cf4636203e1190871d3a73664dea03d26fb019b66692cbfd642faafdad6208e8"}, + {file = "matplotlib-3.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:fd5641a9bb9d55f4dd2afe897a53b537c834b9012684c8444cc105895c8c16fd"}, + {file = "matplotlib-3.10.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:0ef061f74cd488586f552d0c336b2f078d43bc00dc473d2c3e7bfee2272f3fa8"}, + {file = "matplotlib-3.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d96985d14dc5f4a736bbea4b9de9afaa735f8a0fc2ca75be2fa9e96b2097369d"}, + {file = "matplotlib-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c5f0283da91e9522bdba4d6583ed9d5521566f63729ffb68334f86d0bb98049"}, + {file = "matplotlib-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdfa07c0ec58035242bc8b2c8aae37037c9a886370eef6850703d7583e19964b"}, + {file = "matplotlib-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c0b9849a17bce080a16ebcb80a7b714b5677d0ec32161a2cc0a8e5a6030ae220"}, + {file = "matplotlib-3.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:eef6ed6c03717083bc6d69c2d7ee8624205c29a8e6ea5a31cd3492ecdbaee1e1"}, + {file = "matplotlib-3.10.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0ab1affc11d1f495ab9e6362b8174a25afc19c081ba5b0775ef00533a4236eea"}, + {file = "matplotlib-3.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2a818d8bdcafa7ed2eed74487fdb071c09c1ae24152d403952adad11fa3c65b4"}, + {file = "matplotlib-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:748ebc3470c253e770b17d8b0557f0aa85cf8c63fd52f1a61af5b27ec0b7ffee"}, + {file = "matplotlib-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed70453fd99733293ace1aec568255bc51c6361cb0da94fa5ebf0649fdb2150a"}, + {file = "matplotlib-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dbed9917b44070e55640bd13419de83b4c918e52d97561544814ba463811cbc7"}, + {file = "matplotlib-3.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:cf37d8c6ef1a48829443e8ba5227b44236d7fcaf7647caa3178a4ff9f7a5be05"}, + {file = "matplotlib-3.10.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9f2efccc8dcf2b86fc4ee849eea5dcaecedd0773b30f47980dc0cbeabf26ec84"}, + {file = "matplotlib-3.10.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3ddbba06a6c126e3301c3d272a99dcbe7f6c24c14024e80307ff03791a5f294e"}, + {file = "matplotlib-3.10.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:748302b33ae9326995b238f606e9ed840bf5886ebafcb233775d946aa8107a15"}, + {file = "matplotlib-3.10.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a80fcccbef63302c0efd78042ea3c2436104c5b1a4d3ae20f864593696364ac7"}, + {file = "matplotlib-3.10.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:55e46cbfe1f8586adb34f7587c3e4f7dedc59d5226719faf6cb54fc24f2fd52d"}, + {file = "matplotlib-3.10.3-cp313-cp313-win_amd64.whl", hash = "sha256:151d89cb8d33cb23345cd12490c76fd5d18a56581a16d950b48c6ff19bb2ab93"}, + {file = "matplotlib-3.10.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c26dd9834e74d164d06433dc7be5d75a1e9890b926b3e57e74fa446e1a62c3e2"}, + {file = "matplotlib-3.10.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:24853dad5b8c84c8c2390fc31ce4858b6df504156893292ce8092d190ef8151d"}, + {file = "matplotlib-3.10.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68f7878214d369d7d4215e2a9075fef743be38fa401d32e6020bab2dfabaa566"}, + {file = "matplotlib-3.10.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6929fc618cb6db9cb75086f73b3219bbb25920cb24cee2ea7a12b04971a4158"}, + {file = "matplotlib-3.10.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6c7818292a5cc372a2dc4c795e5c356942eb8350b98ef913f7fda51fe175ac5d"}, + {file = "matplotlib-3.10.3-cp313-cp313t-win_amd64.whl", hash = "sha256:4f23ffe95c5667ef8a2b56eea9b53db7f43910fa4a2d5472ae0f72b64deab4d5"}, + {file = "matplotlib-3.10.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:86ab63d66bbc83fdb6733471d3bff40897c1e9921cba112accd748eee4bce5e4"}, + {file = "matplotlib-3.10.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a48f9c08bf7444b5d2391a83e75edb464ccda3c380384b36532a0962593a1751"}, + {file = "matplotlib-3.10.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb73d8aa75a237457988f9765e4dfe1c0d2453c5ca4eabc897d4309672c8e014"}, + {file = "matplotlib-3.10.3.tar.gz", hash = "sha256:2f82d2c5bb7ae93aaaa4cd42aca65d76ce6376f83304fa3a630b569aca274df0"}, +] + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +kiwisolver = ">=1.3.1" +numpy = ">=1.23" +packaging = ">=20.0" +pillow = ">=8" +pyparsing = ">=2.3.1" +python-dateutil = ">=2.7" + +[package.extras] +dev = ["meson-python (>=0.13.1,<0.17.0)", "pybind11 (>=2.13.2,!=2.13.3)", "setuptools (>=64)", "setuptools_scm (>=7)"] + [[package]] name = "matplotlib-inline" version = "0.1.7" @@ -1441,6 +1774,49 @@ files = [ {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, ] +[[package]] +name = "networkx" +version = "3.4.2" +description = "Python package for creating and manipulating graphs and networks" +optional = false +python-versions = ">=3.10" +groups = ["main"] +markers = "python_version == \"3.10\"" +files = [ + {file = "networkx-3.4.2-py3-none-any.whl", hash = "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f"}, + {file = "networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1"}, +] + +[package.extras] +default = ["matplotlib (>=3.7)", "numpy (>=1.24)", "pandas (>=2.0)", "scipy (>=1.10,!=1.11.0,!=1.11.1)"] +developer = ["changelist (==0.5)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] +doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.15)", "sphinx (>=7.3)", "sphinx-gallery (>=0.16)", "texext (>=0.6.7)"] +example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "momepy (>=0.7.2)", "osmnx (>=1.9)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"] +extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"] +test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] + +[[package]] +name = "networkx" +version = "3.5" +description = "Python package for creating and manipulating graphs and networks" +optional = false +python-versions = ">=3.11" +groups = ["main"] +markers = "python_version >= \"3.11\"" +files = [ + {file = "networkx-3.5-py3-none-any.whl", hash = "sha256:0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec"}, + {file = "networkx-3.5.tar.gz", hash = "sha256:d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037"}, +] + +[package.extras] +default = ["matplotlib (>=3.8)", "numpy (>=1.25)", "pandas (>=2.0)", "scipy (>=1.11.2)"] +developer = ["mypy (>=1.15)", "pre-commit (>=4.1)"] +doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=10)", "pydata-sphinx-theme (>=0.16)", "sphinx (>=8.0)", "sphinx-gallery (>=0.18)", "texext (>=0.6.7)"] +example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "momepy (>=0.7.2)", "osmnx (>=2.0.0)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"] +extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"] +test = ["pytest (>=7.2)", "pytest-cov (>=4.0)", "pytest-xdist (>=3.0)"] +test-extras = ["pytest-mpl", "pytest-randomly"] + [[package]] name = "nodeenv" version = "1.9.1" @@ -1459,7 +1835,7 @@ version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -1533,7 +1909,7 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -2184,6 +2560,21 @@ files = [ [package.extras] windows-terminal = ["colorama (>=0.4.6)"] +[[package]] +name = "pyparsing" +version = "3.2.3" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf"}, + {file = "pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + [[package]] name = "pypdf" version = "4.3.1" @@ -2255,7 +2646,7 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -2766,7 +3157,7 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -3225,4 +3616,4 @@ requests = ">=2.0.0,<3.0.0" [metadata] lock-version = "2.1" python-versions = ">=3.10,<4.0" -content-hash = "6be7804c087d66ec92ce52fc606c00fc63d4ea92abbbbdf5413fb5c2579c50ab" +content-hash = "5bf507493ea56e65677e99bf1afd428d12fa4473b1cf0ef47977177c75de2d00" diff --git a/flo_ai/pyproject.toml b/flo_ai/pyproject.toml index 318b3e6d..3606c739 100644 --- a/flo_ai/pyproject.toml +++ b/flo_ai/pyproject.toml @@ -16,6 +16,10 @@ pillow = "^10.3.0" pydantic = "^2.9.2" openai = "^1.77.0" google-genai = "^1.25.0" +# TODO: make into optional dependency +matplotlib = "^3.7.0" +networkx = "^3.0" +anthropic = "^0.57.1" [tool.poetry.group.dev.dependencies] diff --git a/flo_ai/tests/test_arium_builder.py b/flo_ai/tests/test_arium_builder.py new file mode 100644 index 00000000..4e6b1308 --- /dev/null +++ b/flo_ai/tests/test_arium_builder.py @@ -0,0 +1,210 @@ +""" +Basic tests for the AriumBuilder to ensure it works correctly. +""" + +import pytest +from unittest.mock import Mock +from flo_ai.arium.builder import AriumBuilder, create_arium +from flo_ai.arium.memory import MessageMemory +from flo_ai.models.agent import Agent +from flo_ai.tool.base_tool import Tool + + +class TestAriumBuilder: + def test_builder_initialization(self): + """Test that builder initializes correctly""" + builder = AriumBuilder() + assert builder._memory is None + assert builder._agents == [] + assert builder._tools == [] + assert builder._start_node is None + assert builder._end_nodes == [] + assert builder._edges == [] + assert builder._arium is None + + def test_create_arium_convenience_function(self): + """Test the create_arium convenience function""" + builder = create_arium() + assert isinstance(builder, AriumBuilder) + + def test_add_agent(self): + """Test adding a single agent""" + builder = AriumBuilder() + agent = Mock(spec=Agent) + agent.name = 'test_agent' + + result = builder.add_agent(agent) + assert result is builder # Should return self for chaining + assert agent in builder._agents + + def test_add_agents(self): + """Test adding multiple agents""" + builder = AriumBuilder() + agents = [Mock(spec=Agent) for _ in range(3)] + for i, agent in enumerate(agents): + agent.name = f'agent_{i}' + + result = builder.add_agents(agents) + assert result is builder + assert all(agent in builder._agents for agent in agents) + + def test_add_tool(self): + """Test adding a single tool""" + builder = AriumBuilder() + tool = Mock(spec=Tool) + tool.name = 'test_tool' + + result = builder.add_tool(tool) + assert result is builder + assert tool in builder._tools + + def test_add_tools(self): + """Test adding multiple tools""" + builder = AriumBuilder() + tools = [Mock(spec=Tool) for _ in range(3)] + for i, tool in enumerate(tools): + tool.name = f'tool_{i}' + + result = builder.add_tools(tools) + assert result is builder + assert all(tool in builder._tools for tool in tools) + + def test_with_memory(self): + """Test setting custom memory""" + builder = AriumBuilder() + memory = Mock(spec=MessageMemory) + + result = builder.with_memory(memory) + assert result is builder + assert builder._memory is memory + + def test_start_with(self): + """Test setting start node""" + builder = AriumBuilder() + agent = Mock(spec=Agent) + agent.name = 'start_agent' + + result = builder.start_with(agent) + assert result is builder + assert builder._start_node is agent + + def test_end_with(self): + """Test adding end node""" + builder = AriumBuilder() + agent = Mock(spec=Agent) + agent.name = 'end_agent' + + result = builder.end_with(agent) + assert result is builder + assert agent in builder._end_nodes + + # Test that duplicate end nodes aren't added + builder.end_with(agent) + assert builder._end_nodes.count(agent) == 1 + + def test_connect(self): + """Test simple connection between nodes""" + builder = AriumBuilder() + agent1 = Mock(spec=Agent) + agent1.name = 'agent1' + agent2 = Mock(spec=Agent) + agent2.name = 'agent2' + + result = builder.connect(agent1, agent2) + assert result is builder + assert (agent1, [agent2], None) in builder._edges + + def test_add_edge(self): + """Test adding edge with router""" + builder = AriumBuilder() + agent1 = Mock(spec=Agent) + agent1.name = 'agent1' + agent2 = Mock(spec=Agent) + agent2.name = 'agent2' + + def router(memory): + return 'agent2' + + result = builder.add_edge(agent1, [agent2], router) + assert result is builder + assert (agent1, [agent2], router) in builder._edges + + def test_reset(self): + """Test resetting the builder""" + builder = AriumBuilder() + + # Add some data + agent = Mock(spec=Agent) + agent.name = 'test_agent' + builder.add_agent(agent) + builder.start_with(agent) + + # Reset and verify everything is cleared + result = builder.reset() + assert result is builder + assert builder._memory is None + assert builder._agents == [] + assert builder._tools == [] + assert builder._start_node is None + assert builder._end_nodes == [] + assert builder._edges == [] + assert builder._arium is None + + def test_build_validation_no_nodes(self): + """Test that build fails when no nodes are added""" + builder = AriumBuilder() + + with pytest.raises(ValueError, match='No agents or tools added'): + builder.build() + + def test_build_validation_no_start_node(self): + """Test that build fails when no start node is specified""" + builder = AriumBuilder() + agent = Mock(spec=Agent) + agent.name = 'test_agent' + builder.add_agent(agent) + + with pytest.raises(ValueError, match='No start node specified'): + builder.build() + + def test_build_validation_no_end_nodes(self): + """Test that build fails when no end nodes are specified""" + builder = AriumBuilder() + agent = Mock(spec=Agent) + agent.name = 'test_agent' + builder.add_agent(agent) + builder.start_with(agent) + + with pytest.raises(ValueError, match='No end nodes specified'): + builder.build() + + def test_method_chaining(self): + """Test that all methods return self for chaining""" + builder = AriumBuilder() + agent = Mock(spec=Agent) + agent.name = 'test_agent' + tool = Mock(spec=Tool) + tool.name = 'test_tool' + memory = Mock(spec=MessageMemory) + + # This should not raise any errors and should work with chaining + result = ( + builder.with_memory(memory) + .add_agent(agent) + .add_tool(tool) + .start_with(agent) + .connect(agent, tool) + .end_with(tool) + .reset() + ) + + assert result is builder + + +if __name__ == '__main__': + # Run a simple test + test_builder = TestAriumBuilder() + test_builder.test_builder_initialization() + test_builder.test_add_agent() + test_builder.test_method_chaining() + print('Basic tests passed!') diff --git a/flo_ai/test/test_flo_tool.py b/flo_ai/tests/test_flo_tool.py similarity index 100% rename from flo_ai/test/test_flo_tool.py rename to flo_ai/tests/test_flo_tool.py