diff --git a/flo_ai/examples/chat_history.py b/flo_ai/examples/chat_history.py new file mode 100644 index 00000000..028828c1 --- /dev/null +++ b/flo_ai/examples/chat_history.py @@ -0,0 +1,43 @@ +import asyncio +from typing import Any +from flo_ai.builder.agent_builder import AgentBuilder +from flo_ai.llm import OpenAI +from flo_ai.models.agent import Agent +from flo_ai.models.chat_message import ChatMessage + + +async def main() -> None: + # Create a simple conversational agent + agent: Agent = ( + AgentBuilder() + .with_name('Math Tutor') + .with_prompt('You are a helpful math tutor.') + .with_llm(OpenAI(model='gpt-4o-mini')) + .build() + ) + + response: Any = await agent.run( + [ + ChatMessage( + role='user', content='What is the formula for the area of a circle?' + ), + ChatMessage( + role='assistant', + content='The formula for the area of a circle is πr^2.', + ), + ChatMessage( + role='user', content='What is the formula for the area of a rectangle?' + ), + ChatMessage( + role='assistant', + content='The formula for the area of a rectangle is length * width.', + ), + ChatMessage( + role='user', content='What is the formula for the area of a triangle?' + ), + ] + ) + print(f'Response: {response}') + + +asyncio.run(main()) diff --git a/flo_ai/flo_ai/models/__init__.py b/flo_ai/flo_ai/models/__init__.py index 2f7c5d80..ff2b0df4 100644 --- a/flo_ai/flo_ai/models/__init__.py +++ b/flo_ai/flo_ai/models/__init__.py @@ -6,6 +6,7 @@ from .agent_error import AgentError from .base_agent import BaseAgent, AgentType, ReasoningPattern from .document import DocumentMessage, DocumentType +from .chat_message import ChatMessage __all__ = [ 'Agent', @@ -15,4 +16,5 @@ 'ReasoningPattern', 'DocumentMessage', 'DocumentType', + 'ChatMessage', ] diff --git a/flo_ai/flo_ai/models/agent.py b/flo_ai/flo_ai/models/agent.py index de203acf..3d133e2d 100644 --- a/flo_ai/flo_ai/models/agent.py +++ b/flo_ai/flo_ai/models/agent.py @@ -3,6 +3,7 @@ from flo_ai.models.base_agent import BaseAgent, AgentType, ReasoningPattern from flo_ai.llm.base_llm import BaseLLM, ImageMessage from flo_ai.models.document import DocumentMessage +from flo_ai.models.chat_message import ChatMessage from flo_ai.tool.base_tool import Tool, ToolExecutionError from flo_ai.models.agent_error import AgentError from flo_ai.utils.logger import logger @@ -57,7 +58,7 @@ def __init__( @trace_agent_execution() async def run( self, - inputs: List[str | ImageMessage | DocumentMessage] | str, + inputs: List[str | ImageMessage | DocumentMessage | ChatMessage] | str, variables: Optional[Dict[str, Any]] = None, ) -> str: variables = variables or {} @@ -87,6 +88,9 @@ async def run( elif isinstance(input, DocumentMessage): formatted_doc = await self.llm.format_document_in_message(input) self.add_to_history('user', formatted_doc) + elif isinstance(input, ChatMessage): + resolved_content = resolve_variables(input.content, variables) + self.add_to_history(input.role, resolved_content) else: # Resolve variables in text input resolved_input = resolve_variables(input, variables) @@ -103,6 +107,8 @@ async def run( elif isinstance(input, DocumentMessage): formatted_doc = await self.llm.format_document_in_message(input) self.add_to_history('user', formatted_doc) + elif isinstance(input, ChatMessage): + self.add_to_history(input.role, input.content) else: self.add_to_history('user', input) @@ -129,7 +135,6 @@ async def _run_conversational( if self.reasoning_pattern == ReasoningPattern.COT else resolve_variables(self.system_prompt, variables) ) - messages = [ { 'role': 'system', diff --git a/flo_ai/flo_ai/models/chat_message.py b/flo_ai/flo_ai/models/chat_message.py new file mode 100644 index 00000000..db58f7d6 --- /dev/null +++ b/flo_ai/flo_ai/models/chat_message.py @@ -0,0 +1,8 @@ +from typing import Literal +from dataclasses import dataclass + + +@dataclass +class ChatMessage: + role: Literal['user', 'assistant'] + content: str