Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 5 additions & 9 deletions .github/workflows/python-app.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ name: Python application

on:
push:
branches: [ "main" ]
branches: [ "main", "dev" ]
pull_request:
branches: [ "main" ]

Expand All @@ -26,14 +26,10 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install poetry
poetry lock
poetry install
pip install flake8 pytest
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Test with pytest
run: |
pytest
poetry run pytest
3 changes: 3 additions & 0 deletions src/api/controllers/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from .chat import new_message as chat_new_message

__all__ = ["chat_new_message"]
23 changes: 23 additions & 0 deletions src/api/controllers/chat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
from src.infrastructure.database import MongoDB, get_service_details
from src.infrastructure.config import LLM
from src.services import CustomChat


async def new_message(
db: MongoDB,
model: LLM,
message: str,
service_name: str,
) -> str:

if service_details := get_service_details(service_name, db):
prompt = service_details["prompt"]
else:
prompt = "You are a helpful assistant. Be kind!"

chat = CustomChat(
model=model,
sys_prompt=prompt
)
response = await chat(message)
return response
3 changes: 3 additions & 0 deletions src/api/models/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from .api import APIResponse, APIRequest

__all__ = ["APIResponse", "APIRequest"]
14 changes: 14 additions & 0 deletions src/api/models/api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
from typing import Optional
from pydantic import BaseModel


class APIResponse(BaseModel):
status_code: int
status_message: Optional[str] = None
response: Optional[dict] = None


class APIRequest(BaseModel):
message: str
user_id: str
service_name: Optional[str] = None
44 changes: 44 additions & 0 deletions src/api/routes/chat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
from fastapi import APIRouter, status, Request, Depends

from src.api.models import APIResponse, APIRequest
from src.api.controllers import chat_new_message


router = APIRouter(
prefix="/chat",
tags=["chat"],
# dependencies=[Depends(validate_user)]
)


@router.get("/", status_code=status.HTTP_200_OK)
async def router_test() -> APIResponse:
return APIResponse(
status_code=200,
status_message="-- CHAT ROUTER WORKING! --"
)


@router.post("/new_message", status_code=status.HTTP_200_OK)
async def new_message(api_request: APIRequest, req: Request) -> APIResponse:
try:
response = await chat_new_message(
req.app.database,
req.app.llm,
api_request.message,
api_request.service_name
)

return APIResponse(
status_code=200,
response={
"user": api_request.message,
"ai": response
}
)

except Exception as e:
return APIResponse(
status_code=500,
status_message=f"Error: {e}"
)
4 changes: 4 additions & 0 deletions src/infrastructure/config/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from .settings import settings
from .llm import LLM

__all__ = ["settings", "LLM"]
31 changes: 31 additions & 0 deletions src/infrastructure/config/llm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
from .settings import settings

from langchain_ollama.llms import OllamaLLM
from langchain_openai import ChatOpenAI


class LLM:
def __new__(cls, model_name: str):
try:
if model_name == "ollama":
return OllamaLLM(
model=settings.MODEL_NAME,
base_url=settings.MODEL_URL,
temperature=settings.MODEL_TEMPERATURE
)

elif model_name == "openai":
return ChatOpenAI(
model=settings.MODEL_NAME,
base_url=settings.MODEL_URL,
temperature=settings.MODEL_TEMPERATURE,
api_key=settings.MODEL_API_KEY
)
# More models can be added here

raise ValueError(f"Model {model_name} not supported")

except Exception as e:
raise ValueError(
f"Problem instantiating the model {model_name}: {e}"
)
33 changes: 33 additions & 0 deletions src/infrastructure/config/settings.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
from pydantic_settings import BaseSettings


class Settings(BaseSettings):

# MongoDB
MONGO_USER: str
MONGO_PASSWORD: str
MONGO_HOST: str = "localhost"
MONGO_PORT: str = "27017"
MONGO_DB: str

# ChromaDB
CHROMA_HOST: str = "localhost"
CHROMA_PORT: str = "8000"
CHROMA_DB: str

# General Settings
TIMEZONE: str = "America/Sao_Paulo"

# LLM
MODEL: str = "ollama"
MODEL_NAME: str = "llama3"
MODEL_URL: str = "http://localhost:11434"
MODEL_TEMPERATURE: float = 0.2
MODEL_API_KEY: str = ''

class Config:
env_file = ".env"
extra = "ignore"


settings = Settings()
3 changes: 3 additions & 0 deletions src/services/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from .custom_chat.chat import CustomChat

__all__ = ["CustomChat"]
23 changes: 23 additions & 0 deletions src/services/custom_chat/chat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
from src.infrastructure.config import LLM
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.tools import BaseTool
from typing import List


class CustomChat:
def __init__(self, model: LLM, sys_prompt: str):
self.model = model
self.sys_prompt = sys_prompt
self._prompt_template = ChatPromptTemplate.from_messages([
("system", self.sys_prompt),
("user", "{input}")
])

async def __call__(self, user_input: str):
# TO DO: add bind_tools
self.chain = self._prompt_template | self.model
response = self.chain.invoke({"input": user_input})
return response

def add_tools(self, tools: List[BaseTool]):
pass
4 changes: 4 additions & 0 deletions src/services/custom_chat/tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
"""
TO DO:
- Pode-se adicionar tools para dar um bind_tools no modelo.
"""
Loading