Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -19,17 +19,18 @@ class AskChatbotLangchain(AskChatbotPort):
def __init__(self, chain: Chain, chatHistoryManager: ChatHistoryManager):
self.chain = chain
self.chatHistoryManager = chatHistoryManager

def askChatbot(self, message: Message, chatId: ChatId) -> MessageResponse:
embeddingModel = LangchainEmbeddingModel()
if chatId is not None:
self.chain.memory = self.chatHistoryManager.getChatHistory(chatId)
answer = self.chain.run(message.content)
print(answer, flush=True)
answer = self.chain.invoke({"question": message.content, "chat_history": ""})

return MessageResponse(
True,
Message(content=answer,
timestamp=datetime.now(timezone.utc),
relevantDocuments=[DocumentId("DocumentoRilevante.pdf")],
sender=MessageSender.CHATBOT),
chatId
Message(
answer["answer"],
datetime.now(timezone.utc),
list(set(DocumentId(relevantDocumentId.metadata.get("source")) for relevantDocumentId in answer["source_documents"])),
MessageSender.CHATBOT
), chatId
)
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from typing import List

from domain.chat.message import Message
from domain.chat.message import Message, MessageSender
from domain.chat.chat_id import ChatId
from domain.chat.chat_operation_response import ChatOperationResponse
from adapter.out.persistence.postgres.postgres_message import PostgresMessage
from adapter.out.persistence.postgres.postgres_message import PostgresMessage, PostgresMessageSenderType

from application.port.out.persist_chat_port import PersistChatPort
from adapter.out.persistence.postgres.postgres_chat_orm import PostgresChatORM
Expand All @@ -13,13 +13,15 @@ def __init__(self, postgresChatORM: PostgresChatORM):
self.postgresChatORM = postgresChatORM

def persistChat(self, messages: List[Message], chatId: ChatId) -> ChatOperationResponse:
postgresChatOpeartionResponse = self.postgresChatORM.persistChat([self.toPostgresMessageFrom(message) for message in messages], chatId)
return postgresChatOpeartionResponse.toChatOperationResponse()
for message in messages:
print(self.toPostgresMessageFrom(message).sender.name, flush=True)
postgresChatOperationResponse = self.postgresChatORM.persistChat([self.toPostgresMessageFrom(message) for message in messages], chatId)
return postgresChatOperationResponse.toChatOperationResponse()

def toPostgresMessageFrom(self, message: Message) -> PostgresMessage:
return PostgresMessage(
content=message.content,
timestamp=message.timestamp,
relevantDocuments=[relevantDocumentId.id for relevantDocumentId in message.relevantDocuments] if message.relevantDocuments else None,
sender=message.sender.value
sender=PostgresMessageSenderType.USER if message.sender.value == MessageSender.USER.value else PostgresMessageSenderType.CHATBOT
)
13 changes: 7 additions & 6 deletions 3 - PB/MVP/src/backend/adapter/out/configuration_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,24 +165,25 @@ def getAskChatbotPort(self) -> AskChatbotPort:
configuredVectorStore = VectorStoreChromaDBManager()
else:
raise ConfigurationException('Vector store non configurato.')

if configuration.embeddingModel == PostgresEmbeddingModelType.HUGGINGFACE:
configuredEmbeddingModel = HuggingFaceEmbeddingModel()
elif configuration.embeddingModel == PostgresEmbeddingModelType.OPENAI:
configuredEmbeddingModel = OpenAIEmbeddingModel()
else:
raise ConfigurationException('Embeddings model non configurato.')
if configuration.LLMModel == PostgresLLMModelType.HUGGINGFACE:
raise ConfigurationException('Embedding model non configurato.')

if configuration.LLMModel == PostgresLLMModelType.OPENAI:
with open('/run/secrets/openai_key', 'r') as file:
openai_key = file.read()
configuredLLMModel = OpenAI(openai_api_key= openai_key, model_name="gpt-3.5-turbo-instruct", temperature=0.3)
elif configuration.LLMModel == PostgresLLMModelType.OPENAI:
configuredLLMModel = OpenAI(openai_api_key=openai_key, model_name="gpt-3.5-turbo-instruct", temperature=0.01,)
elif configuration.LLMModel == PostgresLLMModelType.HUGGINGFACE:
with open('/run/secrets/huggingface_key', 'r') as file:
hugging_face = file.read()
configuredLLMModel = HuggingFaceEndpoint(repo_id="google/flan-5-large", temperature=0.3, token=hugging_face)
configuredLLMModel = HuggingFaceEndpoint(repo_id="mistralai/Mistral-7B-v0.1", temperature=0.01, huggingfacehub_api_token=hugging_face)
else:
raise ConfigurationException('LLM model non configurato.')


chain = ConversationalRetrievalChain.from_llm(
llm=configuredLLMModel,
retriever=configuredVectorStore.getRetriever(configuredEmbeddingModel),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,5 @@ def __init__(self, postgresChatORM: PostgresChatORM):
self.postgresORM = postgresChatORM

def deleteChats(self, chatsIdsList: List[ChatId]) -> List[ChatOperationResponse]:
postgresOpearationResponseList = self.postgresORM.deleteChats([chatId.id for chatId in chatsIdsList])
return [postgresChatOpearationResponse.toChatOperationResponse() for postgresChatOpearationResponse in postgresOpearationResponseList]
postgresOperationResponseList = self.postgresORM.deleteChats([chatId.id for chatId in chatsIdsList])
return [postgresChatOperationResponse.toChatOperationResponse() for postgresChatOperationResponse in postgresOperationResponseList]
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
from sqlalchemy import Column, Integer, String, Enum as SQLEnum, ForeignKey, Text, JSON
from enum import Enum
from sqlalchemy.orm import relationship

from adapter.out.persistence.postgres.database import Base, db_session

class Chat(Base):
__tablename__ = 'chat'
id = Column('id', Integer, primary_key=True, autoincrement=True)
title = Column('title', Text, unique=True, nullable=False)

def __init__(self, title: str) -> None:
self.title = title

def __repr__(self):
return f'({self.id}, {self.title})'

class MessageStore(Base):
__tablename__ = 'message_store'
id = Column('id', Integer, primary_key=True, autoincrement=True)
sessionId = Column('session_id', Integer, ForeignKey('chat.id'))
message = Column('message', JSON)

chatIdConstraint = relationship(Chat, foreign_keys=[sessionId])

def __init__(self, sessionId: str, message: str) -> None:
self.sessionId = sessionId
self.message = message

def __repr__(self):
return f'({self.id}, {self.sessionId}, {self.message})'

class MessageRelevantDocuments(Base):
__tablename__ = 'message_relevant_documents'
id = Column('id', Integer, ForeignKey('message_store.id'), primary_key=True)
documentId = Column('document_id', Text, primary_key=True)

def initChat():
Base.metadata.create_all(bind=db_session.bind)
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
from sqlalchemy import Column, Integer, String, Enum as SQLEnum, ForeignKey
from enum import Enum
from sqlalchemy.orm import declarative_base, relationship
from sqlalchemy.orm import relationship

from domain.configuration.document_store_configuration import DocumentStoreConfiguration
from domain.configuration.embedding_model_configuration import EmbeddingModelConfiguration
from domain.configuration.llm_model_configuration import LLMModelConfiguration
from domain.configuration.vector_store_configuration import VectorStoreConfiguration

Base = declarative_base()
from adapter.out.persistence.postgres.database import Base, db_session

class PostgresDocumentStoreType(Enum):
AWS = 1
Expand Down Expand Up @@ -154,4 +154,19 @@ def __init__(self, userId: int, vectorStore: PostgresVectorStoreType, embeddingM
self.documentStore = documentStore

def __repr__(self):
return f'({self.userId}, {self.vectorStore}, {self.embeddingModel}, {self.LLMModel}, {self.documentStore})'
return f'({self.userId}, {self.vectorStore}, {self.embeddingModel}, {self.LLMModel}, {self.documentStore})'

def initConfiguration():
Base.metadata.create_all(bind=db_session.bind)

if db_session.query(PostgresConfigurationChoice).filter(PostgresConfigurationChoice.userId == 1).first() is None:
db_session.add(PostgresVectorStoreConfiguration(name=PostgresVectorStoreType.CHROMA_DB, organization='Chroma', description='Chroma DB is an open-source vector store.', type='Open-source', costIndicator='Free'))
db_session.add(PostgresVectorStoreConfiguration(name=PostgresVectorStoreType.PINECONE, organization='Pinecone', description='Pinecone is a vector database for building real-time applications.', type='On cloud', costIndicator='Paid'))
db_session.add(PostgresEmbeddingModelConfiguration(name=PostgresEmbeddingModelType.HUGGINGFACE, organization='Hugging Face', description='Hugging Face is a company that provides a large number of pre-trained models for natural language processing.', type='Local', costIndicator='Free'))
db_session.add(PostgresEmbeddingModelConfiguration(name=PostgresEmbeddingModelType.OPENAI, organization='OpenAI', description='OpenAI is an artificial intelligence research laboratory.', type='Commercial', costIndicator='Paid'))
db_session.add(PostgresLLMModelConfiguration(name=PostgresLLMModelType.HUGGINGFACE, organization='Hugging Face', description='Hugging Face is a company that provides a large number of pre-trained models for natural language processing.', type='Local', costIndicator='Free'))
db_session.add(PostgresLLMModelConfiguration(name=PostgresLLMModelType.OPENAI, organization='OpenAI', description='OpenAI is an artificial intelligence research laboratory.', type='Commercial', costIndicator='Paid'))
db_session.add(PostgresDocumentStoreConfiguration(name=PostgresDocumentStoreType.AWS, organization='Amazon', description='Amazon Web Services is a subsidiary of Amazon providing on-demand cloud computing platforms and APIs to individuals.', type='On cloud', costIndicator='Paid'))
db_session.commit()
db_session.add(PostgresConfigurationChoice(userId=1, vectorStore=PostgresVectorStoreType.CHROMA_DB, embeddingModel=PostgresEmbeddingModelType.HUGGINGFACE, LLMModel=PostgresLLMModelType.HUGGINGFACE, documentStore=PostgresDocumentStoreType.AWS))
db_session.commit()
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import os

from sqlalchemy import create_engine
from sqlalchemy.orm import declarative_base, scoped_session, sessionmaker

Base = declarative_base()

engine = create_engine(os.environ.get('DATABASE_URL'))
db_session = scoped_session(sessionmaker(bind=engine))

def init_db():
from adapter.out.persistence.postgres.configuration_models import initConfiguration
from adapter.out.persistence.postgres.chat_models import initChat
initConfiguration()
initChat()
Original file line number Diff line number Diff line change
@@ -1,18 +1,17 @@
from dataclasses import dataclass
from datetime import datetime
from typing import List

from adapter.out.persistence.postgres.postgres_message import PostgresMessage
from domain.chat.chat import Chat
from domain.chat.chat_id import ChatId



@dataclass
class PostgresChat:
def __init__(self, id:int, title:str, timestamp:datetime, messages: List[PostgresMessage]):
self.id = id
self.title = title
self.timestamp = timestamp
self.messages = messages
id: int
title: str
messages: List[PostgresMessage]

def toChat(self):
listOfMessages = []
for message in self.messages:
Expand Down
Loading