diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/embeddings/llm.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/embeddings/llm.py index dfaa6e56..c358110a 100644 --- a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/embeddings/llm.py +++ b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/embeddings/llm.py @@ -2,6 +2,7 @@ from flo_ai.helpers.llm_factory import LLMFactory from flo_ai.llm import Gemini from db_repo_module.models.llm_inference_config import LlmInferenceConfig +from flo_ai.models.agent import LLMConfigModel class LLMModelFunc: @@ -15,7 +16,11 @@ def _create_llm_instance(self, config: LlmInferenceConfig): Returns: LLM instance """ - return LLMFactory.create_llm({'provider': 'rootflo', 'model_id': config.id}) + config = LLMConfigModel( + provider='rootflo', + model_id=config.id, + ) + return LLMFactory.create_llm(config) async def generate_response( self,