diff --git a/backend-agent/llm.py b/backend-agent/llm.py index 7d04adc..7f60a21 100644 --- a/backend-agent/llm.py +++ b/backend-agent/llm.py @@ -72,6 +72,11 @@ 'gemini-2.5-flash', 'gemini-2.5-pro', ], + 'perplexity-ai': + [ + 'sonar', + 'sonar-pro', + ], } @@ -91,13 +96,19 @@ def from_model_name(cls, model_name: str) -> 'LLM': # Foundation-models scenarios in AI Core if model_name in AICORE_MODELS['azure-openai']: return AICoreOpenAILLM(model_name) + # IBM models are compatible with OpenAI completion API if model_name in AICORE_MODELS['aicore-ibm']: - # IBM models are compatible with OpenAI completion API return AICoreOpenAILLM(model_name) if model_name in AICORE_MODELS['aicore-opensource']: return AICoreOpenAILLM(model_name, False) + # Mistral models are compatible with OpenAI completion API if model_name in AICORE_MODELS['aicore-mistralai']: return AICoreOpenAILLM(model_name, False) + # Perplexity models are compatible with OpenAI completion API + if model_name in AICORE_MODELS['perplexity-ai']: + return AICoreOpenAILLM(model_name) + + # Non OpenAI-compatible models in AI Core if model_name in AICORE_MODELS['aws-bedrock']: if 'titan' in model_name: # Titan models don't support system prompts