diff --git a/packages/types/src/codebase-index.ts b/packages/types/src/codebase-index.ts index 0ad19d8676a..89d5b168d78 100644 --- a/packages/types/src/codebase-index.ts +++ b/packages/types/src/codebase-index.ts @@ -21,7 +21,7 @@ export const CODEBASE_INDEX_DEFAULTS = { export const codebaseIndexConfigSchema = z.object({ codebaseIndexEnabled: z.boolean().optional(), codebaseIndexQdrantUrl: z.string().optional(), - codebaseIndexEmbedderProvider: z.enum(["openai", "ollama", "openai-compatible", "gemini"]).optional(), + codebaseIndexEmbedderProvider: z.enum(["openai", "ollama", "openai-compatible", "gemini", "mistral"]).optional(), codebaseIndexEmbedderBaseUrl: z.string().optional(), codebaseIndexEmbedderModelId: z.string().optional(), codebaseIndexEmbedderModelDimension: z.number().optional(), @@ -47,6 +47,7 @@ export const codebaseIndexModelsSchema = z.object({ ollama: z.record(z.string(), z.object({ dimension: z.number() })).optional(), "openai-compatible": z.record(z.string(), z.object({ dimension: z.number() })).optional(), gemini: z.record(z.string(), z.object({ dimension: z.number() })).optional(), + mistral: z.record(z.string(), z.object({ dimension: z.number() })).optional(), }) export type CodebaseIndexModels = z.infer @@ -62,6 +63,7 @@ export const codebaseIndexProviderSchema = z.object({ codebaseIndexOpenAiCompatibleApiKey: z.string().optional(), codebaseIndexOpenAiCompatibleModelDimension: z.number().optional(), codebaseIndexGeminiApiKey: z.string().optional(), + codebaseIndexMistralApiKey: z.string().optional(), }) export type CodebaseIndexProvider = z.infer diff --git a/packages/types/src/global-settings.ts b/packages/types/src/global-settings.ts index bddb94cc9e3..106d1f6e334 100644 --- a/packages/types/src/global-settings.ts +++ b/packages/types/src/global-settings.ts @@ -162,6 +162,7 @@ export const SECRET_STATE_KEYS = [ "codeIndexQdrantApiKey", "codebaseIndexOpenAiCompatibleApiKey", "codebaseIndexGeminiApiKey", + "codebaseIndexMistralApiKey", ] as const satisfies readonly (keyof ProviderSettings)[] export type SecretState = Pick diff --git a/src/core/webview/webviewMessageHandler.ts b/src/core/webview/webviewMessageHandler.ts index 1bb76734db3..fe4ede0c222 100644 --- a/src/core/webview/webviewMessageHandler.ts +++ b/src/core/webview/webviewMessageHandler.ts @@ -1970,6 +1970,12 @@ export const webviewMessageHandler = async ( settings.codebaseIndexGeminiApiKey, ) } + if (settings.codebaseIndexMistralApiKey !== undefined) { + await provider.contextProxy.storeSecret( + "codebaseIndexMistralApiKey", + settings.codebaseIndexMistralApiKey, + ) + } // Send success response first - settings are saved regardless of validation await provider.postMessageToWebview({ @@ -2062,6 +2068,7 @@ export const webviewMessageHandler = async ( "codebaseIndexOpenAiCompatibleApiKey", )) const hasGeminiApiKey = !!(await provider.context.secrets.get("codebaseIndexGeminiApiKey")) + const hasMistralApiKey = !!(await provider.context.secrets.get("codebaseIndexMistralApiKey")) provider.postMessageToWebview({ type: "codeIndexSecretStatus", @@ -2070,6 +2077,7 @@ export const webviewMessageHandler = async ( hasQdrantApiKey, hasOpenAiCompatibleApiKey, hasGeminiApiKey, + hasMistralApiKey, }, }) break diff --git a/src/i18n/locales/ca/embeddings.json b/src/i18n/locales/ca/embeddings.json index 5deed252bf4..651bc2b80fe 100644 --- a/src/i18n/locales/ca/embeddings.json +++ b/src/i18n/locales/ca/embeddings.json @@ -46,6 +46,7 @@ "ollamaConfigMissing": "Falta la configuració d'Ollama per crear l'embedder", "openAiCompatibleConfigMissing": "Falta la configuració compatible amb OpenAI per crear l'embedder", "geminiConfigMissing": "Falta la configuració de Gemini per crear l'embedder", + "mistralConfigMissing": "Falta la configuració de Mistral per crear l'embedder", "invalidEmbedderType": "Tipus d'embedder configurat no vàlid: {{embedderProvider}}", "vectorDimensionNotDeterminedOpenAiCompatible": "No s'ha pogut determinar la dimensió del vector per al model '{{modelId}}' amb el proveïdor '{{provider}}'. Assegura't que la 'Dimensió d'incrustació' estigui configurada correctament als paràmetres del proveïdor compatible amb OpenAI.", "vectorDimensionNotDetermined": "No s'ha pogut determinar la dimensió del vector per al model '{{modelId}}' amb el proveïdor '{{provider}}'. Comprova els perfils del model o la configuració.", diff --git a/src/i18n/locales/de/embeddings.json b/src/i18n/locales/de/embeddings.json index 74381747e1b..167abc516cf 100644 --- a/src/i18n/locales/de/embeddings.json +++ b/src/i18n/locales/de/embeddings.json @@ -46,6 +46,7 @@ "ollamaConfigMissing": "Ollama-Konfiguration fehlt für die Erstellung des Embedders", "openAiCompatibleConfigMissing": "OpenAI-kompatible Konfiguration fehlt für die Erstellung des Embedders", "geminiConfigMissing": "Gemini-Konfiguration fehlt für die Erstellung des Embedders", + "mistralConfigMissing": "Mistral-Konfiguration fehlt für die Erstellung des Embedders", "invalidEmbedderType": "Ungültiger Embedder-Typ konfiguriert: {{embedderProvider}}", "vectorDimensionNotDeterminedOpenAiCompatible": "Konnte die Vektordimension für Modell '{{modelId}}' mit Anbieter '{{provider}}' nicht bestimmen. Stelle sicher, dass die 'Embedding-Dimension' in den OpenAI-kompatiblen Anbietereinstellungen korrekt eingestellt ist.", "vectorDimensionNotDetermined": "Konnte die Vektordimension für Modell '{{modelId}}' mit Anbieter '{{provider}}' nicht bestimmen. Überprüfe die Modellprofile oder Konfiguration.", diff --git a/src/i18n/locales/en/embeddings.json b/src/i18n/locales/en/embeddings.json index 96b3b2dbead..270a8d193b7 100644 --- a/src/i18n/locales/en/embeddings.json +++ b/src/i18n/locales/en/embeddings.json @@ -46,6 +46,7 @@ "ollamaConfigMissing": "Ollama configuration missing for embedder creation", "openAiCompatibleConfigMissing": "OpenAI Compatible configuration missing for embedder creation", "geminiConfigMissing": "Gemini configuration missing for embedder creation", + "mistralConfigMissing": "Mistral configuration missing for embedder creation", "invalidEmbedderType": "Invalid embedder type configured: {{embedderProvider}}", "vectorDimensionNotDeterminedOpenAiCompatible": "Could not determine vector dimension for model '{{modelId}}' with provider '{{provider}}'. Please ensure the 'Embedding Dimension' is correctly set in the OpenAI-Compatible provider settings.", "vectorDimensionNotDetermined": "Could not determine vector dimension for model '{{modelId}}' with provider '{{provider}}'. Check model profiles or configuration.", diff --git a/src/i18n/locales/es/embeddings.json b/src/i18n/locales/es/embeddings.json index e47db420ebd..06478f1d506 100644 --- a/src/i18n/locales/es/embeddings.json +++ b/src/i18n/locales/es/embeddings.json @@ -46,6 +46,7 @@ "ollamaConfigMissing": "Falta la configuración de Ollama para crear el incrustador", "openAiCompatibleConfigMissing": "Falta la configuración compatible con OpenAI para crear el incrustador", "geminiConfigMissing": "Falta la configuración de Gemini para crear el incrustador", + "mistralConfigMissing": "Falta la configuración de Mistral para la creación del incrustador", "invalidEmbedderType": "Tipo de incrustador configurado inválido: {{embedderProvider}}", "vectorDimensionNotDeterminedOpenAiCompatible": "No se pudo determinar la dimensión del vector para el modelo '{{modelId}}' con el proveedor '{{provider}}'. Asegúrate de que la 'Dimensión de incrustación' esté configurada correctamente en los ajustes del proveedor compatible con OpenAI.", "vectorDimensionNotDetermined": "No se pudo determinar la dimensión del vector para el modelo '{{modelId}}' con el proveedor '{{provider}}'. Verifica los perfiles del modelo o la configuración.", diff --git a/src/i18n/locales/fr/embeddings.json b/src/i18n/locales/fr/embeddings.json index c63d3a7fbcb..167d093e7ac 100644 --- a/src/i18n/locales/fr/embeddings.json +++ b/src/i18n/locales/fr/embeddings.json @@ -46,6 +46,7 @@ "ollamaConfigMissing": "Configuration Ollama manquante pour la création de l'embedder", "openAiCompatibleConfigMissing": "Configuration compatible OpenAI manquante pour la création de l'embedder", "geminiConfigMissing": "Configuration Gemini manquante pour la création de l'embedder", + "mistralConfigMissing": "Configuration Mistral manquante pour la création de l'embedder", "invalidEmbedderType": "Type d'embedder configuré invalide : {{embedderProvider}}", "vectorDimensionNotDeterminedOpenAiCompatible": "Impossible de déterminer la dimension du vecteur pour le modèle '{{modelId}}' avec le fournisseur '{{provider}}'. Assure-toi que la 'Dimension d'embedding' est correctement définie dans les paramètres du fournisseur compatible OpenAI.", "vectorDimensionNotDetermined": "Impossible de déterminer la dimension du vecteur pour le modèle '{{modelId}}' avec le fournisseur '{{provider}}'. Vérifie les profils du modèle ou la configuration.", diff --git a/src/i18n/locales/hi/embeddings.json b/src/i18n/locales/hi/embeddings.json index 15709fd7004..ad24cfe9d17 100644 --- a/src/i18n/locales/hi/embeddings.json +++ b/src/i18n/locales/hi/embeddings.json @@ -46,6 +46,7 @@ "ollamaConfigMissing": "एम्बेडर बनाने के लिए Ollama कॉन्फ़िगरेशन गायब है", "openAiCompatibleConfigMissing": "एम्बेडर बनाने के लिए OpenAI संगत कॉन्फ़िगरेशन गायब है", "geminiConfigMissing": "एम्बेडर बनाने के लिए Gemini कॉन्फ़िगरेशन गायब है", + "mistralConfigMissing": "एम्बेडर निर्माण के लिए मिस्ट्रल कॉन्फ़िगरेशन गायब है", "invalidEmbedderType": "अमान्य एम्बेडर प्रकार कॉन्फ़िगर किया गया: {{embedderProvider}}", "vectorDimensionNotDeterminedOpenAiCompatible": "प्रदाता '{{provider}}' के साथ मॉडल '{{modelId}}' के लिए वेक्टर आयाम निर्धारित नहीं कर सका। कृपया सुनिश्चित करें कि OpenAI-संगत प्रदाता सेटिंग्स में 'एम्बेडिंग आयाम' सही तरीके से सेट है।", "vectorDimensionNotDetermined": "प्रदाता '{{provider}}' के साथ मॉडल '{{modelId}}' के लिए वेक्टर आयाम निर्धारित नहीं कर सका। मॉडल प्रोफ़ाइल या कॉन्फ़िगरेशन की जांच करें।", diff --git a/src/i18n/locales/id/embeddings.json b/src/i18n/locales/id/embeddings.json index e78d39d1ab4..997c6e80186 100644 --- a/src/i18n/locales/id/embeddings.json +++ b/src/i18n/locales/id/embeddings.json @@ -46,6 +46,7 @@ "ollamaConfigMissing": "Konfigurasi Ollama tidak ada untuk membuat embedder", "openAiCompatibleConfigMissing": "Konfigurasi yang kompatibel dengan OpenAI tidak ada untuk membuat embedder", "geminiConfigMissing": "Konfigurasi Gemini tidak ada untuk membuat embedder", + "mistralConfigMissing": "Konfigurasi Mistral hilang untuk pembuatan embedder", "invalidEmbedderType": "Tipe embedder yang dikonfigurasi tidak valid: {{embedderProvider}}", "vectorDimensionNotDeterminedOpenAiCompatible": "Tidak dapat menentukan dimensi vektor untuk model '{{modelId}}' dengan penyedia '{{provider}}'. Pastikan 'Dimensi Embedding' diatur dengan benar di pengaturan penyedia yang kompatibel dengan OpenAI.", "vectorDimensionNotDetermined": "Tidak dapat menentukan dimensi vektor untuk model '{{modelId}}' dengan penyedia '{{provider}}'. Periksa profil model atau konfigurasi.", diff --git a/src/i18n/locales/it/embeddings.json b/src/i18n/locales/it/embeddings.json index 679b17a25e4..1bc406aecb7 100644 --- a/src/i18n/locales/it/embeddings.json +++ b/src/i18n/locales/it/embeddings.json @@ -46,6 +46,7 @@ "ollamaConfigMissing": "Configurazione Ollama mancante per la creazione dell'embedder", "openAiCompatibleConfigMissing": "Configurazione compatibile con OpenAI mancante per la creazione dell'embedder", "geminiConfigMissing": "Configurazione Gemini mancante per la creazione dell'embedder", + "mistralConfigMissing": "Configurazione di Mistral mancante per la creazione dell'embedder", "invalidEmbedderType": "Tipo di embedder configurato non valido: {{embedderProvider}}", "vectorDimensionNotDeterminedOpenAiCompatible": "Impossibile determinare la dimensione del vettore per il modello '{{modelId}}' con il provider '{{provider}}'. Assicurati che la 'Dimensione di embedding' sia impostata correttamente nelle impostazioni del provider compatibile con OpenAI.", "vectorDimensionNotDetermined": "Impossibile determinare la dimensione del vettore per il modello '{{modelId}}' con il provider '{{provider}}'. Controlla i profili del modello o la configurazione.", diff --git a/src/i18n/locales/ja/embeddings.json b/src/i18n/locales/ja/embeddings.json index 89136eb1cc2..7152eb52dfd 100644 --- a/src/i18n/locales/ja/embeddings.json +++ b/src/i18n/locales/ja/embeddings.json @@ -46,6 +46,7 @@ "ollamaConfigMissing": "エンベッダー作成のためのOllama設定がありません", "openAiCompatibleConfigMissing": "エンベッダー作成のためのOpenAI互換設定がありません", "geminiConfigMissing": "エンベッダー作成のためのGemini設定がありません", + "mistralConfigMissing": "エンベッダー作成のためのMistral設定がありません", "invalidEmbedderType": "無効なエンベッダータイプが設定されています: {{embedderProvider}}", "vectorDimensionNotDeterminedOpenAiCompatible": "プロバイダー '{{provider}}' のモデル '{{modelId}}' の埋め込み次元を決定できませんでした。OpenAI互換プロバイダー設定で「埋め込み次元」が正しく設定されていることを確認してください。", "vectorDimensionNotDetermined": "プロバイダー '{{provider}}' のモデル '{{modelId}}' の埋め込み次元を決定できませんでした。モデルプロファイルまたは設定を確認してください。", diff --git a/src/i18n/locales/ko/embeddings.json b/src/i18n/locales/ko/embeddings.json index 7129883ad7a..f1c40f66bce 100644 --- a/src/i18n/locales/ko/embeddings.json +++ b/src/i18n/locales/ko/embeddings.json @@ -46,6 +46,7 @@ "ollamaConfigMissing": "임베더 생성을 위한 Ollama 구성이 누락되었습니다", "openAiCompatibleConfigMissing": "임베더 생성을 위한 OpenAI 호환 구성이 누락되었습니다", "geminiConfigMissing": "임베더 생성을 위한 Gemini 구성이 누락되었습니다", + "mistralConfigMissing": "임베더 생성을 위한 Mistral 구성이 없습니다", "invalidEmbedderType": "잘못된 임베더 유형이 구성되었습니다: {{embedderProvider}}", "vectorDimensionNotDeterminedOpenAiCompatible": "프로바이더 '{{provider}}'의 모델 '{{modelId}}'에 대한 벡터 차원을 결정할 수 없습니다. OpenAI 호환 프로바이더 설정에서 '임베딩 차원'이 올바르게 설정되어 있는지 확인하세요.", "vectorDimensionNotDetermined": "프로바이더 '{{provider}}'의 모델 '{{modelId}}'에 대한 벡터 차원을 결정할 수 없습니다. 모델 프로필 또는 구성을 확인하세요.", diff --git a/src/i18n/locales/nl/embeddings.json b/src/i18n/locales/nl/embeddings.json index ede20774acc..19b7bfeaa22 100644 --- a/src/i18n/locales/nl/embeddings.json +++ b/src/i18n/locales/nl/embeddings.json @@ -46,6 +46,7 @@ "ollamaConfigMissing": "Ollama-configuratie ontbreekt voor het maken van embedder", "openAiCompatibleConfigMissing": "OpenAI-compatibele configuratie ontbreekt voor het maken van embedder", "geminiConfigMissing": "Gemini-configuratie ontbreekt voor het maken van embedder", + "mistralConfigMissing": "Mistral-configuratie ontbreekt voor het maken van de embedder", "invalidEmbedderType": "Ongeldig embedder-type geconfigureerd: {{embedderProvider}}", "vectorDimensionNotDeterminedOpenAiCompatible": "Kan de vectordimensie voor model '{{modelId}}' met provider '{{provider}}' niet bepalen. Zorg ervoor dat de 'Embedding Dimensie' correct is ingesteld in de OpenAI-compatibele provider-instellingen.", "vectorDimensionNotDetermined": "Kan de vectordimensie voor model '{{modelId}}' met provider '{{provider}}' niet bepalen. Controleer modelprofielen of configuratie.", diff --git a/src/i18n/locales/pl/embeddings.json b/src/i18n/locales/pl/embeddings.json index 70279021bd1..46e761cb8b6 100644 --- a/src/i18n/locales/pl/embeddings.json +++ b/src/i18n/locales/pl/embeddings.json @@ -46,6 +46,7 @@ "ollamaConfigMissing": "Brak konfiguracji Ollama do utworzenia embeddera", "openAiCompatibleConfigMissing": "Brak konfiguracji kompatybilnej z OpenAI do utworzenia embeddera", "geminiConfigMissing": "Brak konfiguracji Gemini do utworzenia embeddera", + "mistralConfigMissing": "Brak konfiguracji Mistral do utworzenia embeddera", "invalidEmbedderType": "Skonfigurowano nieprawidłowy typ embeddera: {{embedderProvider}}", "vectorDimensionNotDeterminedOpenAiCompatible": "Nie można określić wymiaru wektora dla modelu '{{modelId}}' z dostawcą '{{provider}}'. Upewnij się, że 'Wymiar osadzania' jest poprawnie ustawiony w ustawieniach dostawcy kompatybilnego z OpenAI.", "vectorDimensionNotDetermined": "Nie można określić wymiaru wektora dla modelu '{{modelId}}' z dostawcą '{{provider}}'. Sprawdź profile modelu lub konfigurację.", diff --git a/src/i18n/locales/pt-BR/embeddings.json b/src/i18n/locales/pt-BR/embeddings.json index aea1bb5007a..816b1ecded4 100644 --- a/src/i18n/locales/pt-BR/embeddings.json +++ b/src/i18n/locales/pt-BR/embeddings.json @@ -46,6 +46,7 @@ "ollamaConfigMissing": "Configuração do Ollama ausente para criação do embedder", "openAiCompatibleConfigMissing": "Configuração compatível com OpenAI ausente para criação do embedder", "geminiConfigMissing": "Configuração do Gemini ausente para criação do embedder", + "mistralConfigMissing": "Configuração do Mistral ausente para a criação do embedder", "invalidEmbedderType": "Tipo de embedder configurado inválido: {{embedderProvider}}", "vectorDimensionNotDeterminedOpenAiCompatible": "Não foi possível determinar a dimensão do vetor para o modelo '{{modelId}}' com o provedor '{{provider}}'. Certifique-se de que a 'Dimensão de Embedding' esteja configurada corretamente nas configurações do provedor compatível com OpenAI.", "vectorDimensionNotDetermined": "Não foi possível determinar a dimensão do vetor para o modelo '{{modelId}}' com o provedor '{{provider}}'. Verifique os perfis do modelo ou a configuração.", diff --git a/src/i18n/locales/ru/embeddings.json b/src/i18n/locales/ru/embeddings.json index a724539b769..fb1688e2ca4 100644 --- a/src/i18n/locales/ru/embeddings.json +++ b/src/i18n/locales/ru/embeddings.json @@ -46,6 +46,7 @@ "ollamaConfigMissing": "Отсутствует конфигурация Ollama для создания эмбеддера", "openAiCompatibleConfigMissing": "Отсутствует конфигурация, совместимая с OpenAI, для создания эмбеддера", "geminiConfigMissing": "Отсутствует конфигурация Gemini для создания эмбеддера", + "mistralConfigMissing": "Конфигурация Mistral отсутствует для создания эмбеддера", "invalidEmbedderType": "Настроен недопустимый тип эмбеддера: {{embedderProvider}}", "vectorDimensionNotDeterminedOpenAiCompatible": "Не удалось определить размерность вектора для модели '{{modelId}}' с провайдером '{{provider}}'. Убедитесь, что 'Размерность эмбеддинга' правильно установлена в настройках провайдера, совместимого с OpenAI.", "vectorDimensionNotDetermined": "Не удалось определить размерность вектора для модели '{{modelId}}' с провайдером '{{provider}}'. Проверьте профили модели или конфигурацию.", diff --git a/src/i18n/locales/tr/embeddings.json b/src/i18n/locales/tr/embeddings.json index 3e115ce1038..5023190929a 100644 --- a/src/i18n/locales/tr/embeddings.json +++ b/src/i18n/locales/tr/embeddings.json @@ -46,6 +46,7 @@ "ollamaConfigMissing": "Gömücü oluşturmak için Ollama yapılandırması eksik", "openAiCompatibleConfigMissing": "Gömücü oluşturmak için OpenAI uyumlu yapılandırması eksik", "geminiConfigMissing": "Gömücü oluşturmak için Gemini yapılandırması eksik", + "mistralConfigMissing": "Gömücü oluşturmak için Mistral yapılandırması eksik", "invalidEmbedderType": "Geçersiz gömücü türü yapılandırıldı: {{embedderProvider}}", "vectorDimensionNotDeterminedOpenAiCompatible": "'{{provider}}' sağlayıcısı ile '{{modelId}}' modeli için vektör boyutu belirlenemedi. OpenAI uyumlu sağlayıcı ayarlarında 'Gömme Boyutu'nun doğru ayarlandığından emin ol.", "vectorDimensionNotDetermined": "'{{provider}}' sağlayıcısı ile '{{modelId}}' modeli için vektör boyutu belirlenemedi. Model profillerini veya yapılandırmayı kontrol et.", diff --git a/src/i18n/locales/vi/embeddings.json b/src/i18n/locales/vi/embeddings.json index 9ef61105fa5..626f0f68624 100644 --- a/src/i18n/locales/vi/embeddings.json +++ b/src/i18n/locales/vi/embeddings.json @@ -46,6 +46,7 @@ "ollamaConfigMissing": "Thiếu cấu hình Ollama để tạo embedder", "openAiCompatibleConfigMissing": "Thiếu cấu hình tương thích OpenAI để tạo embedder", "geminiConfigMissing": "Thiếu cấu hình Gemini để tạo embedder", + "mistralConfigMissing": "Thiếu cấu hình Mistral để tạo trình nhúng", "invalidEmbedderType": "Loại embedder được cấu hình không hợp lệ: {{embedderProvider}}", "vectorDimensionNotDeterminedOpenAiCompatible": "Không thể xác định kích thước vector cho mô hình '{{modelId}}' với nhà cung cấp '{{provider}}'. Hãy đảm bảo 'Kích thước Embedding' được cài đặt đúng trong cài đặt nhà cung cấp tương thích OpenAI.", "vectorDimensionNotDetermined": "Không thể xác định kích thước vector cho mô hình '{{modelId}}' với nhà cung cấp '{{provider}}'. Kiểm tra hồ sơ mô hình hoặc cấu hình.", diff --git a/src/i18n/locales/zh-CN/embeddings.json b/src/i18n/locales/zh-CN/embeddings.json index d3ded6e5a25..3247631bb24 100644 --- a/src/i18n/locales/zh-CN/embeddings.json +++ b/src/i18n/locales/zh-CN/embeddings.json @@ -46,6 +46,7 @@ "ollamaConfigMissing": "创建嵌入器缺少 Ollama 配置", "openAiCompatibleConfigMissing": "创建嵌入器缺少 OpenAI 兼容配置", "geminiConfigMissing": "创建嵌入器缺少 Gemini 配置", + "mistralConfigMissing": "创建嵌入器时缺少 Mistral 配置", "invalidEmbedderType": "配置的嵌入器类型无效:{{embedderProvider}}", "vectorDimensionNotDeterminedOpenAiCompatible": "无法确定提供商 '{{provider}}' 的模型 '{{modelId}}' 的向量维度。请确保在 OpenAI 兼容提供商设置中正确设置了「嵌入维度」。", "vectorDimensionNotDetermined": "无法确定提供商 '{{provider}}' 的模型 '{{modelId}}' 的向量维度。请检查模型配置文件或配置。", diff --git a/src/i18n/locales/zh-TW/embeddings.json b/src/i18n/locales/zh-TW/embeddings.json index 5ab5dcb292c..b3b3231d4ab 100644 --- a/src/i18n/locales/zh-TW/embeddings.json +++ b/src/i18n/locales/zh-TW/embeddings.json @@ -46,6 +46,7 @@ "ollamaConfigMissing": "建立嵌入器缺少 Ollama 設定", "openAiCompatibleConfigMissing": "建立嵌入器缺少 OpenAI 相容設定", "geminiConfigMissing": "建立嵌入器缺少 Gemini 設定", + "mistralConfigMissing": "建立嵌入器時缺少 Mistral 設定", "invalidEmbedderType": "設定的嵌入器類型無效:{{embedderProvider}}", "vectorDimensionNotDeterminedOpenAiCompatible": "無法確定提供商 '{{provider}}' 的模型 '{{modelId}}' 的向量維度。請確保在 OpenAI 相容提供商設定中正確設定了「嵌入維度」。", "vectorDimensionNotDetermined": "無法確定提供商 '{{provider}}' 的模型 '{{modelId}}' 的向量維度。請檢查模型設定檔或設定。", diff --git a/src/services/code-index/config-manager.ts b/src/services/code-index/config-manager.ts index 9958f456c3e..1723f1c2a08 100644 --- a/src/services/code-index/config-manager.ts +++ b/src/services/code-index/config-manager.ts @@ -18,6 +18,7 @@ export class CodeIndexConfigManager { private ollamaOptions?: ApiHandlerOptions private openAiCompatibleOptions?: { baseUrl: string; apiKey: string } private geminiOptions?: { apiKey: string } + private mistralOptions?: { apiKey: string } private qdrantUrl?: string = "http://localhost:6333" private qdrantApiKey?: string private searchMinScore?: number @@ -67,6 +68,7 @@ export class CodeIndexConfigManager { const openAiCompatibleBaseUrl = codebaseIndexConfig.codebaseIndexOpenAiCompatibleBaseUrl ?? "" const openAiCompatibleApiKey = this.contextProxy?.getSecret("codebaseIndexOpenAiCompatibleApiKey") ?? "" const geminiApiKey = this.contextProxy?.getSecret("codebaseIndexGeminiApiKey") ?? "" + const mistralApiKey = this.contextProxy?.getSecret("codebaseIndexMistralApiKey") ?? "" // Update instance variables with configuration this.codebaseIndexEnabled = codebaseIndexEnabled ?? true @@ -100,6 +102,8 @@ export class CodeIndexConfigManager { this.embedderProvider = "openai-compatible" } else if (codebaseIndexEmbedderProvider === "gemini") { this.embedderProvider = "gemini" + } else if (codebaseIndexEmbedderProvider === "mistral") { + this.embedderProvider = "mistral" } else { this.embedderProvider = "openai" } @@ -119,6 +123,7 @@ export class CodeIndexConfigManager { : undefined this.geminiOptions = geminiApiKey ? { apiKey: geminiApiKey } : undefined + this.mistralOptions = mistralApiKey ? { apiKey: mistralApiKey } : undefined } /** @@ -135,6 +140,7 @@ export class CodeIndexConfigManager { ollamaOptions?: ApiHandlerOptions openAiCompatibleOptions?: { baseUrl: string; apiKey: string } geminiOptions?: { apiKey: string } + mistralOptions?: { apiKey: string } qdrantUrl?: string qdrantApiKey?: string searchMinScore?: number @@ -153,6 +159,7 @@ export class CodeIndexConfigManager { openAiCompatibleBaseUrl: this.openAiCompatibleOptions?.baseUrl ?? "", openAiCompatibleApiKey: this.openAiCompatibleOptions?.apiKey ?? "", geminiApiKey: this.geminiOptions?.apiKey ?? "", + mistralApiKey: this.mistralOptions?.apiKey ?? "", qdrantUrl: this.qdrantUrl ?? "", qdrantApiKey: this.qdrantApiKey ?? "", } @@ -176,6 +183,7 @@ export class CodeIndexConfigManager { ollamaOptions: this.ollamaOptions, openAiCompatibleOptions: this.openAiCompatibleOptions, geminiOptions: this.geminiOptions, + mistralOptions: this.mistralOptions, qdrantUrl: this.qdrantUrl, qdrantApiKey: this.qdrantApiKey, searchMinScore: this.currentSearchMinScore, @@ -208,6 +216,11 @@ export class CodeIndexConfigManager { const qdrantUrl = this.qdrantUrl const isConfigured = !!(apiKey && qdrantUrl) return isConfigured + } else if (this.embedderProvider === "mistral") { + const apiKey = this.mistralOptions?.apiKey + const qdrantUrl = this.qdrantUrl + const isConfigured = !!(apiKey && qdrantUrl) + return isConfigured } return false // Should not happen if embedderProvider is always set correctly } @@ -241,6 +254,7 @@ export class CodeIndexConfigManager { const prevOpenAiCompatibleApiKey = prev?.openAiCompatibleApiKey ?? "" const prevModelDimension = prev?.modelDimension const prevGeminiApiKey = prev?.geminiApiKey ?? "" + const prevMistralApiKey = prev?.mistralApiKey ?? "" const prevQdrantUrl = prev?.qdrantUrl ?? "" const prevQdrantApiKey = prev?.qdrantApiKey ?? "" @@ -277,6 +291,7 @@ export class CodeIndexConfigManager { const currentOpenAiCompatibleApiKey = this.openAiCompatibleOptions?.apiKey ?? "" const currentModelDimension = this.modelDimension const currentGeminiApiKey = this.geminiOptions?.apiKey ?? "" + const currentMistralApiKey = this.mistralOptions?.apiKey ?? "" const currentQdrantUrl = this.qdrantUrl ?? "" const currentQdrantApiKey = this.qdrantApiKey ?? "" @@ -295,6 +310,14 @@ export class CodeIndexConfigManager { return true } + if (prevGeminiApiKey !== currentGeminiApiKey) { + return true + } + + if (prevMistralApiKey !== currentMistralApiKey) { + return true + } + // Check for model dimension changes (generic for all providers) if (prevModelDimension !== currentModelDimension) { return true @@ -351,6 +374,7 @@ export class CodeIndexConfigManager { ollamaOptions: this.ollamaOptions, openAiCompatibleOptions: this.openAiCompatibleOptions, geminiOptions: this.geminiOptions, + mistralOptions: this.mistralOptions, qdrantUrl: this.qdrantUrl, qdrantApiKey: this.qdrantApiKey, searchMinScore: this.currentSearchMinScore, diff --git a/src/services/code-index/embedders/__tests__/mistral.spec.ts b/src/services/code-index/embedders/__tests__/mistral.spec.ts new file mode 100644 index 00000000000..50858825030 --- /dev/null +++ b/src/services/code-index/embedders/__tests__/mistral.spec.ts @@ -0,0 +1,193 @@ +import { vitest, describe, it, expect, beforeEach } from "vitest" +import type { MockedClass } from "vitest" +import { MistralEmbedder } from "../mistral" +import { OpenAICompatibleEmbedder } from "../openai-compatible" + +// Mock the OpenAICompatibleEmbedder +vitest.mock("../openai-compatible") + +// Mock TelemetryService +vitest.mock("@roo-code/telemetry", () => ({ + TelemetryService: { + instance: { + captureEvent: vitest.fn(), + }, + }, +})) + +const MockedOpenAICompatibleEmbedder = OpenAICompatibleEmbedder as MockedClass + +describe("MistralEmbedder", () => { + let embedder: MistralEmbedder + + beforeEach(() => { + vitest.clearAllMocks() + }) + + describe("constructor", () => { + it("should create an instance with default model when no model specified", () => { + // Arrange + const apiKey = "test-mistral-api-key" + + // Act + embedder = new MistralEmbedder(apiKey) + + // Assert + expect(MockedOpenAICompatibleEmbedder).toHaveBeenCalledWith( + "https://api.mistral.ai/v1", + apiKey, + "codestral-embed-2505", + 8191, + ) + }) + + it("should create an instance with specified model", () => { + // Arrange + const apiKey = "test-mistral-api-key" + const modelId = "custom-embed-model" + + // Act + embedder = new MistralEmbedder(apiKey, modelId) + + // Assert + expect(MockedOpenAICompatibleEmbedder).toHaveBeenCalledWith( + "https://api.mistral.ai/v1", + apiKey, + "custom-embed-model", + 8191, + ) + }) + + it("should throw error when API key is not provided", () => { + // Act & Assert + expect(() => new MistralEmbedder("")).toThrow("validation.apiKeyRequired") + expect(() => new MistralEmbedder(null as any)).toThrow("validation.apiKeyRequired") + expect(() => new MistralEmbedder(undefined as any)).toThrow("validation.apiKeyRequired") + }) + }) + + describe("embedderInfo", () => { + it("should return correct embedder info", () => { + // Arrange + embedder = new MistralEmbedder("test-api-key") + + // Act + const info = embedder.embedderInfo + + // Assert + expect(info).toEqual({ + name: "mistral", + }) + }) + + describe("createEmbeddings", () => { + let mockCreateEmbeddings: any + + beforeEach(() => { + mockCreateEmbeddings = vitest.fn() + MockedOpenAICompatibleEmbedder.prototype.createEmbeddings = mockCreateEmbeddings + }) + + it("should use instance model when no model parameter provided", async () => { + // Arrange + embedder = new MistralEmbedder("test-api-key") + const texts = ["test text 1", "test text 2"] + const mockResponse = { + embeddings: [ + [0.1, 0.2], + [0.3, 0.4], + ], + } + mockCreateEmbeddings.mockResolvedValue(mockResponse) + + // Act + const result = await embedder.createEmbeddings(texts) + + // Assert + expect(mockCreateEmbeddings).toHaveBeenCalledWith(texts, "codestral-embed-2505") + expect(result).toEqual(mockResponse) + }) + + it("should use provided model parameter when specified", async () => { + // Arrange + embedder = new MistralEmbedder("test-api-key", "custom-embed-model") + const texts = ["test text 1", "test text 2"] + const mockResponse = { + embeddings: [ + [0.1, 0.2], + [0.3, 0.4], + ], + } + mockCreateEmbeddings.mockResolvedValue(mockResponse) + + // Act + const result = await embedder.createEmbeddings(texts, "codestral-embed-2505") + + // Assert + expect(mockCreateEmbeddings).toHaveBeenCalledWith(texts, "codestral-embed-2505") + expect(result).toEqual(mockResponse) + }) + + it("should handle errors from OpenAICompatibleEmbedder", async () => { + // Arrange + embedder = new MistralEmbedder("test-api-key") + const texts = ["test text"] + const error = new Error("Embedding failed") + mockCreateEmbeddings.mockRejectedValue(error) + + // Act & Assert + await expect(embedder.createEmbeddings(texts)).rejects.toThrow("Embedding failed") + }) + }) + }) + + describe("validateConfiguration", () => { + let mockValidateConfiguration: any + + beforeEach(() => { + mockValidateConfiguration = vitest.fn() + MockedOpenAICompatibleEmbedder.prototype.validateConfiguration = mockValidateConfiguration + }) + + it("should delegate validation to OpenAICompatibleEmbedder", async () => { + // Arrange + embedder = new MistralEmbedder("test-api-key") + mockValidateConfiguration.mockResolvedValue({ valid: true }) + + // Act + const result = await embedder.validateConfiguration() + + // Assert + expect(mockValidateConfiguration).toHaveBeenCalled() + expect(result).toEqual({ valid: true }) + }) + + it("should pass through validation errors from OpenAICompatibleEmbedder", async () => { + // Arrange + embedder = new MistralEmbedder("test-api-key") + mockValidateConfiguration.mockResolvedValue({ + valid: false, + error: "embeddings:validation.authenticationFailed", + }) + + // Act + const result = await embedder.validateConfiguration() + + // Assert + expect(mockValidateConfiguration).toHaveBeenCalled() + expect(result).toEqual({ + valid: false, + error: "embeddings:validation.authenticationFailed", + }) + }) + + it("should handle validation exceptions", async () => { + // Arrange + embedder = new MistralEmbedder("test-api-key") + mockValidateConfiguration.mockRejectedValue(new Error("Validation failed")) + + // Act & Assert + await expect(embedder.validateConfiguration()).rejects.toThrow("Validation failed") + }) + }) +}) diff --git a/src/services/code-index/embedders/mistral.ts b/src/services/code-index/embedders/mistral.ts new file mode 100644 index 00000000000..c23bcbba1d4 --- /dev/null +++ b/src/services/code-index/embedders/mistral.ts @@ -0,0 +1,91 @@ +import { OpenAICompatibleEmbedder } from "./openai-compatible" +import { IEmbedder, EmbeddingResponse, EmbedderInfo } from "../interfaces/embedder" +import { MAX_ITEM_TOKENS } from "../constants" +import { t } from "../../../i18n" +import { TelemetryEventName } from "@roo-code/types" +import { TelemetryService } from "@roo-code/telemetry" + +/** + * Mistral embedder implementation that wraps the OpenAI Compatible embedder + * with configuration for Mistral's embedding API. + * + * Supported models: + * - codestral-embed-2505 (dimension: 1536) + */ +export class MistralEmbedder implements IEmbedder { + private readonly openAICompatibleEmbedder: OpenAICompatibleEmbedder + private static readonly MISTRAL_BASE_URL = "https://api.mistral.ai/v1" + private static readonly DEFAULT_MODEL = "codestral-embed-2505" + private readonly modelId: string + + /** + * Creates a new Mistral embedder + * @param apiKey The Mistral API key for authentication + * @param modelId The model ID to use (defaults to codestral-embed-2505) + */ + constructor(apiKey: string, modelId?: string) { + if (!apiKey) { + throw new Error(t("embeddings:validation.apiKeyRequired")) + } + + // Use provided model or default + this.modelId = modelId || MistralEmbedder.DEFAULT_MODEL + + // Create an OpenAI Compatible embedder with Mistral's configuration + this.openAICompatibleEmbedder = new OpenAICompatibleEmbedder( + MistralEmbedder.MISTRAL_BASE_URL, + apiKey, + this.modelId, + MAX_ITEM_TOKENS, // This is the max token limit (8191), not the embedding dimension + ) + } + + /** + * Creates embeddings for the given texts using Mistral's embedding API + * @param texts Array of text strings to embed + * @param model Optional model identifier (uses constructor model if not provided) + * @returns Promise resolving to embedding response + */ + async createEmbeddings(texts: string[], model?: string): Promise { + try { + // Use the provided model or fall back to the instance's model + const modelToUse = model || this.modelId + return await this.openAICompatibleEmbedder.createEmbeddings(texts, modelToUse) + } catch (error) { + TelemetryService.instance.captureEvent(TelemetryEventName.CODE_INDEX_ERROR, { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + location: "MistralEmbedder:createEmbeddings", + }) + throw error + } + } + + /** + * Validates the Mistral embedder configuration by delegating to the underlying OpenAI-compatible embedder + * @returns Promise resolving to validation result with success status and optional error message + */ + async validateConfiguration(): Promise<{ valid: boolean; error?: string }> { + try { + // Delegate validation to the OpenAI-compatible embedder + // The error messages will be specific to Mistral since we're using Mistral's base URL + return await this.openAICompatibleEmbedder.validateConfiguration() + } catch (error) { + TelemetryService.instance.captureEvent(TelemetryEventName.CODE_INDEX_ERROR, { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + location: "MistralEmbedder:validateConfiguration", + }) + throw error + } + } + + /** + * Returns information about this embedder + */ + get embedderInfo(): EmbedderInfo { + return { + name: "mistral", + } + } +} diff --git a/src/services/code-index/interfaces/config.ts b/src/services/code-index/interfaces/config.ts index 190a23e2a3e..9098a60091c 100644 --- a/src/services/code-index/interfaces/config.ts +++ b/src/services/code-index/interfaces/config.ts @@ -13,6 +13,7 @@ export interface CodeIndexConfig { ollamaOptions?: ApiHandlerOptions openAiCompatibleOptions?: { baseUrl: string; apiKey: string } geminiOptions?: { apiKey: string } + mistralOptions?: { apiKey: string } qdrantUrl?: string qdrantApiKey?: string searchMinScore?: number @@ -33,6 +34,7 @@ export type PreviousConfigSnapshot = { openAiCompatibleBaseUrl?: string openAiCompatibleApiKey?: string geminiApiKey?: string + mistralApiKey?: string qdrantUrl?: string qdrantApiKey?: string } diff --git a/src/services/code-index/interfaces/embedder.ts b/src/services/code-index/interfaces/embedder.ts index 0a74446d5e7..c5653ea2b7e 100644 --- a/src/services/code-index/interfaces/embedder.ts +++ b/src/services/code-index/interfaces/embedder.ts @@ -28,7 +28,7 @@ export interface EmbeddingResponse { } } -export type AvailableEmbedders = "openai" | "ollama" | "openai-compatible" | "gemini" +export type AvailableEmbedders = "openai" | "ollama" | "openai-compatible" | "gemini" | "mistral" export interface EmbedderInfo { name: AvailableEmbedders diff --git a/src/services/code-index/interfaces/manager.ts b/src/services/code-index/interfaces/manager.ts index 70e3fd97659..fd3b2bfddac 100644 --- a/src/services/code-index/interfaces/manager.ts +++ b/src/services/code-index/interfaces/manager.ts @@ -70,7 +70,7 @@ export interface ICodeIndexManager { } export type IndexingState = "Standby" | "Indexing" | "Indexed" | "Error" -export type EmbedderProvider = "openai" | "ollama" | "openai-compatible" | "gemini" +export type EmbedderProvider = "openai" | "ollama" | "openai-compatible" | "gemini" | "mistral" export interface IndexProgressUpdate { systemStatus: IndexingState diff --git a/src/services/code-index/service-factory.ts b/src/services/code-index/service-factory.ts index b7951db7acf..68b0f5c0bc6 100644 --- a/src/services/code-index/service-factory.ts +++ b/src/services/code-index/service-factory.ts @@ -3,6 +3,7 @@ import { OpenAiEmbedder } from "./embedders/openai" import { CodeIndexOllamaEmbedder } from "./embedders/ollama" import { OpenAICompatibleEmbedder } from "./embedders/openai-compatible" import { GeminiEmbedder } from "./embedders/gemini" +import { MistralEmbedder } from "./embedders/mistral" import { EmbedderProvider, getDefaultModelId, getModelDimension } from "../../shared/embeddingModels" import { QdrantVectorStore } from "./vector-store/qdrant-client" import { codeParser, DirectoryScanner, FileWatcher } from "./processors" @@ -64,6 +65,11 @@ export class CodeIndexServiceFactory { throw new Error(t("embeddings:serviceFactory.geminiConfigMissing")) } return new GeminiEmbedder(config.geminiOptions.apiKey, config.modelId) + } else if (provider === "mistral") { + if (!config.mistralOptions?.apiKey) { + throw new Error(t("embeddings:serviceFactory.mistralConfigMissing")) + } + return new MistralEmbedder(config.mistralOptions.apiKey, config.modelId) } throw new Error( diff --git a/src/shared/WebviewMessage.ts b/src/shared/WebviewMessage.ts index e983ce720d9..0317c469871 100644 --- a/src/shared/WebviewMessage.ts +++ b/src/shared/WebviewMessage.ts @@ -244,7 +244,7 @@ export interface WebviewMessage { // Global state settings codebaseIndexEnabled: boolean codebaseIndexQdrantUrl: string - codebaseIndexEmbedderProvider: "openai" | "ollama" | "openai-compatible" | "gemini" + codebaseIndexEmbedderProvider: "openai" | "ollama" | "openai-compatible" | "gemini" | "mistral" codebaseIndexEmbedderBaseUrl?: string codebaseIndexEmbedderModelId: string codebaseIndexEmbedderModelDimension?: number // Generic dimension for all providers @@ -257,6 +257,7 @@ export interface WebviewMessage { codeIndexQdrantApiKey?: string codebaseIndexOpenAiCompatibleApiKey?: string codebaseIndexGeminiApiKey?: string + codebaseIndexMistralApiKey?: string } } diff --git a/src/shared/embeddingModels.ts b/src/shared/embeddingModels.ts index f387480c65d..a3cd61e6593 100644 --- a/src/shared/embeddingModels.ts +++ b/src/shared/embeddingModels.ts @@ -2,7 +2,7 @@ * Defines profiles for different embedding models, including their dimensions. */ -export type EmbedderProvider = "openai" | "ollama" | "openai-compatible" | "gemini" // Add other providers as needed +export type EmbedderProvider = "openai" | "ollama" | "openai-compatible" | "gemini" | "mistral" // Add other providers as needed export interface EmbeddingModelProfile { dimension: number @@ -50,6 +50,9 @@ export const EMBEDDING_MODEL_PROFILES: EmbeddingModelProfiles = { "text-embedding-004": { dimension: 768 }, "gemini-embedding-001": { dimension: 3072, scoreThreshold: 0.4 }, }, + mistral: { + "codestral-embed-2505": { dimension: 1536, scoreThreshold: 0.4 }, + }, } /** @@ -137,6 +140,9 @@ export function getDefaultModelId(provider: EmbedderProvider): string { case "gemini": return "gemini-embedding-001" + case "mistral": + return "codestral-embed-2505" + default: // Fallback for unknown providers console.warn(`Unknown provider for default model ID: ${provider}. Falling back to OpenAI default.`) diff --git a/webview-ui/src/components/chat/CodeIndexPopover.tsx b/webview-ui/src/components/chat/CodeIndexPopover.tsx index 4385e2e8446..d7683e8c7e6 100644 --- a/webview-ui/src/components/chat/CodeIndexPopover.tsx +++ b/webview-ui/src/components/chat/CodeIndexPopover.tsx @@ -68,6 +68,7 @@ interface LocalCodeIndexSettings { codebaseIndexOpenAiCompatibleBaseUrl?: string codebaseIndexOpenAiCompatibleApiKey?: string codebaseIndexGeminiApiKey?: string + codebaseIndexMistralApiKey?: string } // Validation schema for codebase index settings @@ -126,6 +127,14 @@ const createValidationSchema = (provider: EmbedderProvider, t: any) => { .min(1, t("settings:codeIndex.validation.modelSelectionRequired")), }) + case "mistral": + return baseSchema.extend({ + codebaseIndexMistralApiKey: z.string().min(1, t("settings:codeIndex.validation.mistralApiKeyRequired")), + codebaseIndexEmbedderModelId: z + .string() + .min(1, t("settings:codeIndex.validation.modelSelectionRequired")), + }) + default: return baseSchema } @@ -169,6 +178,7 @@ export const CodeIndexPopover: React.FC = ({ codebaseIndexOpenAiCompatibleBaseUrl: "", codebaseIndexOpenAiCompatibleApiKey: "", codebaseIndexGeminiApiKey: "", + codebaseIndexMistralApiKey: "", }) // Initial settings state - stores the settings when popover opens @@ -202,6 +212,7 @@ export const CodeIndexPopover: React.FC = ({ codebaseIndexOpenAiCompatibleBaseUrl: codebaseIndexConfig.codebaseIndexOpenAiCompatibleBaseUrl || "", codebaseIndexOpenAiCompatibleApiKey: "", codebaseIndexGeminiApiKey: "", + codebaseIndexMistralApiKey: "", } setInitialSettings(settings) setCurrentSettings(settings) @@ -293,6 +304,9 @@ export const CodeIndexPopover: React.FC = ({ if (!prev.codebaseIndexGeminiApiKey || prev.codebaseIndexGeminiApiKey === SECRET_PLACEHOLDER) { updated.codebaseIndexGeminiApiKey = secretStatus.hasGeminiApiKey ? SECRET_PLACEHOLDER : "" } + if (!prev.codebaseIndexMistralApiKey || prev.codebaseIndexMistralApiKey === SECRET_PLACEHOLDER) { + updated.codebaseIndexMistralApiKey = secretStatus.hasMistralApiKey ? SECRET_PLACEHOLDER : "" + } return updated } @@ -364,7 +378,8 @@ export const CodeIndexPopover: React.FC = ({ if ( key === "codeIndexOpenAiKey" || key === "codebaseIndexOpenAiCompatibleApiKey" || - key === "codebaseIndexGeminiApiKey" + key === "codebaseIndexGeminiApiKey" || + key === "codebaseIndexMistralApiKey" ) { dataToValidate[key] = "placeholder-valid" } @@ -606,6 +621,9 @@ export const CodeIndexPopover: React.FC = ({ {t("settings:codeIndex.geminiProvider")} + + {t("settings:codeIndex.mistralProvider")} + @@ -933,6 +951,71 @@ export const CodeIndexPopover: React.FC = ({ )} + {currentSettings.codebaseIndexEmbedderProvider === "mistral" && ( + <> +
+ + + updateSetting("codebaseIndexMistralApiKey", e.target.value) + } + placeholder={t("settings:codeIndex.mistralApiKeyPlaceholder")} + className={cn("w-full", { + "border-red-500": formErrors.codebaseIndexMistralApiKey, + })} + /> + {formErrors.codebaseIndexMistralApiKey && ( +

+ {formErrors.codebaseIndexMistralApiKey} +

+ )} +
+ +
+ + + updateSetting("codebaseIndexEmbedderModelId", e.target.value) + } + className={cn("w-full", { + "border-red-500": formErrors.codebaseIndexEmbedderModelId, + })}> + + {t("settings:codeIndex.selectModel")} + + {getAvailableModels().map((modelId) => { + const model = + codebaseIndexModels?.[ + currentSettings.codebaseIndexEmbedderProvider + ]?.[modelId] + return ( + + {modelId}{" "} + {model + ? t("settings:codeIndex.modelDimensions", { + dimension: model.dimension, + }) + : ""} + + ) + })} + + {formErrors.codebaseIndexEmbedderModelId && ( +

+ {formErrors.codebaseIndexEmbedderModelId} +

+ )} +
+ + )} + {/* Qdrant Settings */}