Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
f672801
Added MistralAI ChatCompletion
nmoellerms Jul 2, 2024
1155291
Integrated Feedback of PR
nmoellerms Jul 2, 2024
2a34856
Merge remote-tracking branch 'origin/main' into issue-6499-Mistral-Ai…
nmoellerms Jul 2, 2024
27b7cbb
added mistral to unit test dependencies
nmoellerms Jul 2, 2024
0ae805d
removed tools from settings
nmoellerms Jul 2, 2024
6b2025f
fixed comment and pytestfixture and lock file
nmoellerms Jul 2, 2024
e986b01
adjusted test cases to not conatin tools
nmoellerms Jul 2, 2024
b6cc9e3
handle function choice behavior
nmoellerms Jul 2, 2024
29efd0b
Merge branch 'main' into issue-6499-Mistral-Ai-Connector-chat-completion
nmoeller Jul 2, 2024
11295d7
fixed mypy issues except liskov
nmoellerms Jul 3, 2024
a894710
increased test coverage
nmoellerms Jul 3, 2024
87a8a67
Merge branch 'main' into issue-6499-Mistral-Ai-Connector-chat-completion
nmoeller Jul 3, 2024
14218da
full test coverage
nmoellerms Jul 3, 2024
0bebbf4
Merge branch 'issue-6499-Mistral-Ai-Connector-chat-completion' of htt…
nmoellerms Jul 3, 2024
46c997f
Integrated PR Feedback and skip Int Tests if Mistral is not configured
nmoellerms Jul 4, 2024
0ff93c8
Merge branch 'main' into issue-6499-Mistral-Ai-Connector-chat-completion
nmoeller Jul 4, 2024
b55ed11
small fix for skipping integration tests
nmoellerms Jul 4, 2024
297373f
Merge branch 'issue-6499-Mistral-Ai-Connector-chat-completion' of htt…
nmoellerms Jul 4, 2024
88ce112
Merge branch 'main' into issue-6499-Mistral-Ai-Connector-chat-completion
nmoeller Jul 4, 2024
a5f8bea
skiped MistralConstructor in TestSetup
nmoellerms Jul 4, 2024
cda788c
Merge branch 'issue-6499-Mistral-Ai-Connector-chat-completion' of htt…
nmoellerms Jul 4, 2024
a8e90ae
Merge branch 'main' into issue-6499-Mistral-Ai-Connector-chat-completion
nmoeller Jul 4, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .github/workflows/python-integration-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,8 @@ jobs:
AZURE_KEY_VAULT_CLIENT_ID: ${{secrets.AZURE_KEY_VAULT_CLIENT_ID}}
AZURE_KEY_VAULT_CLIENT_SECRET: ${{secrets.AZURE_KEY_VAULT_CLIENT_SECRET}}
ACA_POOL_MANAGEMENT_ENDPOINT: ${{secrets.ACA_POOL_MANAGEMENT_ENDPOINT}}
MISTRALAI_API_KEY: ${{secrets.MISTRALAI_API_KEY}}
MISTRALAI_CHAT_MODEL_ID: ${{ vars.MISTRALAI_CHAT_MODEL_ID }}
run: |
if ${{ matrix.os == 'ubuntu-latest' }}; then
docker run -d --name redis-stack-server -p 6379:6379 redis/redis-stack-server:latest
Expand Down Expand Up @@ -163,6 +165,8 @@ jobs:
AZURE_KEY_VAULT_CLIENT_ID: ${{secrets.AZURE_KEY_VAULT_CLIENT_ID}}
AZURE_KEY_VAULT_CLIENT_SECRET: ${{secrets.AZURE_KEY_VAULT_CLIENT_SECRET}}
ACA_POOL_MANAGEMENT_ENDPOINT: ${{secrets.ACA_POOL_MANAGEMENT_ENDPOINT}}
MISTRALAI_API_KEY: ${{secrets.MISTRALAI_API_KEY}}
MISTRALAI_CHAT_MODEL_ID: ${{ vars.MISTRALAI_CHAT_MODEL_ID }}
run: |
if ${{ matrix.os == 'ubuntu-latest' }}; then
docker run -d --name redis-stack-server -p 6379:6379 redis/redis-stack-server:latest
Expand Down
21 changes: 19 additions & 2 deletions python/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 7 additions & 2 deletions python/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@ ipykernel = { version = "^6.21.1", optional = true}
# milvus
pymilvus = { version = ">=2.3,<2.4.4", optional = true}
milvus = { version = ">=2.3,<2.3.8", markers = 'sys_platform != "win32"', optional = true}
# mistralai
mistralai = { version = "^0.4.1", optional = true}
# pinecone
pinecone-client = { version = ">=3.0.0", optional = true}
# postgres
Expand All @@ -65,7 +67,6 @@ usearch = { version = "^2.9", optional = true}
pyarrow = { version = ">=12.0.1,<17.0.0", optional = true}
weaviate-client = { version = ">=3.18,<5.0", optional = true}

# Groups are for development only (installed through Poetry)
[tool.poetry.group.dev.dependencies]
pre-commit = ">=3.7.1"
ruff = ">=0.4.5"
Expand All @@ -86,6 +87,7 @@ azure-ai-inference = {version = "^1.0.0b1", allow-prereleases = true}
azure-search-documents = {version = "11.6.0b4", allow-prereleases = true}
azure-core = "^1.28.0"
azure-cosmos = "^4.7.0"
mistralai = "^0.4.1"
Comment thread
nmoeller marked this conversation as resolved.
transformers = { version = "^4.28.1", extras=["torch"]}
sentence-transformers = "^2.2.2"

Expand All @@ -108,6 +110,8 @@ sentence-transformers = "^2.2.2"
# milvus
pymilvus = ">=2.3,<2.4.4"
milvus = { version = ">=2.3,<2.3.8", markers = 'sys_platform != "win32"'}
# mistralai
mistralai = "^0.4.1"
# mongodb
motor = "^3.3.2"
# pinecone
Expand All @@ -126,12 +130,13 @@ weaviate-client = ">=3.18,<5.0"

# Extras are exposed to pip, this allows a user to easily add the right dependencies to their environment
[tool.poetry.extras]
all = ["transformers", "sentence-transformers", "qdrant-client", "chromadb", "pymilvus", "milvus", "weaviate-client", "pinecone-client", "psycopg", "redis", "azure-ai-inference", "azure-search-documents", "azure-core", "azure-identity", "azure-cosmos", "usearch", "pyarrow", "ipykernel", "motor"]
all = ["transformers", "sentence-transformers", "qdrant-client", "chromadb", "pymilvus", "milvus","mistralai", "weaviate-client", "pinecone-client", "psycopg", "redis", "azure-ai-inference", "azure-search-documents", "azure-core", "azure-identity", "azure-cosmos", "usearch", "pyarrow", "ipykernel", "motor"]

azure = ["azure-ai-inference", "azure-search-documents", "azure-core", "azure-identity", "azure-cosmos", "msgraph-sdk"]
chromadb = ["chromadb"]
hugging_face = ["transformers", "sentence-transformers"]
milvus = ["pymilvus", "milvus"]
mistralai = ["mistralai"]
mongo = ["motor"]
notebooks = ["ipykernel"]
pinecone = ["pinecone-client"]
Expand Down
86 changes: 86 additions & 0 deletions python/samples/concepts/chat_completion/chat_mistral_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
# Copyright (c) Microsoft. All rights reserved.

import asyncio

from semantic_kernel import Kernel
from semantic_kernel.connectors.ai.mistral_ai import MistralAIChatCompletion
from semantic_kernel.contents import ChatHistory

system_message = """
You are a chat bot. Your name is Mosscap and
you have one goal: figure out what people need.
Your full name, should you need to know it, is
Splendid Speckled Mosscap. You communicate
effectively, but you tend to answer with long
flowery prose.
"""

kernel = Kernel()

service_id = "mistral-ai-chat"
kernel.add_service(MistralAIChatCompletion(service_id=service_id))

settings = kernel.get_prompt_execution_settings_from_service_id(service_id)
settings.max_tokens = 2000
settings.temperature = 0.7
settings.top_p = 0.8

chat_function = kernel.add_function(
plugin_name="ChatBot",
function_name="Chat",
prompt="{{$chat_history}}{{$user_input}}",
template_format="semantic-kernel",
prompt_execution_settings=settings,
)

chat_history = ChatHistory(system_message=system_message)
chat_history.add_user_message("Hi there, who are you?")
chat_history.add_assistant_message("I am Mosscap, a chat bot. I'm trying to figure out what people need")
chat_history.add_user_message("I want to find a hotel in Seattle with free wifi and a pool.")


async def chat() -> bool:
try:
user_input = input("User:> ")
except KeyboardInterrupt:
print("\n\nExiting chat...")
return False
except EOFError:
print("\n\nExiting chat...")
return False

if user_input == "exit":
print("\n\nExiting chat...")
return False

stream = True
if stream:
answer = kernel.invoke_stream(
chat_function,
user_input=user_input,
chat_history=chat_history,
)
print("Mosscap:> ", end="")
async for message in answer:
print(str(message[0]), end="")
print("\n")
return True
answer = await kernel.invoke(
chat_function,
user_input=user_input,
chat_history=chat_history,
)
print(f"Mosscap:> {answer}")
chat_history.add_user_message(user_input)
chat_history.add_assistant_message(str(answer))
return True


async def main() -> None:
chatting = True
while chatting:
chatting = await chat()


if __name__ == "__main__":
asyncio.run(main())
11 changes: 11 additions & 0 deletions python/semantic_kernel/connectors/ai/mistral_ai/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# Copyright (c) Microsoft. All rights reserved.

from semantic_kernel.connectors.ai.mistral_ai.prompt_execution_settings.mistral_ai_prompt_execution_settings import (
MistralAIChatPromptExecutionSettings,
)
from semantic_kernel.connectors.ai.mistral_ai.services.mistral_ai_chat_completion import MistralAIChatCompletion

__all__ = [
"MistralAIChatCompletion",
"MistralAIChatPromptExecutionSettings",
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# Copyright (c) Microsoft. All rights reserved.

import logging
from typing import Any, Literal

from pydantic import Field, model_validator

from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings

logger = logging.getLogger(__name__)


class MistralAIPromptExecutionSettings(PromptExecutionSettings):
"""Common request settings for MistralAI services."""

ai_model_id: str | None = Field(None, serialization_alias="model")


class MistralAIChatPromptExecutionSettings(MistralAIPromptExecutionSettings):
"""Specific settings for the Chat Completion endpoint."""

response_format: dict[Literal["type"], Literal["text", "json_object"]] | None = None
messages: list[dict[str, Any]] | None = None
safe_mode: bool = False
safe_prompt: bool = False
max_tokens: int | None = Field(None, gt=0)
seed: int | None = None
temperature: float | None = Field(None, ge=0.0, le=2.0)
top_p: float | None = Field(None, ge=0.0, le=1.0)
random_seed: int | None = None

@model_validator(mode="after")
def check_function_call_behavior(self) -> "MistralAIChatPromptExecutionSettings":
"""Check if the user is requesting function call behavior."""
if self.function_choice_behavior is not None:
raise NotImplementedError("MistralAI does not support function call behavior.")

return self
Empty file.
Loading