diff --git a/examples/anthropic/anthropic_function_call.py b/examples/anthropic/anthropic_function_call.py new file mode 100644 index 0000000..21dd36b --- /dev/null +++ b/examples/anthropic/anthropic_function_call.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python +import os +import json +import asyncio +from javelin_sdk import JavelinClient, JavelinConfig + +# Load environment variables +from dotenv import load_dotenv +load_dotenv() + +# Javelin Setup +config = JavelinConfig( + base_url=os.getenv("JAVELIN_BASE_URL"), + javelin_api_key=os.getenv("JAVELIN_API_KEY"), +) +client = JavelinClient(config) + +# Anthropic Headers +headers = { + "Content-Type": "application/json", + "x-javelin-route": "amazon_univ", + "x-javelin-model": "claude-3-5-sonnet-20240620", + "x-api-key": os.getenv("ANTHROPIC_API_KEY"), +} + +# Messages and dummy tool call (check if tool support throws any error) +messages = [ + {"role": "user", "content": "Please call the tool to fetch today's weather in Paris."} +] + +tools = [ + { + "name": "get_weather", + "description": "Get weather info by city", + "parameters": { + "type": "object", + "properties": { + "city": {"type": "string", "description": "Name of the city"}, + }, + "required": ["city"] + } + } +] + +async def run_anthropic_test(): + print("\n==== Testing Anthropic Function Calling Support via Javelin ====") + try: + body = { + "messages": messages, + "tools": tools, # test tool support + "tool_choice": "auto", + "anthropic_version": "bedrock-2023-05-31", + "max_tokens": 256, + "temperature": 0.7, + } + result = client.query_unified_endpoint( + provider_name="anthropic", + endpoint_type="messages", + query_body=body, + headers=headers, + ) + print("Raw Response:") + print(json.dumps(result, indent=2)) + except Exception as e: + print(f"Function/tool call failed for Anthropic: {str(e)}") + +if __name__ == "__main__": + asyncio.run(run_anthropic_test()) diff --git a/examples/azure-openai/azure_function_call.py b/examples/azure-openai/azure_function_call.py new file mode 100644 index 0000000..4d1bea1 --- /dev/null +++ b/examples/azure-openai/azure_function_call.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python +import os +import json +from dotenv import load_dotenv +from openai import AzureOpenAI +from javelin_sdk import JavelinClient, JavelinConfig + +load_dotenv() + +def init_azure_client_with_javelin(): + azure_api_key = os.getenv("AZURE_OPENAI_API_KEY") + javelin_api_key = os.getenv("JAVELIN_API_KEY") + + if not azure_api_key or not javelin_api_key: + raise ValueError("Missing AZURE_OPENAI_API_KEY or JAVELIN_API_KEY") + + # Azure OpenAI setup + azure_client = AzureOpenAI( + api_version="2023-07-01-preview", + azure_endpoint="https://javelinpreview.openai.azure.com", + api_key=azure_api_key + ) + + # Register with Javelin + config = JavelinConfig(javelin_api_key=javelin_api_key) + client = JavelinClient(config) + client.register_azureopenai(azure_client, route_name="azureopenai_univ") + + return azure_client + +def run_function_call_test(azure_client): + print("\n==== Azure OpenAI Function Calling via Javelin ====") + + try: + response = azure_client.chat.completions.create( + model="gpt35", # Your Azure model deployment name + messages=[{"role": "user", "content": "Get weather in Tokyo in Celsius."}], + functions=[ + { + "name": "get_weather", + "description": "Provides weather information", + "parameters": { + "type": "object", + "properties": { + "city": {"type": "string", "description": "City name"}, + "unit": { + "type": "string", + "enum": ["celsius", "fahrenheit"], + "description": "Temperature unit" + } + }, + "required": ["city"] + } + } + ], + function_call="auto" + ) + print("Function Call Output:") + print(response.to_json(indent=2)) + except Exception as e: + print("Azure Function Calling Error:", e) + +def run_tool_call_test(azure_client): + print("\n==== Azure OpenAI Tool Calling via Javelin ====") + + try: + response = azure_client.chat.completions.create( + model="gpt35", # Your Azure deployment name + messages=[{"role": "user", "content": "Get a random motivational quote."}], + tools=[ + { + "type": "function", + "function": { + "name": "get_motivation", + "description": "Returns a motivational quote", + "parameters": { + "type": "object", + "properties": { + "category": {"type": "string", "description": "e.g. success, life"} + }, + "required": [] + } + } + } + ], + tool_choice="auto" + ) + print("Tool Call Output:") + print(response.to_json(indent=2)) + except Exception as e: + print("Azure Tool Calling Error:", e) + +def main(): + client = init_azure_client_with_javelin() + run_function_call_test(client) + run_tool_call_test(client) + +if __name__ == "__main__": + main() diff --git a/examples/bedrock/bedrock_function_tool_call.py b/examples/bedrock/bedrock_function_tool_call.py new file mode 100644 index 0000000..76a32f5 --- /dev/null +++ b/examples/bedrock/bedrock_function_tool_call.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python +import asyncio +import json +import os +from typing import Dict, Any + +from javelin_sdk import JavelinClient, JavelinConfig + +# Load ENV +from dotenv import load_dotenv +load_dotenv() + +# Print response utility +def print_response(provider: str, response: Dict[str, Any]) -> None: + print(f"\n=== Response from {provider} ===") + print(json.dumps(response, indent=2)) + + +# Setup Bedrock Javelin client +config = JavelinConfig( + base_url=os.getenv("JAVELIN_BASE_URL"), + javelin_api_key=os.getenv("JAVELIN_API_KEY"), +) +client = JavelinClient(config) + +headers = { + "Content-Type": "application/json", + "x-javelin-route": "amazon_univ", + "x-javelin-model": "amazon.titan-text-express-v1", # replace if needed + "x-api-key": os.getenv("JAVELIN_API_KEY"), +} + + +async def test_function_call(): + print("\n==== Bedrock Function Calling Test ====") + try: + query_body = { + "messages": [{"role": "user", "content": "Get weather for Paris in Celsius"}], + "functions": [ + { + "name": "get_weather", + "description": "Returns weather info for a city", + "parameters": { + "type": "object", + "properties": { + "city": {"type": "string"}, + "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]} + }, + "required": ["city"] + } + } + ], + "function_call": "auto", + "max_tokens": 100, + "temperature": 0.7, + } + + response = client.query_unified_endpoint( + provider_name="bedrock", + endpoint_type="invoke", + query_body=query_body, + headers=headers, + model_id="amazon.titan-text-express-v1", + ) + print_response("Bedrock Function Call", response) + except Exception as e: + print(f"Function call failed: {str(e)}") + + +async def test_tool_call(): + print("\n==== Bedrock Tool Calling Test ====") + try: + query_body = { + "messages": [{"role": "user", "content": "Give me a motivational quote"}], + "tools": [ + { + "type": "function", + "function": { + "name": "get_motivation", + "description": "Returns motivational quote", + "parameters": { + "type": "object", + "properties": { + "category": {"type": "string", "description": "e.g. success, life"} + }, + "required": [] + } + } + } + ], + "tool_choice": "auto", + "max_tokens": 100, + "temperature": 0.7, + } + + response = client.query_unified_endpoint( + provider_name="bedrock", + endpoint_type="invoke", + query_body=query_body, + headers=headers, + model_id="amazon.titan-text-express-v1", + ) + print_response("Bedrock Tool Call", response) + except Exception as e: + print(f"Tool call failed: {str(e)}") + + +async def main(): + await test_function_call() + await test_tool_call() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/gemini/gemini_function_tool_call.py b/examples/gemini/gemini_function_tool_call.py new file mode 100644 index 0000000..e6328fe --- /dev/null +++ b/examples/gemini/gemini_function_tool_call.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python +import os +import json +from dotenv import load_dotenv +from openai import OpenAI +from javelin_sdk import JavelinClient, JavelinConfig + +load_dotenv() + +def init_gemini_client(): + gemini_api_key = os.getenv("GEMINI_API_KEY") + javelin_api_key = os.getenv("JAVELIN_API_KEY") + + if not gemini_api_key or not javelin_api_key: + raise ValueError("Missing GEMINI_API_KEY or JAVELIN_API_KEY") + + gemini_client = OpenAI( + api_key=gemini_api_key, + base_url="https://generativelanguage.googleapis.com/v1beta/openai/" + ) + + config = JavelinConfig(javelin_api_key=javelin_api_key) + client = JavelinClient(config) + client.register_gemini(gemini_client, route_name="google_univ") + + return gemini_client + +def test_function_call(client): + print("\n==== Gemini Function Calling Test ====") + try: + tools = [{ + "type": "function", + "function": { + "name": "get_weather", + "description": "Get weather info for a given location", + "parameters": { + "type": "object", + "properties": { + "location": {"type": "string", "description": "e.g. Tokyo"}, + "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]} + }, + "required": ["location"] + } + } + }] + messages = [{"role": "user", "content": "What's the weather like in Tokyo today?"}] + response = client.chat.completions.create( + model="gemini-1.5-flash", + messages=messages, + tools=tools, + tool_choice="auto" + ) + print("Response:") + print(response.model_dump_json(indent=2)) + except Exception as e: + print(f"Function calling failed: {e}") + +def test_tool_call(client): + print("\n==== Gemini Tool Calling Test ====") + try: + tools = [{ + "type": "function", + "function": { + "name": "get_quote", + "description": "Returns a motivational quote", + "parameters": { + "type": "object", + "properties": { + "category": {"type": "string", "description": "e.g. success"} + }, + "required": [] + } + } + }] + messages = [{"role": "user", "content": "Give me a quote about perseverance."}] + response = client.chat.completions.create( + model="gemini-1.5-flash", + messages=messages, + tools=tools, + tool_choice="auto" + ) + print("Response:") + print(response.model_dump_json(indent=2)) + except Exception as e: + print(f"Tool calling failed: {e}") + +def main(): + print("=== Gemini Javelin Tool/Function Test ===") + try: + gemini_client = init_gemini_client() + except Exception as e: + print(f"Initialization failed: {e}") + return + + test_function_call(gemini_client) + test_tool_call(gemini_client) + +if __name__ == "__main__": + main() diff --git a/examples/mistral/mistral_function_tool_call.py b/examples/mistral/mistral_function_tool_call.py new file mode 100644 index 0000000..dbb3e58 --- /dev/null +++ b/examples/mistral/mistral_function_tool_call.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python +import os +import dotenv +from langchain.chat_models import init_chat_model + +dotenv.load_dotenv() + +def init_mistral_model(): + return init_chat_model( + model_name="mistral-large-latest", + model_provider="openai", + base_url=f"{os.getenv('JAVELIN_BASE_URL')}/v1", + extra_headers={ + "x-javelin-route": "mistral_univ", + "x-api-key": os.environ.get("OPENAI_API_KEY"), + "Authorization": f"Bearer {os.environ.get('MISTRAL_API_KEY')}" + } + ) + +def run_basic_prompt(model): + print("\n==== Mistral Prompt Test ====") + try: + response = model.invoke("Write a haiku about sunrise.") + print("Response:\n", response) + except Exception as e: + print("Prompt failed:", e) + +def run_function_calling(model): + print("\n==== Mistral Function Calling Test ====") + try: + messages = [{"role": "user", "content": "Get the current weather in Mumbai"}] + functions = [ + { + "name": "get_weather", + "description": "Fetch current weather", + "parameters": { + "type": "object", + "properties": { + "location": {"type": "string", "description": "City name"}, + "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]} + }, + "required": ["location"] + } + } + ] + response = model.predict_messages(messages=messages, functions=functions, function_call="auto") + print("Function Response:\n", response) + except Exception as e: + print("Function calling failed:", e) + +def run_tool_calling(model): + print("\n==== Mistral Tool Calling Test ====") + try: + messages = [{"role": "user", "content": "Tell me a motivational quote"}] + tools = [ + { + "type": "function", + "function": { + "name": "get_quote", + "description": "Returns a motivational quote", + "parameters": { + "type": "object", + "properties": { + "category": {"type": "string", "description": "e.g. life, success"} + }, + "required": [] + } + } + } + ] + response = model.predict_messages(messages=messages, tools=tools, tool_choice="auto") + print("Tool Response:\n", response) + except Exception as e: + print("Tool calling failed:", e) + +def main(): + try: + model = init_mistral_model() + except Exception as e: + print(f"Failed to initialize model: {e}") + return + + run_basic_prompt(model) + run_function_calling(model) + run_tool_calling(model) + +if __name__ == "__main__": + main()