Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
68 changes: 68 additions & 0 deletions examples/anthropic/anthropic_function_call.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
#!/usr/bin/env python
import os
import json
import asyncio
from javelin_sdk import JavelinClient, JavelinConfig

# Load environment variables
from dotenv import load_dotenv
load_dotenv()

# Javelin Setup
config = JavelinConfig(
base_url=os.getenv("JAVELIN_BASE_URL"),
javelin_api_key=os.getenv("JAVELIN_API_KEY"),
)
client = JavelinClient(config)

# Anthropic Headers
headers = {
"Content-Type": "application/json",
"x-javelin-route": "amazon_univ",
"x-javelin-model": "claude-3-5-sonnet-20240620",
"x-api-key": os.getenv("ANTHROPIC_API_KEY"),
}

# Messages and dummy tool call (check if tool support throws any error)
messages = [
{"role": "user", "content": "Please call the tool to fetch today's weather in Paris."}
]

tools = [
{
"name": "get_weather",
"description": "Get weather info by city",
"parameters": {
"type": "object",
"properties": {
"city": {"type": "string", "description": "Name of the city"},
},
"required": ["city"]
}
}
]

async def run_anthropic_test():
print("\n==== Testing Anthropic Function Calling Support via Javelin ====")
try:
body = {
"messages": messages,
"tools": tools, # test tool support
"tool_choice": "auto",
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 256,
"temperature": 0.7,
}
result = client.query_unified_endpoint(
provider_name="anthropic",
endpoint_type="messages",
query_body=body,
headers=headers,
)
print("Raw Response:")
print(json.dumps(result, indent=2))
except Exception as e:
print(f"Function/tool call failed for Anthropic: {str(e)}")
Comment on lines +64 to +65
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

Consider logging the full exception traceback for better debugging. This will provide more context when debugging failures.

Suggested change
except Exception as e:
print(f"Function/tool call failed for Anthropic: {str(e)}")
except Exception as e:
print(f"Function/tool call failed for Anthropic: {str(e)}")
import traceback
traceback.print_exc()


if __name__ == "__main__":
asyncio.run(run_anthropic_test())
99 changes: 99 additions & 0 deletions examples/azure-openai/azure_function_call.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
#!/usr/bin/env python
import os
import json
from dotenv import load_dotenv
from openai import AzureOpenAI
from javelin_sdk import JavelinClient, JavelinConfig

load_dotenv()

def init_azure_client_with_javelin():
azure_api_key = os.getenv("AZURE_OPENAI_API_KEY")
javelin_api_key = os.getenv("JAVELIN_API_KEY")

if not azure_api_key or not javelin_api_key:
raise ValueError("Missing AZURE_OPENAI_API_KEY or JAVELIN_API_KEY")
Comment on lines +14 to +15
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

Consider logging which environment variables are missing for easier debugging.

    if not azure_api_key or not javelin_api_key:
        missing_vars = []
        if not azure_api_key: missing_vars.append("AZURE_OPENAI_API_KEY")
        if not javelin_api_key: missing_vars.append("JAVELIN_API_KEY")
        raise ValueError(f"Missing environment variables: {', '.join(missing_vars)}")


# Azure OpenAI setup
azure_client = AzureOpenAI(
api_version="2023-07-01-preview",
azure_endpoint="https://javelinpreview.openai.azure.com",
api_key=azure_api_key
)

# Register with Javelin
config = JavelinConfig(javelin_api_key=javelin_api_key)
client = JavelinClient(config)
client.register_azureopenai(azure_client, route_name="azureopenai_univ")

return azure_client

def run_function_call_test(azure_client):
print("\n==== Azure OpenAI Function Calling via Javelin ====")

try:
response = azure_client.chat.completions.create(
model="gpt35", # Your Azure model deployment name
messages=[{"role": "user", "content": "Get weather in Tokyo in Celsius."}],
functions=[
{
"name": "get_weather",
"description": "Provides weather information",
"parameters": {
"type": "object",
"properties": {
"city": {"type": "string", "description": "City name"},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"],
"description": "Temperature unit"
}
},
"required": ["city"]
}
}
],
function_call="auto"
)
print("Function Call Output:")
print(response.to_json(indent=2))
except Exception as e:
print("Azure Function Calling Error:", e)

def run_tool_call_test(azure_client):
print("\n==== Azure OpenAI Tool Calling via Javelin ====")

try:
response = azure_client.chat.completions.create(
model="gpt35", # Your Azure deployment name
messages=[{"role": "user", "content": "Get a random motivational quote."}],
tools=[
{
"type": "function",
"function": {
"name": "get_motivation",
"description": "Returns a motivational quote",
"parameters": {
"type": "object",
"properties": {
"category": {"type": "string", "description": "e.g. success, life"}
},
"required": []
}
}
}
],
tool_choice="auto"
)
print("Tool Call Output:")
print(response.to_json(indent=2))
except Exception as e:
print("Azure Tool Calling Error:", e)

def main():
client = init_azure_client_with_javelin()
run_function_call_test(client)
run_tool_call_test(client)

if __name__ == "__main__":
main()
114 changes: 114 additions & 0 deletions examples/bedrock/bedrock_function_tool_call.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
#!/usr/bin/env python
import asyncio
import json
import os
from typing import Dict, Any

from javelin_sdk import JavelinClient, JavelinConfig

# Load ENV
from dotenv import load_dotenv
load_dotenv()

# Print response utility
def print_response(provider: str, response: Dict[str, Any]) -> None:
print(f"\n=== Response from {provider} ===")
print(json.dumps(response, indent=2))


# Setup Bedrock Javelin client
config = JavelinConfig(
base_url=os.getenv("JAVELIN_BASE_URL"),
javelin_api_key=os.getenv("JAVELIN_API_KEY"),
)
client = JavelinClient(config)

headers = {
"Content-Type": "application/json",
"x-javelin-route": "amazon_univ",
"x-javelin-model": "amazon.titan-text-express-v1", # replace if needed
"x-api-key": os.getenv("JAVELIN_API_KEY"),
}


async def test_function_call():
print("\n==== Bedrock Function Calling Test ====")
try:
query_body = {
"messages": [{"role": "user", "content": "Get weather for Paris in Celsius"}],
"functions": [
{
"name": "get_weather",
"description": "Returns weather info for a city",
"parameters": {
"type": "object",
"properties": {
"city": {"type": "string"},
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}
},
"required": ["city"]
}
}
],
"function_call": "auto",
"max_tokens": 100,
"temperature": 0.7,
}

response = client.query_unified_endpoint(
provider_name="bedrock",
endpoint_type="invoke",
query_body=query_body,
headers=headers,
model_id="amazon.titan-text-express-v1",
)
print_response("Bedrock Function Call", response)
except Exception as e:
print(f"Function call failed: {str(e)}")


async def test_tool_call():
print("\n==== Bedrock Tool Calling Test ====")
try:
query_body = {
"messages": [{"role": "user", "content": "Give me a motivational quote"}],
"tools": [
{
"type": "function",
"function": {
"name": "get_motivation",
"description": "Returns motivational quote",
"parameters": {
"type": "object",
"properties": {
"category": {"type": "string", "description": "e.g. success, life"}
},
"required": []
}
}
}
],
"tool_choice": "auto",
"max_tokens": 100,
"temperature": 0.7,
}

response = client.query_unified_endpoint(
provider_name="bedrock",
endpoint_type="invoke",
query_body=query_body,
headers=headers,
model_id="amazon.titan-text-express-v1",
)
print_response("Bedrock Tool Call", response)
except Exception as e:
print(f"Tool call failed: {str(e)}")


async def main():
await test_function_call()
await test_tool_call()


if __name__ == "__main__":
asyncio.run(main())
99 changes: 99 additions & 0 deletions examples/gemini/gemini_function_tool_call.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
#!/usr/bin/env python
import os
import json
from dotenv import load_dotenv
from openai import OpenAI
from javelin_sdk import JavelinClient, JavelinConfig

load_dotenv()

def init_gemini_client():
gemini_api_key = os.getenv("GEMINI_API_KEY")
javelin_api_key = os.getenv("JAVELIN_API_KEY")

if not gemini_api_key or not javelin_api_key:
raise ValueError("Missing GEMINI_API_KEY or JAVELIN_API_KEY")

gemini_client = OpenAI(
api_key=gemini_api_key,
base_url="https://generativelanguage.googleapis.com/v1beta/openai/"
)

config = JavelinConfig(javelin_api_key=javelin_api_key)
client = JavelinClient(config)
client.register_gemini(gemini_client, route_name="google_univ")

return gemini_client

def test_function_call(client):
print("\n==== Gemini Function Calling Test ====")
try:
tools = [{
"type": "function",
"function": {
"name": "get_weather",
"description": "Get weather info for a given location",
"parameters": {
"type": "object",
"properties": {
"location": {"type": "string", "description": "e.g. Tokyo"},
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}
},
"required": ["location"]
}
}
}]
messages = [{"role": "user", "content": "What's the weather like in Tokyo today?"}]
response = client.chat.completions.create(
model="gemini-1.5-flash",
messages=messages,
tools=tools,
tool_choice="auto"
)
print("Response:")
print(response.model_dump_json(indent=2))
except Exception as e:
print(f"Function calling failed: {e}")

def test_tool_call(client):
print("\n==== Gemini Tool Calling Test ====")
try:
tools = [{
"type": "function",
"function": {
"name": "get_quote",
"description": "Returns a motivational quote",
"parameters": {
"type": "object",
"properties": {
"category": {"type": "string", "description": "e.g. success"}
},
"required": []
}
}
}]
messages = [{"role": "user", "content": "Give me a quote about perseverance."}]
response = client.chat.completions.create(
model="gemini-1.5-flash",
messages=messages,
tools=tools,
tool_choice="auto"
)
print("Response:")
print(response.model_dump_json(indent=2))
except Exception as e:
print(f"Tool calling failed: {e}")

def main():
print("=== Gemini Javelin Tool/Function Test ===")
try:
gemini_client = init_gemini_client()
except Exception as e:
print(f"Initialization failed: {e}")
return
Comment on lines +91 to +93
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

Consider logging the full exception traceback for better debugging.

Suggested change
except Exception as e:
print(f"Initialization failed: {e}")
return
except Exception as e:
print(f"Initialization failed: {e}")
import traceback
traceback.print_exc()
return


test_function_call(gemini_client)
test_tool_call(gemini_client)

if __name__ == "__main__":
main()
Loading
Loading