Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
211 changes: 139 additions & 72 deletions examples/bedrock/bedrock_client_universal.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,19 +15,15 @@ def init_bedrock():
2) Register them with Javelin (optional but often recommended),
3) Return the bedrock_runtime_client for direct 'invoke_model' calls.
"""
# Configure the bedrock-runtime and bedrock service clients
bedrock_runtime_client = boto3.client(
service_name="bedrock-runtime", region_name="us-west-2"
)
bedrock_client = boto3.client(service_name="bedrock", region_name="us-west-2")

# Initialize Javelin Client (if you want the route registered)
config = JavelinConfig(
javelin_api_key=os.getenv("JAVELIN_API_KEY") # Replace with your Javelin API key
javelin_api_key=os.getenv("JAVELIN_API_KEY") # Replace with your Javelin API key
)
javelin_client = JavelinClient(config)

# Register the bedrock clients with Javelin under route "bedrock"
javelin_client.register_bedrock(
bedrock_runtime_client=bedrock_runtime_client,
bedrock_client=bedrock_client,
Expand All @@ -37,65 +33,45 @@ def init_bedrock():


def bedrock_invoke_example(bedrock_runtime_client):
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The docstring for this function has been removed. It's important to keep docstrings to explain the purpose and usage of the function. Please add it back.

def bedrock_invoke_example(bedrock_runtime_client):
    """
    Demonstrates a basic 'invoke' style call (non-streaming).
    Returns a JSON-formatted string of the response.
    """

"""
Demonstrates a basic 'invoke' style call (non-streaming).
Returns a JSON-formatted string of the response.
"""
response = bedrock_runtime_client.invoke_model(
modelId="anthropic.claude-3-5-sonnet-20240620-v1:0", # Example model ID
body=json.dumps(
{
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 100,
"messages": [{"role": "user", "content": "What is machine learning?"}],
}
),
modelId="anthropic.claude-3-5-sonnet-20240620-v1:0",
body=json.dumps({
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 100,
"messages": [{"role": "user", "content": "What is machine learning?"}]
}),
contentType="application/json",
)

response_body = json.loads(response["body"].read())
return json.dumps(response_body, indent=2)


def bedrock_converse_example(bedrock_runtime_client):
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The docstring for this function has been removed. It's important to keep docstrings to explain the purpose and usage of the function. Please add it back.

def bedrock_converse_example(bedrock_runtime_client):
    """
    Demonstrates a 'converse' style call by including 'system' text plus a user message.
    Still uses 'invoke_model', but the request body includes additional fields.
    """

"""
Demonstrates a 'converse' style call by including 'system' text plus a user message.
Still uses 'invoke_model', but the request body includes additional fields.
"""
response = bedrock_runtime_client.invoke_model(
modelId="anthropic.claude-3-5-sonnet-20240620-v1:0",
body=json.dumps(
{
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 500,
"system": "You are an economist with access to lots of data",
"messages": [
{
"role": "user",
"content": "Write an article about the impact of high inflation on a country's GDP"
}
],
}
),
body=json.dumps({
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 500,
"system": "You are an economist with access to lots of data",
"messages": [{
"role": "user",
"content": "Write an article about the impact of high inflation on a country's GDP"
}]
}),
contentType="application/json",
)
response_body = json.loads(response["body"].read())
return json.dumps(response_body, indent=2)


def bedrock_invoke_stream_example(bedrock_runtime_client):
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The docstring for this function has been removed. It's important to keep docstrings to explain the purpose and usage of the function. Please add it back.

def bedrock_invoke_stream_example(bedrock_runtime_client):
    """
    Demonstrates a streaming 'invoke' call by processing the response tokens as they arrive.
    Iterates over the streaming response lines and prints them in real-time.
    """

"""
Demonstrates a streaming 'invoke' call by processing the response tokens as they arrive.
Iterates over the streaming response lines and prints them in real-time.
"""
response = bedrock_runtime_client.invoke_model(
modelId="anthropic.claude-3-5-sonnet-20240620-v1:0",
body=json.dumps(
{
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 100,
"messages": [{"role": "user", "content": "What is machine learning?"}],
}
),
body=json.dumps({
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 100,
"messages": [{"role": "user", "content": "What is machine learning?"}]
}),
contentType="application/json",
)
tokens = []
Expand All @@ -109,26 +85,19 @@ def bedrock_invoke_stream_example(bedrock_runtime_client):
print("Error streaming invoke response:", e)
return "".join(tokens)


def bedrock_converse_stream_example(bedrock_runtime_client):
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The docstring for this function has been removed. It's important to keep docstrings to explain the purpose and usage of the function. Please add it back.

def bedrock_converse_stream_example(bedrock_runtime_client):
    """
    Demonstrates a streaming 'converse' call by processing the response tokens as they arrive.
    Iterates over the streaming response lines for a conversation style input.
    """

"""
Demonstrates a streaming 'converse' call by processing the response tokens as they arrive.
Iterates over the streaming response lines for a conversation style input.
"""
response = bedrock_runtime_client.invoke_model(
modelId="anthropic.claude-3-5-sonnet-20240620-v1:0",
body=json.dumps(
{
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 500,
"system": "You are an economist with access to lots of data",
"messages": [
{
"role": "user",
"content": "Write an article about the impact of high inflation on a country's GDP"
}
],
}
),
body=json.dumps({
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 500,
"system": "You are an economist with access to lots of data",
"messages": [{
"role": "user",
"content": "Write an article about the impact of high inflation on a country's GDP"
}]
}),
contentType="application/json",
)
tokens = []
Expand All @@ -141,6 +110,8 @@ def bedrock_converse_stream_example(bedrock_runtime_client):
except Exception as e:
print("Error streaming converse response:", e)
return "".join(tokens)


def test_claude_v2_invoke(bedrock_runtime_client):
print("\n--- Test: anthropic.claude-v2 / invoke ---")
try:
Expand All @@ -151,13 +122,14 @@ def test_claude_v2_invoke(bedrock_runtime_client):
"max_tokens": 100,
"messages": [{"role": "user", "content": "Explain quantum computing"}]
}),
contentType="application/json"
contentType="application/json",
)
result = json.loads(response["body"].read())
print(json.dumps(result, indent=2))
except Exception as e:
print("❌ Error:", e)


def test_claude_v2_stream(bedrock_runtime_client):
print("\n--- Test: anthropic.claude-v2 / invoke-with-response-stream ---")
try:
Expand All @@ -168,7 +140,7 @@ def test_claude_v2_stream(bedrock_runtime_client):
"max_tokens": 100,
"messages": [{"role": "user", "content": "Tell me about LLMs"}]
}),
contentType="application/json"
contentType="application/json",
)
output = ""
for part in response["body"]:
Expand All @@ -180,6 +152,7 @@ def test_claude_v2_stream(bedrock_runtime_client):
except Exception as e:
print("❌ Error:", e)


def test_haiku_v3_invoke(bedrock_runtime_client):
print("\n--- Test: anthropic.claude-3-haiku-20240307-v1:0 / invoke ---")
try:
Expand All @@ -190,13 +163,14 @@ def test_haiku_v3_invoke(bedrock_runtime_client):
"max_tokens": 100,
"messages": [{"role": "user", "content": "What is generative AI?"}]
}),
contentType="application/json"
contentType="application/json",
)
result = json.loads(response["body"].read())
print(json.dumps(result, indent=2))
except Exception as e:
print("❌ Error:", e)


def test_haiku_v3_stream(bedrock_runtime_client):
print("\n--- Test: anthropic.claude-3-haiku-20240307-v1:0 / invoke-with-response-stream ---")
try:
Expand All @@ -207,7 +181,7 @@ def test_haiku_v3_stream(bedrock_runtime_client):
"max_tokens": 100,
"messages": [{"role": "user", "content": "What are AI guardrails?"}]
}),
contentType="application/json"
contentType="application/json",
)
output = ""
for part in response["body"]:
Expand Down Expand Up @@ -281,7 +255,7 @@ def main():
"max_tokens": 100,
"messages": [{"role": "user", "content": "Explain quantum computing"}]
}),
contentType="application/json"
contentType="application/json",
)
result = json.loads(response["body"].read())
print(json.dumps(result, indent=2))
Expand All @@ -298,7 +272,7 @@ def main():
"max_tokens": 100,
"messages": [{"role": "user", "content": "Tell me about LLMs"}]
}),
contentType="application/json"
contentType="application/json",
)
for part in response["body"]:
chunk = json.loads(part["chunk"]["bytes"].decode())
Expand All @@ -318,7 +292,7 @@ def main():
"max_tokens": 100,
"messages": [{"role": "user", "content": "What is generative AI?"}]
}),
contentType="application/json"
contentType="application/json",
)
result = json.loads(response["body"].read())
print(json.dumps(result, indent=2))
Expand All @@ -335,7 +309,7 @@ def main():
"max_tokens": 100,
"messages": [{"role": "user", "content": "What are AI guardrails?"}]
}),
contentType="application/json"
contentType="application/json",
)
for part in response["body"]:
chunk = json.loads(part["chunk"]["bytes"].decode())
Expand All @@ -345,8 +319,101 @@ def main():
except Exception as e:
print("Error in haiku stream:", e)

print("\nScript complete.")
# 9) Test amazon.titan-text-lite-v1 / invoke-with-response-stream
print("\n--- Test: amazon.titan-text-lite-v1 / invoke-with-response-stream ---")
try:
response = bedrock_runtime_client.invoke_model_with_response_stream(
modelId="amazon.titan-text-lite-v1",
body=json.dumps({"inputText": "Test prompt for titan-lite"}),
contentType="application/json",
)
for part in response["body"]:
print(part)
print("\nStreamed Output Complete.")
except Exception as e:
print("Error in titan-text-lite-v1 stream:", e)

# 10–13) Test amazon.titan-text-premier-v1 across invoke types
for mode in ["invoke", "invoke-with-response-stream"]:
print(f"\n--- Test: amazon.titan-text-premier-v1 / {mode} ---")
try:
if mode == "invoke":
response = bedrock_runtime_client.invoke_model(
modelId="amazon.titan-text-premier-v1",
body=json.dumps({"inputText": "Premier test input"}),
contentType="application/json",
)
else:
response = bedrock_runtime_client.invoke_model_with_response_stream(
modelId="amazon.titan-text-premier-v1",
body=json.dumps({"inputText": "Premier test input"}),
contentType="application/json",
)
if "stream" in mode:
for part in response["body"]:
print(part)
print("\nStreamed Output Complete.")
else:
result = json.loads(response["body"].read())
print(json.dumps(result, indent=2))
except Exception as e:
if "provided model identifier is invalid" in str(e):
print("✅ Skipped amazon.titan-text-premier-v1 test (model identifier invalid)")
Comment on lines +360 to +361
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The check for skipping the amazon.titan-text-premier-v1 test relies on string matching the error message. This might not be robust, as the error message could change. Consider using a more specific exception type or a more reliable way to determine if the model is unavailable.

            except Exception as e:
                if isinstance(e, some_specific_exception):
                    print("✅ Skipped amazon.titan-text-premier-v1 test (model identifier invalid)")
                elif "provided model identifier is invalid" in str(e):
                    print("✅ Skipped amazon.titan-text-premier-v1 test (model identifier invalid)")

else:
print(f"Error in titan-text-premier-v1 / {mode}:", e)

# 11) Test amazon.titan-text-premier-v1 across converse types
for mode in ["converse", "converse-stream"]:
print(f"\n--- Test: amazon.titan-text-premier-v1 / {mode} ---")
try:
if mode == "converse":
response = bedrock_runtime_client.converse(
modelId="amazon.titan-text-premier-v1",
messages=[{"role": "user", "content": [{"text": "Premier converse test input"}]}]
)
print(response)
else:
response = bedrock_runtime_client.converse_stream(
modelId="amazon.titan-text-premier-v1",
messages=[{"role": "user", "content": [{"text": "Premier converse test input"}]}]
)
for part in response["stream"]:
print(part)
except Exception as e:
if "provided model identifier is invalid" in str(e):
print("✅ Skipped amazon.titan-text-premier-v1 test (model identifier invalid)")
else:
print(f"Error in titan-text-premier-v1 / {mode}:", e)

# 12–14) Test cohere.command-light-text-v14 across modes
for mode in ["invoke", "converse", "converse-stream"]:
print(f"\n--- Test: cohere.command-light-text-v14 / {mode} ---")
try:
if mode == "invoke":
response = bedrock_runtime_client.invoke_model(
modelId="cohere.command-light-text-v14",
body=json.dumps({"prompt": "Cohere light model test"}),
contentType="application/json",
)
result = json.loads(response["body"].read())
print(json.dumps(result, indent=2))
elif mode == "converse":
response = bedrock_runtime_client.converse(
modelId="cohere.command-light-text-v14",
messages=[{"role": "user", "content": [{"text": "Cohere converse test"}]}]
)
print(response)
else:
response = bedrock_runtime_client.converse_stream(
modelId="cohere.command-light-text-v14",
messages=[{"role": "user", "content": [{"text": "Cohere converse test"}]}]
)
for part in response["stream"]:
print(part)
except Exception as e:
print(f"Error in cohere.command-light-text-v14 / {mode}:", e)

print("\nScript complete.")


if __name__ == "__main__":
Expand Down
Loading