Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
152 changes: 152 additions & 0 deletions examples/bedrock/bedrock_client_universal.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,83 @@ def bedrock_converse_stream_example(bedrock_runtime_client):
except Exception as e:
print("Error streaming converse response:", e)
return "".join(tokens)
def test_claude_v2_invoke(bedrock_runtime_client):
print("\n--- Test: anthropic.claude-v2 / invoke ---")
try:
response = bedrock_runtime_client.invoke_model(
modelId="anthropic.claude-v2",
body=json.dumps({
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 100,
"messages": [{"role": "user", "content": "Explain quantum computing"}]
}),
contentType="application/json"
)
result = json.loads(response["body"].read())
print(json.dumps(result, indent=2))
except Exception as e:
print("❌ Error:", e)
Comment on lines +144 to +159
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

This test function and the following ones share a lot of common structure. Consider refactoring the common parts into a reusable helper function to reduce duplication and improve maintainability. For example, the model invocation logic, json parsing, and error handling could be generalized.

def invoke_model_and_print(bedrock_runtime_client, model_id, messages):
    try:
        response = bedrock_runtime_client.invoke_model(
            modelId=model_id,
            body=json.dumps({
                "anthropic_version": "bedrock-2023-05-31",
                "max_tokens": 100,
                "messages": messages
            }),
            contentType="application/json"
        )
        result = json.loads(response["body"].read())
        print(json.dumps(result, indent=2))
    except Exception as e:
        print("❌ Error:", e)

def test_claude_v2_invoke(bedrock_runtime_client):
    print("\n--- Test: anthropic.claude-v2 / invoke ---")
    invoke_model_and_print(bedrock_runtime_client, "anthropic.claude-v2",
                           [{"role": "user", "content": "Explain quantum computing"}])

Comment on lines +158 to +159
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The error message here is very generic. Consider adding more context to the error message, such as the model ID or the specific operation that failed. This will make it easier to diagnose issues when they occur.

    except Exception as e:
        print(f"❌ Error invoking anthropic.claude-v2: {e}")


def test_claude_v2_stream(bedrock_runtime_client):
print("\n--- Test: anthropic.claude-v2 / invoke-with-response-stream ---")
try:
response = bedrock_runtime_client.invoke_model_with_response_stream(
modelId="anthropic.claude-v2",
body=json.dumps({
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 100,
"messages": [{"role": "user", "content": "Tell me about LLMs"}]
}),
contentType="application/json"
)
output = ""
for part in response["body"]:
chunk = json.loads(part["chunk"]["bytes"].decode())
delta = chunk.get("delta", {}).get("text", "")
output += delta
print(delta, end="", flush=True)
print("\nStreamed Output Complete.")
except Exception as e:
print("❌ Error:", e)
Comment on lines +161 to +181
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

Similar to the test_claude_v2_invoke function, this test function shares a lot of common code with other streaming test functions. Consider creating a reusable helper function to reduce duplication.

def invoke_model_stream_and_print(bedrock_runtime_client, model_id, messages):
    try:
        response = bedrock_runtime_client.invoke_model_with_response_stream(
            modelId=model_id,
            body=json.dumps({
                "anthropic_version": "bedrock-2023-05-31",
                "max_tokens": 100,
                "messages": messages
            }),
            contentType="application/json"
        )
        output = ""
        for part in response["body"]:
            chunk = json.loads(part["chunk"]["bytes"].decode())
            delta = chunk.get("delta", {}).get("text", "")
            output += delta
            print(delta, end="", flush=True)
        print("\nStreamed Output Complete.")
    except Exception as e:
        print("❌ Error:", e)

def test_claude_v2_stream(bedrock_runtime_client):
    print("\n--- Test: anthropic.claude-v2 / invoke-with-response-stream ---")
    invoke_model_stream_and_print(bedrock_runtime_client, "anthropic.claude-v2",
                                  [{"role": "user", "content": "Tell me about LLMs"}])


def test_haiku_v3_invoke(bedrock_runtime_client):
print("\n--- Test: anthropic.claude-3-haiku-20240307-v1:0 / invoke ---")
try:
response = bedrock_runtime_client.invoke_model(
modelId="anthropic.claude-3-haiku-20240307-v1:0",
body=json.dumps({
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 100,
"messages": [{"role": "user", "content": "What is generative AI?"}]
}),
contentType="application/json"
)
result = json.loads(response["body"].read())
print(json.dumps(result, indent=2))
except Exception as e:
print("❌ Error:", e)

def test_haiku_v3_stream(bedrock_runtime_client):
print("\n--- Test: anthropic.claude-3-haiku-20240307-v1:0 / invoke-with-response-stream ---")
try:
response = bedrock_runtime_client.invoke_model_with_response_stream(
modelId="anthropic.claude-3-haiku-20240307-v1:0",
body=json.dumps({
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 100,
"messages": [{"role": "user", "content": "What are AI guardrails?"}]
}),
contentType="application/json"
)
output = ""
for part in response["body"]:
chunk = json.loads(part["chunk"]["bytes"].decode())
delta = chunk.get("delta", {}).get("text", "")
output += delta
print(delta, end="", flush=True)
print("\nStreamed Output Complete.")
except Exception as e:
print("❌ Error:", e)


def main():
Expand Down Expand Up @@ -194,8 +271,83 @@ def main():
except Exception as e:
print("Error in bedrock_converse_stream_example:", e)

# 5) Test anthropic.claude-v2 / invoke
print("\n--- Test: anthropic.claude-v2 / invoke ---")
try:
response = bedrock_runtime_client.invoke_model(
modelId="anthropic.claude-v2",
body=json.dumps({
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 100,
"messages": [{"role": "user", "content": "Explain quantum computing"}]
}),
contentType="application/json"
)
result = json.loads(response["body"].read())
print(json.dumps(result, indent=2))
except Exception as e:
print("Error in claude-v2 invoke:", e)
Comment on lines +274 to +289
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

This block of code is duplicated from the test_claude_v2_invoke function. Refactor this into a reusable function to avoid duplication.


# 6) Test anthropic.claude-v2 / invoke-with-response-stream
print("\n--- Test: anthropic.claude-v2 / invoke-with-response-stream ---")
try:
response = bedrock_runtime_client.invoke_model_with_response_stream(
modelId="anthropic.claude-v2",
body=json.dumps({
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 100,
"messages": [{"role": "user", "content": "Tell me about LLMs"}]
}),
contentType="application/json"
)
for part in response["body"]:
chunk = json.loads(part["chunk"]["bytes"].decode())
delta = chunk.get("delta", {}).get("text", "")
print(delta, end="", flush=True)
print("\nStreamed Output Complete.")
except Exception as e:
print("Error in claude-v2 stream:", e)
Comment on lines +291 to +309
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

This block of code is duplicated from the test_claude_v2_stream function. Refactor this into a reusable function to avoid duplication.


# 7) Test anthropic.claude-3-haiku-20240307-v1:0 / invoke
print("\n--- Test: anthropic.claude-3-haiku-20240307-v1:0 / invoke ---")
try:
response = bedrock_runtime_client.invoke_model(
modelId="anthropic.claude-3-haiku-20240307-v1:0",
body=json.dumps({
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 100,
"messages": [{"role": "user", "content": "What is generative AI?"}]
}),
contentType="application/json"
)
result = json.loads(response["body"].read())
print(json.dumps(result, indent=2))
except Exception as e:
print("Error in haiku invoke:", e)
Comment on lines +311 to +326
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

This block of code is duplicated from the test_claude_v2_invoke function. Refactor this into a reusable function to avoid duplication.


# 8) Test anthropic.claude-3-haiku-20240307-v1:0 / invoke-with-response-stream
print("\n--- Test: anthropic.claude-3-haiku-20240307-v1:0 / invoke-with-response-stream ---")
try:
response = bedrock_runtime_client.invoke_model_with_response_stream(
modelId="anthropic.claude-3-haiku-20240307-v1:0",
body=json.dumps({
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 100,
"messages": [{"role": "user", "content": "What are AI guardrails?"}]
}),
contentType="application/json"
)
for part in response["body"]:
chunk = json.loads(part["chunk"]["bytes"].decode())
delta = chunk.get("delta", {}).get("text", "")
print(delta, end="", flush=True)
print("\nStreamed Output Complete.")
except Exception as e:
print("Error in haiku stream:", e)
Comment on lines +328 to +346
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

This block of code is duplicated from the test_claude_v2_stream function. Refactor this into a reusable function to avoid duplication.


print("\nScript complete.")



if __name__ == "__main__":
main()
Loading