From 4356d4af7abe3e7c05fdc879a440f14508da9c7f Mon Sep 17 00:00:00 2001 From: Dhruvj07 Date: Tue, 8 Apr 2025 16:27:20 +0530 Subject: [PATCH 1/3] fix: Remove extra comma --- javelin_sdk/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/javelin_sdk/client.py b/javelin_sdk/client.py index ee216de..439b830 100644 --- a/javelin_sdk/client.py +++ b/javelin_sdk/client.py @@ -708,7 +708,7 @@ def override_endpoint_url(request: Any, **kwargs) -> None: updated_url = original_url._replace( scheme=parsed_base.scheme, netloc=parsed_base.netloc, - path=f"/v1{original_url.path}", + path=f"{original_url.path}", ) request.url = urlunparse(updated_url) From a47b158d7f6331c87f9c8004eb0d67428542cfab Mon Sep 17 00:00:00 2001 From: Dhruvj07 Date: Tue, 8 Apr 2025 16:41:44 +0530 Subject: [PATCH 2/3] fix:revert client changes --- javelin_sdk/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/javelin_sdk/client.py b/javelin_sdk/client.py index 439b830..ee216de 100644 --- a/javelin_sdk/client.py +++ b/javelin_sdk/client.py @@ -708,7 +708,7 @@ def override_endpoint_url(request: Any, **kwargs) -> None: updated_url = original_url._replace( scheme=parsed_base.scheme, netloc=parsed_base.netloc, - path=f"{original_url.path}", + path=f"/v1{original_url.path}", ) request.url = urlunparse(updated_url) From 70d0d36587d8aaaaca89cdf88f098994f2612eca Mon Sep 17 00:00:00 2001 From: Dhruvj07 Date: Thu, 10 Apr 2025 01:25:15 +0530 Subject: [PATCH 3/3] fix:update models suffix endpoint for azure deployed model, updated payload for bedrock llm call --- examples/azure-openai/azure-universal.py | 4 ++-- examples/bedrock/bedrock_client_universal.py | 18 +++++------------- 2 files changed, 7 insertions(+), 15 deletions(-) diff --git a/examples/azure-openai/azure-universal.py b/examples/azure-openai/azure-universal.py index b13b611..3489c8e 100644 --- a/examples/azure-openai/azure-universal.py +++ b/examples/azure-openai/azure-universal.py @@ -54,7 +54,7 @@ def get_chat_completion_sync(azure_client, messages): """ response = azure_client.chat.completions.create( - model="gpt-4", messages=messages # Adjust to your Azure deployment name + model="gpt35", messages=messages # Adjust to your Azure deployment name ) return response.to_json() @@ -65,7 +65,7 @@ def get_chat_completion_stream(azure_client, messages): Returns the concatenated text from the streamed chunks. """ response = azure_client.chat.completions.create( - model="gpt-4", # Adjust to your Azure deployment name + model="gpt35", # Adjust to your Azure deployment name messages=messages, stream=True, ) diff --git a/examples/bedrock/bedrock_client_universal.py b/examples/bedrock/bedrock_client_universal.py index 71d2b9e..a50c81e 100644 --- a/examples/bedrock/bedrock_client_universal.py +++ b/examples/bedrock/bedrock_client_universal.py @@ -68,32 +68,27 @@ def bedrock_converse_example(bedrock_runtime_client): { "anthropic_version": "bedrock-2023-05-31", "max_tokens": 500, - "system": [ - {"type": "text", "text": "You are an economist with access to lots of data"} - ], + "system": "You are an economist with access to lots of data", "messages": [ { "role": "user", - "content": [{"type": "text", "text": "Write an article about the impact of high inflation on a country's GDP"}] + "content": "Write an article about the impact of high inflation on a country's GDP" } ], } ), contentType="application/json", ) - response_body = json.loads(response["body"].read()) return json.dumps(response_body, indent=2) - - def bedrock_invoke_stream_example(bedrock_runtime_client): """ Demonstrates a streaming 'invoke' call by processing the response tokens as they arrive. Iterates over the streaming response lines and prints them in real-time. """ response = bedrock_runtime_client.invoke_model( - modelId="anthropic.claude-3-5-sonnet-20240620-v1:0", # Example model ID + modelId="anthropic.claude-3-5-sonnet-20240620-v1:0", body=json.dumps( { "anthropic_version": "bedrock-2023-05-31", @@ -114,7 +109,6 @@ def bedrock_invoke_stream_example(bedrock_runtime_client): print("Error streaming invoke response:", e) return "".join(tokens) - def bedrock_converse_stream_example(bedrock_runtime_client): """ Demonstrates a streaming 'converse' call by processing the response tokens as they arrive. @@ -126,13 +120,11 @@ def bedrock_converse_stream_example(bedrock_runtime_client): { "anthropic_version": "bedrock-2023-05-31", "max_tokens": 500, - "system": [ - {"type": "text", "text": "You are an economist with access to lots of data"} - ], + "system": "You are an economist with access to lots of data", "messages": [ { "role": "user", - "content": [{"type": "text", "text": "Write an article about the impact of high inflation on a country's GDP"}] + "content": "Write an article about the impact of high inflation on a country's GDP" } ], }