From 90b7aa64ce2557d22bc12eb33e52474a071092bf Mon Sep 17 00:00:00 2001 From: jknapp Date: Sun, 16 Mar 2025 22:53:48 -0700 Subject: [PATCH] Fixing bug with max tokens --- claude_sonnet-3-7-bedrock.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/claude_sonnet-3-7-bedrock.py b/claude_sonnet-3-7-bedrock.py index 1327c03..fc2543d 100644 --- a/claude_sonnet-3-7-bedrock.py +++ b/claude_sonnet-3-7-bedrock.py @@ -174,7 +174,7 @@ class Pipeline: payload = {"modelId": model_id, "messages": processed_messages, "system": [{'text': system_message['content'] if system_message else 'you are an intelligent ai assistant'}], - "inferenceConfig": {"temperature": 1, "maxTokens": max_tokens }, + "inferenceConfig": {"temperature": 1, "maxTokens": MAX_COMBINED_TOKENS }, "additionalModelRequestFields": reasoning_config } if body.get("stream", False):