From 588b0a434d6e7b742e8c7b105bc375b1326c8b36 Mon Sep 17 00:00:00 2001 From: jknapp Date: Sun, 16 Mar 2025 22:50:26 -0700 Subject: [PATCH] Fixing bug with max tokens --- claude_sonnet-3-7-bedrock.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/claude_sonnet-3-7-bedrock.py b/claude_sonnet-3-7-bedrock.py index d44a9d6..1327c03 100644 --- a/claude_sonnet-3-7-bedrock.py +++ b/claude_sonnet-3-7-bedrock.py @@ -165,10 +165,16 @@ class Pipeline: } else: reasoning_config = {} + + # If budget_tokens is greater than max_tokens, adjust max_tokens to MAX_COMBINED_TOKENS + max_tokens = body.get("max_tokens", MAX_COMBINED_TOKENS) + if max_tokens < budget_tokens and budget_tokens > 0: + max_tokens = MAX_COMBINED_TOKENS + payload = {"modelId": model_id, "messages": processed_messages, "system": [{'text': system_message['content'] if system_message else 'you are an intelligent ai assistant'}], - "inferenceConfig": {"temperature": 1, "maxTokens": body.get("max_tokens", MAX_COMBINED_TOKENS)}, + "inferenceConfig": {"temperature": 1, "maxTokens": max_tokens }, "additionalModelRequestFields": reasoning_config } if body.get("stream", False):