From 39fc0b3dfe446a269cff3aa122afeb60915c6a08 Mon Sep 17 00:00:00 2001 From: jemeza-codegen Date: Tue, 18 Mar 2025 11:24:14 -0700 Subject: [PATCH] fix!: increases output token limit for claude-3-7 to 12k --- src/codegen/extensions/langchain/llm.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/codegen/extensions/langchain/llm.py b/src/codegen/extensions/langchain/llm.py index 0d4795740..8191c7151 100644 --- a/src/codegen/extensions/langchain/llm.py +++ b/src/codegen/extensions/langchain/llm.py @@ -89,7 +89,8 @@ def _get_model(self) -> BaseChatModel: if not os.getenv("ANTHROPIC_API_KEY"): msg = "ANTHROPIC_API_KEY not found in environment. Please set it in your .env file or environment variables." raise ValueError(msg) - return ChatAnthropic(**self._get_model_kwargs(), max_tokens=8192, max_retries=10, timeout=1000) + max_tokens = 12000 if "claude-3-7" in self.model_name else 8192 + return ChatAnthropic(**self._get_model_kwargs(), max_tokens=max_tokens, max_retries=10, timeout=1000) elif self.model_provider == "openai": if not os.getenv("OPENAI_API_KEY"): @@ -101,7 +102,7 @@ def _get_model(self) -> BaseChatModel: if not os.getenv("XAI_API_KEY"): msg = "XAI_API_KEY not found in environment. Please set it in your .env file or environment variables." raise ValueError(msg) - return ChatXAI(**self._get_model_kwargs(), max_tokens=8192) + return ChatXAI(**self._get_model_kwargs(), max_tokens=12000) msg = f"Unknown model provider: {self.model_provider}. Must be one of: anthropic, openai, xai" raise ValueError(msg)