Skip to content

Commit 6c57663

Browse files
committed
fix
1 parent 9fff8cb commit 6c57663

File tree

2 files changed

+13
-23
lines changed

2 files changed

+13
-23
lines changed

apps/sim/providers/bedrock/index.ts

Lines changed: 7 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -20,11 +20,7 @@ import {
2020
generateToolUseId,
2121
getBedrockInferenceProfileId,
2222
} from '@/providers/bedrock/utils'
23-
import {
24-
getMaxOutputTokensForModel,
25-
getProviderDefaultModel,
26-
getProviderModels,
27-
} from '@/providers/models'
23+
import { getProviderDefaultModel, getProviderModels } from '@/providers/models'
2824
import type {
2925
ProviderConfig,
3026
ProviderRequest,
@@ -261,18 +257,11 @@ export const bedrockProvider: ProviderConfig = {
261257

262258
const systemPromptWithSchema = systemContent
263259

264-
const inferenceConfig = {
260+
const inferenceConfig: { temperature: number; maxTokens?: number } = {
265261
temperature: Number.parseFloat(String(request.temperature ?? 0.7)),
266-
maxTokens:
267-
Number.parseInt(String(request.maxTokens)) ||
268-
getMaxOutputTokensForModel(request.model, request.stream ?? false),
269262
}
270-
271-
const toolLoopInferenceConfig = {
272-
...inferenceConfig,
273-
maxTokens:
274-
Number.parseInt(String(request.maxTokens)) ||
275-
getMaxOutputTokensForModel(request.model, false),
263+
if (request.maxTokens != null) {
264+
inferenceConfig.maxTokens = Number.parseInt(String(request.maxTokens))
276265
}
277266

278267
const shouldStreamToolCalls = request.streamToolCalls ?? false
@@ -381,7 +370,7 @@ export const bedrockProvider: ProviderConfig = {
381370
modelId: bedrockModelId,
382371
messages,
383372
system: systemPromptWithSchema.length > 0 ? systemPromptWithSchema : undefined,
384-
inferenceConfig: toolLoopInferenceConfig,
373+
inferenceConfig,
385374
toolConfig,
386375
})
387376

@@ -627,7 +616,7 @@ export const bedrockProvider: ProviderConfig = {
627616
modelId: bedrockModelId,
628617
messages: currentMessages,
629618
system: systemPromptWithSchema.length > 0 ? systemPromptWithSchema : undefined,
630-
inferenceConfig: toolLoopInferenceConfig,
619+
inferenceConfig: inferenceConfig,
631620
toolConfig: bedrockTools?.length
632621
? { tools: bedrockTools, toolChoice: nextToolChoice }
633622
: undefined,
@@ -694,7 +683,7 @@ export const bedrockProvider: ProviderConfig = {
694683
modelId: bedrockModelId,
695684
messages: currentMessages,
696685
system: systemPromptWithSchema.length > 0 ? systemPromptWithSchema : undefined,
697-
inferenceConfig: toolLoopInferenceConfig,
686+
inferenceConfig: inferenceConfig,
698687
toolConfig: {
699688
tools: [structuredOutputTool],
700689
toolChoice: { tool: { name: structuredOutputToolName } },

apps/sim/providers/models.ts

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -34,10 +34,15 @@ export interface ModelCapabilities {
3434
toolUsageControl?: boolean
3535
computerUse?: boolean
3636
nativeStructuredOutputs?: boolean
37+
/**
38+
* Max output tokens configuration for Anthropic SDK's streaming timeout workaround.
39+
* The Anthropic SDK throws an error for non-streaming requests that may take >10 minutes.
40+
* This only applies to direct Anthropic API calls, not Bedrock (which uses AWS SDK).
41+
*/
3742
maxOutputTokens?: {
3843
/** Maximum tokens for streaming requests */
3944
max: number
40-
/** Safe default for non-streaming requests (to avoid timeout issues) */
45+
/** Safe default for non-streaming requests (to avoid Anthropic SDK timeout errors) */
4146
default: number
4247
}
4348
reasoningEffort?: {
@@ -1709,7 +1714,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
17091714
capabilities: {
17101715
temperature: { min: 0, max: 1 },
17111716
nativeStructuredOutputs: true,
1712-
maxOutputTokens: { max: 64000, default: 8192 },
17131717
},
17141718
contextWindow: 200000,
17151719
},
@@ -1723,7 +1727,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
17231727
capabilities: {
17241728
temperature: { min: 0, max: 1 },
17251729
nativeStructuredOutputs: true,
1726-
maxOutputTokens: { max: 64000, default: 8192 },
17271730
},
17281731
contextWindow: 200000,
17291732
},
@@ -1737,7 +1740,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
17371740
capabilities: {
17381741
temperature: { min: 0, max: 1 },
17391742
nativeStructuredOutputs: true,
1740-
maxOutputTokens: { max: 64000, default: 8192 },
17411743
},
17421744
contextWindow: 200000,
17431745
},
@@ -1751,7 +1753,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
17511753
capabilities: {
17521754
temperature: { min: 0, max: 1 },
17531755
nativeStructuredOutputs: true,
1754-
maxOutputTokens: { max: 64000, default: 8192 },
17551756
},
17561757
contextWindow: 200000,
17571758
},

0 commit comments

Comments
 (0)