Skip to content

Commit 65bc216

Browse files
improvement(block-inputs): must parse json accurately + models max_tokens fix (#3033)
* improvement(block-inputs): must parse json accurately * fix sheets typing * add reference comment * fix models * revert blocks changes * fix param to follow openai new convention
1 parent ef613ef commit 65bc216

File tree

13 files changed

+62
-17
lines changed

13 files changed

+62
-17
lines changed

apps/sim/executor/execution/block-executor.ts

Lines changed: 49 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ import type {
2828
} from '@/executor/types'
2929
import { streamingResponseFormatProcessor } from '@/executor/utils'
3030
import { buildBlockExecutionError, normalizeError } from '@/executor/utils/errors'
31+
import { isJSONString } from '@/executor/utils/json'
3132
import { filterOutputForLog } from '@/executor/utils/output-filter'
3233
import { validateBlockType } from '@/executor/utils/permission-check'
3334
import type { VariableResolver } from '@/executor/variables/resolver'
@@ -86,7 +87,7 @@ export class BlockExecutor {
8687
resolvedInputs = this.resolver.resolveInputs(ctx, node.id, block.config.params, block)
8788

8889
if (blockLog) {
89-
blockLog.input = resolvedInputs
90+
blockLog.input = this.parseJsonInputs(resolvedInputs)
9091
}
9192
} catch (error) {
9293
cleanupSelfReference?.()
@@ -157,7 +158,14 @@ export class BlockExecutor {
157158
const displayOutput = filterOutputForLog(block.metadata?.id || '', normalizedOutput, {
158159
block,
159160
})
160-
this.callOnBlockComplete(ctx, node, block, resolvedInputs, displayOutput, duration)
161+
this.callOnBlockComplete(
162+
ctx,
163+
node,
164+
block,
165+
this.parseJsonInputs(resolvedInputs),
166+
displayOutput,
167+
duration
168+
)
161169
}
162170

163171
return normalizedOutput
@@ -233,7 +241,7 @@ export class BlockExecutor {
233241
blockLog.durationMs = duration
234242
blockLog.success = false
235243
blockLog.error = errorMessage
236-
blockLog.input = input
244+
blockLog.input = this.parseJsonInputs(input)
237245
blockLog.output = filterOutputForLog(block.metadata?.id || '', errorOutput, { block })
238246
}
239247

@@ -248,7 +256,14 @@ export class BlockExecutor {
248256

249257
if (!isSentinel) {
250258
const displayOutput = filterOutputForLog(block.metadata?.id || '', errorOutput, { block })
251-
this.callOnBlockComplete(ctx, node, block, input, displayOutput, duration)
259+
this.callOnBlockComplete(
260+
ctx,
261+
node,
262+
block,
263+
this.parseJsonInputs(input),
264+
displayOutput,
265+
duration
266+
)
252267
}
253268

254269
const hasErrorPort = this.hasErrorPortEdge(node)
@@ -336,6 +351,36 @@ export class BlockExecutor {
336351
return { result: output }
337352
}
338353

354+
/**
355+
* Parse JSON string inputs to objects for log display only.
356+
* Attempts to parse any string that looks like JSON.
357+
* Returns a new object - does not mutate the original inputs.
358+
*/
359+
private parseJsonInputs(inputs: Record<string, any>): Record<string, any> {
360+
let result = inputs
361+
let hasChanges = false
362+
363+
for (const [key, value] of Object.entries(inputs)) {
364+
// isJSONString is a quick heuristic (checks for { or [), not a validator.
365+
// Invalid JSON is safely caught below - this just avoids JSON.parse on every string.
366+
if (typeof value !== 'string' || !isJSONString(value)) {
367+
continue
368+
}
369+
370+
try {
371+
if (!hasChanges) {
372+
result = { ...inputs }
373+
hasChanges = true
374+
}
375+
result[key] = JSON.parse(value.trim())
376+
} catch {
377+
// Not valid JSON, keep original string
378+
}
379+
}
380+
381+
return result
382+
}
383+
339384
private callOnBlockStart(ctx: ExecutionContext, node: DAGNode, block: SerializedBlock): void {
340385
const blockId = node.id
341386
const blockName = block.metadata?.name ?? blockId

apps/sim/executor/handlers/agent/agent-handler.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -936,8 +936,8 @@ export class AgentBlockHandler implements BlockHandler {
936936
systemPrompt: validMessages ? undefined : inputs.systemPrompt,
937937
context: validMessages ? undefined : stringifyJSON(messages),
938938
tools: formattedTools,
939-
temperature: inputs.temperature,
940-
maxTokens: inputs.maxTokens,
939+
temperature: inputs.temperature != null ? Number(inputs.temperature) : undefined,
940+
maxTokens: inputs.maxTokens != null ? Number(inputs.maxTokens) : undefined,
941941
apiKey: inputs.apiKey,
942942
azureEndpoint: inputs.azureEndpoint,
943943
azureApiVersion: inputs.azureApiVersion,

apps/sim/providers/azure-openai/index.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ export const azureOpenAIProvider: ProviderConfig = {
102102
}
103103

104104
if (request.temperature !== undefined) payload.temperature = request.temperature
105-
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
105+
if (request.maxTokens != null) payload.max_completion_tokens = request.maxTokens
106106

107107
if (request.reasoningEffort !== undefined) payload.reasoning_effort = request.reasoningEffort
108108
if (request.verbosity !== undefined) payload.verbosity = request.verbosity

apps/sim/providers/cerebras/index.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ export const cerebrasProvider: ProviderConfig = {
7777
messages: allMessages,
7878
}
7979
if (request.temperature !== undefined) payload.temperature = request.temperature
80-
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
80+
if (request.maxTokens != null) payload.max_completion_tokens = request.maxTokens
8181
if (request.responseFormat) {
8282
payload.response_format = {
8383
type: 'json_schema',

apps/sim/providers/deepseek/index.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ export const deepseekProvider: ProviderConfig = {
8181
}
8282

8383
if (request.temperature !== undefined) payload.temperature = request.temperature
84-
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
84+
if (request.maxTokens != null) payload.max_tokens = request.maxTokens
8585

8686
let preparedTools: ReturnType<typeof prepareToolsWithUsageControl> | null = null
8787

apps/sim/providers/gemini/core.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -349,7 +349,7 @@ export async function executeGeminiRequest(
349349
if (request.temperature !== undefined) {
350350
geminiConfig.temperature = request.temperature
351351
}
352-
if (request.maxTokens !== undefined) {
352+
if (request.maxTokens != null) {
353353
geminiConfig.maxOutputTokens = request.maxTokens
354354
}
355355
if (systemInstruction) {

apps/sim/providers/groq/index.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ export const groqProvider: ProviderConfig = {
7474
}
7575

7676
if (request.temperature !== undefined) payload.temperature = request.temperature
77-
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
77+
if (request.maxTokens != null) payload.max_completion_tokens = request.maxTokens
7878

7979
if (request.responseFormat) {
8080
payload.response_format = {

apps/sim/providers/mistral/index.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ export const mistralProvider: ProviderConfig = {
9191
}
9292

9393
if (request.temperature !== undefined) payload.temperature = request.temperature
94-
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
94+
if (request.maxTokens != null) payload.max_tokens = request.maxTokens
9595

9696
if (request.responseFormat) {
9797
payload.response_format = {

apps/sim/providers/ollama/index.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ export const ollamaProvider: ProviderConfig = {
105105
}
106106

107107
if (request.temperature !== undefined) payload.temperature = request.temperature
108-
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
108+
if (request.maxTokens != null) payload.max_tokens = request.maxTokens
109109

110110
if (request.responseFormat) {
111111
payload.response_format = {

apps/sim/providers/openai/index.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ export const openaiProvider: ProviderConfig = {
8181
}
8282

8383
if (request.temperature !== undefined) payload.temperature = request.temperature
84-
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
84+
if (request.maxTokens != null) payload.max_completion_tokens = request.maxTokens
8585

8686
if (request.reasoningEffort !== undefined) payload.reasoning_effort = request.reasoningEffort
8787
if (request.verbosity !== undefined) payload.verbosity = request.verbosity

0 commit comments

Comments
 (0)