Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 49 additions & 4 deletions apps/sim/executor/execution/block-executor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import type {
} from '@/executor/types'
import { streamingResponseFormatProcessor } from '@/executor/utils'
import { buildBlockExecutionError, normalizeError } from '@/executor/utils/errors'
import { isJSONString } from '@/executor/utils/json'
import { filterOutputForLog } from '@/executor/utils/output-filter'
import { validateBlockType } from '@/executor/utils/permission-check'
import type { VariableResolver } from '@/executor/variables/resolver'
Expand Down Expand Up @@ -86,7 +87,7 @@ export class BlockExecutor {
resolvedInputs = this.resolver.resolveInputs(ctx, node.id, block.config.params, block)

if (blockLog) {
blockLog.input = resolvedInputs
blockLog.input = this.parseJsonInputs(resolvedInputs)
}
} catch (error) {
cleanupSelfReference?.()
Expand Down Expand Up @@ -157,7 +158,14 @@ export class BlockExecutor {
const displayOutput = filterOutputForLog(block.metadata?.id || '', normalizedOutput, {
block,
})
this.callOnBlockComplete(ctx, node, block, resolvedInputs, displayOutput, duration)
this.callOnBlockComplete(
ctx,
node,
block,
this.parseJsonInputs(resolvedInputs),
displayOutput,
duration
)
}

return normalizedOutput
Expand Down Expand Up @@ -233,7 +241,7 @@ export class BlockExecutor {
blockLog.durationMs = duration
blockLog.success = false
blockLog.error = errorMessage
blockLog.input = input
blockLog.input = this.parseJsonInputs(input)
blockLog.output = filterOutputForLog(block.metadata?.id || '', errorOutput, { block })
}

Expand All @@ -248,7 +256,14 @@ export class BlockExecutor {

if (!isSentinel) {
const displayOutput = filterOutputForLog(block.metadata?.id || '', errorOutput, { block })
this.callOnBlockComplete(ctx, node, block, input, displayOutput, duration)
this.callOnBlockComplete(
ctx,
node,
block,
this.parseJsonInputs(input),
displayOutput,
duration
)
}

const hasErrorPort = this.hasErrorPortEdge(node)
Expand Down Expand Up @@ -336,6 +351,36 @@ export class BlockExecutor {
return { result: output }
}

/**
* Parse JSON string inputs to objects for log display only.
* Attempts to parse any string that looks like JSON.
* Returns a new object - does not mutate the original inputs.
*/
private parseJsonInputs(inputs: Record<string, any>): Record<string, any> {
let result = inputs
let hasChanges = false

for (const [key, value] of Object.entries(inputs)) {
// isJSONString is a quick heuristic (checks for { or [), not a validator.
// Invalid JSON is safely caught below - this just avoids JSON.parse on every string.
if (typeof value !== 'string' || !isJSONString(value)) {
continue
}

try {
if (!hasChanges) {
result = { ...inputs }
hasChanges = true
}
result[key] = JSON.parse(value.trim())
} catch {
// Not valid JSON, keep original string
}
}

return result
}

private callOnBlockStart(ctx: ExecutionContext, node: DAGNode, block: SerializedBlock): void {
const blockId = node.id
const blockName = block.metadata?.name ?? blockId
Expand Down
4 changes: 2 additions & 2 deletions apps/sim/executor/handlers/agent/agent-handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -936,8 +936,8 @@ export class AgentBlockHandler implements BlockHandler {
systemPrompt: validMessages ? undefined : inputs.systemPrompt,
context: validMessages ? undefined : stringifyJSON(messages),
tools: formattedTools,
temperature: inputs.temperature,
maxTokens: inputs.maxTokens,
temperature: inputs.temperature != null ? Number(inputs.temperature) : undefined,
maxTokens: inputs.maxTokens != null ? Number(inputs.maxTokens) : undefined,
apiKey: inputs.apiKey,
azureEndpoint: inputs.azureEndpoint,
azureApiVersion: inputs.azureApiVersion,
Expand Down
2 changes: 1 addition & 1 deletion apps/sim/providers/azure-openai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ export const azureOpenAIProvider: ProviderConfig = {
}

if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
if (request.maxTokens != null) payload.max_completion_tokens = request.maxTokens

if (request.reasoningEffort !== undefined) payload.reasoning_effort = request.reasoningEffort
if (request.verbosity !== undefined) payload.verbosity = request.verbosity
Expand Down
2 changes: 1 addition & 1 deletion apps/sim/providers/cerebras/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ export const cerebrasProvider: ProviderConfig = {
messages: allMessages,
}
if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
if (request.maxTokens != null) payload.max_completion_tokens = request.maxTokens
if (request.responseFormat) {
payload.response_format = {
type: 'json_schema',
Expand Down
2 changes: 1 addition & 1 deletion apps/sim/providers/deepseek/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ export const deepseekProvider: ProviderConfig = {
}

if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
if (request.maxTokens != null) payload.max_tokens = request.maxTokens

let preparedTools: ReturnType<typeof prepareToolsWithUsageControl> | null = null

Expand Down
2 changes: 1 addition & 1 deletion apps/sim/providers/gemini/core.ts
Original file line number Diff line number Diff line change
Expand Up @@ -349,7 +349,7 @@ export async function executeGeminiRequest(
if (request.temperature !== undefined) {
geminiConfig.temperature = request.temperature
}
if (request.maxTokens !== undefined) {
if (request.maxTokens != null) {
geminiConfig.maxOutputTokens = request.maxTokens
}
if (systemInstruction) {
Expand Down
2 changes: 1 addition & 1 deletion apps/sim/providers/groq/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ export const groqProvider: ProviderConfig = {
}

if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
if (request.maxTokens != null) payload.max_completion_tokens = request.maxTokens

if (request.responseFormat) {
payload.response_format = {
Expand Down
2 changes: 1 addition & 1 deletion apps/sim/providers/mistral/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ export const mistralProvider: ProviderConfig = {
}

if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
if (request.maxTokens != null) payload.max_tokens = request.maxTokens

if (request.responseFormat) {
payload.response_format = {
Expand Down
2 changes: 1 addition & 1 deletion apps/sim/providers/ollama/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ export const ollamaProvider: ProviderConfig = {
}

if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
if (request.maxTokens != null) payload.max_tokens = request.maxTokens

if (request.responseFormat) {
payload.response_format = {
Expand Down
2 changes: 1 addition & 1 deletion apps/sim/providers/openai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ export const openaiProvider: ProviderConfig = {
}

if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
if (request.maxTokens != null) payload.max_completion_tokens = request.maxTokens

if (request.reasoningEffort !== undefined) payload.reasoning_effort = request.reasoningEffort
if (request.verbosity !== undefined) payload.verbosity = request.verbosity
Expand Down
2 changes: 1 addition & 1 deletion apps/sim/providers/openrouter/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ export const openRouterProvider: ProviderConfig = {
}

if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
if (request.maxTokens != null) payload.max_tokens = request.maxTokens

let preparedTools: ReturnType<typeof prepareToolsWithUsageControl> | null = null
let hasActiveTools = false
Expand Down
2 changes: 1 addition & 1 deletion apps/sim/providers/vllm/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ export const vllmProvider: ProviderConfig = {
}

if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
if (request.maxTokens != null) payload.max_completion_tokens = request.maxTokens

if (request.responseFormat) {
payload.response_format = {
Expand Down
2 changes: 1 addition & 1 deletion apps/sim/providers/xai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ export const xAIProvider: ProviderConfig = {
}

if (request.temperature !== undefined) basePayload.temperature = request.temperature
if (request.maxTokens !== undefined) basePayload.max_tokens = request.maxTokens
if (request.maxTokens != null) basePayload.max_completion_tokens = request.maxTokens
let preparedTools: ReturnType<typeof prepareToolsWithUsageControl> | null = null

if (tools?.length) {
Expand Down