Skip to content

Commit d9438a1

Browse files
committed
fix(gemini): token count
1 parent b4a389a commit d9438a1

3 files changed

Lines changed: 13 additions & 6 deletions

File tree

apps/sim/executor/handlers/agent/agent-handler.ts

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -936,8 +936,12 @@ export class AgentBlockHandler implements BlockHandler {
936936
systemPrompt: validMessages ? undefined : inputs.systemPrompt,
937937
context: validMessages ? undefined : stringifyJSON(messages),
938938
tools: formattedTools,
939-
temperature: inputs.temperature != null ? Number(inputs.temperature) : undefined,
940-
maxTokens: inputs.maxTokens != null ? Number(inputs.maxTokens) : undefined,
939+
temperature:
940+
inputs.temperature != null && inputs.temperature !== ''
941+
? Number(inputs.temperature)
942+
: undefined,
943+
maxTokens:
944+
inputs.maxTokens != null && inputs.maxTokens !== '' ? Number(inputs.maxTokens) : undefined,
941945
apiKey: inputs.apiKey,
942946
azureEndpoint: inputs.azureEndpoint,
943947
azureApiVersion: inputs.azureApiVersion,

apps/sim/executor/handlers/agent/types.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,8 @@ export interface AgentInputs {
1414
slidingWindowSize?: string // For message-based sliding window
1515
slidingWindowTokens?: string // For token-based sliding window
1616
// LLM parameters
17-
temperature?: number
18-
maxTokens?: number
17+
temperature?: string
18+
maxTokens?: string
1919
apiKey?: string
2020
azureEndpoint?: string
2121
azureApiVersion?: string

apps/sim/providers/google/utils.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -123,13 +123,16 @@ export function extractFunctionCallPart(candidate: Candidate | undefined): Part
123123
}
124124

125125
/**
126-
* Converts usage metadata from SDK response to our format
126+
* Converts usage metadata from SDK response to our format.
127+
* Includes thinking tokens in candidatesTokenCount for correct billing.
127128
*/
128129
export function convertUsageMetadata(
129130
usageMetadata: GenerateContentResponseUsageMetadata | undefined
130131
): GeminiUsage {
131132
const promptTokenCount = usageMetadata?.promptTokenCount ?? 0
132-
const candidatesTokenCount = usageMetadata?.candidatesTokenCount ?? 0
133+
const thoughtsTokenCount = usageMetadata?.thoughtsTokenCount ?? 0
134+
// Include thinking tokens in output count for correct billing
135+
const candidatesTokenCount = (usageMetadata?.candidatesTokenCount ?? 0) + thoughtsTokenCount
133136
return {
134137
promptTokenCount,
135138
candidatesTokenCount,

0 commit comments

Comments
 (0)