File tree Expand file tree Collapse file tree 7 files changed +8
-8
lines changed
Expand file tree Collapse file tree 7 files changed +8
-8
lines changed Original file line number Diff line number Diff line change @@ -936,8 +936,8 @@ export class AgentBlockHandler implements BlockHandler {
936936 systemPrompt : validMessages ? undefined : inputs . systemPrompt ,
937937 context : validMessages ? undefined : stringifyJSON ( messages ) ,
938938 tools : formattedTools ,
939- temperature : inputs . temperature ,
940- maxTokens : inputs . maxTokens ,
939+ temperature : inputs . temperature != null ? Number ( inputs . temperature ) : undefined ,
940+ maxTokens : inputs . maxTokens != null ? Number ( inputs . maxTokens ) : undefined ,
941941 apiKey : inputs . apiKey ,
942942 azureEndpoint : inputs . azureEndpoint ,
943943 azureApiVersion : inputs . azureApiVersion ,
Original file line number Diff line number Diff line change @@ -102,7 +102,7 @@ export const azureOpenAIProvider: ProviderConfig = {
102102 }
103103
104104 if ( request . temperature !== undefined ) payload . temperature = request . temperature
105- if ( request . maxTokens != null ) payload . max_tokens = request . maxTokens
105+ if ( request . maxTokens != null ) payload . max_completion_tokens = request . maxTokens
106106
107107 if ( request . reasoningEffort !== undefined ) payload . reasoning_effort = request . reasoningEffort
108108 if ( request . verbosity !== undefined ) payload . verbosity = request . verbosity
Original file line number Diff line number Diff line change @@ -77,7 +77,7 @@ export const cerebrasProvider: ProviderConfig = {
7777 messages : allMessages ,
7878 }
7979 if ( request . temperature !== undefined ) payload . temperature = request . temperature
80- if ( request . maxTokens != null ) payload . max_tokens = request . maxTokens
80+ if ( request . maxTokens != null ) payload . max_completion_tokens = request . maxTokens
8181 if ( request . responseFormat ) {
8282 payload . response_format = {
8383 type : 'json_schema' ,
Original file line number Diff line number Diff line change @@ -74,7 +74,7 @@ export const groqProvider: ProviderConfig = {
7474 }
7575
7676 if ( request . temperature !== undefined ) payload . temperature = request . temperature
77- if ( request . maxTokens != null ) payload . max_tokens = request . maxTokens
77+ if ( request . maxTokens != null ) payload . max_completion_tokens = request . maxTokens
7878
7979 if ( request . responseFormat ) {
8080 payload . response_format = {
Original file line number Diff line number Diff line change @@ -81,7 +81,7 @@ export const openaiProvider: ProviderConfig = {
8181 }
8282
8383 if ( request . temperature !== undefined ) payload . temperature = request . temperature
84- if ( request . maxTokens != null ) payload . max_tokens = request . maxTokens
84+ if ( request . maxTokens != null ) payload . max_completion_tokens = request . maxTokens
8585
8686 if ( request . reasoningEffort !== undefined ) payload . reasoning_effort = request . reasoningEffort
8787 if ( request . verbosity !== undefined ) payload . verbosity = request . verbosity
Original file line number Diff line number Diff line change @@ -135,7 +135,7 @@ export const vllmProvider: ProviderConfig = {
135135 }
136136
137137 if ( request . temperature !== undefined ) payload . temperature = request . temperature
138- if ( request . maxTokens != null ) payload . max_tokens = request . maxTokens
138+ if ( request . maxTokens != null ) payload . max_completion_tokens = request . maxTokens
139139
140140 if ( request . responseFormat ) {
141141 payload . response_format = {
Original file line number Diff line number Diff line change @@ -92,7 +92,7 @@ export const xAIProvider: ProviderConfig = {
9292 }
9393
9494 if ( request . temperature !== undefined ) basePayload . temperature = request . temperature
95- if ( request . maxTokens != null ) basePayload . max_tokens = request . maxTokens
95+ if ( request . maxTokens != null ) basePayload . max_completion_tokens = request . maxTokens
9696 let preparedTools : ReturnType < typeof prepareToolsWithUsageControl > | null = null
9797
9898 if ( tools ?. length ) {
You can’t perform that action at this time.
0 commit comments