Skip to content

Commit a0c00bc

Browse files
committed
fix(models): remove temp from models that don't support it
1 parent bd07f80 commit a0c00bc

File tree

2 files changed

+22
-18
lines changed

2 files changed

+22
-18
lines changed

apps/sim/providers/models.ts

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
7171
updatedAt: '2025-06-17',
7272
},
7373
capabilities: {
74-
temperature: { min: 0, max: 2 },
7574
toolUsageControl: true,
7675
},
7776
},
@@ -84,7 +83,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
8483
updatedAt: '2025-06-17',
8584
},
8685
capabilities: {
87-
temperature: { min: 0, max: 2 },
8886
toolUsageControl: true,
8987
},
9088
},
@@ -97,7 +95,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
9795
updatedAt: '2025-06-17',
9896
},
9997
capabilities: {
100-
temperature: { min: 0, max: 2 },
10198
toolUsageControl: true,
10299
},
103100
},
@@ -110,7 +107,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
110107
updatedAt: '2025-06-17',
111108
},
112109
capabilities: {
113-
temperature: { min: 0, max: 2 },
114110
toolUsageControl: true,
115111
},
116112
},
@@ -123,7 +119,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
123119
updatedAt: '2025-06-17',
124120
},
125121
capabilities: {
126-
temperature: { min: 0, max: 2 },
127122
toolUsageControl: true,
128123
},
129124
},
@@ -136,7 +131,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
136131
updatedAt: '2025-06-17',
137132
},
138133
capabilities: {
139-
temperature: { min: 0, max: 2 },
140134
toolUsageControl: true,
141135
},
142136
},
@@ -171,7 +165,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
171165
updatedAt: '2025-06-15',
172166
},
173167
capabilities: {
174-
temperature: { min: 0, max: 2 },
175168
toolUsageControl: true,
176169
},
177170
},
@@ -184,7 +177,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
184177
updatedAt: '2025-06-15',
185178
},
186179
capabilities: {
187-
temperature: { min: 0, max: 2 },
188180
toolUsageControl: true,
189181
},
190182
},
@@ -197,7 +189,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
197189
updatedAt: '2025-06-15',
198190
},
199191
capabilities: {
200-
temperature: { min: 0, max: 2 },
201192
toolUsageControl: true,
202193
},
203194
},
@@ -210,7 +201,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
210201
updatedAt: '2025-06-15',
211202
},
212203
capabilities: {
213-
temperature: { min: 0, max: 2 },
214204
toolUsageControl: true,
215205
},
216206
},
@@ -330,7 +320,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
330320
updatedAt: '2025-03-21',
331321
},
332322
capabilities: {
333-
temperature: { min: 0, max: 2 },
334323
toolUsageControl: true,
335324
},
336325
},
@@ -356,7 +345,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
356345
updatedAt: '2025-03-21',
357346
},
358347
capabilities: {
359-
temperature: { min: 0, max: 2 },
360348
toolUsageControl: true,
361349
},
362350
},

apps/sim/providers/utils.test.ts

Lines changed: 22 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,6 @@ describe('Model Capabilities', () => {
118118
'grok-3-latest',
119119
'grok-3-fast-latest',
120120
'deepseek-v3',
121-
'deepseek-chat',
122121
]
123122

124123
for (const model of supportedModels) {
@@ -131,6 +130,21 @@ describe('Model Capabilities', () => {
131130
'unsupported-model',
132131
'cerebras/llama-3.3-70b', // Cerebras models don't have temperature defined
133132
'groq/meta-llama/llama-4-scout-17b-16e-instruct', // Groq models don't have temperature defined
133+
// Reasoning models that don't support temperature
134+
'o1',
135+
'o3',
136+
'o4-mini',
137+
'azure/o3',
138+
'azure/o4-mini',
139+
'deepseek-r1',
140+
// Chat models that don't support temperature
141+
'deepseek-chat',
142+
// GPT-4.1 family models that don't support temperature
143+
'gpt-4.1',
144+
'gpt-4.1-nano',
145+
'gpt-4.1-mini',
146+
'azure/gpt-4.1',
147+
'azure/model-router',
134148
]
135149

136150
for (const model of unsupportedModels) {
@@ -148,15 +162,10 @@ describe('Model Capabilities', () => {
148162
it.concurrent('should return 2 for models with temperature range 0-2', () => {
149163
const modelsRange02 = [
150164
'gpt-4o',
151-
'o1',
152-
'o3',
153-
'o4-mini',
154165
'azure/gpt-4o',
155166
'gemini-2.5-pro',
156167
'gemini-2.5-flash',
157168
'deepseek-v3',
158-
'deepseek-chat',
159-
'deepseek-r1',
160169
]
161170

162171
for (const model of modelsRange02) {
@@ -183,6 +192,13 @@ describe('Model Capabilities', () => {
183192
expect(getMaxTemperature('unsupported-model')).toBeUndefined()
184193
expect(getMaxTemperature('cerebras/llama-3.3-70b')).toBeUndefined()
185194
expect(getMaxTemperature('groq/meta-llama/llama-4-scout-17b-16e-instruct')).toBeUndefined()
195+
// Reasoning models that don't support temperature
196+
expect(getMaxTemperature('o1')).toBeUndefined()
197+
expect(getMaxTemperature('o3')).toBeUndefined()
198+
expect(getMaxTemperature('o4-mini')).toBeUndefined()
199+
expect(getMaxTemperature('azure/o3')).toBeUndefined()
200+
expect(getMaxTemperature('azure/o4-mini')).toBeUndefined()
201+
expect(getMaxTemperature('deepseek-r1')).toBeUndefined()
186202
})
187203

188204
it.concurrent('should be case insensitive', () => {

0 commit comments

Comments
 (0)