@@ -221,53 +221,30 @@ export class PearAIGenericHandler extends BaseProvider implements SingleCompleti
221221 }
222222
223223 override getModel ( ) : { id : string ; info : ModelInfo } {
224- const modelId = this . options . openAiModelId ?? "none"
225-
226- // PATCH for issue with update
227- const fallbackModelInfo : ModelInfo = {
228- maxTokens : 8192 ,
229- contextWindow : 200_000 ,
230- supportsImages : true ,
231- supportsComputerUse : true ,
232- supportsPromptCache : true ,
233- inputPrice : 3.0 ,
234- outputPrice : 15.0 ,
235- cacheWritesPrice : 3.75 ,
236- cacheReadsPrice : 0.3 ,
237- }
238-
224+ const modelId = this . options . openAiModelId
239225 // Prioritize serverside model info
240- if ( this . options . apiModelId && this . options . pearaiAgentModels ) {
226+ if ( modelId && this . options . pearaiAgentModels ) {
241227 let modelInfo = null
242- if ( this . options . apiModelId . startsWith ( "pearai" ) ) {
243- modelInfo = this . options . pearaiAgentModels . models [ this . options . apiModelId ] . underlyingModelUpdated
228+ if ( modelId . startsWith ( "pearai" ) ) {
229+ modelInfo = this . options . pearaiAgentModels . models [ modelId ] . underlyingModelUpdated
244230 } else {
245- modelInfo = this . options . pearaiAgentModels . models [ this . options . apiModelId || "pearai-model" ]
231+ modelInfo = this . options . pearaiAgentModels . models [ modelId || "pearai-model" ]
246232 }
247233 if ( modelInfo ) {
248234 const result = {
249- id : this . options . apiModelId ,
235+ id : modelId ,
250236 info : modelInfo ,
251237 }
252- // If model info is missing or has undefined context window, use fallback
253- if ( ! result . info || ! result . info . contextWindow ) {
254- result . info = fallbackModelInfo
255- }
256238 return result
257239 }
258240 }
259241
260242 const result = {
261- id : modelId ,
262- info : allModels [ modelId ] ,
263- }
264- // If model info is missing or has undefined context window, use fallback
265- if ( ! result . info || ! result . info . contextWindow ) {
266- result . info = fallbackModelInfo
243+ id : modelId ?? pearAiDefaultModelId ,
244+ info : allModels [ modelId ?? pearAiDefaultModelId ] ,
267245 }
268246 return result
269247 }
270-
271248 async completePrompt ( prompt : string ) : Promise < string > {
272249 try {
273250 const requestOptions : OpenAI . Chat . Completions . ChatCompletionCreateParamsNonStreaming = {
0 commit comments