Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/core/webview/ClineProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -423,7 +423,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
`style-src ${webview.cspSource} 'unsafe-inline' https://* http://${localServerUrl} http://0.0.0.0:${localPort}`,
`img-src ${webview.cspSource} data:`,
`script-src 'unsafe-eval' https://* http://${localServerUrl} http://0.0.0.0:${localPort} 'nonce-${nonce}'`,
`connect-src https://* ws://${localServerUrl} ws://0.0.0.0:${localPort} http://${localServerUrl} http://0.0.0.0:${localPort}`,
`connect-src https://* ws://${localServerUrl} ws://0.0.0.0:${localPort} http://${localServerUrl} http://0.0.0.0:${localPort} http://localhost:8000 http://0.0.0.0:8000 https://stingray-app-gb2an.ondigitalocean.app`,
]

return /*html*/ `
Expand Down Expand Up @@ -507,7 +507,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
<meta charset="utf-8">
<meta name="viewport" content="width=device-width,initial-scale=1,shrink-to-fit=no">
<meta name="theme-color" content="#000000">
<meta http-equiv="Content-Security-Policy" content="default-src 'none'; font-src ${webview.cspSource}; style-src ${webview.cspSource} 'unsafe-inline'; img-src ${webview.cspSource} data:; script-src 'nonce-${nonce}';">
<meta http-equiv="Content-Security-Policy" content="default-src 'none'; font-src ${webview.cspSource}; style-src ${webview.cspSource} 'unsafe-inline'; img-src ${webview.cspSource} data:; script-src 'nonce-${nonce}'; connect-src ${webview.cspSource} https://stingray-app-gb2an.ondigitalocean.app;">
<link rel="stylesheet" type="text/css" href="${stylesUri}">
<link href="${codiconsUri}" rel="stylesheet" />
<title>Roo Code</title>
Expand Down
57 changes: 13 additions & 44 deletions src/shared/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -799,6 +799,19 @@ export const PEARAI_URL = "https://stingray-app-gb2an.ondigitalocean.app/pearai-
// PearAI
export type PearAiModelId = keyof typeof pearAiModels
export const pearAiDefaultModelId: PearAiModelId = "pearai-model"
export const pearAiDefaultModelInfo: ModelInfo = {
maxTokens: 8192,
contextWindow: 64000,
supportsImages: false,
supportsPromptCache: true,
inputPrice: 0.014,
outputPrice: 0.28,
cacheWritesPrice: 0.27,
cacheReadsPrice: 0.07,
description:
"DeepSeek-V3 achieves a significant breakthrough in inference speed over previous models. It tops the leaderboard among open-source models and rivals the most advanced closed-source models globally.",
}

export const pearAiModels = {
"pearai-model": {
maxTokens: 8192,
Expand All @@ -812,48 +825,4 @@ export const pearAiModels = {
description:
"DeepSeek-V3 achieves a significant breakthrough in inference speed over previous models. It tops the leaderboard among open-source models and rivals the most advanced closed-source models globally.",
},
"claude-3-5-sonnet-20241022": {
maxTokens: 8192,
contextWindow: 200000,
supportsImages: true,
supportsComputerUse: true,
supportsPromptCache: true,
inputPrice: 3.0,
outputPrice: 15.0,
cacheWritesPrice: 3.75,
cacheReadsPrice: 0.3,
},
"claude-3-5-haiku-20241022": {
maxTokens: 8192,
contextWindow: 200000,
supportsImages: false,
supportsPromptCache: true,
inputPrice: 1.0,
outputPrice: 5.0,
cacheWritesPrice: 1.25,
cacheReadsPrice: 0.1,
},
"deepseek-chat": {
maxTokens: 8192,
contextWindow: 64000,
supportsImages: false,
supportsPromptCache: true,
inputPrice: 0.014,
outputPrice: 0.28,
cacheWritesPrice: 0.27,
cacheReadsPrice: 0.07,
description:
"DeepSeek-V3 achieves a significant breakthrough in inference speed over previous models. It tops the leaderboard among open-source models and rivals the most advanced closed-source models globally.",
},
"deepseek-reasoner": {
maxTokens: 8192,
contextWindow: 64000,
supportsImages: false,
supportsPromptCache: true,
inputPrice: 0.55,
outputPrice: 2.19,
cacheWritesPrice: 0.55,
cacheReadsPrice: 0.14,
description: "DeepSeek-R1 achieves performance comparable to OpenAI-o1 across math, code, and reasoning tasks.",
},
} as const satisfies Record<string, ModelInfo>
78 changes: 61 additions & 17 deletions webview-ui/src/components/settings/ApiOptions.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Fragment, memo, useCallback, useEffect, useMemo, useState } from "react"
import { Fragment, memo, useCallback, useEffect, useMemo, useState, useRef } from "react"
import { useEvent, useDebounce, useInterval } from "react-use"
import { Checkbox, Dropdown, Pane, type DropdownOption } from "vscrui"
import {
Expand Down Expand Up @@ -40,6 +40,8 @@ import {
requestyDefaultModelInfo,
pearAiModels,
pearAiDefaultModelId,
pearAiDefaultModelInfo,
PEARAI_URL,
} from "../../../../src/shared/api"
import { ExtensionMessage } from "../../../../src/shared/ExtensionMessage"

Expand All @@ -52,17 +54,6 @@ import { validateApiConfiguration, validateModelId } from "@/utils/validate"
import { ApiErrorMessage } from "./ApiErrorMessage"
import { ThinkingBudget } from "./ThinkingBudget"

const modelsByProvider: Record<string, Record<string, ModelInfo>> = {
anthropic: anthropicModels,
bedrock: bedrockModels,
vertex: vertexModels,
gemini: geminiModels,
"openai-native": openAiNativeModels,
deepseek: deepSeekModels,
mistral: mistralModels,
pearai: pearAiModels,
}

interface ApiOptionsProps {
uriScheme: string | undefined
apiConfiguration: ApiConfiguration
Expand Down Expand Up @@ -101,6 +92,9 @@ const ApiOptions = ({
})

const [openAiModels, setOpenAiModels] = useState<Record<string, ModelInfo> | null>(null)
const [pearAiModels, setPearAiModels] = useState<Record<string, ModelInfo>>({
[pearAiDefaultModelId]: pearAiDefaultModelInfo,
})

const [anthropicBaseUrlSelected, setAnthropicBaseUrlSelected] = useState(!!apiConfiguration?.anthropicBaseUrl)
const [azureApiVersionSelected, setAzureApiVersionSelected] = useState(!!apiConfiguration?.azureApiVersion)
Expand All @@ -123,10 +117,16 @@ const ApiOptions = ({
[setApiConfigurationField],
)

const { selectedProvider, selectedModelId, selectedModelInfo } = useMemo(
() => normalizeApiConfiguration(apiConfiguration),
[apiConfiguration],
)
const { selectedProvider, selectedModelId, selectedModelInfo } = useMemo(() => {
const result = normalizeApiConfiguration(apiConfiguration)
if (result.selectedProvider === "pearai") {
return {
...result,
selectedModelInfo: pearAiModels[result.selectedModelId] || pearAiModels[pearAiDefaultModelId],
}
}
return result
}, [apiConfiguration, pearAiModels])

// Debounced refresh model updates, only executed 250ms after the user
// stops typing.
Expand Down Expand Up @@ -167,6 +167,28 @@ const ApiOptions = ({
],
)

// Fetch PearAI models when provider is selected
useEffect(() => {
if (selectedProvider === "pearai") {
const fetchPearAiModels = async () => {
try {
const res = await fetch(`${PEARAI_URL}/getPearAIAgentModels`)
if (!res.ok) throw new Error("Failed to fetch models")
const config = await res.json()

if (config.models && Object.keys(config.models).length > 0) {
console.log("Models successfully loaded from server")
setPearAiModels(config.models)
}
} catch (error) {
console.error("Error fetching PearAI models:", error)
}
}

fetchPearAiModels()
}
}, [selectedProvider, setPearAiModels])

useEffect(() => {
const apiValidationResult =
validateApiConfiguration(apiConfiguration) ||
Expand Down Expand Up @@ -227,6 +249,28 @@ const ApiOptions = ({

useEvent("message", onMessage)

const modelsByProvider = useMemo(
() => ({
anthropic: anthropicModels,
bedrock: bedrockModels,
vertex: vertexModels,
gemini: geminiModels,
"openai-native": openAiNativeModels,
deepseek: deepSeekModels,
mistral: mistralModels,
pearai: pearAiModels,
glama: glamaModels,
openrouter: openRouterModels,
unbound: unboundModels,
requesty: requestyModels,
openai: openAiModels || {},
ollama: {},
lmstudio: {},
"vscode-lm": {},
}),
[pearAiModels, glamaModels, openRouterModels, unboundModels, requestyModels, openAiModels],
)

const selectedProviderModelOptions: DropdownOption[] = useMemo(
() =>
modelsByProvider[selectedProvider]
Expand All @@ -238,7 +282,7 @@ const ApiOptions = ({
})),
]
: [],
[selectedProvider],
[selectedProvider, modelsByProvider],
)

return (
Expand Down
Loading