Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
162 changes: 148 additions & 14 deletions src/api/providers/deepseek.ts
Original file line number Diff line number Diff line change
@@ -1,24 +1,158 @@
import { OpenAiHandler, OpenAiHandlerOptions } from "./openai"
import { ModelInfo } from "../../shared/api"
import { deepSeekModels, deepSeekDefaultModelId } from "../../shared/api"

export class DeepSeekHandler extends OpenAiHandler {
constructor(options: OpenAiHandlerOptions) {
super({
...options,
openAiApiKey: options.deepSeekApiKey ?? "not-provided",
openAiModelId: options.apiModelId ?? deepSeekDefaultModelId,
openAiBaseUrl: options.deepSeekBaseUrl ?? "https://api.deepseek.com/v1",
openAiStreamingEnabled: true,
includeMaxTokens: true,
import { Anthropic } from "@anthropic-ai/sdk"
import { ApiHandlerOptions, ModelInfo, deepSeekModels, deepSeekDefaultModelId } from "../../shared/api"
import { ApiHandler, SingleCompletionHandler } from "../index"
import { convertToR1Format } from "../transform/r1-format"
import { convertToOpenAiMessages } from "../transform/openai-format"
import { ApiStream } from "../transform/stream"

interface DeepSeekUsage {
prompt_tokens: number
completion_tokens: number
prompt_cache_miss_tokens?: number
prompt_cache_hit_tokens?: number
}

export class DeepSeekHandler implements ApiHandler, SingleCompletionHandler {
private options: ApiHandlerOptions

constructor(options: ApiHandlerOptions) {
if (!options.deepSeekApiKey) {
throw new Error("DeepSeek API key is required. Please provide it in the settings.")
}
this.options = options
}

private get baseUrl(): string {
return this.options.deepSeekBaseUrl ?? "https://api.deepseek.com/v1"
}

async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
const modelInfo = this.getModel().info
const modelId = this.options.apiModelId ?? deepSeekDefaultModelId
const isReasoner = modelId.includes("deepseek-reasoner")

const systemMessage = { role: "system", content: systemPrompt }
const formattedMessages = isReasoner
? convertToR1Format([{ role: "user", content: systemPrompt }, ...messages])
: [systemMessage, ...convertToOpenAiMessages(messages)]

const response = await fetch(`${this.baseUrl}/chat/completions`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${this.options.deepSeekApiKey}`,
},
body: JSON.stringify({
model: modelId,
messages: formattedMessages,
temperature: 0,
stream: true,
max_tokens: modelInfo.maxTokens,
}),
})

if (!response.ok) {
throw new Error(`DeepSeek API error: ${response.statusText}`)
}

if (!response.body) {
throw new Error("No response body received from DeepSeek API")
}

const reader = response.body.getReader()
const decoder = new TextDecoder()
let buffer = ""

try {
while (true) {
const { done, value } = await reader.read()
if (done) break

buffer += decoder.decode(value, { stream: true })
const lines = buffer.split("\n")
buffer = lines.pop() || ""

for (const line of lines) {
if (line.trim() === "") continue
if (!line.startsWith("data: ")) continue

const data = line.slice(6)
if (data === "[DONE]") continue

try {
const chunk = JSON.parse(data)
const delta = chunk.choices[0]?.delta ?? {}

if (delta.content) {
yield {
type: "text",
text: delta.content,
}
}

if ("reasoning_content" in delta && delta.reasoning_content) {
yield {
type: "reasoning",
text: delta.reasoning_content,
}
}

if (chunk.usage) {
const usage = chunk.usage as DeepSeekUsage
let inputTokens = (usage.prompt_tokens || 0) - (usage.prompt_cache_hit_tokens || 0)
yield {
type: "usage",
inputTokens: inputTokens,
outputTokens: usage.completion_tokens || 0,
cacheReadTokens: usage.prompt_cache_hit_tokens || 0,
cacheWriteTokens: usage.prompt_cache_miss_tokens || 0,
}
}
} catch (error) {
console.error("Error parsing DeepSeek response:", error)
}
}
}
} finally {
reader.releaseLock()
}
}

override getModel(): { id: string; info: ModelInfo } {
getModel(): { id: string; info: ModelInfo } {
const modelId = this.options.apiModelId ?? deepSeekDefaultModelId
return {
id: modelId,
info: deepSeekModels[modelId as keyof typeof deepSeekModels] || deepSeekModels[deepSeekDefaultModelId],
}
}

async completePrompt(prompt: string): Promise<string> {
try {
const response = await fetch(`${this.baseUrl}/chat/completions`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${this.options.deepSeekApiKey}`,
},
body: JSON.stringify({
model: this.getModel().id,
messages: [{ role: "user", content: prompt }],
temperature: 0,
stream: false,
}),
})

if (!response.ok) {
throw new Error(`DeepSeek API error: ${response.statusText}`)
}

const data = await response.json()
return data.choices[0]?.message?.content || ""
} catch (error) {
if (error instanceof Error) {
throw new Error(`DeepSeek completion error: ${error.message}`)
}
throw error
}
}
}
99 changes: 86 additions & 13 deletions src/api/providers/pearai.ts
Original file line number Diff line number Diff line change
@@ -1,36 +1,100 @@
import { OpenAiHandler } from "./openai"
import * as vscode from "vscode"
import { AnthropicModelId, ApiHandlerOptions, ModelInfo, PEARAI_URL } from "../../shared/api"
import { ApiHandlerOptions, PEARAI_URL, ModelInfo } from "../../shared/api"
import { AnthropicHandler } from "./anthropic"
import { DeepSeekHandler } from "./deepseek"

interface PearAiModelsResponse {
models: {
[key: string]: {
underlyingModel?: string
[key: string]: any
}
}
defaultModelId: string
}

export class PearAiHandler {
private handler!: AnthropicHandler | DeepSeekHandler

export class PearAiHandler extends AnthropicHandler {
constructor(options: ApiHandlerOptions) {
if (!options.pearaiApiKey) {
vscode.window.showErrorMessage("PearAI API key not found.", "Login to PearAI").then(async (selection) => {
if (selection === "Login to PearAI") {
const extensionUrl = `${vscode.env.uriScheme}://pearai.pearai/auth`
const callbackUri = await vscode.env.asExternalUri(vscode.Uri.parse(extensionUrl))

vscode.env.openExternal(
await vscode.env.asExternalUri(
vscode.Uri.parse(
`https://trypear.ai/signin?callback=${callbackUri.toString()}`, // Change to localhost if running locally
),
vscode.Uri.parse(`https://trypear.ai/signin?callback=${callbackUri.toString()}`),
),
)
}
})
throw new Error("PearAI API key not found. Please login to PearAI.")
}
super({
...options,
apiKey: options.pearaiApiKey,
anthropicBaseUrl: PEARAI_URL,

this.initializeHandler(options).catch((error) => {
console.error("Failed to initialize PearAI handler:", error)
throw error
})
}

override getModel(): { id: AnthropicModelId; info: ModelInfo } {
const baseModel = super.getModel()
private async initializeHandler(options: ApiHandlerOptions): Promise<void> {
const modelId = options.apiModelId || ""

if (modelId === "pearai-model") {
try {
const response = await fetch(`${PEARAI_URL}/getPearAIAgentModels`)
if (!response.ok) {
throw new Error(`Failed to fetch models: ${response.statusText}`)
}
const data = (await response.json()) as PearAiModelsResponse
const underlyingModel = data.models[modelId]?.underlyingModel || "claude-3-5-sonnet-20241022"

if (underlyingModel.startsWith("deepseek")) {
this.handler = new DeepSeekHandler({
...options,
deepSeekApiKey: options.pearaiApiKey,
deepSeekBaseUrl: PEARAI_URL,
apiModelId: underlyingModel,
})
} else {
// Default to Claude
this.handler = new AnthropicHandler({
...options,
apiKey: options.pearaiApiKey,
anthropicBaseUrl: PEARAI_URL,
apiModelId: underlyingModel,
})
}
} catch (error) {
console.error("Error fetching PearAI models:", error)
// Default to Claude if there's an error
this.handler = new AnthropicHandler({
...options,
apiKey: options.pearaiApiKey,
anthropicBaseUrl: PEARAI_URL,
apiModelId: "claude-3-5-sonnet-20241022",
})
}
} else if (modelId.startsWith("claude")) {
this.handler = new AnthropicHandler({
...options,
apiKey: options.pearaiApiKey,
anthropicBaseUrl: PEARAI_URL,
})
} else if (modelId.startsWith("deepseek")) {
this.handler = new DeepSeekHandler({
...options,
deepSeekApiKey: options.pearaiApiKey,
deepSeekBaseUrl: PEARAI_URL,
})
} else {
throw new Error(`Unsupported model: ${modelId}`)
}
}

getModel(): { id: string; info: ModelInfo } {
const baseModel = this.handler.getModel()
return {
id: baseModel.id,
info: {
Expand All @@ -42,4 +106,13 @@ export class PearAiHandler extends AnthropicHandler {
},
}
}

async *createMessage(systemPrompt: string, messages: any[]): AsyncGenerator<any> {
const generator = this.handler.createMessage(systemPrompt, messages)
yield* generator
}

async completePrompt(prompt: string): Promise<string> {
return this.handler.completePrompt(prompt)
}
}
14 changes: 1 addition & 13 deletions src/core/webview/ClineProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -423,7 +423,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
`style-src ${webview.cspSource} 'unsafe-inline' https://* http://${localServerUrl} http://0.0.0.0:${localPort}`,
`img-src ${webview.cspSource} data:`,
`script-src 'unsafe-eval' https://* http://${localServerUrl} http://0.0.0.0:${localPort} 'nonce-${nonce}'`,
`connect-src https://* ws://${localServerUrl} ws://0.0.0.0:${localPort} http://${localServerUrl} http://0.0.0.0:${localPort}`,
`connect-src https://* ws://${localServerUrl} ws://0.0.0.0:${localPort} http://${localServerUrl} http://0.0.0.0:${localPort} http://localhost:8000`,
]

return /*html*/ `
Expand Down Expand Up @@ -1691,9 +1691,6 @@ export class ClineProvider implements vscode.WebviewViewProvider {
requestyModelInfo,
modelTemperature,
modelMaxTokens,
pearaiBaseUrl,
pearaiModelId,
pearaiModelInfo,
} = apiConfiguration
await Promise.all([
this.updateGlobalState("apiProvider", apiProvider),
Expand Down Expand Up @@ -1743,9 +1740,6 @@ export class ClineProvider implements vscode.WebviewViewProvider {
this.updateGlobalState("requestyModelInfo", requestyModelInfo),
this.updateGlobalState("modelTemperature", modelTemperature),
this.updateGlobalState("modelMaxTokens", modelMaxTokens),
await this.updateGlobalState("pearaiBaseUrl", PEARAI_URL),
await this.updateGlobalState("pearaiModelId", pearaiModelId),
await this.updateGlobalState("pearaiModelInfo", pearaiModelInfo),
])
if (this.cline) {
this.cline.api = buildApiHandler(apiConfiguration)
Expand Down Expand Up @@ -2190,8 +2184,6 @@ export class ClineProvider implements vscode.WebviewViewProvider {
pearaiApiKey,
pearaiRefreshKey,
pearaiBaseUrl,
pearaiModelId,
pearaiModelInfo,
mistralCodestralUrl,
azureApiVersion,
openAiStreamingEnabled,
Expand Down Expand Up @@ -2279,8 +2271,6 @@ export class ClineProvider implements vscode.WebviewViewProvider {
this.getSecret("pearai-token") as Promise<string | undefined>,
this.getSecret("pearai-refresh") as Promise<string | undefined>,
this.getGlobalState("pearaiBaseUrl") as Promise<string | undefined>,
this.getGlobalState("pearaiModelId") as Promise<string | undefined>,
this.getGlobalState("pearaiModelInfo") as Promise<ModelInfo | undefined>,
this.getGlobalState("mistralCodestralUrl") as Promise<string | undefined>,
this.getGlobalState("azureApiVersion") as Promise<string | undefined>,
this.getGlobalState("openAiStreamingEnabled") as Promise<boolean | undefined>,
Expand Down Expand Up @@ -2384,8 +2374,6 @@ export class ClineProvider implements vscode.WebviewViewProvider {
mistralApiKey,
pearaiApiKey,
pearaiBaseUrl,
pearaiModelId,
pearaiModelInfo,
mistralCodestralUrl,
azureApiVersion,
openAiStreamingEnabled,
Expand Down
Loading
Loading