Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions core/llm/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ export abstract class BaseLLM implements ILLM {
// Provider capabilities (overridable by subclasses)
protected supportsReasoningField: boolean = false;
protected supportsReasoningDetailsField: boolean = false;
protected supportsReasoningContentField: boolean = false;

get providerName(): string {
return (this.constructor as typeof BaseLLM).providerName;
Expand Down Expand Up @@ -1182,6 +1183,7 @@ export abstract class BaseLLM implements ILLM {
let body = toChatBody(messages, completionOptions, {
includeReasoningField: this.supportsReasoningField,
includeReasoningDetailsField: this.supportsReasoningDetailsField,
includeReasoningContentField: this.supportsReasoningContentField,
});
body = this.modifyChatBody(body);

Expand Down
3 changes: 2 additions & 1 deletion core/llm/llms/Deepseek.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@ import OpenAI from "./OpenAI.js";

class Deepseek extends OpenAI {
static providerName = "deepseek";
protected supportsReasoningField = true;
protected supportsReasoningField = false;
protected supportsReasoningDetailsField = false;
protected supportsReasoningContentField = true;
static defaultOptions: Partial<LLMOptions> = {
apiBase: "https://api.deepseek.com/",
model: "deepseek-coder",
Expand Down
14 changes: 7 additions & 7 deletions core/llm/llms/OpenAI.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,12 @@ import {
} from "openai/resources/index";

import { streamSse } from "@continuedev/fetch";
import {
ResponseCreateParamsBase,
ResponseInputItem,
ResponseInputMessageContentList,
Tool as ResponsesTool,
} from "openai/resources/responses/responses.mjs";
import {
ChatMessage,
CompletionOptions,
Expand All @@ -19,13 +25,6 @@ import {
toChatBody,
toResponsesInput,
} from "../openaiTypeConverters.js";
import {
ResponseInput,
ResponseInputItem,
ResponseInputMessageContentList,
ResponseCreateParamsBase,
Tool as ResponsesTool,
} from "openai/resources/responses/responses.mjs";

const NON_CHAT_MODELS = [
"text-davinci-002",
Expand Down Expand Up @@ -279,6 +278,7 @@ class OpenAI extends BaseLLM {
const finalOptions = toChatBody(messages, options, {
includeReasoningField: this.supportsReasoningField,
includeReasoningDetailsField: this.supportsReasoningDetailsField,
includeReasoningContentField: this.supportsReasoningContentField,
});

finalOptions.stop = options.stop?.slice(0, this.getMaxStopWords());
Expand Down
12 changes: 11 additions & 1 deletion core/llm/openaiTypeConverters.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,20 +40,24 @@ import {
function appendReasoningFieldsIfSupported(
msg: ChatCompletionAssistantMessageParam & {
reasoning?: string;
reasoning_content?: string;
reasoning_details?: any[];
},
options: CompletionOptions,
prevMessage?: ChatMessage,
providerFlags?: {
includeReasoningField?: boolean;
includeReasoningDetailsField?: boolean;
includeReasoningContentField?: boolean;
},
) {
if (!prevMessage || prevMessage.role !== "thinking") return;

const includeReasoning = !!providerFlags?.includeReasoningField;
const includeReasoningDetails = !!providerFlags?.includeReasoningDetailsField;
if (!includeReasoning && !includeReasoningDetails) return;
const includeReasoningContent = !!providerFlags?.includeReasoningContentField;
if (!includeReasoning && !includeReasoningDetails && !includeReasoningContent)
return;

const reasoningDetailsValue =
prevMessage.reasoning_details ||
Expand Down Expand Up @@ -84,6 +88,9 @@ function appendReasoningFieldsIfSupported(
if (includeReasoning) {
msg.reasoning = prevMessage.content as string;
}
if (includeReasoningContent) {
Comment thread
RomneyDa marked this conversation as resolved.
msg.reasoning_content = prevMessage.content as string;
}
}

export function toChatMessage(
Expand All @@ -93,6 +100,7 @@ export function toChatMessage(
providerFlags?: {
includeReasoningField?: boolean;
includeReasoningDetailsField?: boolean;
includeReasoningContentField?: boolean;
},
): ChatCompletionMessageParam | null {
if (message.role === "tool") {
Expand All @@ -117,6 +125,7 @@ export function toChatMessage(
// Base assistant message
const msg: ChatCompletionAssistantMessageParam & {
reasoning?: string;
reasoning_content?: string;
reasoning_details?: {
[key: string]: any;
signature?: string | undefined;
Expand Down Expand Up @@ -191,6 +200,7 @@ export function toChatBody(
providerFlags?: {
includeReasoningField?: boolean;
includeReasoningDetailsField?: boolean;
includeReasoningContentField?: boolean;
},
): ChatCompletionCreateParams {
const params: ChatCompletionCreateParams = {
Expand Down
Loading