Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
92 changes: 92 additions & 0 deletions src/api/transform/__tests__/openai-format.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -327,6 +327,98 @@ describe("convertToOpenAiMessages", () => {
expect(toolMessage.content).toBe("(empty)")
})

describe("empty text block filtering", () => {
it("should filter out empty text blocks from user messages (Gemini compatibility)", () => {
// This test ensures that user messages with empty text blocks are filtered out
// to prevent "must include at least one parts field" error from Gemini (via OpenRouter).
// Empty text blocks can occur in edge cases during message construction.
const anthropicMessages: Anthropic.Messages.MessageParam[] = [
{
role: "user",
content: [
{
type: "text",
text: "", // Empty text block should be filtered out
},
{
type: "text",
text: "Hello, how are you?",
},
],
},
]

const openAiMessages = convertToOpenAiMessages(anthropicMessages)
expect(openAiMessages).toHaveLength(1)
expect(openAiMessages[0].role).toBe("user")

const content = openAiMessages[0].content as Array<{ type: string; text?: string }>
// Should only have the non-empty text block
expect(content).toHaveLength(1)
expect(content[0]).toEqual({ type: "text", text: "Hello, how are you?" })
})

it("should not create user message when all text blocks are empty (Gemini compatibility)", () => {
// If all text blocks are empty, no user message should be created
const anthropicMessages: Anthropic.Messages.MessageParam[] = [
{
role: "user",
content: [
{
type: "text",
text: "", // Empty
},
{
type: "text",
text: "", // Also empty
},
],
},
]

const openAiMessages = convertToOpenAiMessages(anthropicMessages)
// No messages should be created since all content is empty
expect(openAiMessages).toHaveLength(0)
})

it("should preserve image blocks when filtering empty text blocks", () => {
const anthropicMessages: Anthropic.Messages.MessageParam[] = [
{
role: "user",
content: [
{
type: "text",
text: "", // Empty text block should be filtered out
},
{
type: "image",
source: {
type: "base64",
media_type: "image/png",
data: "base64data",
},
},
],
},
]

const openAiMessages = convertToOpenAiMessages(anthropicMessages)
expect(openAiMessages).toHaveLength(1)
expect(openAiMessages[0].role).toBe("user")

const content = openAiMessages[0].content as Array<{
type: string
image_url?: { url: string }
}>
// Should only have the image block
expect(content).toHaveLength(1)
expect(content[0]).toEqual({
type: "image_url",
image_url: { url: "data:image/png;base64,base64data" },
})
})
})

describe("mergeToolResultText option", () => {
it("should merge text content into last tool message when mergeToolResultText is true", () => {
const anthropicMessages: Anthropic.Messages.MessageParam[] = [
Expand Down
14 changes: 10 additions & 4 deletions src/api/transform/openai-format.ts
Original file line number Diff line number Diff line change
Expand Up @@ -138,11 +138,17 @@ export function convertToOpenAiMessages(
// }

// Process non-tool messages
if (nonToolMessages.length > 0) {
// Filter out empty text blocks to prevent "must include at least one parts field" error
// from Gemini (via OpenRouter). Images always have content (base64 data).
const filteredNonToolMessages = nonToolMessages.filter(
(part) => part.type === "image" || (part.type === "text" && part.text),
)

if (filteredNonToolMessages.length > 0) {
// Check if we should merge text into the last tool message
// This is critical for reasoning/thinking models where a user message
// after tool results causes the model to drop all previous reasoning_content
const hasOnlyTextContent = nonToolMessages.every((part) => part.type === "text")
const hasOnlyTextContent = filteredNonToolMessages.every((part) => part.type === "text")
const hasToolMessages = toolMessages.length > 0
const shouldMergeIntoToolMessage =
options?.mergeToolResultText && hasToolMessages && hasOnlyTextContent
Expand All @@ -153,7 +159,7 @@ export function convertToOpenAiMessages(
openAiMessages.length - 1
] as OpenAI.Chat.ChatCompletionToolMessageParam
if (lastToolMessage?.role === "tool") {
const additionalText = nonToolMessages
const additionalText = filteredNonToolMessages
.map((part) => (part as Anthropic.TextBlockParam).text)
.join("\n")
lastToolMessage.content = `${lastToolMessage.content}\n\n${additionalText}`
Expand All @@ -162,7 +168,7 @@ export function convertToOpenAiMessages(
// Standard behavior: add user message with text/image content
openAiMessages.push({
role: "user",
content: nonToolMessages.map((part) => {
content: filteredNonToolMessages.map((part) => {
if (part.type === "image") {
return {
type: "image_url",
Expand Down
Loading