Skip to content

Commit f405233

Browse files
committed
refactor(web): improve type safety by replacing any types
Replace unsafe 'any' types with proper TypeScript interfaces: - Add web/src/llm-api/types.ts with ChatCompletionRequestBody, ChatMessage, CodebuffMetadata, and OpenRouter response types - Fix app/api/v1/_helpers.ts: define UserInfo interface - Fix app/api/v1/chat/completions/_post.ts: use typed body - Fix llm-api/openrouter.ts: proper parameter and response types - Fix llm-api/openai.ts: remove unnecessary type assertions - Fix llm-api/helpers.ts: use proper types for body access - Fix util/logger.ts: use LogData/LogArgs types instead of any - Fix server/agents-transform.ts: define AgentData interface - Fix error handling in stripe webhook and sessions routes
1 parent 5b7c494 commit f405233

File tree

10 files changed

+273
-71
lines changed

10 files changed

+273
-71
lines changed

web/src/app/api/sessions/route.ts

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ async function revokeStandardSessions(
7373
eq(schema.session.userId, userId),
7474
inArray(schema.session.sessionToken, tokensToDelete),
7575
// Explicitly restrict to web/cli to avoid PATs here
76-
inArray(schema.session.type, ['web', 'cli'] as any),
76+
inArray(schema.session.type, ['web', 'cli'] as const),
7777
),
7878
)
7979
.returning({ sessionToken: schema.session.sessionToken })
@@ -109,12 +109,13 @@ export async function DELETE(req: NextRequest) {
109109
return new NextResponse('Unauthorized', { status: 401 })
110110
}
111111

112-
const {
113-
sessionIds,
114-
tokenIds,
115-
}: { sessionIds?: string[]; tokenIds?: string[] } = await req
116-
.json()
117-
.catch(() => ({}) as any)
112+
let body: { sessionIds?: string[]; tokenIds?: string[] } = {}
113+
try {
114+
body = await req.json()
115+
} catch {
116+
body = {}
117+
}
118+
const { sessionIds, tokenIds } = body
118119

119120
const userId = session.user.id
120121

@@ -137,11 +138,13 @@ export async function DELETE(req: NextRequest) {
137138
}
138139

139140
return NextResponse.json({ revokedSessions, revokedTokens })
140-
} catch (e: any) {
141+
} catch (e: unknown) {
142+
const errorMessage = e instanceof Error ? e.message : String(e)
143+
const stack = e instanceof Error ? e.stack : undefined
141144
logger.error(
142-
{ error: e?.message ?? String(e), stack: e?.stack },
145+
{ error: errorMessage, stack },
143146
'Error in DELETE /api/sessions',
144147
)
145-
return new NextResponse(e?.message ?? 'Internal error', { status: 500 })
148+
return new NextResponse(errorMessage, { status: 500 })
146149
}
147150
}

web/src/app/api/stripe/webhook/route.ts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -336,13 +336,13 @@ const webhookHandler = async (req: NextRequest): Promise<NextResponse> => {
336336
env.STRIPE_WEBHOOK_SECRET_KEY,
337337
)
338338
} catch (err) {
339-
const error = err as Error
339+
const errorMessage = err instanceof Error ? err.message : String(err)
340340
logger.error(
341-
{ error: error.message },
341+
{ error: errorMessage },
342342
'Webhook signature verification failed',
343343
)
344344
return NextResponse.json(
345-
{ error: { message: `Webhook Error: ${error.message}` } },
345+
{ error: { message: `Webhook Error: ${errorMessage}` } },
346346
{ status: 400 },
347347
)
348348
}
@@ -550,13 +550,13 @@ const webhookHandler = async (req: NextRequest): Promise<NextResponse> => {
550550
}
551551
return NextResponse.json({ received: true })
552552
} catch (err) {
553-
const error = err as Error
553+
const errorMessage = err instanceof Error ? err.message : String(err)
554554
logger.error(
555-
{ error: error.message, eventType: event.type },
555+
{ error: errorMessage, eventType: event.type },
556556
'Error processing webhook',
557557
)
558558
return NextResponse.json(
559-
{ error: { message: `Webhook handler error: ${error.message}` } },
559+
{ error: { message: `Webhook handler error: ${errorMessage}` } },
560560
{ status: 500 },
561561
)
562562
}

web/src/app/api/v1/_helpers.ts

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,18 @@ import type { ZodType } from 'zod'
1717

1818
import { extractApiKeyFromHeader } from '@/util/auth'
1919

20+
/**
21+
* User information returned from API key validation
22+
*/
23+
export interface UserInfo {
24+
id: string
25+
email: string
26+
discord_id: string | null
27+
referral_code?: string | null
28+
stripe_customer_id?: string | null
29+
banned?: boolean
30+
}
31+
2032
export type HandlerResult<T> =
2133
| { ok: true; data: T }
2234
| { ok: false; response: NextResponse }
@@ -77,7 +89,7 @@ export const requireUserFromApiKey = async (params: {
7789
trackEvent: TrackEventFn
7890
authErrorEvent: AnalyticsEvent
7991
}): Promise<
80-
HandlerResult<{ userId: string; userInfo: any; logger: Logger }>
92+
HandlerResult<{ userId: string; userInfo: UserInfo; logger: Logger }>
8193
> => {
8294
const {
8395
req,

web/src/app/api/v1/chat/completions/_post.ts

Lines changed: 18 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@ import type {
1919
} from '@codebuff/common/types/contracts/logger'
2020
import type { NextRequest } from 'next/server'
2121

22+
import type { ChatCompletionRequestBody } from '@/llm-api/types'
23+
2224
import {
2325
handleOpenAINonStream,
2426
OPENAI_SUPPORTED_MODELS,
@@ -109,8 +111,9 @@ export async function postChatCompletions(params: {
109111
)
110112
}
111113

112-
const bodyStream = 'stream' in body && body.stream
113-
const runId = (body as any)?.codebuff_metadata?.run_id
114+
const typedBody = body as unknown as ChatCompletionRequestBody
115+
const bodyStream = typedBody.stream ?? false
116+
const runId = typedBody.codebuff_metadata?.run_id
114117

115118
// Extract and validate API key
116119
const apiKey = extractApiKeyFromHeader(req)
@@ -204,8 +207,7 @@ export async function postChatCompletions(params: {
204207
}
205208

206209
// Extract and validate agent run ID
207-
const runIdFromBody: string | undefined = (body as any).codebuff_metadata
208-
?.run_id
210+
const runIdFromBody = typedBody.codebuff_metadata?.run_id
209211
if (!runIdFromBody || typeof runIdFromBody !== 'string') {
210212
trackEvent({
211213
event: AnalyticsEvent.CHAT_COMPLETIONS_VALIDATION_ERROR,
@@ -269,7 +271,7 @@ export async function postChatCompletions(params: {
269271
if (bodyStream) {
270272
// Streaming request
271273
const stream = await handleOpenRouterStream({
272-
body,
274+
body: typedBody,
273275
userId,
274276
stripeCustomerId,
275277
agentId,
@@ -299,21 +301,20 @@ export async function postChatCompletions(params: {
299301
})
300302
} else {
301303
// Non-streaming request
302-
const model = (body as any)?.model
303-
const shortModelName =
304-
typeof model === 'string' ? model.split('/')[1] : undefined
304+
const model = typedBody.model
305+
const modelParts = model.split('/')
306+
const shortModelName = modelParts.length > 1 ? modelParts[1] : model
305307
const isOpenAIDirectModel =
306-
typeof model === 'string' &&
307308
model.startsWith('openai/') &&
308-
OPENAI_SUPPORTED_MODELS.includes(shortModelName as any)
309+
(OPENAI_SUPPORTED_MODELS as readonly string[]).includes(shortModelName)
309310
// Only use OpenAI endpoint for OpenAI models with n parameter
310311
// All other models (including non-OpenAI with n parameter) should use OpenRouter
311312
const shouldUseOpenAIEndpoint =
312-
isOpenAIDirectModel && (body as any)?.codebuff_metadata?.n
313+
isOpenAIDirectModel && typedBody.codebuff_metadata?.n !== undefined
313314

314315
const nonStreamRequest = shouldUseOpenAIEndpoint
315316
? handleOpenAINonStream({
316-
body,
317+
body: typedBody,
317318
userId,
318319
stripeCustomerId,
319320
agentId,
@@ -322,7 +323,7 @@ export async function postChatCompletions(params: {
322323
insertMessageBigquery,
323324
})
324325
: handleOpenRouterNonStream({
325-
body,
326+
body: typedBody,
326327
userId,
327328
stripeCustomerId,
328329
agentId,
@@ -360,13 +361,13 @@ export async function postChatCompletions(params: {
360361
userId,
361362
agentId,
362363
runId: runIdFromBody,
363-
model: (body as any)?.model,
364+
model: typedBody.model,
364365
streaming: !!bodyStream,
365366
hasByokKey: !!openrouterApiKey,
366-
messageCount: Array.isArray((body as any)?.messages)
367-
? (body as any).messages.length
367+
messageCount: Array.isArray(typedBody.messages)
368+
? typedBody.messages.length
368369
: 0,
369-
messages: (body as any)?.messages,
370+
messages: typedBody.messages,
370371
openrouterStatusCode: openrouterError?.statusCode,
371372
openrouterStatusText: openrouterError?.statusText,
372373
openrouterErrorCode: errorDetails?.error?.code,

web/src/llm-api/helpers.ts

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,8 @@ import { PROFIT_MARGIN } from '@codebuff/common/old-constants'
1010
import type { InsertMessageBigqueryFn } from '@codebuff/common/types/contracts/bigquery'
1111
import type { Logger } from '@codebuff/common/types/contracts/logger'
1212

13+
import type { ChatCompletionRequestBody } from './types'
14+
1315
export type UsageData = {
1416
inputTokens: number
1517
outputTokens: number
@@ -24,21 +26,24 @@ export function extractRequestMetadata(params: {
2426
}) {
2527
const { body, logger } = params
2628

27-
const rawClientId = (body as any)?.codebuff_metadata?.client_id
29+
const typedBody = body as ChatCompletionRequestBody | undefined
30+
const metadata = typedBody?.codebuff_metadata
31+
32+
const rawClientId = metadata?.client_id
2833
const clientId = typeof rawClientId === 'string' ? rawClientId : null
2934
if (!clientId) {
3035
logger.warn({ body }, 'Received request without client_id')
3136
}
3237

33-
const rawRunId = (body as any)?.codebuff_metadata?.run_id
38+
const rawRunId = metadata?.run_id
3439
const clientRequestId: string | null =
3540
typeof rawRunId === 'string' ? rawRunId : null
3641
if (!clientRequestId) {
3742
logger.warn({ body }, 'Received request without run_id')
3843
}
3944

40-
const n = (body as any)?.codebuff_metadata?.n
41-
const rawCostMode = (body as any)?.codebuff_metadata?.cost_mode
45+
const n = metadata?.n
46+
const rawCostMode = metadata?.cost_mode
4247
const costMode = typeof rawCostMode === 'string' ? rawCostMode : undefined
4348
return { clientId, clientRequestId, costMode, ...(n && { n }) }
4449
}

web/src/llm-api/openai.ts

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ import {
99
import type { UsageData } from './helpers'
1010
import type { InsertMessageBigqueryFn } from '@codebuff/common/types/contracts/bigquery'
1111
import type { Logger } from '@codebuff/common/types/contracts/logger'
12+
import type { ChatCompletionRequestBody } from './types'
1213

1314
export const OPENAI_SUPPORTED_MODELS = ['gpt-5', 'gpt-5.1'] as const
1415
export type OpenAIModel = (typeof OPENAI_SUPPORTED_MODELS)[number]
@@ -71,7 +72,7 @@ export async function handleOpenAINonStream({
7172
logger,
7273
insertMessageBigquery,
7374
}: {
74-
body: any
75+
body: ChatCompletionRequestBody
7576
userId: string
7677
stripeCustomerId?: string | null
7778
agentId: string
@@ -108,7 +109,7 @@ export async function handleOpenAINonStream({
108109
// Transform max_tokens to max_completion_tokens
109110
openaiBody.max_completion_tokens =
110111
openaiBody.max_completion_tokens ?? openaiBody.max_tokens
111-
delete (openaiBody as any).max_tokens
112+
delete openaiBody.max_tokens
112113

113114
// Transform reasoning to reasoning_effort
114115
if (openaiBody.reasoning && typeof openaiBody.reasoning === 'object') {
@@ -122,14 +123,14 @@ export async function handleOpenAINonStream({
122123
openaiBody.reasoning_effort = reasoning.effort ?? 'medium'
123124
}
124125
}
125-
delete (openaiBody as any).reasoning
126+
delete openaiBody.reasoning
126127

127128
// Remove fields that OpenAI doesn't support
128-
delete (openaiBody as any).stop
129-
delete (openaiBody as any).usage
130-
delete (openaiBody as any).provider
131-
delete (openaiBody as any).transforms
132-
delete (openaiBody as any).codebuff_metadata
129+
delete openaiBody.stop
130+
delete openaiBody.usage
131+
delete openaiBody.provider
132+
delete openaiBody.transforms
133+
delete openaiBody.codebuff_metadata
133134

134135
const response = await fetch('https://api.openai.com/v1/chat/completions', {
135136
method: 'POST',

web/src/llm-api/openrouter.ts

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,10 @@ import type { UsageData } from './helpers'
1616
import type { OpenRouterStreamChatCompletionChunk } from './type/openrouter'
1717
import type { InsertMessageBigqueryFn } from '@codebuff/common/types/contracts/bigquery'
1818
import type { Logger } from '@codebuff/common/types/contracts/logger'
19+
import type {
20+
ChatCompletionRequestBody,
21+
OpenRouterErrorMetadata,
22+
} from './types'
1923

2024
type StreamState = { responseText: string; reasoningText: string }
2125

@@ -26,7 +30,7 @@ type LineResult = {
2630
}
2731

2832
function createOpenRouterRequest(params: {
29-
body: any
33+
body: ChatCompletionRequestBody
3034
openrouterApiKey: string | null
3135
fetch: typeof globalThis.fetch
3236
}) {
@@ -61,7 +65,8 @@ function extractRequestMetadataWithN(params: {
6165
}) {
6266
const { body, logger } = params
6367
const { clientId, clientRequestId, costMode } = extractRequestMetadata({ body, logger })
64-
const n = (body as any)?.codebuff_metadata?.n
68+
const typedBody = body as ChatCompletionRequestBody | undefined
69+
const n = typedBody?.codebuff_metadata?.n
6570
return { clientId, clientRequestId, costMode, ...(n && { n }) }
6671
}
6772

@@ -75,7 +80,7 @@ export async function handleOpenRouterNonStream({
7580
logger,
7681
insertMessageBigquery,
7782
}: {
78-
body: any
83+
body: ChatCompletionRequestBody
7984
userId: string
8085
stripeCustomerId?: string | null
8186
agentId: string
@@ -98,7 +103,7 @@ export async function handleOpenRouterNonStream({
98103
const byok = openrouterApiKey !== null
99104

100105
// If n > 1, make n parallel requests
101-
if (n > 1) {
106+
if (n && n > 1) {
102107
const requests = Array.from({ length: n }, () =>
103108
createOpenRouterRequest({ body, openrouterApiKey, fetch }),
104109
)
@@ -259,7 +264,7 @@ export async function handleOpenRouterStream({
259264
logger,
260265
insertMessageBigquery,
261266
}: {
262-
body: any
267+
body: ChatCompletionRequestBody
263268
userId: string
264269
stripeCustomerId?: string | null
265270
agentId: string

0 commit comments

Comments
 (0)