|
| 1 | +import { createLogger } from '@sim/logger' |
| 2 | +import { type NextRequest, NextResponse } from 'next/server' |
| 3 | +import { tableExportFormatSchema, tableIdParamsSchema } from '@/lib/api/contracts/tables' |
| 4 | +import { getValidationErrorMessage } from '@/lib/api/server' |
| 5 | +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' |
| 6 | +import { generateRequestId } from '@/lib/core/utils/request' |
| 7 | +import { withRouteHandler } from '@/lib/core/utils/with-route-handler' |
| 8 | +import { queryRows } from '@/lib/table/service' |
| 9 | +import { accessError, checkAccess } from '@/app/api/table/utils' |
| 10 | + |
| 11 | +const logger = createLogger('TableExport') |
| 12 | + |
| 13 | +const EXPORT_BATCH_SIZE = 1000 |
| 14 | + |
| 15 | +type ExportFormat = 'csv' | 'json' |
| 16 | + |
| 17 | +interface RouteParams { |
| 18 | + params: Promise<{ tableId: string }> |
| 19 | +} |
| 20 | + |
| 21 | +/** GET /api/table/[tableId]/export - Streams the full table contents as CSV or JSON. */ |
| 22 | +export const GET = withRouteHandler(async (request: NextRequest, { params }: RouteParams) => { |
| 23 | + const requestId = generateRequestId() |
| 24 | + const { tableId } = tableIdParamsSchema.parse(await params) |
| 25 | + |
| 26 | + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) |
| 27 | + if (!auth.success || !auth.userId) { |
| 28 | + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) |
| 29 | + } |
| 30 | + |
| 31 | + const { searchParams } = new URL(request.url) |
| 32 | + const formatValidation = tableExportFormatSchema.safeParse( |
| 33 | + searchParams.get('format') ?? undefined |
| 34 | + ) |
| 35 | + if (!formatValidation.success) { |
| 36 | + return NextResponse.json( |
| 37 | + { error: getValidationErrorMessage(formatValidation.error) }, |
| 38 | + { status: 400 } |
| 39 | + ) |
| 40 | + } |
| 41 | + const format: ExportFormat = formatValidation.data |
| 42 | + |
| 43 | + const access = await checkAccess(tableId, auth.userId, 'read') |
| 44 | + if (!access.ok) return accessError(access, requestId, tableId) |
| 45 | + const { table } = access |
| 46 | + |
| 47 | + const columns = table.schema.columns |
| 48 | + const safeName = sanitizeFilename(table.name) |
| 49 | + const filename = `${safeName}.${format}` |
| 50 | + |
| 51 | + const stream = new ReadableStream<Uint8Array>({ |
| 52 | + async start(controller) { |
| 53 | + const encoder = new TextEncoder() |
| 54 | + try { |
| 55 | + if (format === 'csv') { |
| 56 | + controller.enqueue(encoder.encode(`${toCsvRow(columns.map((c) => c.name))}\n`)) |
| 57 | + } else { |
| 58 | + controller.enqueue(encoder.encode('[')) |
| 59 | + } |
| 60 | + |
| 61 | + let offset = 0 |
| 62 | + let firstJsonRow = true |
| 63 | + while (true) { |
| 64 | + const result = await queryRows( |
| 65 | + tableId, |
| 66 | + table.workspaceId, |
| 67 | + { limit: EXPORT_BATCH_SIZE, offset, includeTotal: false }, |
| 68 | + requestId |
| 69 | + ) |
| 70 | + |
| 71 | + for (const row of result.rows) { |
| 72 | + if (format === 'csv') { |
| 73 | + const values = columns.map((c) => formatCsvValue(row.data[c.name])) |
| 74 | + controller.enqueue(encoder.encode(`${toCsvRow(values)}\n`)) |
| 75 | + } else { |
| 76 | + const prefix = firstJsonRow ? '' : ',' |
| 77 | + firstJsonRow = false |
| 78 | + controller.enqueue(encoder.encode(prefix + JSON.stringify({ ...row.data }))) |
| 79 | + } |
| 80 | + } |
| 81 | + |
| 82 | + if (result.rows.length < EXPORT_BATCH_SIZE) break |
| 83 | + offset += result.rows.length |
| 84 | + } |
| 85 | + |
| 86 | + if (format === 'json') controller.enqueue(encoder.encode(']')) |
| 87 | + controller.close() |
| 88 | + |
| 89 | + logger.info(`[${requestId}] Exported table ${tableId}`, { |
| 90 | + format, |
| 91 | + rowCount: table.rowCount, |
| 92 | + }) |
| 93 | + } catch (err) { |
| 94 | + logger.error(`[${requestId}] Export failed for table ${tableId}`, err) |
| 95 | + controller.error(err) |
| 96 | + } |
| 97 | + }, |
| 98 | + }) |
| 99 | + |
| 100 | + return new NextResponse(stream, { |
| 101 | + status: 200, |
| 102 | + headers: { |
| 103 | + 'Content-Type': format === 'csv' ? 'text/csv; charset=utf-8' : 'application/json', |
| 104 | + 'Content-Disposition': `attachment; filename="${filename}"`, |
| 105 | + 'Cache-Control': 'no-store', |
| 106 | + }, |
| 107 | + }) |
| 108 | +}) |
| 109 | + |
| 110 | +function sanitizeFilename(name: string): string { |
| 111 | + const cleaned = name.replace(/[^a-zA-Z0-9_-]+/g, '_').replace(/^_+|_+$/g, '') |
| 112 | + return cleaned || 'table' |
| 113 | +} |
| 114 | + |
| 115 | +function formatCsvValue(value: unknown): string { |
| 116 | + if (value === null || value === undefined) return '' |
| 117 | + if (value instanceof Date) return value.toISOString() |
| 118 | + if (typeof value === 'object') return JSON.stringify(value) |
| 119 | + return String(value) |
| 120 | +} |
| 121 | + |
| 122 | +function toCsvRow(values: string[]): string { |
| 123 | + return values.map(escapeCsvField).join(',') |
| 124 | +} |
| 125 | + |
| 126 | +function escapeCsvField(field: string): string { |
| 127 | + if (/[",\n\r]/.test(field)) { |
| 128 | + return `"${field.replace(/"/g, '""')}"` |
| 129 | + } |
| 130 | + return field |
| 131 | +} |
0 commit comments