diff --git a/.gitignore b/.gitignore index 6fc7b59..14132fc 100644 --- a/.gitignore +++ b/.gitignore @@ -183,3 +183,6 @@ operations/convert_institution_id_to_string.py operations/verify_institution_id.py .vercel .env.deploy + +# Test upload fixtures (generated — do not commit) +data/test_uploads/ diff --git a/codebenders-dashboard/app/admin/layout.tsx b/codebenders-dashboard/app/admin/layout.tsx new file mode 100644 index 0000000..feaab0e --- /dev/null +++ b/codebenders-dashboard/app/admin/layout.tsx @@ -0,0 +1,3 @@ +export default function AdminLayout({ children }: { children: React.ReactNode }) { + return <>{children} +} diff --git a/codebenders-dashboard/app/admin/upload/history/page.tsx b/codebenders-dashboard/app/admin/upload/history/page.tsx new file mode 100644 index 0000000..46e80dc --- /dev/null +++ b/codebenders-dashboard/app/admin/upload/history/page.tsx @@ -0,0 +1,181 @@ +"use client" + +import { useState, useEffect, useCallback } from "react" +import { useRouter } from "next/navigation" +import { Button } from "@/components/ui/button" +import { Loader2 } from "lucide-react" + +interface UploadEntry { + id: number + userEmail: string + filename: string + fileType: string + rowsInserted: number + rowsSkipped: number + errorCount: number + status: "success" | "partial" | "failed" + uploadedAt: string +} + +const FILE_TYPE_COLORS: Record = { + pdp_cohort_ar: "bg-green-50 text-green-700", + pdp_cohort_submission: "bg-green-50 text-green-700", + course_ar: "bg-blue-50 text-blue-700", + course_submission: "bg-blue-50 text-blue-700", + ml_predictions: "bg-purple-50 text-purple-700", +} + +const FILE_TYPE_LABELS: Record = { + pdp_cohort_ar: "PDP Cohort AR", + pdp_cohort_submission: "PDP Cohort Submission", + course_ar: "Course AR", + course_submission: "Course Submission", + ml_predictions: "ML Predictions", +} + +const STATUS_STYLES: Record = { + success: "bg-green-100 text-green-700", + partial: "bg-amber-100 text-amber-700", + failed: "bg-red-100 text-red-700", +} + +export default function UploadHistoryPage() { + const router = useRouter() + const [entries, setEntries] = useState([]) + const [total, setTotal] = useState(0) + const [page, setPage] = useState(1) + const [loading, setLoading] = useState(true) + const [statusCounts, setStatusCounts] = useState>({}) + const pageSize = 20 + + const fetchHistory = useCallback(async (p: number) => { + setLoading(true) + try { + const res = await fetch( + `/api/admin/upload/history?page=${p}&pageSize=${pageSize}` + ) + if (!res.ok) throw new Error(`HTTP ${res.status}`) + const data = await res.json() + setEntries(data.data ?? []) + setTotal(data.total ?? 0) + setStatusCounts(data.statusCounts ?? {}) + } catch { + setEntries([]) + setTotal(0) + } finally { + setLoading(false) + } + }, []) + + useEffect(() => { + fetchHistory(page) + }, [page, fetchHistory]) + + const pageCount = Math.ceil(total / pageSize) + + return ( +
+
+
+

Upload History

+

+ All data file uploads by admin and IR users +

+
+ +
+ +
+
+
Total Uploads
+
{total}
+
+
+
Successful
+
{statusCounts.success ?? 0}
+
+
+
Partial
+
{statusCounts.partial ?? 0}
+
+
+
Failed
+
{statusCounts.failed ?? 0}
+
+
+ + {loading ? ( +
+ Loading… +
+ ) : entries.length === 0 ? ( +
+ No uploads yet. Click "+ New Upload" to get started. +
+ ) : ( +
+ + + + + + + + + + + + + + + {entries.map((e, i) => ( + + + + + + + + + + + ))} + +
FileTypeInsertedSkippedErrorsStatusUploaded ByDate
{e.filename} + + {FILE_TYPE_LABELS[e.fileType] ?? e.fileType} + + {e.rowsInserted.toLocaleString()}{e.rowsSkipped}{e.errorCount} + + {e.status} + + {e.userEmail}{new Date(e.uploadedAt).toLocaleDateString()}
+
+ )} + + {pageCount > 1 && ( +
+ Showing {(page - 1) * pageSize + 1}–{Math.min(page * pageSize, total)} of {total} uploads +
+ + {Array.from({ length: pageCount }, (_, i) => i + 1).slice(0, 5).map((p) => ( + + ))} + +
+
+ )} +
+ ) +} diff --git a/codebenders-dashboard/app/admin/upload/page.tsx b/codebenders-dashboard/app/admin/upload/page.tsx new file mode 100644 index 0000000..be3870f --- /dev/null +++ b/codebenders-dashboard/app/admin/upload/page.tsx @@ -0,0 +1,380 @@ +"use client" + +import { useState, useCallback, useEffect } from "react" +import { useRouter } from "next/navigation" +import { DropZone } from "@/components/upload/drop-zone" +import { ColumnMapper } from "@/components/upload/column-mapper" +import { DataPreview } from "@/components/upload/data-preview" +import { UploadSummary } from "@/components/upload/upload-summary" +import { Button } from "@/components/ui/button" +import { AlertCircle, CheckCircle, Loader2 } from "lucide-react" +import { SCHEMAS, CONFIDENT_THRESHOLD, type ColumnMapping } from "@/lib/upload-schemas" + +type Step = "upload" | "preview" | "complete" + +interface PreviewData { + detectedSchema: string | null + detectedSchemaLabel: string | null + confidence: number + scores: Array<{ schemaId: string; label: string; score: number }> + columns: ColumnMapping[] + sampleRows: Record[] + totalRows: number + warnings: string[] + errors: string[] +} + +interface CommitResult { + inserted: number + skipped: number + errors: Array<{ row: number; message: string }> + uploadId: number +} + +interface HistoryEntry { + id: number + filename: string + fileType: string + rowsInserted: number + status: string + uploadedAt: string +} + +export default function UploadPage() { + const router = useRouter() + const [step, setStep] = useState("upload") + const [file, setFile] = useState(null) + const [preview, setPreview] = useState(null) + const [columns, setColumns] = useState([]) + const [selectedSchema, setSelectedSchema] = useState(null) + const [showSchemaOverride, setShowSchemaOverride] = useState(false) + const [commitResult, setCommitResult] = useState(null) + const [loading, setLoading] = useState(false) + const [error, setError] = useState(null) + const [recentUploads, setRecentUploads] = useState([]) + + useEffect(() => { + fetch("/api/admin/upload/history?pageSize=5") + .then((r) => r.json()) + .then((d) => setRecentUploads(d.data ?? [])) + .catch(() => {}) + }, []) + + const handleFile = useCallback(async (f: File) => { + setFile(f) + setError(null) + setLoading(true) + + try { + const formData = new FormData() + formData.append("file", f) + + const res = await fetch("/api/admin/upload/preview", { + method: "POST", + body: formData, + }) + + if (!res.ok) { + const data = await res.json() + setError(data.error ?? "Preview failed") + setLoading(false) + return + } + + const data: PreviewData = await res.json() + setPreview(data) + setColumns(data.columns) + setSelectedSchema(data.detectedSchema) + setStep("preview") + } catch (err) { + setError((err as Error).message) + } finally { + setLoading(false) + } + }, []) + + const handleSchemaOverride = useCallback( + (schemaId: string) => { + setSelectedSchema(schemaId) + }, + [] + ) + + const handleCommit = useCallback(async () => { + if (!file || !selectedSchema) return + setLoading(true) + setError(null) + + try { + const formData = new FormData() + formData.append("file", file) + formData.append("schemaId", selectedSchema) + formData.append("columnMapping", JSON.stringify(columns)) + + const res = await fetch("/api/admin/upload/commit", { + method: "POST", + body: formData, + }) + + if (!res.ok) { + const data = await res.json() + setError(data.error ?? "Upload failed") + setLoading(false) + return + } + + const data: CommitResult = await res.json() + setCommitResult(data) + setStep("complete") + } catch (err) { + setError((err as Error).message) + } finally { + setLoading(false) + } + }, [file, selectedSchema, columns]) + + const resetWizard = useCallback(() => { + setStep("upload") + setFile(null) + setPreview(null) + setColumns([]) + setSelectedSchema(null) + setShowSchemaOverride(false) + setCommitResult(null) + setError(null) + }, []) + + const headers = preview?.sampleRows?.[0] ? Object.keys(preview.sampleRows[0]) : [] + const hasRequiredErrors = (preview?.errors?.length ?? 0) > 0 + const stepLabels = ["Upload", "Preview & Map", "Complete"] + const stepIndex = step === "upload" ? 0 : step === "preview" ? 1 : 2 + const selectedSchemaObj = SCHEMAS.find((s) => s.id === selectedSchema) ?? null + const selectedSchemaLabel = selectedSchemaObj?.label ?? null + const matchedCount = columns.filter((c) => c.status === "matched").length + const unmappedCount = columns.filter((c) => c.status === "unmapped").length + + return ( +
+
+

Upload Data

+

+ Drop a PDP, course, or prediction file — we'll detect the format + automatically +

+
+ + {/* Step indicator */} +
+ {stepLabels.map((label, i) => ( + + {i > 0 && } + + {i < stepIndex ? `${label} ✓` : `${i + 1}. ${label}`} + + + ))} +
+ + {/* Error banner */} + {error && ( +
+ + {error} +
+ )} + + {/* Step 1: Upload */} + {step === "upload" && ( +
+ + {loading && ( +
+ Parsing file… +
+ )} + + {recentUploads.length > 0 && ( +
+

Recent Uploads

+
+ {recentUploads.map((u) => ( +
+
+ {u.filename} + + {u.fileType} + +
+
+ {u.rowsInserted} rows + {new Date(u.uploadedAt).toLocaleDateString()} + +
+
+ ))} +
+
+ )} +
+ )} + + {/* Step 2: Preview & Map */} + {step === "preview" && preview && ( +
+ {preview.confidence >= CONFIDENT_THRESHOLD ? ( + <> +
+
+ + {selectedSchemaLabel} + + — {file?.name} — {preview.totalRows} rows,{" "} + {matchedCount}/ + {columns.length} columns matched + +
+ +
+ {showSchemaOverride && ( +
+ {preview.scores.filter((s) => s.score > 0.1).map((s) => ( + + ))} +
+ )} + + ) : ( +
+
+ + + Couldn't confidently detect the file type + +
+

+ {file?.name} has {columns.length} columns — it partially matches + multiple schemas. Please select the correct type: +

+
+ {preview.scores + .filter((s) => s.score > 0.1) + .map((s) => ( + + ))} +
+
+ )} + +
+

Column Mapping

+ +
+ + + +
+
+ + 📊 {preview.totalRows} rows + + + ✓ {matchedCount} matched + + {unmappedCount > 0 && ( + + ⚠{" "} + {unmappedCount}{" "} + unmapped + + )} + {hasRequiredErrors && ( + + ✗ {preview.errors.length} errors + + )} +
+
+ + +
+
+
+ )} + + {/* Step 3: Complete */} + {step === "complete" && commitResult && ( + router.push("/admin/upload/history")} + /> + )} +
+ ) +} diff --git a/codebenders-dashboard/app/api/admin/upload/commit/route.ts b/codebenders-dashboard/app/api/admin/upload/commit/route.ts new file mode 100644 index 0000000..b3feedb --- /dev/null +++ b/codebenders-dashboard/app/api/admin/upload/commit/route.ts @@ -0,0 +1,217 @@ +import { NextRequest, NextResponse } from "next/server" +import { parseFileBuffer, getFileType, validateFileSize } from "@/lib/upload-parser" +import { SCHEMAS, type UploadSchema, type ColumnMapping } from "@/lib/upload-schemas" +import { getPool } from "@/lib/db" + +const BATCH_SIZE = 500 + +export async function POST(request: NextRequest) { + const userId = request.headers.get("x-user-id") ?? "" + const userEmail = request.headers.get("x-user-email") ?? "" + + try { + const formData = await request.formData() + const file = formData.get("file") as File | null + const schemaId = formData.get("schemaId") as string | null + const mappingJson = formData.get("columnMapping") as string | null + + if (!file || !schemaId || !mappingJson) { + return NextResponse.json( + { error: "Missing file, schemaId, or columnMapping" }, + { status: 400 } + ) + } + + if (!validateFileSize(file.size)) { + return NextResponse.json({ error: "File exceeds 50 MB limit" }, { status: 413 }) + } + + const schema = SCHEMAS.find((s) => s.id === schemaId) + if (!schema) { + return NextResponse.json({ error: `Unknown schema: ${schemaId}` }, { status: 400 }) + } + + let columnMapping: ColumnMapping[] + try { + columnMapping = JSON.parse(mappingJson) + } catch { + return NextResponse.json({ error: "Invalid column mapping JSON" }, { status: 400 }) + } + + const fileType = getFileType(file.name) + if (!fileType) { + return NextResponse.json({ error: "Unsupported file type" }, { status: 400 }) + } + + const buffer = Buffer.from(await file.arrayBuffer()) + const { rows } = await parseFileBuffer(buffer, fileType) + + const result = await upsertRows(rows, columnMapping, schema) + + const pool = getPool() + const status = + result.errors.length > 0 && result.inserted === 0 + ? "failed" + : result.errors.length > 0 + ? "partial" + : "success" + + const { rows: historyRows } = await pool.query( + `INSERT INTO upload_history (user_id, user_email, filename, file_type, rows_inserted, rows_skipped, error_count, status) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING id`, + [userId, userEmail, file.name, schemaId, result.inserted, result.skipped, result.errors.length, status] + ) + + return NextResponse.json({ + inserted: result.inserted, + skipped: result.skipped, + errors: result.errors.slice(0, 50), + uploadId: historyRows[0].id, + }) + } catch (err) { + console.error("Upload commit error:", err) + return NextResponse.json( + { error: `Upload failed: ${(err as Error).message}` }, + { status: 500 } + ) + } +} + +interface UpsertResult { + inserted: number + skipped: number + errors: Array<{ row: number; column?: string; message: string }> +} + +async function upsertRows( + rows: Record[], + columnMapping: ColumnMapping[], + schema: UploadSchema +): Promise { + const pool = getPool() + let inserted = 0 + let skipped = 0 + const errors: UpsertResult["errors"] = [] + + // Build the header→dbColumn map from the user-confirmed mapping + const headerToDb = new Map() + for (const col of columnMapping) { + if (col.mappedTo) { + headerToDb.set(col.header, col.mappedTo) + } + } + + // Build transform lookup from schema + const transforms = new Map string>() + for (const col of schema.columns) { + if (col.transform) { + transforms.set(col.name, col.transform) + } + } + + // Check required columns are mapped + const mappedDbCols = new Set(headerToDb.values()) + for (const col of schema.columns) { + if (col.required && !mappedDbCols.has(col.name)) { + errors.push({ row: 0, column: col.name, message: `Required column not mapped: ${col.name}` }) + } + } + if (errors.length > 0) return { inserted: 0, skipped: 0, errors } + + // Build SQL template once — columns are determined by the mapping, not per-row + const cols = Array.from(headerToDb.values()) + const conflictClause = schema.upsertKey.join(", ") + const updateSet = cols + .filter((c) => !schema.upsertKey.includes(c)) + .map((c) => `${c} = EXCLUDED.${c}`) + .join(", ") + + // Cache required column names + const requiredDbCols = schema.columns + .filter((c) => c.required) + .map((c) => c.name) + + // Process in batches — one INSERT per batch (N+1 → N/BATCH_SIZE queries) + for (let i = 0; i < rows.length; i += BATCH_SIZE) { + const batch = rows.slice(i, i + BATCH_SIZE) + const batchValues: string[] = [] + const batchParams: string[] = [] + let paramIdx = 1 + + for (let j = 0; j < batch.length; j++) { + const row = batch[j] + const rowIndex = i + j + 1 + + const dbRow: Record = {} + for (const [header, dbCol] of headerToDb) { + let value = row[header] ?? "" + const transform = transforms.get(dbCol) + if (transform) value = transform(value) + dbRow[dbCol] = value + } + + // Check required fields have values + const missing = requiredDbCols.find((c) => !dbRow[c] || dbRow[c].trim() === "") + if (missing) { + skipped++ + errors.push({ row: rowIndex, column: missing, message: `Empty required field: ${missing}` }) + continue + } + + const rowPlaceholders = cols.map(() => `$${paramIdx++}`) + batchValues.push(`(${rowPlaceholders.join(", ")})`) + batchParams.push(...cols.map((c) => dbRow[c])) + } + + if (batchValues.length === 0) continue + + try { + const batchSql = updateSet + ? `INSERT INTO ${schema.targetTable} (${cols.join(", ")}) + VALUES ${batchValues.join(", ")} + ON CONFLICT (${conflictClause}) DO UPDATE SET ${updateSet}` + : `INSERT INTO ${schema.targetTable} (${cols.join(", ")}) + VALUES ${batchValues.join(", ")} + ON CONFLICT (${conflictClause}) DO NOTHING` + + const result = await pool.query(batchSql, batchParams) + inserted += result.rowCount ?? 0 + } catch (err) { + // If batch fails, fall back to per-row to identify the bad row(s) + for (let j = 0; j < batch.length; j++) { + const row = batch[j] + const rowIndex = i + j + 1 + + const dbRow: Record = {} + for (const [header, dbCol] of headerToDb) { + let value = row[header] ?? "" + const transform = transforms.get(dbCol) + if (transform) value = transform(value) + dbRow[dbCol] = value + } + + const missing = requiredDbCols.find((c) => !dbRow[c] || dbRow[c].trim() === "") + if (missing) continue // already counted above + + try { + const singlePlaceholders = cols.map((_, idx) => `$${idx + 1}`) + const singleSql = updateSet + ? `INSERT INTO ${schema.targetTable} (${cols.join(", ")}) + VALUES (${singlePlaceholders.join(", ")}) + ON CONFLICT (${conflictClause}) DO UPDATE SET ${updateSet}` + : `INSERT INTO ${schema.targetTable} (${cols.join(", ")}) + VALUES (${singlePlaceholders.join(", ")}) + ON CONFLICT (${conflictClause}) DO NOTHING` + + await pool.query(singleSql, cols.map((c) => dbRow[c])) + inserted++ + } catch (rowErr) { + skipped++ + errors.push({ row: rowIndex, message: (rowErr as Error).message }) + } + } + } + } + + return { inserted, skipped, errors } +} diff --git a/codebenders-dashboard/app/api/admin/upload/history/route.ts b/codebenders-dashboard/app/api/admin/upload/history/route.ts new file mode 100644 index 0000000..eed9a00 --- /dev/null +++ b/codebenders-dashboard/app/api/admin/upload/history/route.ts @@ -0,0 +1,58 @@ +import { NextRequest, NextResponse } from "next/server" +import { getPool } from "@/lib/db" + +export async function GET(request: NextRequest) { + try { + const { searchParams } = new URL(request.url) + const page = Math.max(1, parseInt(searchParams.get("page") ?? "1") || 1) + const pageSize = Math.min(50, Math.max(1, parseInt(searchParams.get("pageSize") ?? "20") || 20)) + const offset = (page - 1) * pageSize + + const pool = getPool() + + const [dataResult, countResult, statusResult] = await Promise.all([ + pool.query( + `SELECT id, user_email, filename, file_type, rows_inserted, rows_skipped, + error_count, status, uploaded_at + FROM upload_history + ORDER BY uploaded_at DESC + LIMIT $1 OFFSET $2`, + [pageSize, offset] + ), + pool.query(`SELECT COUNT(*)::int AS total FROM upload_history`), + pool.query( + `SELECT status, COUNT(*)::int AS count FROM upload_history GROUP BY status` + ), + ]) + + const total = countResult.rows[0].total + const statusCounts: Record = {} + for (const row of statusResult.rows) { + statusCounts[row.status] = row.count + } + + return NextResponse.json({ + data: dataResult.rows.map((row) => ({ + id: row.id, + userEmail: row.user_email, + filename: row.filename, + fileType: row.file_type, + rowsInserted: row.rows_inserted, + rowsSkipped: row.rows_skipped, + errorCount: row.error_count, + status: row.status, + uploadedAt: row.uploaded_at, + })), + total, + page, + pageSize, + statusCounts, + }) + } catch (err) { + console.error("Upload history error:", err) + return NextResponse.json( + { error: `Failed to fetch upload history: ${(err as Error).message}` }, + { status: 500 } + ) + } +} diff --git a/codebenders-dashboard/app/api/admin/upload/preview/route.ts b/codebenders-dashboard/app/api/admin/upload/preview/route.ts new file mode 100644 index 0000000..8542fab --- /dev/null +++ b/codebenders-dashboard/app/api/admin/upload/preview/route.ts @@ -0,0 +1,71 @@ +import { NextRequest, NextResponse } from "next/server" +import { parseFileBuffer, getFileType, validateFileSize } from "@/lib/upload-parser" +import { detectSchema, mapColumns } from "@/lib/upload-schemas" + +export async function POST(request: NextRequest) { + try { + const formData = await request.formData() + const file = formData.get("file") as File | null + + if (!file) { + return NextResponse.json({ error: "No file provided" }, { status: 400 }) + } + + if (!validateFileSize(file.size)) { + return NextResponse.json( + { error: "File exceeds 50 MB limit" }, + { status: 413 } + ) + } + + const fileType = getFileType(file.name) + if (!fileType) { + return NextResponse.json( + { error: "Unsupported file type. Please upload a .csv or .xlsx file." }, + { status: 400 } + ) + } + + const buffer = Buffer.from(await file.arrayBuffer()) + const { headers, rows, totalRows } = await parseFileBuffer(buffer, fileType, 50) + + const detection = detectSchema(headers) + + const columns = detection.schema + ? mapColumns(headers, detection.schema) + : headers.map((h) => ({ header: h, mappedTo: null, status: "unmapped" as const })) + + const missingRequired = detection.schema + ? detection.schema.columns + .filter((c) => c.required) + .filter((c) => !columns.some((col) => col.mappedTo === c.name)) + .map((c) => `Missing required column: ${c.name}`) + : [] + + const warnings = detection.schema + ? detection.schema.columns + .filter((c) => !c.required) + .filter((c) => !columns.some((col) => col.mappedTo === c.name)) + .slice(0, 5) + .map((c) => `Missing optional column: ${c.name}`) + : [] + + return NextResponse.json({ + detectedSchema: detection.schema?.id ?? null, + detectedSchemaLabel: detection.schema?.label ?? null, + confidence: Math.round(detection.confidence * 100) / 100, + scores: detection.scores, + columns, + sampleRows: rows.slice(0, 10), + totalRows, + warnings, + errors: missingRequired, + }) + } catch (err) { + console.error("Upload preview error:", err) + return NextResponse.json( + { error: `Failed to parse file: ${(err as Error).message}` }, + { status: 500 } + ) + } +} diff --git a/codebenders-dashboard/components/nav-header.tsx b/codebenders-dashboard/components/nav-header.tsx index ad70255..c86be49 100644 --- a/codebenders-dashboard/components/nav-header.tsx +++ b/codebenders-dashboard/components/nav-header.tsx @@ -12,11 +12,12 @@ interface NavHeaderProps { role: Role } -const NAV_LINKS = [ +const NAV_LINKS: Array<{ href: string; label: string; roles?: Role[] }> = [ { href: "/", label: "Dashboard" }, { href: "/courses", label: "Courses" }, { href: "/students", label: "Students" }, { href: "/query", label: "Query" }, + { href: "/admin/upload", label: "Admin", roles: ["admin", "ir"] }, ] export function NavHeader({ email, role }: NavHeaderProps) { @@ -34,7 +35,7 @@ export function NavHeader({ email, role }: NavHeaderProps) { {/* Nav links */}