From cc8199b4737b341d9ab96568d8ee6bc3b7ffca20 Mon Sep 17 00:00:00 2001 From: NeuroKoder3 Date: Thu, 14 May 2026 21:51:57 -0500 Subject: [PATCH 1/6] chore: remove dead Deno functions/ scaffolding (C-12) The top-level functions/ directory contained Deno-targeted .ts files that predated the migration to Node + Electron. The live runtime code lives in electron/functions/ and nothing in the application graph imports the top-level folder; this commit removes 16 dead files (~3,000 LOC) and fixes one stale URL reference in EHRIntegrationManager.jsx that pointed at the old path. Risk: none. Confirmed via repo-wide grep that no module resolves against the top-level functions/ path. The electron/functions/ sibling that the production code actually uses is untouched. Co-authored-by: Cursor --- functions/calculatePriority.ts | 162 -------- functions/calculatePriorityAdvanced.ts | 283 -------------- functions/checkNotificationRules.ts | 185 --------- functions/exportToFHIR.ts | 371 ------------------- functions/exportWaitlist.ts | 189 ---------- functions/fhirWebhook.ts | 111 ------ functions/importFHIRData.ts | 266 ------------- functions/lib/audit.ts | 86 ----- functions/lib/constants.ts | 82 ---- functions/lib/logger.ts | 97 ----- functions/lib/validators.ts | 269 -------------- functions/matchDonor.ts | 256 ------------- functions/matchDonorAdvanced.ts | 297 --------------- functions/pushToEHR.ts | 151 -------- functions/validateFHIRData.ts | 147 -------- src/components/ehr/EHRIntegrationManager.jsx | 2 +- 16 files changed, 1 insertion(+), 2953 deletions(-) delete mode 100644 functions/calculatePriority.ts delete mode 100644 functions/calculatePriorityAdvanced.ts delete mode 100644 functions/checkNotificationRules.ts delete mode 100644 functions/exportToFHIR.ts delete mode 100644 functions/exportWaitlist.ts delete mode 100644 functions/fhirWebhook.ts delete mode 100644 functions/importFHIRData.ts delete mode 100644 functions/lib/audit.ts delete mode 100644 functions/lib/constants.ts delete mode 100644 functions/lib/logger.ts delete mode 100644 functions/lib/validators.ts delete mode 100644 functions/matchDonor.ts delete mode 100644 functions/matchDonorAdvanced.ts delete mode 100644 functions/pushToEHR.ts delete mode 100644 functions/validateFHIRData.ts diff --git a/functions/calculatePriority.ts b/functions/calculatePriority.ts deleted file mode 100644 index ecae8c4..0000000 --- a/functions/calculatePriority.ts +++ /dev/null @@ -1,162 +0,0 @@ -import { createClientFromRequest } from 'npm:@api/sdk@0.8.6'; -import { - PRIORITY_SCORING, - URGENCY_SCORES, - BLOOD_TYPE_RARITY, -} from './lib/constants.ts'; -import { - isValidUUID, - validatePatientMedicalScores, -} from './lib/validators.ts'; -import { createLogger, generateRequestId, safeErrorResponse } from './lib/logger.ts'; -import { createHIPAAAuditLog } from './lib/audit.ts'; - -const logger = createLogger('calculatePriority'); - -Deno.serve(async (req) => { - const requestId = generateRequestId(); - - try { - const api = createClientFromRequest(req); - - const user = await api.auth.me(); - if (!user) { - return Response.json({ error: 'Unauthorized' }, { status: 401 }); - } - - const body = await req.json(); - const { patient_id } = body; - - if (!patient_id || !isValidUUID(patient_id)) { - return Response.json( - { error: 'Invalid or missing patient_id. Must be a valid UUID.' }, - { status: 400 } - ); - } - - const patient = await api.entities.Patient.get(patient_id); - - if (!patient) { - return Response.json({ error: 'Patient not found' }, { status: 404 }); - } - - // Validate medical scores before using them in calculations - const validation = validatePatientMedicalScores(patient); - if (!validation.valid) { - logger.warn('Patient has invalid medical score data', { - patient_id, - validation_errors: validation.errors, - request_id: requestId, - }); - - await createHIPAAAuditLog(api, { - action: 'CALCULATE', - entityType: 'Patient', - entityId: patient_id, - patientName: `${patient.first_name} ${patient.last_name}`, - details: `Priority calculation rejected: ${validation.errors.join('; ')}`, - user: { email: user.email, role: user.role }, - outcome: 'FAILURE', - errorMessage: validation.errors.join('; '), - requestId, - }); - - return Response.json( - { error: 'Patient has invalid medical data', validation_errors: validation.errors }, - { status: 422 } - ); - } - - // Priority Scoring Algorithm - let score = 0; - - // 1. Medical Urgency Weight (0-30 points) - score += URGENCY_SCORES[patient.medical_urgency] || URGENCY_SCORES.medium; - - // 2. Time on Waitlist (0-25 points) - if (patient.date_added_to_waitlist) { - const daysOnList = Math.floor( - (Date.now() - new Date(patient.date_added_to_waitlist).getTime()) / (1000 * 60 * 60 * 24) - ); - score += Math.min( - PRIORITY_SCORING.MAX_WAITTIME_POINTS, - Math.floor(daysOnList / PRIORITY_SCORING.DAYS_PER_WAITTIME_POINT) - ); - } - - // 3. Organ-Specific Scoring (0-25 points) - if (patient.organ_needed === 'liver' && patient.meld_score) { - const meldRange = 40 - 6; // MELD 6-40 maps to 0-25 - score += Math.min( - PRIORITY_SCORING.MAX_ORGAN_SPECIFIC_POINTS, - ((patient.meld_score - 6) / meldRange) * PRIORITY_SCORING.MAX_ORGAN_SPECIFIC_POINTS - ); - } else if (patient.organ_needed === 'lung' && patient.las_score) { - score += Math.min( - PRIORITY_SCORING.MAX_ORGAN_SPECIFIC_POINTS, - (patient.las_score / 100) * PRIORITY_SCORING.MAX_ORGAN_SPECIFIC_POINTS - ); - } else if (patient.organ_needed === 'kidney') { - if (patient.pra_percentage) { - score += Math.min(15, (patient.pra_percentage / 100) * 15); - } - if (patient.cpra_percentage) { - score += Math.min(10, (patient.cpra_percentage / 100) * 10); - } - } else { - score += 10; - } - - // 4. Recent Evaluation Bonus (0-10 points) - if (patient.last_evaluation_date) { - const daysSinceEval = Math.floor( - (Date.now() - new Date(patient.last_evaluation_date).getTime()) / (1000 * 60 * 60 * 24) - ); - if (daysSinceEval <= PRIORITY_SCORING.EVALUATION_RECENT_DAYS) { - score += PRIORITY_SCORING.MAX_EVALUATION_POINTS; - } else if (daysSinceEval <= PRIORITY_SCORING.EVALUATION_MODERATE_DAYS) { - score += PRIORITY_SCORING.MAX_EVALUATION_POINTS / 2; - } - } - - // 5. Blood Type Rarity Modifier (0-10 points) - score += BLOOD_TYPE_RARITY[patient.blood_type] || 0; - - // Normalize to 0-100 scale - const normalizedScore = Math.min( - PRIORITY_SCORING.MAX_TOTAL_SCORE, - Math.max(PRIORITY_SCORING.MIN_TOTAL_SCORE, score) - ); - - const previousScore = patient.priority_score; - - // Update patient with new priority score - await api.entities.Patient.update(patient_id, { - priority_score: normalizedScore, - }); - - // HIPAA-compliant audit log - await createHIPAAAuditLog(api, { - action: 'CALCULATE', - entityType: 'Patient', - entityId: patient_id, - patientName: `${patient.first_name} ${patient.last_name}`, - details: `Priority score recalculated: ${normalizedScore.toFixed(1)}`, - user: { email: user.email, role: user.role }, - outcome: 'SUCCESS', - dataModified: { - priority_score: [previousScore, normalizedScore], - }, - requestId, - }); - - return Response.json({ - success: true, - priority_score: normalizedScore, - patient_id, - }); - } catch (error) { - logger.error('Priority calculation failed', error, { request_id: requestId }); - return safeErrorResponse(requestId, 'Priority calculation failed. Contact support.'); - } -}); diff --git a/functions/calculatePriorityAdvanced.ts b/functions/calculatePriorityAdvanced.ts deleted file mode 100644 index bb412b8..0000000 --- a/functions/calculatePriorityAdvanced.ts +++ /dev/null @@ -1,283 +0,0 @@ -import { createClientFromRequest } from 'npm:@api/sdk@0.8.6'; -import { createLogger, generateRequestId, safeErrorResponse } from './lib/logger.ts'; - -const logger = createLogger('calculatePriorityAdvanced'); - -Deno.serve(async (req) => { - const requestId = generateRequestId(); - - try { - const api = createClientFromRequest(req); - - const user = await api.auth.me(); - if (!user) { - return Response.json({ error: 'Unauthorized' }, { status: 401 }); - } - - const { patient_id } = await req.json(); - - const patient = await api.entities.Patient.get(patient_id); - - if (!patient) { - return Response.json({ error: 'Patient not found' }, { status: 404 }); - } - - // Get active priority weights configuration - const allWeights = await api.entities.PriorityWeights.filter({ is_active: true }); - const weights = allWeights.length > 0 ? allWeights[0] : { - medical_urgency_weight: 30, - time_on_waitlist_weight: 25, - organ_specific_score_weight: 25, - evaluation_recency_weight: 10, - blood_type_rarity_weight: 10, - evaluation_decay_rate: 0.5, - }; - - const breakdown = { - components: {}, - raw_scores: {}, - weighted_scores: {}, - total: 0 - }; - - // 1. Medical Urgency Score - const urgencyScores = { - critical: 100, - high: 75, - medium: 50, - low: 25, - }; - const urgencyRaw = urgencyScores[patient.medical_urgency] || 50; - - // Factor in functional status - const functionalStatusMultiplier = { - critical: 1.2, - fully_dependent: 1.1, - partially_dependent: 1.0, - independent: 0.95, - }; - const functionalAdjustment = functionalStatusMultiplier[patient.functional_status] || 1.0; - - // Factor in prognosis - const prognosisMultiplier = { - critical: 1.3, - poor: 1.15, - fair: 1.0, - good: 0.95, - excellent: 0.9, - }; - const prognosisAdjustment = prognosisMultiplier[patient.prognosis_rating] || 1.0; - - const urgencyScore = urgencyRaw * functionalAdjustment * prognosisAdjustment; - breakdown.raw_scores.medical_urgency = urgencyScore; - breakdown.components.medical_urgency = { - base: urgencyRaw, - functional_adjustment: functionalAdjustment, - prognosis_adjustment: prognosisAdjustment, - final: urgencyScore - }; - - // 2. Time on Waitlist Score - let timeScore = 0; - if (patient.date_added_to_waitlist) { - const daysOnList = Math.floor( - (new Date() - new Date(patient.date_added_to_waitlist)) / (1000 * 60 * 60 * 24) - ); - // Score increases with time, max 100 at 730 days (2 years) - timeScore = Math.min(100, (daysOnList / 730) * 100); - - // Bonus for very long waits (>3 years) - if (daysOnList > 1095) { - timeScore = Math.min(100, timeScore + 10); - } - - breakdown.components.time_on_waitlist = { - days: daysOnList, - base_score: timeScore, - long_wait_bonus: daysOnList > 1095 ? 10 : 0 - }; - } - breakdown.raw_scores.time_on_waitlist = timeScore; - - // 3. Organ-Specific Scoring - let organScore = 0; - if (patient.organ_needed === 'liver' && patient.meld_score) { - // MELD score (6-40) maps to 0-100 - organScore = ((patient.meld_score - 6) / 34) * 100; - breakdown.components.organ_specific = { - type: 'MELD', - score: patient.meld_score, - normalized: organScore - }; - } else if (patient.organ_needed === 'lung' && patient.las_score) { - // LAS score (0-100) maps directly - organScore = patient.las_score; - breakdown.components.organ_specific = { - type: 'LAS', - score: patient.las_score, - normalized: organScore - }; - } else if (patient.organ_needed === 'kidney') { - // For kidney, consider PRA/CPRA - let kidneyScore = 50; // base - if (patient.pra_percentage) { - kidneyScore += (patient.pra_percentage / 100) * 30; - } - if (patient.cpra_percentage) { - kidneyScore += (patient.cpra_percentage / 100) * 20; - } - organScore = Math.min(100, kidneyScore); - breakdown.components.organ_specific = { - type: 'Kidney (PRA/CPRA)', - pra: patient.pra_percentage, - cpra: patient.cpra_percentage, - normalized: organScore - }; - } else { - // Default based on urgency - organScore = urgencyRaw * 0.6; - breakdown.components.organ_specific = { - type: 'Default (based on urgency)', - normalized: organScore - }; - } - breakdown.raw_scores.organ_specific = organScore; - - // 4. Evaluation Recency with Time Decay - let evaluationScore = 0; - if (patient.last_evaluation_date) { - const daysSinceEval = Math.floor( - (new Date() - new Date(patient.last_evaluation_date)) / (1000 * 60 * 60 * 24) - ); - - // Base score: recent evaluation is good - if (daysSinceEval <= 90) { - evaluationScore = 100; - } else { - // Apply exponential decay - const periods = Math.floor(daysSinceEval / 90); - const decayRate = weights.evaluation_decay_rate || 0.5; - evaluationScore = 100 * Math.pow(1 - decayRate, periods); - } - - breakdown.components.evaluation_recency = { - days_since_eval: daysSinceEval, - decay_periods: Math.floor(daysSinceEval / 90), - decay_rate: weights.evaluation_decay_rate, - score: evaluationScore - }; - } else { - evaluationScore = 0; - breakdown.components.evaluation_recency = { - status: 'No evaluation on record', - score: 0 - }; - } - breakdown.raw_scores.evaluation_recency = evaluationScore; - - // 5. Blood Type Rarity Score - const bloodTypeRarity = { - 'AB-': 100, - 'B-': 85, - 'A-': 70, - 'O-': 60, - 'AB+': 50, - 'B+': 40, - 'A+': 30, - 'O+': 20, - }; - const bloodScore = bloodTypeRarity[patient.blood_type] || 40; - breakdown.raw_scores.blood_type_rarity = bloodScore; - breakdown.components.blood_type_rarity = { - blood_type: patient.blood_type, - rarity_score: bloodScore - }; - - // 6. Additional Factors - - // Comorbidity penalty - let comorbidityPenalty = 0; - if (patient.comorbidity_score) { - comorbidityPenalty = (patient.comorbidity_score / 10) * 10; // Max -10 points - breakdown.components.comorbidity_adjustment = { - score: patient.comorbidity_score, - penalty: -comorbidityPenalty - }; - } - - // Previous transplant adjustment - let previousTransplantAdjustment = 0; - if (patient.previous_transplants > 0) { - // Slight penalty for re-transplants due to complexity - previousTransplantAdjustment = -5 * patient.previous_transplants; - breakdown.components.previous_transplants = { - count: patient.previous_transplants, - adjustment: previousTransplantAdjustment - }; - } - - // Compliance bonus - let complianceBonus = 0; - if (patient.compliance_score) { - complianceBonus = (patient.compliance_score / 10) * 5; // Max +5 points - breakdown.components.compliance_bonus = { - score: patient.compliance_score, - bonus: complianceBonus - }; - } - - // Calculate weighted scores - breakdown.weighted_scores.medical_urgency = - (breakdown.raw_scores.medical_urgency / 100) * weights.medical_urgency_weight; - breakdown.weighted_scores.time_on_waitlist = - (breakdown.raw_scores.time_on_waitlist / 100) * weights.time_on_waitlist_weight; - breakdown.weighted_scores.organ_specific = - (breakdown.raw_scores.organ_specific / 100) * weights.organ_specific_score_weight; - breakdown.weighted_scores.evaluation_recency = - (breakdown.raw_scores.evaluation_recency / 100) * weights.evaluation_recency_weight; - breakdown.weighted_scores.blood_type_rarity = - (breakdown.raw_scores.blood_type_rarity / 100) * weights.blood_type_rarity_weight; - - // Calculate final score - let finalScore = Object.values(breakdown.weighted_scores).reduce((sum, val) => sum + val, 0); - - // Apply adjustments - finalScore = finalScore - comorbidityPenalty + previousTransplantAdjustment + complianceBonus; - finalScore = Math.min(100, Math.max(0, finalScore)); - - breakdown.total = finalScore; - breakdown.weights_used = weights; - breakdown.adjustments = { - comorbidity_penalty: -comorbidityPenalty, - previous_transplant_adjustment: previousTransplantAdjustment, - compliance_bonus: complianceBonus - }; - - // Update patient with new priority score and breakdown - await api.entities.Patient.update(patient_id, { - priority_score: finalScore, - priority_score_breakdown: breakdown - }); - - // Log the calculation - await api.entities.AuditLog.create({ - action: 'update', - entity_type: 'Patient', - entity_id: patient_id, - patient_name: `${patient.first_name} ${patient.last_name}`, - details: `Advanced priority score calculated: ${finalScore.toFixed(1)} (Medical: ${breakdown.weighted_scores.medical_urgency.toFixed(1)}, Time: ${breakdown.weighted_scores.time_on_waitlist.toFixed(1)}, Organ: ${breakdown.weighted_scores.organ_specific.toFixed(1)})`, - user_email: user.email, - user_role: user.role, - }); - - return Response.json({ - success: true, - priority_score: finalScore, - breakdown, - patient_id, - }); - } catch (error) { - logger.error('Advanced priority calculation failed', error, { request_id: requestId }); - return safeErrorResponse(requestId, 'Priority calculation failed. Contact support.'); - } -}); \ No newline at end of file diff --git a/functions/checkNotificationRules.ts b/functions/checkNotificationRules.ts deleted file mode 100644 index cbc8e71..0000000 --- a/functions/checkNotificationRules.ts +++ /dev/null @@ -1,185 +0,0 @@ -import { createClientFromRequest } from 'npm:@api/sdk@0.8.6'; -import { sanitizePatientName } from './lib/validators.ts'; -import { createLogger, generateRequestId, safeErrorResponse } from './lib/logger.ts'; - -const logger = createLogger('checkNotificationRules'); - -Deno.serve(async (req) => { - const requestId = generateRequestId(); - - try { - const api = createClientFromRequest(req); - - const user = await api.auth.me(); - if (!user) { - return Response.json({ error: 'Unauthorized' }, { status: 401 }); - } - - const { patient_id, event_type, old_data } = await req.json(); - - // Get the updated patient data - const patient = await api.entities.Patient.get(patient_id); - - // Get all active notification rules - const rules = await api.entities.NotificationRule.filter({ is_active: true }); - - const triggeredNotifications = []; - - for (const rule of rules) { - let shouldTrigger = false; - let message = ''; - - const conditions = rule.trigger_conditions || {}; - - switch (rule.rule_type) { - case 'priority_threshold': - if (patient.priority_score >= (conditions.priority_score || 80)) { - if (!conditions.organ_type || patient.organ_needed === conditions.organ_type) { - shouldTrigger = true; - message = `${sanitizePatientName(patient.first_name, patient.last_name)} has reached critical priority score of ${patient.priority_score.toFixed(0)}`; - } - } - break; - - case 'status_change': - if (event_type === 'update' && old_data && old_data.waitlist_status !== patient.waitlist_status) { - if (!conditions.status_to || patient.waitlist_status === conditions.status_to) { - shouldTrigger = true; - message = `${sanitizePatientName(patient.first_name, patient.last_name)} status changed from ${old_data.waitlist_status} to ${patient.waitlist_status}`; - } - } - break; - - case 'evaluation_overdue': - if (patient.last_evaluation_date) { - const daysSinceEval = Math.floor( - (Date.now() - new Date(patient.last_evaluation_date).getTime()) / (1000 * 60 * 60 * 24) - ); - const threshold = conditions.days_threshold || 90; - if (daysSinceEval >= threshold) { - shouldTrigger = true; - message = `${sanitizePatientName(patient.first_name, patient.last_name)} evaluation is ${daysSinceEval} days overdue (threshold: ${threshold} days)`; - } - } - break; - - case 'time_on_waitlist': - if (patient.date_added_to_waitlist) { - const daysOnList = Math.floor( - (Date.now() - new Date(patient.date_added_to_waitlist).getTime()) / (1000 * 60 * 60 * 24) - ); - const threshold = conditions.days_threshold || 365; - if (daysOnList >= threshold) { - shouldTrigger = true; - message = `${sanitizePatientName(patient.first_name, patient.last_name)} has been on waitlist for ${daysOnList} days`; - } - } - break; - - case 'score_change': - if (event_type === 'update' && old_data && old_data.priority_score) { - const scoreChange = patient.priority_score - old_data.priority_score; - if (Math.abs(scoreChange) >= 10) { - shouldTrigger = true; - message = `${sanitizePatientName(patient.first_name, patient.last_name)} priority score changed by ${scoreChange > 0 ? '+' : ''}${scoreChange.toFixed(0)} points`; - } - } - break; - - case 'new_patient': - if (event_type === 'create') { - shouldTrigger = true; - message = `New patient added: ${sanitizePatientName(patient.first_name, patient.last_name)} (${patient.organ_needed})`; - } - break; - } - - if (shouldTrigger) { - // Use custom message template if provided - const finalMessage = rule.message_template || message; - - // Determine priority level - let priorityLevel = 'medium'; - if (rule.rule_type === 'priority_threshold' || patient.medical_urgency === 'critical') { - priorityLevel = 'critical'; - } else if (patient.medical_urgency === 'high') { - priorityLevel = 'high'; - } - - // Get users to notify - const allUsers = await api.asServiceRole.entities.User.list(); - const usersToNotify = allUsers.filter(u => - rule.notify_roles.includes(u.role) - ); - - for (const notifyUser of usersToNotify) { - // Create in-app notification - if (rule.notification_channels.includes('in_app')) { - const notification = await api.entities.Notification.create({ - recipient_email: notifyUser.email, - title: rule.rule_name, - message: finalMessage, - notification_type: rule.rule_type === 'priority_threshold' ? 'priority_alert' : - rule.rule_type === 'status_change' ? 'status_change' : 'system', - is_read: false, - related_patient_id: patient.id, - related_patient_name: `${patient.first_name} ${patient.last_name}`, - priority_level: priorityLevel, - action_url: `/PatientDetails?id=${patient.id}`, - metadata: { rule_id: rule.id, patient_id: patient.id } - }); - - triggeredNotifications.push(notification); - } - - // Send email notification - if (rule.notification_channels.includes('email')) { - try { - await api.integrations.Core.SendEmail({ - from_name: 'TransTrack Notifications', - to: notifyUser.email, - subject: `${rule.rule_name} - ${patient.first_name} ${patient.last_name}`, - body: ` - - -
-
-

TransTrack Alert

-
-
-

${rule.rule_name}

-

${finalMessage}

-
- Patient: ${patient.first_name} ${patient.last_name}
- Patient ID: ${patient.patient_id}
- Organ: ${patient.organ_needed}
- Priority Score: ${patient.priority_score?.toFixed(0) || 'N/A'}
- Status: ${patient.waitlist_status} -
-

- Log in to TransTrack to view full patient details and take action. -

-
-
- - - ` - }); - } catch (emailError) { - console.error('Email notification failed:', emailError); - } - } - } - } - } - - return Response.json({ - success: true, - notifications_created: triggeredNotifications.length, - notifications: triggeredNotifications - }); - } catch (error) { - logger.error('Notification rule check failed', error, { request_id: requestId }); - return safeErrorResponse(requestId, 'Notification processing failed. Contact support.'); - } -}); \ No newline at end of file diff --git a/functions/exportToFHIR.ts b/functions/exportToFHIR.ts deleted file mode 100644 index 00d16cc..0000000 --- a/functions/exportToFHIR.ts +++ /dev/null @@ -1,371 +0,0 @@ -import { createClientFromRequest } from 'npm:@api/sdk@0.8.6'; -import { isValidUUID, sanitizeDiagnosis } from './lib/validators.ts'; -import { createLogger, generateRequestId, safeErrorResponse } from './lib/logger.ts'; - -const logger = createLogger('exportToFHIR'); - -Deno.serve(async (req) => { - const requestId = generateRequestId(); - - try { - const api = createClientFromRequest(req); - - const user = await api.auth.me(); - if (!user) { - return Response.json({ error: 'Unauthorized' }, { status: 401 }); - } - - const { patient_id, resource_types } = await req.json(); - - if (!patient_id || !isValidUUID(patient_id)) { - return Response.json( - { error: 'Invalid or missing patient_id. Must be a valid UUID.' }, - { status: 400 } - ); - } - - const patient = await api.entities.Patient.get(patient_id); - - if (!patient) { - return Response.json({ error: 'Patient not found' }, { status: 404 }); - } - - const fhirBundle = { - resourceType: 'Bundle', - type: 'collection', - timestamp: new Date().toISOString(), - entry: [] as Record[], - }; - - // Always include Patient resource - const fhirPatient = { - resourceType: 'Patient', - id: patient.id, - identifier: [ - { - system: 'https://transtrack.app/patient-id', - value: patient.patient_id - } - ], - name: [ - { - use: 'official', - family: patient.last_name, - given: [patient.first_name] - } - ], - telecom: [ - ...(patient.phone ? [{ - system: 'phone', - value: patient.phone, - use: 'home' - }] : []), - ...(patient.email ? [{ - system: 'email', - value: patient.email - }] : []) - ], - birthDate: patient.date_of_birth, - contact: patient.emergency_contact_name ? [ - { - relationship: [ - { - coding: [ - { - system: 'http://terminology.hl7.org/CodeSystem/v2-0131', - code: 'C', - display: 'Emergency Contact' - } - ] - } - ], - name: { - text: patient.emergency_contact_name - }, - telecom: patient.emergency_contact_phone ? [ - { - system: 'phone', - value: patient.emergency_contact_phone - } - ] : [] - } - ] : [] - }; - - fhirBundle.entry.push({ - fullUrl: `Patient/${patient.id}`, - resource: fhirPatient - }); - - // Add Observations for clinical data - if (!resource_types || resource_types.includes('Observation')) { - const observations: Record[] = []; - - // Blood Type Observation - if (patient.blood_type) { - observations.push({ - resourceType: 'Observation', - id: `${patient.id}-bloodtype`, - status: 'final', - category: [ - { - coding: [ - { - system: 'http://terminology.hl7.org/CodeSystem/observation-category', - code: 'laboratory', - display: 'Laboratory' - } - ] - } - ], - code: { - coding: [ - { - system: 'http://loinc.org', - code: '883-9', - display: 'ABO group [Type] in Blood' - } - ], - text: 'Blood Type' - }, - subject: { - reference: `Patient/${patient.id}` - }, - effectiveDateTime: patient.last_evaluation_date || new Date().toISOString(), - valueCodeableConcept: { - coding: [ - { - system: 'http://snomed.info/sct', - code: patient.blood_type, - display: patient.blood_type - } - ], - text: patient.blood_type - } - }); - } - - // MELD Score Observation - if (patient.meld_score) { - observations.push({ - resourceType: 'Observation', - id: `${patient.id}-meld`, - status: 'final', - category: [ - { - coding: [ - { - system: 'http://terminology.hl7.org/CodeSystem/observation-category', - code: 'survey', - display: 'Survey' - } - ] - } - ], - code: { - coding: [ - { - system: 'http://loinc.org', - code: '88374-7', - display: 'MELD score' - } - ], - text: 'MELD Score' - }, - subject: { - reference: `Patient/${patient.id}` - }, - effectiveDateTime: patient.last_evaluation_date || new Date().toISOString(), - valueInteger: Math.round(patient.meld_score) - }); - } - - // LAS Score Observation - if (patient.las_score) { - observations.push({ - resourceType: 'Observation', - id: `${patient.id}-las`, - status: 'final', - code: { - text: 'Lung Allocation Score' - }, - subject: { - reference: `Patient/${patient.id}` - }, - effectiveDateTime: patient.last_evaluation_date || new Date().toISOString(), - valueQuantity: { - value: patient.las_score, - unit: 'score' - } - }); - } - - // TransTrack Priority Score as custom Observation - if (patient.priority_score !== undefined) { - observations.push({ - resourceType: 'Observation', - id: `${patient.id}-priority`, - status: 'final', - category: [ - { - coding: [ - { - system: 'https://transtrack.app/observation-category', - code: 'transplant-priority', - display: 'Transplant Priority' - } - ] - } - ], - code: { - text: 'Transplant Priority Score' - }, - subject: { - reference: `Patient/${patient.id}` - }, - effectiveDateTime: patient.updated_date || new Date().toISOString(), - valueQuantity: { - value: patient.priority_score, - unit: 'score', - system: 'https://transtrack.app/priority-score', - code: 'priority-score' - }, - note: patient.priority_score_breakdown ? [ - { - text: `Breakdown: Medical Urgency=${patient.priority_score_breakdown.weighted_scores?.medical_urgency?.toFixed(1)}, Time=${patient.priority_score_breakdown.weighted_scores?.time_on_waitlist?.toFixed(1)}, Organ Score=${patient.priority_score_breakdown.weighted_scores?.organ_specific?.toFixed(1)}` - } - ] : [] - }); - } - - // HLA Typing - if (patient.hla_typing) { - observations.push({ - resourceType: 'Observation', - id: `${patient.id}-hla`, - status: 'final', - code: { - text: 'HLA Typing' - }, - subject: { - reference: `Patient/${patient.id}` - }, - effectiveDateTime: patient.last_evaluation_date || new Date().toISOString(), - valueString: patient.hla_typing - }); - } - - observations.forEach(obs => { - fhirBundle.entry.push({ - fullUrl: `Observation/${obs.id}`, - resource: obs - }); - }); - } - - // Add Conditions - if (!resource_types || resource_types.includes('Condition')) { - const conditions: Record[] = []; - - // Primary diagnosis - sanitized against injection - if (patient.diagnosis) { - const sanitizedDiagnosis = sanitizeDiagnosis(patient.diagnosis); - - conditions.push({ - resourceType: 'Condition', - id: `${patient.id}-diagnosis`, - clinicalStatus: { - coding: [ - { - system: 'http://terminology.hl7.org/CodeSystem/condition-clinical', - code: patient.waitlist_status === 'transplanted' ? 'resolved' : 'active', - display: patient.waitlist_status === 'transplanted' ? 'Resolved' : 'Active' - } - ] - }, - verificationStatus: { - coding: [ - { - system: 'http://terminology.hl7.org/CodeSystem/condition-ver-status', - code: 'confirmed', - display: 'Confirmed' - } - ] - }, - category: [ - { - coding: [ - { - system: 'http://terminology.hl7.org/CodeSystem/condition-category', - code: 'encounter-diagnosis', - display: 'Encounter Diagnosis' - } - ] - } - ], - code: { - text: sanitizedDiagnosis - }, - subject: { - reference: `Patient/${patient.id}` - }, - recordedDate: patient.created_date - }); - } - - // Waitlist status as a Condition - conditions.push({ - resourceType: 'Condition', - id: `${patient.id}-waitlist`, - clinicalStatus: { - coding: [ - { - system: 'http://terminology.hl7.org/CodeSystem/condition-clinical', - code: patient.waitlist_status === 'active' ? 'active' : 'inactive', - display: patient.waitlist_status === 'active' ? 'Active' : 'Inactive' - } - ] - }, - category: [ - { - coding: [ - { - system: 'https://transtrack.app/condition-category', - code: 'transplant-waitlist', - display: 'Transplant Waitlist' - } - ] - } - ], - code: { - text: `${patient.organ_needed} Transplant Waitlist - ${patient.waitlist_status}` - }, - subject: { - reference: `Patient/${patient.id}` - }, - onsetDateTime: patient.date_added_to_waitlist, - note: [ - { - text: `Medical Urgency: ${patient.medical_urgency}, Priority Score: ${patient.priority_score?.toFixed(1) || 'N/A'}` - } - ] - }); - - conditions.forEach(condition => { - fhirBundle.entry.push({ - fullUrl: `Condition/${condition.id}`, - resource: condition - }); - }); - } - - return Response.json({ - success: true, - fhir_bundle: fhirBundle, - resource_count: fhirBundle.entry.length - }); - } catch (error) { - logger.error('FHIR export failed', error, { request_id: requestId }); - return safeErrorResponse(requestId, 'FHIR export failed. Contact support.'); - } -}); diff --git a/functions/exportWaitlist.ts b/functions/exportWaitlist.ts deleted file mode 100644 index 2ae885b..0000000 --- a/functions/exportWaitlist.ts +++ /dev/null @@ -1,189 +0,0 @@ -import { createClientFromRequest } from 'npm:@api/sdk@0.8.6'; -import { jsPDF } from 'npm:jspdf@2.5.1'; -import { createLogger, generateRequestId, safeErrorResponse } from './lib/logger.ts'; - -const logger = createLogger('exportWaitlist'); - -Deno.serve(async (req) => { - const requestId = generateRequestId(); - - try { - const api = createClientFromRequest(req); - - const user = await api.auth.me(); - if (!user) { - return Response.json({ error: 'Unauthorized' }, { status: 401 }); - } - - const { filters, format } = await req.json(); - - // Fetch patients with filters - let patients = await api.entities.Patient.list('-priority_score', 1000); - - // Apply filters - if (filters) { - if (filters.organ && filters.organ !== 'all') { - patients = patients.filter(p => p.organ_needed === filters.organ); - } - if (filters.bloodType && filters.bloodType !== 'all') { - patients = patients.filter(p => p.blood_type === filters.bloodType); - } - if (filters.status && filters.status !== 'all') { - patients = patients.filter(p => p.waitlist_status === filters.status); - } - if (filters.search) { - const search = filters.search.toLowerCase(); - patients = patients.filter(p => - p.first_name?.toLowerCase().includes(search) || - p.last_name?.toLowerCase().includes(search) || - p.patient_id?.toLowerCase().includes(search) - ); - } - } - - if (format === 'csv') { - // Generate CSV - const headers = [ - 'Patient ID', - 'Name', - 'Blood Type', - 'Organ Needed', - 'Priority Score', - 'Medical Urgency', - 'Status', - 'Days on Waitlist', - 'Last Evaluation', - ]; - - const rows = patients.map(p => { - const daysOnList = p.date_added_to_waitlist - ? Math.floor((new Date() - new Date(p.date_added_to_waitlist)) / (1000 * 60 * 60 * 24)) - : 0; - - return [ - p.patient_id, - `${p.first_name} ${p.last_name}`, - p.blood_type, - p.organ_needed, - (p.priority_score || 0).toFixed(1), - p.medical_urgency, - p.waitlist_status, - daysOnList, - p.last_evaluation_date || 'N/A', - ]; - }); - - const csvContent = [ - headers.join(','), - ...rows.map(row => row.map(cell => `"${cell}"`).join(',')), - ].join('\n'); - - // Log export - await api.entities.AuditLog.create({ - action: 'export', - entity_type: 'Waitlist', - details: `Exported ${patients.length} patients to CSV`, - user_email: user.email, - user_role: user.role, - }); - - return new Response(csvContent, { - headers: { - 'Content-Type': 'text/csv', - 'Content-Disposition': `attachment; filename="waitlist-export-${new Date().toISOString().split('T')[0]}.csv"`, - }, - }); - } else { - // Generate PDF - const doc = new jsPDF(); - const pageWidth = doc.internal.pageSize.getWidth(); - const pageHeight = doc.internal.pageSize.getHeight(); - - // Title - doc.setFontSize(20); - doc.setTextColor(8, 145, 178); - doc.text('TransTrack Waitlist Report', 20, 20); - - // Metadata - doc.setFontSize(10); - doc.setTextColor(100); - doc.text(`Generated: ${new Date().toLocaleString()}`, 20, 30); - doc.text(`Total Patients: ${patients.length}`, 20, 36); - doc.text(`Generated by: ${user.full_name}`, 20, 42); - - let y = 55; - - patients.forEach((patient, index) => { - // Check if we need a new page - if (y > pageHeight - 40) { - doc.addPage(); - y = 20; - } - - // Patient box - const boxHeight = 35; - - // Priority color bar - const score = patient.priority_score || 0; - let color; - if (score >= 80) color = [220, 38, 38]; - else if (score >= 60) color = [249, 115, 22]; - else if (score >= 40) color = [245, 158, 11]; - else color = [100, 116, 139]; - - doc.setFillColor(color[0], color[1], color[2]); - doc.rect(20, y, 3, boxHeight, 'F'); - - // Background box - doc.setFillColor(249, 250, 251); - doc.rect(23, y, pageWidth - 43, boxHeight, 'F'); - - // Patient info - doc.setFontSize(12); - doc.setTextColor(0); - doc.setFont(undefined, 'bold'); - doc.text(`${patient.first_name} ${patient.last_name}`, 28, y + 7); - - doc.setFont(undefined, 'normal'); - doc.setFontSize(9); - doc.setTextColor(100); - doc.text(`ID: ${patient.patient_id}`, 28, y + 13); - - doc.text(`Organ: ${patient.organ_needed}`, 28, y + 19); - doc.text(`Blood: ${patient.blood_type}`, 28, y + 25); - doc.text(`Status: ${patient.waitlist_status}`, 28, y + 31); - - // Priority score - doc.setFontSize(16); - doc.setTextColor(color[0], color[1], color[2]); - doc.setFont(undefined, 'bold'); - doc.text(`${score.toFixed(0)}`, pageWidth - 35, y + 20); - doc.setFontSize(8); - doc.text('Priority', pageWidth - 35, y + 26); - - y += boxHeight + 5; - }); - - const pdfBytes = doc.output('arraybuffer'); - - // Log export - await api.entities.AuditLog.create({ - action: 'export', - entity_type: 'Waitlist', - details: `Exported ${patients.length} patients to PDF`, - user_email: user.email, - user_role: user.role, - }); - - return new Response(pdfBytes, { - headers: { - 'Content-Type': 'application/pdf', - 'Content-Disposition': `attachment; filename="waitlist-report-${new Date().toISOString().split('T')[0]}.pdf"`, - }, - }); - } - } catch (error) { - logger.error('Waitlist export failed', error, { request_id: requestId }); - return safeErrorResponse(requestId, 'Waitlist export failed. Contact support.'); - } -}); \ No newline at end of file diff --git a/functions/fhirWebhook.ts b/functions/fhirWebhook.ts deleted file mode 100644 index 1990b60..0000000 --- a/functions/fhirWebhook.ts +++ /dev/null @@ -1,111 +0,0 @@ -import { createClientFromRequest } from 'npm:@api/sdk@0.8.6'; -import { createLogger, generateRequestId, safeErrorResponse } from './lib/logger.ts'; - -const logger = createLogger('fhirWebhook'); - -function timingSafeEqual(a: string, b: string): boolean { - if (a.length !== b.length) return false; - const encoder = new TextEncoder(); - const bufA = encoder.encode(a); - const bufB = encoder.encode(b); - let result = 0; - for (let i = 0; i < bufA.length; i++) { - result |= bufA[i] ^ bufB[i]; - } - return result === 0; -} - -Deno.serve(async (req) => { - const requestId = generateRequestId(); - - try { - // Validate webhook authentication - const authHeader = req.headers.get('Authorization'); - const webhookSecret = Deno.env.get('EHR_WEBHOOK_SECRET'); - - if (!webhookSecret) { - return Response.json({ - error: 'EHR webhook not configured. Contact administrator.' - }, { status: 503 }); - } - - const expectedToken = `Bearer ${webhookSecret}`; - if (!authHeader || !timingSafeEqual(authHeader, expectedToken)) { - return Response.json({ error: 'Unauthorized' }, { status: 401 }); - } - - const api = createClientFromRequest(req); - - const payload = await req.json(); - - // Validate FHIR resource - if (!payload.resourceType) { - return Response.json({ - error: 'Invalid FHIR resource' - }, { status: 400 }); - } - - // Handle different FHIR resource types - if (payload.resourceType === 'Bundle') { - // Process bundle - const entries = payload.entry || []; - const results = { - processed: 0, - created: 0, - updated: 0, - failed: 0 - }; - - for (const entry of entries) { - if (entry.resource?.resourceType === 'Patient') { - results.processed++; - - try { - const fhirPatient = entry.resource; - const patientId = fhirPatient.identifier?.[0]?.value; - - // Check if patient exists - const existing = await api.asServiceRole.entities.Patient.filter({ - patient_id: patientId - }); - - const mappedData = { - patient_id: patientId, - first_name: fhirPatient.name?.[0]?.given?.[0] || '', - last_name: fhirPatient.name?.[0]?.family || '', - date_of_birth: fhirPatient.birthDate, - phone: fhirPatient.telecom?.find(t => t.system === 'phone')?.value, - email: fhirPatient.telecom?.find(t => t.system === 'email')?.value, - }; - - if (existing.length > 0) { - await api.asServiceRole.entities.Patient.update(existing[0].id, mappedData); - results.updated++; - } else { - await api.asServiceRole.entities.Patient.create(mappedData); - results.created++; - } - } catch (error) { - results.failed++; - console.error('Patient processing error:', error); - } - } - } - - return Response.json({ - success: true, - message: 'FHIR webhook processed', - results - }); - } - - return Response.json({ - success: true, - message: 'FHIR resource received', - resourceType: payload.resourceType - }); - } catch (error) { - logger.error('FHIR webhook processing failed', error, { request_id: requestId }); - return safeErrorResponse(requestId, 'Webhook processing failed.'); - } -}); \ No newline at end of file diff --git a/functions/importFHIRData.ts b/functions/importFHIRData.ts deleted file mode 100644 index ae62b16..0000000 --- a/functions/importFHIRData.ts +++ /dev/null @@ -1,266 +0,0 @@ -import { createClientFromRequest } from 'npm:@api/sdk@0.8.6'; -import { createLogger, generateRequestId, safeErrorResponse } from './lib/logger.ts'; - -const logger = createLogger('importFHIRData'); - -Deno.serve(async (req) => { - const requestId = generateRequestId(); - - try { - const api = createClientFromRequest(req); - - const user = await api.auth.me(); - if (!user) { - return Response.json({ error: 'Unauthorized' }, { status: 401 }); - } - - const { fhir_bundle, source_system, auto_create, auto_update } = await req.json(); - - if (!fhir_bundle || !fhir_bundle.resourceType) { - return Response.json({ - error: 'Invalid FHIR data. Expected a FHIR Bundle resource.' - }, { status: 400 }); - } - - const results = { - processed: 0, - created: 0, - updated: 0, - failed: 0, - errors: [], - warnings: [] - }; - - // Extract Patient resources from bundle - const entries = fhir_bundle.entry || []; - const patientResources = entries - .filter(entry => entry.resource?.resourceType === 'Patient') - .map(entry => entry.resource); - - for (const fhirPatient of patientResources) { - results.processed++; - - try { - // Validate FHIR resource - const validationResponse = await api.functions.invoke('validateFHIRData', { - fhir_resource: fhirPatient, - resource_type: 'Patient' - }); - - const validation = validationResponse.data; - - // Collect warnings - if (validation.warnings && validation.warnings.length > 0) { - results.warnings.push(...validation.warnings.map(w => ({ - patient_id: fhirPatient.identifier?.[0]?.value, - ...w - }))); - } - - // Skip import if validation failed with errors - if (!validation.valid && validation.errors && validation.errors.length > 0) { - results.failed++; - results.errors.push({ - resource_id: fhirPatient.id, - resource_type: 'Patient', - patient_id: fhirPatient.identifier?.[0]?.value, - validation_errors: validation.errors - }); - continue; - } - - // Map FHIR Patient to TransTrack Patient - const transTrackPatient = mapFHIRToTransTrack(fhirPatient, entries); - - // Check if patient already exists by patient_id or identifier - const existingPatients = await api.entities.Patient.filter({ - patient_id: transTrackPatient.patient_id - }); - - if (existingPatients.length > 0 && auto_update) { - // Update existing patient - await api.entities.Patient.update(existingPatients[0].id, transTrackPatient); - results.updated++; - - // Recalculate priority - await api.functions.invoke('calculatePriorityAdvanced', { - patient_id: existingPatients[0].id - }); - } else if (existingPatients.length === 0 && auto_create) { - // Create new patient - const newPatient = await api.entities.Patient.create(transTrackPatient); - results.created++; - - // Calculate initial priority - await api.functions.invoke('calculatePriorityAdvanced', { - patient_id: newPatient.id - }); - - // Trigger notification rules - await api.functions.invoke('checkNotificationRules', { - patient_id: newPatient.id, - event_type: 'create', - }); - } else { - // Skip - patient exists but auto_update is false - results.failed++; - results.errors.push({ - patient_id: transTrackPatient.patient_id, - reason: 'Patient exists and auto_update is disabled' - }); - } - } catch (error) { - results.failed++; - results.errors.push({ - patient_id: fhirPatient.identifier?.[0]?.value || 'unknown', - error: error.message - }); - } - } - - // Create import record - const importRecord = await api.entities.EHRImport.create({ - import_type: 'manual_upload', - source_system: source_system || 'Unknown', - records_processed: results.processed, - records_created: results.created, - records_updated: results.updated, - records_failed: results.failed, - error_details: results.errors, - imported_by: user.email, - status: results.failed === 0 ? 'success' : - results.created + results.updated > 0 ? 'partial' : 'failed', - fhir_version: fhir_bundle.meta?.versionId || 'R4' - }); - - // Log the import - await api.entities.AuditLog.create({ - action: 'create', - entity_type: 'EHRImport', - entity_id: importRecord.id, - details: `FHIR import completed: ${results.created} created, ${results.updated} updated, ${results.failed} failed`, - user_email: user.email, - user_role: user.role, - }); - - return Response.json({ - success: true, - results, - import_id: importRecord.id - }); - } catch (error) { - logger.error('FHIR import failed', error, { request_id: requestId }); - return safeErrorResponse(requestId, 'FHIR import failed. Contact support.'); - } -}); - -// FHIR to TransTrack mapping function -function mapFHIRToTransTrack(fhirPatient, bundleEntries) { - const patient = { - // Basic demographics - patient_id: fhirPatient.identifier?.[0]?.value || `FHIR-${Date.now()}`, - first_name: fhirPatient.name?.[0]?.given?.[0] || '', - last_name: fhirPatient.name?.[0]?.family || '', - date_of_birth: fhirPatient.birthDate || '', - phone: fhirPatient.telecom?.find(t => t.system === 'phone')?.value || '', - email: fhirPatient.telecom?.find(t => t.system === 'email')?.value || '', - }; - - // Extract blood type from Observation resources - const observations = bundleEntries - .filter(e => e.resource?.resourceType === 'Observation' && - e.resource?.subject?.reference === `Patient/${fhirPatient.id}`) - .map(e => e.resource); - - const bloodTypeObs = observations.find(obs => - obs.code?.coding?.some(c => c.code === '883-9' || c.display?.includes('Blood')) - ); - - if (bloodTypeObs?.valueCodeableConcept?.coding?.[0]?.code) { - patient.blood_type = bloodTypeObs.valueCodeableConcept.coding[0].code; - } - - // Extract HLA typing - const hlaObs = observations.find(obs => - obs.code?.coding?.some(c => c.display?.includes('HLA')) - ); - - if (hlaObs?.valueString) { - patient.hla_typing = hlaObs.valueString; - } - - // Extract weight and height - const weightObs = observations.find(obs => - obs.code?.coding?.some(c => c.code === '29463-7' || c.display?.includes('weight')) - ); - if (weightObs?.valueQuantity?.value) { - patient.weight_kg = weightObs.valueQuantity.value; - } - - const heightObs = observations.find(obs => - obs.code?.coding?.some(c => c.code === '8302-2' || c.display?.includes('height')) - ); - if (heightObs?.valueQuantity?.value) { - patient.height_cm = heightObs.valueQuantity.value; - } - - // Extract MELD score - const meldObs = observations.find(obs => - obs.code?.coding?.some(c => c.display?.includes('MELD')) - ); - if (meldObs?.valueInteger || meldObs?.valueQuantity?.value) { - patient.meld_score = meldObs.valueInteger || meldObs.valueQuantity.value; - } - - // Extract conditions/diagnoses - const conditions = bundleEntries - .filter(e => e.resource?.resourceType === 'Condition' && - e.resource?.subject?.reference === `Patient/${fhirPatient.id}`) - .map(e => e.resource); - - if (conditions.length > 0) { - const primaryCondition = conditions.find(c => c.category?.[0]?.coding?.[0]?.code === 'encounter-diagnosis'); - if (primaryCondition?.code?.text) { - patient.diagnosis = primaryCondition.code.text; - } - - // Collect other conditions as comorbidities - const otherConditions = conditions - .filter(c => c !== primaryCondition) - .map(c => c.code?.text) - .filter(Boolean); - - if (otherConditions.length > 0) { - patient.comorbidities = otherConditions.join('; '); - } - } - - // Extract medications from MedicationStatement resources - const medications = bundleEntries - .filter(e => e.resource?.resourceType === 'MedicationStatement' && - e.resource?.subject?.reference === `Patient/${fhirPatient.id}`) - .map(e => e.resource); - - if (medications.length > 0) { - const medList = medications - .map(m => m.medicationCodeableConcept?.text || m.medicationReference?.display) - .filter(Boolean); - - if (medList.length > 0) { - patient.medications = medList.join(', '); - } - } - - // Extract emergency contact - const emergencyContact = fhirPatient.contact?.find(c => - c.relationship?.some(r => r.coding?.some(code => code.code === 'C')) - ); - - if (emergencyContact) { - patient.emergency_contact_name = emergencyContact.name?.text || - `${emergencyContact.name?.given?.[0]} ${emergencyContact.name?.family}`; - patient.emergency_contact_phone = emergencyContact.telecom?.find(t => t.system === 'phone')?.value; - } - - return patient; -} \ No newline at end of file diff --git a/functions/lib/audit.ts b/functions/lib/audit.ts deleted file mode 100644 index 118eb8d..0000000 --- a/functions/lib/audit.ts +++ /dev/null @@ -1,86 +0,0 @@ -/** - * TransTrack - HIPAA-Compliant Audit Trail - * - * Provides comprehensive WHO/WHAT/WHEN/WHERE/WHY audit logging - * as required by HIPAA 164.312(b). - * - * Generates a SHA-256 hash of each audit record for immutability verification. - */ - -type AuditAction = 'CREATE' | 'READ' | 'UPDATE' | 'DELETE' | 'EXPORT' | 'MATCH' | 'CALCULATE'; -type AccessType = 'DIRECT' | 'INCIDENTAL' | 'EMERGENCY_ACCESS' | 'SYSTEM'; - -interface AuditUser { - email: string; - role: string; - id?: string; -} - -interface HIPAAAuditEntry { - action: string; - entity_type: string; - entity_id: string; - patient_name?: string; - details: string; - user_email: string; - user_role: string; - hipaa_action: AuditAction; - access_type: AccessType; - access_justification?: string; - outcome: 'SUCCESS' | 'FAILURE'; - error_message?: string; - data_modified?: string; - request_id?: string; - record_hash?: string; -} - -async function computeRecordHash(data: Record): Promise { - const serialized = JSON.stringify(data, Object.keys(data).sort()); - const encoded = new TextEncoder().encode(serialized); - const hashBuffer = await crypto.subtle.digest('SHA-256', encoded); - return Array.from(new Uint8Array(hashBuffer)) - .map((b) => b.toString(16).padStart(2, '0')) - .join(''); -} - -/** - * Create a HIPAA-compliant audit log entry via the API. - */ -export async function createHIPAAAuditLog( - api: { entities: { AuditLog: { create: (data: Record) => Promise } } }, - params: { - action: AuditAction; - entityType: string; - entityId: string; - patientName?: string; - details: string; - user: AuditUser; - accessType?: AccessType; - accessJustification?: string; - outcome?: 'SUCCESS' | 'FAILURE'; - errorMessage?: string; - dataModified?: Record; - requestId?: string; - } -): Promise { - const entry: HIPAAAuditEntry = { - action: params.action.toLowerCase(), - entity_type: params.entityType, - entity_id: params.entityId, - patient_name: params.patientName, - details: params.details, - user_email: params.user.email, - user_role: params.user.role, - hipaa_action: params.action, - access_type: params.accessType || 'DIRECT', - access_justification: params.accessJustification, - outcome: params.outcome || 'SUCCESS', - error_message: params.errorMessage, - data_modified: params.dataModified ? JSON.stringify(params.dataModified) : undefined, - request_id: params.requestId, - }; - - entry.record_hash = await computeRecordHash(entry as unknown as Record); - - await api.entities.AuditLog.create(entry as unknown as Record); -} diff --git a/functions/lib/constants.ts b/functions/lib/constants.ts deleted file mode 100644 index bf85dbc..0000000 --- a/functions/lib/constants.ts +++ /dev/null @@ -1,82 +0,0 @@ -/** - * TransTrack - Named Constants - * - * Centralizes all magic numbers and configuration values used across - * Deno edge functions to improve readability and maintainability. - */ - -export const PRIORITY_SCORING = { - MAX_URGENCY_POINTS: 30, - MAX_WAITTIME_POINTS: 25, - MAX_ORGAN_SPECIFIC_POINTS: 25, - MAX_EVALUATION_POINTS: 10, - MAX_BLOOD_RARITY_POINTS: 10, - DAYS_PER_WAITTIME_POINT: 14.6, - MAX_WAITTIME_DAYS: 365, - EVALUATION_RECENT_DAYS: 90, - EVALUATION_MODERATE_DAYS: 180, - MAX_TOTAL_SCORE: 100, - MIN_TOTAL_SCORE: 0, -} as const; - -export const MEDICAL_SCORE_RANGES = { - MELD: { MIN: 6, MAX: 40 }, - LAS: { MIN: 0, MAX: 100 }, - PRA: { MIN: 0, MAX: 100 }, - CPRA: { MIN: 0, MAX: 100 }, -} as const; - -export const MATCHING = { - MAX_MATCHES_TO_CREATE: 10, - TOP_PRIORITY_NOTIFICATIONS: 3, - HLA_ANTIGEN_COUNT: 6, - WEIGHT_RATIO_MIN: 0.7, - WEIGHT_RATIO_MAX: 1.5, - DEFAULT_HLA_SCORE: 50, - WEIGHT_PRIORITY: 0.40, - WEIGHT_HLA: 0.25, - WEIGHT_BLOOD_TYPE: 0.15, - WEIGHT_SIZE: 0.10, - WEIGHT_WAITTIME: 0.10, -} as const; - -export const URGENCY_SCORES: Record = { - critical: 30, - high: 20, - medium: 10, - low: 5, -}; - -export const BLOOD_TYPE_RARITY: Record = { - 'AB-': 10, - 'B-': 8, - 'A-': 6, - 'O-': 5, - 'AB+': 4, - 'B+': 3, - 'A+': 2, - 'O+': 1, -}; - -export const BLOOD_COMPATIBILITY: Record = { - 'O-': ['O-', 'O+', 'A-', 'A+', 'B-', 'B+', 'AB-', 'AB+'], - 'O+': ['O+', 'A+', 'B+', 'AB+'], - 'A-': ['A-', 'A+', 'AB-', 'AB+'], - 'A+': ['A+', 'AB+'], - 'B-': ['B-', 'B+', 'AB-', 'AB+'], - 'B+': ['B+', 'AB+'], - 'AB-': ['AB-', 'AB+'], - 'AB+': ['AB+'], -}; - -export const VALID_BLOOD_TYPES = [ - 'O-', 'O+', 'A-', 'A+', 'B-', 'B+', 'AB-', 'AB+', -] as const; - -export const VALID_URGENCY_LEVELS = [ - 'critical', 'high', 'medium', 'low', -] as const; - -export const VALID_ORGAN_TYPES = [ - 'kidney', 'liver', 'heart', 'lung', 'pancreas', 'intestine', -] as const; diff --git a/functions/lib/logger.ts b/functions/lib/logger.ts deleted file mode 100644 index 1c94a4f..0000000 --- a/functions/lib/logger.ts +++ /dev/null @@ -1,97 +0,0 @@ -/** - * TransTrack - Structured Logging - * - * Provides JSON-structured logging for all Deno edge functions. - * Ensures sensitive data is redacted and errors are logged safely. - */ - -type LogLevel = 'DEBUG' | 'INFO' | 'WARN' | 'ERROR'; - -interface LogEntry { - timestamp: string; - level: LogLevel; - context: string; - message: string; - [key: string]: unknown; -} - -function formatEntry(level: LogLevel, context: string, message: string, data?: Record): string { - const entry: LogEntry = { - timestamp: new Date().toISOString(), - level, - context, - message, - ...redactSensitiveFields(data || {}), - }; - return JSON.stringify(entry); -} - -const SENSITIVE_KEYS = new Set([ - 'password', 'password_hash', 'ssn', 'social_security', - 'credit_card', 'api_key', 'token', 'secret', -]); - -function redactSensitiveFields(data: Record): Record { - const redacted: Record = {}; - for (const [key, value] of Object.entries(data)) { - if (SENSITIVE_KEYS.has(key.toLowerCase())) { - redacted[key] = '[REDACTED]'; - } else if (value && typeof value === 'object' && !Array.isArray(value)) { - redacted[key] = redactSensitiveFields(value as Record); - } else { - redacted[key] = value; - } - } - return redacted; -} - -export function createLogger(context: string) { - return { - debug(message: string, data?: Record) { - console.debug(formatEntry('DEBUG', context, message, data)); - }, - info(message: string, data?: Record) { - console.log(formatEntry('INFO', context, message, data)); - }, - warn(message: string, data?: Record) { - console.warn(formatEntry('WARN', context, message, data)); - }, - error(message: string, error?: Error | unknown, data?: Record) { - const errorInfo: Record = { ...data }; - if (error instanceof Error) { - errorInfo.error_message = error.message; - errorInfo.error_stack = error.stack; - } else if (error !== undefined) { - errorInfo.error_message = String(error); - } - console.error(formatEntry('ERROR', context, message, errorInfo)); - }, - }; -} - -/** - * Generate a unique request ID for tracking through audit logs. - */ -export function generateRequestId(): string { - return crypto.randomUUID(); -} - -/** - * Create a safe error response that does not leak internal details. - */ -export function safeErrorResponse( - requestId: string, - userMessage: string, - statusCode = 500 -): Response { - return Response.json( - { - error: userMessage, - request_id: requestId, - }, - { - status: statusCode, - headers: { 'X-Request-ID': requestId }, - } - ); -} diff --git a/functions/lib/validators.ts b/functions/lib/validators.ts deleted file mode 100644 index eca7b87..0000000 --- a/functions/lib/validators.ts +++ /dev/null @@ -1,269 +0,0 @@ -/** - * TransTrack - Input Validation - * - * Provides comprehensive validation for medical scores, HLA typing, - * and other critical transplant data to ensure HIPAA/FDA compliance. - */ - -import { - MEDICAL_SCORE_RANGES, - MATCHING, - VALID_BLOOD_TYPES, - VALID_URGENCY_LEVELS, - VALID_ORGAN_TYPES, -} from './constants.ts'; - -export interface ValidationResult { - valid: boolean; - errors: string[]; -} - -export interface ParsedHLA { - raw: string; - antigens: string[]; -} - -// ── UUID Validation ───────────────────────────────────────────── - -const UUID_REGEX = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i; - -export function isValidUUID(value: string): boolean { - return typeof value === 'string' && UUID_REGEX.test(value); -} - -// ── Medical Score Validation ──────────────────────────────────── - -export function validateMELDScore(score: unknown): ValidationResult { - const errors: string[] = []; - if (score === null || score === undefined) { - return { valid: true, errors: [] }; - } - if (typeof score !== 'number' || !Number.isFinite(score)) { - errors.push('MELD score must be a finite number'); - } else if (score < MEDICAL_SCORE_RANGES.MELD.MIN || score > MEDICAL_SCORE_RANGES.MELD.MAX) { - errors.push( - `MELD score must be between ${MEDICAL_SCORE_RANGES.MELD.MIN} and ${MEDICAL_SCORE_RANGES.MELD.MAX}, got ${score}` - ); - } - return { valid: errors.length === 0, errors }; -} - -export function validateLASScore(score: unknown): ValidationResult { - const errors: string[] = []; - if (score === null || score === undefined) { - return { valid: true, errors: [] }; - } - if (typeof score !== 'number' || !Number.isFinite(score)) { - errors.push('LAS score must be a finite number'); - } else if (score < MEDICAL_SCORE_RANGES.LAS.MIN || score > MEDICAL_SCORE_RANGES.LAS.MAX) { - errors.push( - `LAS score must be between ${MEDICAL_SCORE_RANGES.LAS.MIN} and ${MEDICAL_SCORE_RANGES.LAS.MAX}, got ${score}` - ); - } - return { valid: errors.length === 0, errors }; -} - -export function validatePRAPercentage(pra: unknown): ValidationResult { - const errors: string[] = []; - if (pra === null || pra === undefined) { - return { valid: true, errors: [] }; - } - if (typeof pra !== 'number' || !Number.isFinite(pra)) { - errors.push('PRA percentage must be a finite number'); - } else if (pra < MEDICAL_SCORE_RANGES.PRA.MIN || pra > MEDICAL_SCORE_RANGES.PRA.MAX) { - errors.push( - `PRA percentage must be between ${MEDICAL_SCORE_RANGES.PRA.MIN} and ${MEDICAL_SCORE_RANGES.PRA.MAX}, got ${pra}` - ); - } - return { valid: errors.length === 0, errors }; -} - -export function validateCPRAPercentage(cpra: unknown): ValidationResult { - const errors: string[] = []; - if (cpra === null || cpra === undefined) { - return { valid: true, errors: [] }; - } - if (typeof cpra !== 'number' || !Number.isFinite(cpra)) { - errors.push('cPRA percentage must be a finite number'); - } else if (cpra < MEDICAL_SCORE_RANGES.CPRA.MIN || cpra > MEDICAL_SCORE_RANGES.CPRA.MAX) { - errors.push( - `cPRA percentage must be between ${MEDICAL_SCORE_RANGES.CPRA.MIN} and ${MEDICAL_SCORE_RANGES.CPRA.MAX}, got ${cpra}` - ); - } - return { valid: errors.length === 0, errors }; -} - -/** - * Validates all organ-specific medical scores on a patient record. - * Returns aggregated validation result. - */ -export function validatePatientMedicalScores(patient: Record): ValidationResult { - const allErrors: string[] = []; - - const checks = [ - validateMELDScore(patient.meld_score), - validateLASScore(patient.las_score), - validatePRAPercentage(patient.pra_percentage), - validateCPRAPercentage(patient.cpra_percentage), - ]; - - for (const check of checks) { - allErrors.push(...check.errors); - } - - if (patient.blood_type && !VALID_BLOOD_TYPES.includes(patient.blood_type as typeof VALID_BLOOD_TYPES[number])) { - allErrors.push(`Invalid blood type: ${patient.blood_type}`); - } - - if (patient.medical_urgency && !VALID_URGENCY_LEVELS.includes(patient.medical_urgency as typeof VALID_URGENCY_LEVELS[number])) { - allErrors.push(`Invalid medical urgency level: ${patient.medical_urgency}`); - } - - if (patient.organ_needed && !VALID_ORGAN_TYPES.includes(patient.organ_needed as typeof VALID_ORGAN_TYPES[number])) { - allErrors.push(`Invalid organ type: ${patient.organ_needed}`); - } - - return { valid: allErrors.length === 0, errors: allErrors }; -} - -// ── HLA Validation & Parsing ──────────────────────────────────── - -/** - * HLA antigen format: A*02:01, B*07:02, DR*04:01, etc. - * Also accepts simplified formats: A2, B7, DR4, etc. - */ -const HLA_STRICT_REGEX = /^[A-Z]{1,3}\*?\d{1,4}(:\d{1,4})?(:[A-Z]{1,2})?$/; -const HLA_SIMPLIFIED_REGEX = /^[A-Z]{1,3}\d{1,4}$/; - -export function validateHLATyping(typing: unknown): ValidationResult & { antigens: string[] } { - if (typing === null || typing === undefined || typing === '') { - return { valid: true, errors: [], antigens: [] }; - } - - if (typeof typing !== 'string') { - return { valid: false, errors: ['HLA typing must be a string'], antigens: [] }; - } - - const trimmed = typing.trim(); - if (trimmed.length === 0) { - return { valid: true, errors: [], antigens: [] }; - } - - if (trimmed.length > 500) { - return { valid: false, errors: ['HLA typing string exceeds maximum length of 500 characters'], antigens: [] }; - } - - const antigens = trimmed.split(/[\s,;]+/).filter(Boolean); - - if (antigens.length === 0) { - return { valid: true, errors: [], antigens: [] }; - } - - if (antigens.length > 20) { - return { - valid: false, - errors: [`HLA typing contains too many antigens (${antigens.length}), maximum is 20`], - antigens: [], - }; - } - - const errors: string[] = []; - for (const antigen of antigens) { - if (!HLA_STRICT_REGEX.test(antigen) && !HLA_SIMPLIFIED_REGEX.test(antigen)) { - errors.push(`Invalid HLA antigen format: "${antigen}"`); - } - } - - return { - valid: errors.length === 0, - errors, - antigens: errors.length === 0 ? antigens : [], - }; -} - -/** - * Parse and cache HLA typing. Returns empty array on invalid input. - */ -export function parseHLATyping(typing: string | null | undefined): string[] { - if (!typing || typeof typing !== 'string') return []; - const result = validateHLATyping(typing); - return result.antigens; -} - -/** - * Calculate HLA match score between donor and patient antigens. - * Uses actual antigen count rather than hard-coded 6. - */ -export function calculateHLAMatchScore(donorAntigens: string[], patientAntigens: string[]): number { - if (donorAntigens.length === 0 || patientAntigens.length === 0) { - return MATCHING.DEFAULT_HLA_SCORE; - } - - const patientSet = new Set(patientAntigens.map(a => a.toUpperCase())); - let matches = 0; - for (const antigen of donorAntigens) { - if (patientSet.has(antigen.toUpperCase())) { - matches++; - } - } - - const totalAntigens = Math.max(donorAntigens.length, patientAntigens.length, MATCHING.HLA_ANTIGEN_COUNT); - return (matches / totalAntigens) * 100; -} - -// ── Diagnosis Validation ──────────────────────────────────────── - -const ICD10_REGEX = /^[A-Z]\d{2}(\.\d{1,4})?$/; - -export function isValidICD10Code(code: string): boolean { - return ICD10_REGEX.test(code); -} - -/** - * Validates or sanitizes a diagnosis string for safe use in FHIR exports. - * Strips HTML/script content and enforces length limits. - */ -export function sanitizeDiagnosis(diagnosis: unknown): string { - if (!diagnosis || typeof diagnosis !== 'string') return ''; - return sanitizePlainText(diagnosis, 500); -} - -// ── General Text Sanitization ─────────────────────────────────── - -/** - * Strips HTML tags and dangerous characters from a string. - * Uses iterative stripping to prevent incomplete sanitization - * (e.g. nested or split tags like "ipt>"). - */ -export function sanitizePlainText(input: string, maxLength = 1000): string { - if (typeof input !== 'string') return ''; - - let result = input; - let previous = ''; - while (result !== previous) { - previous = result; - result = result.replace(/<[^>]*>/g, ''); - } - - return result - .replace(/[<>"'&]/g, (ch) => { - const entities: Record = { - '<': '<', - '>': '>', - '"': '"', - "'": ''', - '&': '&', - }; - return entities[ch] || ch; - }) - .slice(0, maxLength); -} - -/** - * Sanitize a patient name for use in notifications / messages. - */ -export function sanitizePatientName(firstName: unknown, lastName: unknown): string { - const first = sanitizePlainText(String(firstName || ''), 100); - const last = sanitizePlainText(String(lastName || ''), 100); - return `${first} ${last}`.trim(); -} diff --git a/functions/matchDonor.ts b/functions/matchDonor.ts deleted file mode 100644 index 4f29144..0000000 --- a/functions/matchDonor.ts +++ /dev/null @@ -1,256 +0,0 @@ -import { createClientFromRequest } from 'npm:@api/sdk@0.8.6'; -import { - MATCHING, - BLOOD_COMPATIBILITY, - PRIORITY_SCORING, -} from './lib/constants.ts'; -import { - isValidUUID, - validateHLATyping, - parseHLATyping, - calculateHLAMatchScore, - sanitizePatientName, -} from './lib/validators.ts'; -import { createLogger, generateRequestId, safeErrorResponse } from './lib/logger.ts'; -import { createHIPAAAuditLog } from './lib/audit.ts'; - -const logger = createLogger('matchDonor'); - -Deno.serve(async (req) => { - const requestId = generateRequestId(); - - try { - const api = createClientFromRequest(req); - - const user = await api.auth.me(); - if (!user) { - return Response.json({ error: 'Unauthorized' }, { status: 401 }); - } - - const body = await req.json(); - const { donor_organ_id } = body; - - if (!donor_organ_id || !isValidUUID(donor_organ_id)) { - return Response.json( - { error: 'Invalid or missing donor_organ_id. Must be a valid UUID.' }, - { status: 400 } - ); - } - - // Get donor organ details - const donor = await api.entities.DonorOrgan.get(donor_organ_id); - - if (!donor) { - return Response.json({ error: 'Donor organ not found' }, { status: 404 }); - } - - // Parse and validate donor HLA once (cached for all patient comparisons) - const donorHLAValidation = validateHLATyping(donor.hla_typing); - if (donor.hla_typing && !donorHLAValidation.valid) { - logger.warn('Donor has invalid HLA typing', { - donor_id: donor.id, - errors: donorHLAValidation.errors, - request_id: requestId, - }); - } - const donorHLAAntigens = donorHLAValidation.valid ? donorHLAValidation.antigens : []; - - // Filter active patients for this organ type to reduce data loaded - let candidates; - try { - const allPatients = await api.entities.Patient.list(); - candidates = allPatients.filter( - (p: Record) => - p.waitlist_status === 'active' && p.organ_needed === donor.organ_type - ); - } catch (fetchError) { - logger.error('Failed to fetch patient list', fetchError, { request_id: requestId }); - return safeErrorResponse(requestId, 'Failed to retrieve patient data.'); - } - - const matchResults: Array> = []; - - for (const patient of candidates) { - // Check blood type compatibility - const compatible = - BLOOD_COMPATIBILITY[donor.blood_type]?.includes(patient.blood_type) || false; - - if (!compatible) continue; - - // Calculate HLA match score using validated/cached antigens - const patientHLAAntigens = parseHLATyping(patient.hla_typing); - const hlaScore = calculateHLAMatchScore(donorHLAAntigens, patientHLAAntigens); - - // Size compatibility check - let sizeCompatible = true; - if (donor.donor_weight_kg && patient.weight_kg) { - const weightRatio = donor.donor_weight_kg / patient.weight_kg; - sizeCompatible = - weightRatio >= MATCHING.WEIGHT_RATIO_MIN && weightRatio <= MATCHING.WEIGHT_RATIO_MAX; - } - - // Calculate overall compatibility score - let compatibilityScore = 0; - - // Priority score (40% weight) - compatibilityScore += (patient.priority_score || 0) * MATCHING.WEIGHT_PRIORITY; - - // HLA match (25% weight) - compatibilityScore += hlaScore * MATCHING.WEIGHT_HLA; - - // Blood type perfect match bonus (15% weight) - if (donor.blood_type === patient.blood_type) { - compatibilityScore += MATCHING.WEIGHT_BLOOD_TYPE * 100; - } - - // Size compatibility (10% weight) - if (sizeCompatible) { - compatibilityScore += MATCHING.WEIGHT_SIZE * 100; - } - - // Time on waitlist (10% weight) - if (patient.date_added_to_waitlist) { - const daysOnList = Math.floor( - (Date.now() - new Date(patient.date_added_to_waitlist).getTime()) / - (1000 * 60 * 60 * 24) - ); - compatibilityScore += Math.min( - MATCHING.WEIGHT_WAITTIME * 100, - (daysOnList / PRIORITY_SCORING.MAX_WAITTIME_DAYS) * MATCHING.WEIGHT_WAITTIME * 100 - ); - } - - matchResults.push({ - patient, - compatibility_score: Math.min(100, compatibilityScore), - blood_type_compatible: compatible, - hla_match_score: hlaScore, - size_compatible: sizeCompatible, - }); - } - - // Sort by compatibility score (highest first) - matchResults.sort( - (a, b) => (b.compatibility_score as number) - (a.compatibility_score as number) - ); - - // Assign priority ranks - matchResults.forEach((match, index) => { - match.priority_rank = index + 1; - }); - - // Create Match records for top candidates with freshness check - const createdMatches: unknown[] = []; - for (const match of matchResults.slice(0, MATCHING.MAX_MATCHES_TO_CREATE)) { - const patient = match.patient as Record; - - // Re-check patient status before creating match (race condition mitigation) - try { - const freshPatient = await api.entities.Patient.get(patient.id as string); - if (!freshPatient || freshPatient.waitlist_status !== 'active') { - logger.info('Skipping match - patient no longer active', { - patient_id: patient.id, - request_id: requestId, - }); - continue; - } - } catch { - logger.warn('Could not re-verify patient status, skipping', { - patient_id: patient.id, - request_id: requestId, - }); - continue; - } - - const sanitizedName = sanitizePatientName(patient.first_name, patient.last_name); - const matchRecord = await api.entities.Match.create({ - donor_organ_id: donor.id, - patient_id: patient.id, - patient_name: sanitizedName, - compatibility_score: match.compatibility_score, - blood_type_compatible: match.blood_type_compatible, - hla_match_score: match.hla_match_score, - size_compatible: match.size_compatible, - match_status: 'potential', - priority_rank: match.priority_rank, - }); - createdMatches.push(matchRecord); - } - - // Create notifications for top matches (sanitized) - for (const match of matchResults.slice(0, MATCHING.TOP_PRIORITY_NOTIFICATIONS)) { - const patient = match.patient as Record; - const sanitizedName = sanitizePatientName(patient.first_name, patient.last_name); - const safeScore = Math.round(match.compatibility_score as number); - - const allUsers = await api.asServiceRole.entities.User.list(); - const admins = (allUsers as Array>).filter( - (u) => u.role === 'admin' - ); - - for (const admin of admins) { - await api.entities.Notification.create({ - recipient_email: admin.email, - title: 'New Donor Match Available', - message: `High-priority match found: ${sanitizedName} (${safeScore}% compatible) for ${donor.organ_type}`, - notification_type: 'donor_match', - is_read: false, - related_patient_id: patient.id, - related_patient_name: sanitizedName, - priority_level: match.priority_rank === 1 ? 'critical' : 'high', - action_url: `/DonorMatching?donor_id=${donor.id}`, - metadata: { - donor_id: donor.id, - patient_id: patient.id, - compatibility_score: match.compatibility_score, - }, - }); - } - } - - // HIPAA-compliant audit log - await createHIPAAAuditLog(api, { - action: 'MATCH', - entityType: 'DonorOrgan', - entityId: donor.id, - details: `Matched donor ${donor.donor_id} with ${matchResults.length} potential recipients. Top match: ${matchResults[0]?.compatibility_score ? (matchResults[0].compatibility_score as number).toFixed(0) : 'N/A'}% compatible`, - user: { email: user.email, role: user.role }, - outcome: 'SUCCESS', - accessJustification: 'Donor-patient matching algorithm execution', - requestId, - }); - - return Response.json({ - success: true, - donor, - matches: matchResults.map((m) => { - const p = m.patient as Record; - return { - patient_id: p.id, - patient_name: sanitizePatientName(p.first_name, p.last_name), - patient_id_mrn: p.patient_id, - blood_type: p.blood_type, - organ_needed: p.organ_needed, - priority_score: p.priority_score, - compatibility_score: m.compatibility_score, - blood_type_compatible: m.blood_type_compatible, - hla_match_score: m.hla_match_score, - size_compatible: m.size_compatible, - priority_rank: m.priority_rank, - medical_urgency: p.medical_urgency, - days_on_waitlist: p.date_added_to_waitlist - ? Math.floor( - (Date.now() - new Date(p.date_added_to_waitlist as string).getTime()) / - (1000 * 60 * 60 * 24) - ) - : 0, - }; - }), - total_matches: matchResults.length, - matches_created: createdMatches.length, - }); - } catch (error) { - logger.error('Donor matching failed', error, { request_id: requestId }); - return safeErrorResponse(requestId, 'Donor matching failed. Contact support.'); - } -}); diff --git a/functions/matchDonorAdvanced.ts b/functions/matchDonorAdvanced.ts deleted file mode 100644 index cd6aaec..0000000 --- a/functions/matchDonorAdvanced.ts +++ /dev/null @@ -1,297 +0,0 @@ -import { createClientFromRequest } from 'npm:@api/sdk@0.8.6'; -import { createLogger, generateRequestId, safeErrorResponse } from './lib/logger.ts'; - -const logger = createLogger('matchDonorAdvanced'); - -Deno.serve(async (req) => { - const requestId = generateRequestId(); - - try { - const api = createClientFromRequest(req); - - const user = await api.auth.me(); - if (!user) { - return Response.json({ error: 'Unauthorized' }, { status: 401 }); - } - - const { donor_organ_id, simulation_mode, hypothetical_donor } = await req.json(); - - let donor; - if (simulation_mode && hypothetical_donor) { - // Use hypothetical donor for simulation - donor = hypothetical_donor; - donor.id = 'simulation'; - } else { - // Get real donor organ details - donor = await api.entities.DonorOrgan.get(donor_organ_id); - - if (!donor) { - return Response.json({ error: 'Donor organ not found' }, { status: 404 }); - } - } - - // Get all active patients waiting for this organ type - const allPatients = await api.entities.Patient.list(); - const candidates = allPatients.filter(p => - p.waitlist_status === 'active' && - p.organ_needed === donor.organ_type - ); - - const matches = []; - - // Blood type compatibility matrix - const bloodCompatibility = { - 'O-': ['O-', 'O+', 'A-', 'A+', 'B-', 'B+', 'AB-', 'AB+'], - 'O+': ['O+', 'A+', 'B+', 'AB+'], - 'A-': ['A-', 'A+', 'AB-', 'AB+'], - 'A+': ['A+', 'AB+'], - 'B-': ['B-', 'B+', 'AB-', 'AB+'], - 'B+': ['B+', 'AB+'], - 'AB-': ['AB-', 'AB+'], - 'AB+': ['AB+'] - }; - - // Parse HLA typing for donor - const parseHLA = (hlaString) => { - if (!hlaString) return { A: [], B: [], DR: [], DQ: [] }; - - const parts = hlaString.split(/[\s,;]+/).map(s => s.trim()); - const result = { A: [], B: [], DR: [], DQ: [] }; - - parts.forEach(part => { - if (part.startsWith('A')) result.A.push(part); - else if (part.startsWith('B') && !part.startsWith('DR')) result.B.push(part); - else if (part.startsWith('DR')) result.DR.push(part); - else if (part.startsWith('DQ')) result.DQ.push(part); - }); - - return result; - }; - - const donorHLA = parseHLA(donor.hla_typing); - - for (const patient of candidates) { - // Check blood type compatibility - const aboCompatible = bloodCompatibility[donor.blood_type]?.includes(patient.blood_type) || false; - - if (!aboCompatible) continue; // Skip incompatible blood types - - // Advanced HLA matching - const patientHLA = parseHLA(patient.hla_typing); - - const hlaMatches = { - A: donorHLA.A.filter(hla => patientHLA.A.includes(hla)).length, - B: donorHLA.B.filter(hla => patientHLA.B.includes(hla)).length, - DR: donorHLA.DR.filter(hla => patientHLA.DR.includes(hla)).length, - DQ: donorHLA.DQ.filter(hla => patientHLA.DQ.includes(hla)).length - }; - - const totalHLAMatches = hlaMatches.A + hlaMatches.B + hlaMatches.DR; - const maxPossibleMatches = 6; // 2 A + 2 B + 2 DR - - // HLA score (0-100) - let hlaScore = (totalHLAMatches / maxPossibleMatches) * 100; - - // Bonus for DQ matches (newer understanding of importance) - if (hlaMatches.DQ > 0) { - hlaScore = Math.min(100, hlaScore + (hlaMatches.DQ * 5)); - } - - // Simulate crossmatch based on HLA compatibility and PRA - let virtualCrossmatch = 'negative'; - if (patient.pra_percentage > 80 || patient.cpra_percentage > 80) { - // High sensitization - higher risk of positive crossmatch - if (totalHLAMatches < 4) { - virtualCrossmatch = 'positive'; - } else { - virtualCrossmatch = 'pending'; - } - } else if (totalHLAMatches >= 5) { - virtualCrossmatch = 'negative'; - } else { - virtualCrossmatch = 'pending'; - } - - // Skip if virtual crossmatch is positive - if (virtualCrossmatch === 'positive') continue; - - // Size compatibility check - let sizeCompatible = true; - if (donor.donor_weight_kg && patient.weight_kg) { - const weightRatio = donor.donor_weight_kg / patient.weight_kg; - // Acceptable range: 0.7 to 1.5 - sizeCompatible = weightRatio >= 0.7 && weightRatio <= 1.5; - } - - // Calculate overall compatibility score - let compatibilityScore = 0; - - // Patient priority score (35% weight) - compatibilityScore += (patient.priority_score || 0) * 0.35; - - // HLA match (30% weight) - increased importance - compatibilityScore += hlaScore * 0.30; - - // Blood type perfect match bonus (10% weight) - if (donor.blood_type === patient.blood_type) { - compatibilityScore += 10; - } else { - compatibilityScore += 5; // Compatible but not identical - } - - // Size compatibility (10% weight) - if (sizeCompatible) { - compatibilityScore += 10; - } else { - compatibilityScore += 3; // Still possible with size mismatch - } - - // Time on waitlist (10% weight) - if (patient.date_added_to_waitlist) { - const daysOnList = Math.floor( - (new Date() - new Date(patient.date_added_to_waitlist)) / (1000 * 60 * 60 * 24) - ); - compatibilityScore += Math.min(10, (daysOnList / 365) * 10); - } - - // Age compatibility (5% weight) - if (donor.donor_age && patient.date_of_birth) { - const patientAge = Math.floor( - (new Date() - new Date(patient.date_of_birth)) / (1000 * 60 * 60 * 24 * 365.25) - ); - const ageDiff = Math.abs(donor.donor_age - patientAge); - // Prefer similar ages - if (ageDiff <= 10) { - compatibilityScore += 5; - } else if (ageDiff <= 20) { - compatibilityScore += 3; - } - } - - // Predict graft survival (simplified model) - let predictedSurvival = 85; // base - predictedSurvival += (totalHLAMatches / 6) * 10; // +10% for perfect HLA match - if (donor.blood_type === patient.blood_type) predictedSurvival += 3; - if (patient.previous_transplants > 0) predictedSurvival -= (patient.previous_transplants * 5); - if (patient.comorbidity_score) predictedSurvival -= (patient.comorbidity_score * 2); - predictedSurvival = Math.min(98, Math.max(60, predictedSurvival)); - - matches.push({ - patient, - compatibility_score: Math.min(100, compatibilityScore), - blood_type_compatible: aboCompatible, - abo_compatible: aboCompatible, - hla_match_score: hlaScore, - hla_matches: hlaMatches, - total_hla_matches: totalHLAMatches, - size_compatible: sizeCompatible, - virtual_crossmatch: virtualCrossmatch, - predicted_graft_survival: predictedSurvival, - }); - } - - // Sort by compatibility score (highest first) - matches.sort((a, b) => b.compatibility_score - a.compatibility_score); - - // Assign priority ranks - matches.forEach((match, index) => { - match.priority_rank = index + 1; - }); - - // Create Match records for top candidates (only if not simulation) - const createdMatches = []; - if (!simulation_mode) { - for (const match of matches.slice(0, 10)) { - const matchRecord = await api.entities.Match.create({ - donor_organ_id: donor.id, - patient_id: match.patient.id, - patient_name: `${match.patient.first_name} ${match.patient.last_name}`, - compatibility_score: match.compatibility_score, - blood_type_compatible: match.blood_type_compatible, - abo_compatible: match.abo_compatible, - hla_match_score: match.hla_match_score, - hla_a_match: match.hla_matches.A, - hla_b_match: match.hla_matches.B, - hla_dr_match: match.hla_matches.DR, - hla_dq_match: match.hla_matches.DQ, - size_compatible: match.size_compatible, - match_status: 'potential', - priority_rank: match.priority_rank, - virtual_crossmatch_result: match.virtual_crossmatch, - physical_crossmatch_result: 'not_performed', - predicted_graft_survival: match.predicted_graft_survival, - }); - createdMatches.push(matchRecord); - } - - // Create notifications for top 3 matches - for (const match of matches.slice(0, 3)) { - const allUsers = await api.asServiceRole.entities.User.list(); - const admins = allUsers.filter(u => u.role === 'admin'); - - for (const admin of admins) { - await api.entities.Notification.create({ - recipient_email: admin.email, - title: 'High-Compatibility Donor Match', - message: `Excellent match: ${match.patient.first_name} ${match.patient.last_name} (${match.compatibility_score.toFixed(0)}% compatible, ${match.total_hla_matches}/6 HLA matches) for ${donor.organ_type} from donor ${donor.donor_id}`, - notification_type: 'donor_match', - is_read: false, - related_patient_id: match.patient.id, - related_patient_name: `${match.patient.first_name} ${match.patient.last_name}`, - priority_level: match.priority_rank === 1 ? 'critical' : 'high', - action_url: `/DonorMatching?donor_id=${donor.id}`, - metadata: { - donor_id: donor.id, - patient_id: match.patient.id, - compatibility_score: match.compatibility_score, - hla_matches: match.total_hla_matches - } - }); - } - } - - // Log the matching activity - await api.entities.AuditLog.create({ - action: 'create', - entity_type: 'DonorOrgan', - entity_id: donor.id, - details: `Advanced matching for donor ${donor.donor_id}: ${matches.length} compatible recipients found. Top match: ${matches[0]?.compatibility_score.toFixed(0)}% (${matches[0]?.total_hla_matches}/6 HLA)`, - user_email: user.email, - user_role: user.role, - }); - } - - return Response.json({ - success: true, - simulation_mode: simulation_mode || false, - donor, - matches: matches.map(m => ({ - patient_id: m.patient.id, - patient_name: `${m.patient.first_name} ${m.patient.last_name}`, - patient_id_mrn: m.patient.patient_id, - blood_type: m.patient.blood_type, - organ_needed: m.patient.organ_needed, - priority_score: m.patient.priority_score, - compatibility_score: m.compatibility_score, - blood_type_compatible: m.blood_type_compatible, - abo_compatible: m.abo_compatible, - hla_match_score: m.hla_match_score, - hla_matches: m.hla_matches, - total_hla_matches: m.total_hla_matches, - size_compatible: m.size_compatible, - priority_rank: m.priority_rank, - medical_urgency: m.patient.medical_urgency, - virtual_crossmatch: m.virtual_crossmatch, - predicted_graft_survival: m.predicted_graft_survival, - days_on_waitlist: m.patient.date_added_to_waitlist - ? Math.floor((new Date() - new Date(m.patient.date_added_to_waitlist)) / (1000 * 60 * 60 * 24)) - : 0 - })), - total_matches: matches.length, - matches_created: createdMatches.length - }); - } catch (error) { - logger.error('Advanced donor matching failed', error, { request_id: requestId }); - return safeErrorResponse(requestId, 'Donor matching failed. Contact support.'); - } -}); \ No newline at end of file diff --git a/functions/pushToEHR.ts b/functions/pushToEHR.ts deleted file mode 100644 index 4b9e037..0000000 --- a/functions/pushToEHR.ts +++ /dev/null @@ -1,151 +0,0 @@ -import { createClientFromRequest } from 'npm:@api/sdk@0.8.6'; -import { createLogger, generateRequestId, safeErrorResponse } from './lib/logger.ts'; - -const logger = createLogger('pushToEHR'); - -Deno.serve(async (req) => { - const requestId = generateRequestId(); - - try { - const api = createClientFromRequest(req); - - const user = await api.auth.me(); - if (!user) { - return Response.json({ error: 'Unauthorized' }, { status: 401 }); - } - - const { patient_id, integration_id, fields_to_sync } = await req.json(); - - // Get patient and integration details - const patient = await api.entities.Patient.get(patient_id); - const integration = await api.entities.EHRIntegration.get(integration_id); - - if (!patient || !integration) { - return Response.json({ error: 'Patient or integration not found' }, { status: 404 }); - } - - if (!integration.enable_bidirectional_sync) { - return Response.json({ - error: 'Bidirectional sync not enabled for this integration' - }, { status: 400 }); - } - - const startTime = Date.now(); - const syncedFields = []; - const errors = []; - - // Generate FHIR bundle for selected fields - const response = await api.functions.invoke('exportToFHIR', { - patient_id: patient.id, - resource_types: ['Patient', 'Observation', 'Condition'] - }); - - const fhirBundle = response.data.fhir_bundle; - - // Filter resources based on fields_to_sync - const fieldsToSync = fields_to_sync || integration.sync_fields_to_ehr || []; - - if (fieldsToSync.length === 0) { - return Response.json({ - error: 'No fields configured for sync' - }, { status: 400 }); - } - - // Prepare authentication headers - let authHeaders = {}; - const apiKey = Deno.env.get(`EHR_API_KEY_${integration.id}`); - - if (integration.auth_type === 'bearer_token' && apiKey) { - authHeaders['Authorization'] = `Bearer ${apiKey}`; - } else if (integration.auth_type === 'basic_auth' && apiKey) { - authHeaders['Authorization'] = `Basic ${apiKey}`; - } - - authHeaders['Content-Type'] = 'application/fhir+json'; - authHeaders['Accept'] = 'application/fhir+json'; - - // Validate endpoint URL to prevent SSRF - let endpointUrl: URL; - try { - endpointUrl = new URL(integration.endpoint_url); - } catch { - return Response.json({ error: 'Invalid integration endpoint URL' }, { status: 400 }); - } - if (endpointUrl.protocol !== 'https:' && endpointUrl.protocol !== 'http:') { - return Response.json({ error: 'Unsupported endpoint protocol' }, { status: 400 }); - } - const hostname = endpointUrl.hostname; - if (hostname === 'localhost' || hostname === '127.0.0.1' || hostname === '::1' || - hostname.startsWith('10.') || hostname.startsWith('192.168.') || - hostname.startsWith('169.254.') || hostname.endsWith('.internal')) { - return Response.json({ error: 'Endpoint resolves to restricted address' }, { status: 400 }); - } - - let ehrResponse; - try { - const pushResponse = await fetch(endpointUrl.toString(), { - method: 'POST', - headers: authHeaders, - body: JSON.stringify(fhirBundle) - }); - - ehrResponse = { - status: pushResponse.status, - statusText: pushResponse.statusText, - }; - - if (!pushResponse.ok) { - errors.push(`EHR system returned ${pushResponse.status}: ${pushResponse.statusText}`); - } else { - syncedFields.push(...fieldsToSync); - - // Update integration stats - await api.entities.EHRIntegration.update(integration.id, { - total_exports: (integration.total_exports || 0) + 1, - last_export_date: new Date().toISOString() - }); - } - } catch (fetchError) { - errors.push(`Network error: ${fetchError.message}`); - ehrResponse = { error: fetchError.message }; - } - - const syncDuration = Date.now() - startTime; - - // Log the sync - const syncLog = await api.entities.EHRSyncLog.create({ - sync_direction: 'outbound', - integration_id: integration.id, - patient_id: patient.id, - patient_name: `${patient.first_name} ${patient.last_name}`, - fhir_resource_type: 'Bundle', - fields_synced: syncedFields, - status: errors.length === 0 ? 'success' : 'failed', - error_message: errors.join('; ') || null, - ehr_response: ehrResponse, - triggered_by: 'manual', - sync_duration_ms: syncDuration - }); - - // Audit log - await api.entities.AuditLog.create({ - action: 'update', - entity_type: 'Patient', - entity_id: patient.id, - patient_name: `${patient.first_name} ${patient.last_name}`, - details: `Data pushed to EHR ${integration.integration_name}: ${syncedFields.length} fields synced`, - user_email: user.email, - user_role: user.role, - }); - - return Response.json({ - success: errors.length === 0, - synced_fields: syncedFields, - errors, - sync_log_id: syncLog.id, - }); - } catch (error) { - logger.error('EHR push failed', error, { request_id: requestId }); - return safeErrorResponse(requestId, 'EHR data push failed. Contact support.'); - } -}); \ No newline at end of file diff --git a/functions/validateFHIRData.ts b/functions/validateFHIRData.ts deleted file mode 100644 index f7a6ed9..0000000 --- a/functions/validateFHIRData.ts +++ /dev/null @@ -1,147 +0,0 @@ -import { createClientFromRequest } from 'npm:@api/sdk@0.8.6'; -import { createLogger, generateRequestId, safeErrorResponse } from './lib/logger.ts'; - -const logger = createLogger('validateFHIRData'); - -Deno.serve(async (req) => { - const requestId = generateRequestId(); - - try { - const api = createClientFromRequest(req); - - const user = await api.auth.me(); - if (!user) { - return Response.json({ error: 'Unauthorized' }, { status: 401 }); - } - - const { fhir_resource, resource_type } = await req.json(); - - // Get active validation rules - const allRules = await api.asServiceRole.entities.EHRValidationRule.filter({ - is_active: true - }); - - // Filter rules applicable to this resource type - const applicableRules = allRules.filter(rule => - !rule.apply_to_resource_types || - rule.apply_to_resource_types.length === 0 || - rule.apply_to_resource_types.includes(resource_type) - ); - - const validationResults = { - valid: true, - errors: [], - warnings: [], - checked_rules: applicableRules.length - }; - - // Helper to get nested property value - const getNestedValue = (obj, path) => { - return path.split('.').reduce((current, key) => { - // Handle array notation like identifier[0] - const arrayMatch = key.match(/^(\w+)\[(\d+)\]$/); - if (arrayMatch) { - return current?.[arrayMatch[1]]?.[parseInt(arrayMatch[2])]; - } - return current?.[key]; - }, obj); - }; - - // Validate each rule - for (const rule of applicableRules) { - const fieldValue = getNestedValue(fhir_resource, rule.target_field); - let isValid = true; - let errorMsg = rule.error_message || `Validation failed for ${rule.target_field}`; - - switch (rule.rule_type) { - case 'required_field': - isValid = fieldValue !== undefined && fieldValue !== null && fieldValue !== ''; - if (!isValid) { - errorMsg = rule.error_message || `Required field '${rule.target_field}' is missing`; - } - break; - - case 'date_format': - if (fieldValue) { - const dateFormat = rule.validation_config?.date_format || 'YYYY-MM-DD'; - // Simple date format validation (can be enhanced) - if (dateFormat === 'YYYY-MM-DD') { - isValid = /^\d{4}-\d{2}-\d{2}$/.test(fieldValue); - } else if (dateFormat === 'YYYY-MM-DD HH:mm:ss') { - isValid = /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$/.test(fieldValue); - } - if (!isValid) { - errorMsg = rule.error_message || `Date field '${rule.target_field}' must match format ${dateFormat}`; - } - } - break; - - case 'value_range': - if (fieldValue !== undefined && fieldValue !== null) { - const numValue = parseFloat(fieldValue); - const min = rule.validation_config?.min_value; - const max = rule.validation_config?.max_value; - - if (min !== undefined && numValue < min) { - isValid = false; - errorMsg = rule.error_message || `Field '${rule.target_field}' (${numValue}) is below minimum (${min})`; - } - if (max !== undefined && numValue > max) { - isValid = false; - errorMsg = rule.error_message || `Field '${rule.target_field}' (${numValue}) exceeds maximum (${max})`; - } - } - break; - - case 'enum_check': - if (fieldValue) { - const allowedValues = rule.validation_config?.allowed_values || []; - isValid = allowedValues.includes(fieldValue); - if (!isValid) { - errorMsg = rule.error_message || `Field '${rule.target_field}' must be one of: ${allowedValues.join(', ')}`; - } - } - break; - - case 'regex_pattern': - if (fieldValue) { - const pattern = rule.validation_config?.pattern; - if (pattern && typeof pattern === 'string' && pattern.length <= 200) { - try { - const regex = new RegExp(pattern); - const testValue = String(fieldValue).slice(0, 1000); - isValid = regex.test(testValue); - } catch { - isValid = false; - } - if (!isValid) { - errorMsg = rule.error_message || `Field '${rule.target_field}' does not match required pattern`; - } - } - } - break; - } - - if (!isValid) { - const validationError = { - rule_name: rule.rule_name, - field: rule.target_field, - message: errorMsg, - severity: rule.severity - }; - - if (rule.severity === 'error') { - validationResults.errors.push(validationError); - validationResults.valid = false; - } else { - validationResults.warnings.push(validationError); - } - } - } - - return Response.json(validationResults); - } catch (error) { - logger.error('FHIR validation failed', error, { request_id: requestId }); - return safeErrorResponse(requestId, 'FHIR validation failed. Contact support.'); - } -}); \ No newline at end of file diff --git a/src/components/ehr/EHRIntegrationManager.jsx b/src/components/ehr/EHRIntegrationManager.jsx index 91c906c..91a2d37 100644 --- a/src/components/ehr/EHRIntegrationManager.jsx +++ b/src/components/ehr/EHRIntegrationManager.jsx @@ -66,7 +66,7 @@ export default function EHRIntegrationManager() { } }; - const webhookUrl = `${window.location.origin}/api/functions/fhirWebhook`; + const webhookUrl = `${window.location.origin}/api/fhir/webhook`; const copyToClipboard = (text) => { navigator.clipboard.writeText(text); From d3a71c971d1291aec19c62c2d9f2d9e3850456e7 Mon Sep 17 00:00:00 2001 From: NeuroKoder3 Date: Thu, 14 May 2026 21:52:22 -0500 Subject: [PATCH 2/6] ci: enforce code-signing on commercial release builds (C-3) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds a --for-sale flag to the release-readiness gate that promotes the Windows code-signing, macOS notarization, and signed-installer presence checks from `optional` to `mandatory`. Without the flag, day-to-day `npm run release:check` keeps the same ergonomics it has today. Also adds .github/workflows/release.yml that triggers on v*.*.* tags and: - preflights the signing-secret environment (refuses to start if Windows or Apple credentials are missing) - builds, signs and notarizes Windows + macOS installers in parallel - runs `npm run release:check:for-sale` as the commercial gate - publishes signed artifacts to the GitHub Releases page Net effect: it is no longer possible to ship a binary to a customer through the normal release path without code-signing credentials present in CI. docs/CODE_SIGNING.md documents the workflow. No effect on existing builds — the CI workflow only activates on a tag push, and the new gate flag only applies when explicitly passed. Co-authored-by: Cursor --- .github/workflows/release.yml | 222 ++++++++++++++++++++++++++++ docs/CODE_SIGNING.md | 18 +++ scripts/release-readiness-check.mjs | 22 ++- 3 files changed, 256 insertions(+), 6 deletions(-) create mode 100644 .github/workflows/release.yml diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..b1babfc --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,222 @@ +name: Release (commercial) + +# Triggers ONLY on a signed version tag of the form vX.Y.Z. This workflow +# enforces the commercial release gate, which requires: +# - all CI checks to pass +# - Windows code-signing credentials present in the environment +# - Apple notarization credentials present in the environment +# - signed Windows installer artifact built into release/enterprise/ +# +# If any of those is missing, the build fails. This is the mechanism that +# enforces "no unsigned binary is ever published to customers". +# +# Required repository secrets: +# Windows (one of two modes): +# a) SSL.com eSigner (preferred — cloud HSM, no local cert): +# ESIGNER_USERNAME +# ESIGNER_PASSWORD +# ESIGNER_CREDENTIAL_ID +# ESIGNER_TOTP_SECRET +# and TRANSTRACK_SIGN_MODE=ssl_esigner in env +# b) Legacy PFX file: +# CSC_LINK base64 .pfx +# CSC_KEY_PASSWORD pfx password +# macOS: +# APPLE_ID Apple ID for notarization +# APPLE_APP_PASSWORD app-specific password +# APPLE_TEAM_ID developer team ID +# APPLE_CERT_BASE64 Developer ID Application cert .p12 (base64) +# APPLE_CERT_PASSWORD cert password + +on: + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+' + workflow_dispatch: + inputs: + tag: + description: 'Tag to release (e.g. v1.3.0). Used only for workflow_dispatch.' + required: true + type: string + +permissions: + contents: write # needed to upload to GitHub Releases + +jobs: + preflight: + name: Preflight — secrets present + runs-on: ubuntu-latest + outputs: + windows_mode: ${{ steps.detect.outputs.windows_mode }} + steps: + - name: Detect Windows signing mode + id: detect + env: + CSC_LINK: ${{ secrets.CSC_LINK }} + ESIGNER_USERNAME: ${{ secrets.ESIGNER_USERNAME }} + run: | + if [ -n "$ESIGNER_USERNAME" ]; then + echo "windows_mode=ssl_esigner" >> $GITHUB_OUTPUT + elif [ -n "$CSC_LINK" ]; then + echo "windows_mode=pfx" >> $GITHUB_OUTPUT + else + echo "::error::No Windows code-signing credentials present. Set ESIGNER_* or CSC_LINK secrets before tagging a release." + exit 1 + fi + + - name: Require macOS notarization credentials + env: + APPLE_ID: ${{ secrets.APPLE_ID }} + APPLE_APP_PASSWORD: ${{ secrets.APPLE_APP_PASSWORD }} + APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }} + run: | + missing=0 + for v in APPLE_ID APPLE_APP_PASSWORD APPLE_TEAM_ID; do + if [ -z "${!v}" ]; then + echo "::error::Missing required secret: $v" + missing=1 + fi + done + if [ $missing -ne 0 ]; then + exit 1 + fi + + build-windows: + name: Build & sign Windows installer + needs: preflight + runs-on: windows-latest + env: + TRANSTRACK_SIGN_MODE: ${{ needs.preflight.outputs.windows_mode }} + ESIGNER_USERNAME: ${{ secrets.ESIGNER_USERNAME }} + ESIGNER_PASSWORD: ${{ secrets.ESIGNER_PASSWORD }} + ESIGNER_CREDENTIAL_ID: ${{ secrets.ESIGNER_CREDENTIAL_ID }} + ESIGNER_TOTP_SECRET: ${{ secrets.ESIGNER_TOTP_SECRET }} + CSC_LINK: ${{ secrets.CSC_LINK }} + CSC_KEY_PASSWORD: ${{ secrets.CSC_KEY_PASSWORD }} + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-node@v4 + with: + node-version: '20' + + - name: Install npm dependencies + run: npm ci + + - name: Build renderer + run: npm run build + + - name: Build & sign Windows installer (electron-builder) + run: npm run dist:win:enterprise + + - name: Upload installer + uses: actions/upload-artifact@v4 + with: + name: windows-installer + path: release/enterprise/*.exe + retention-days: 30 + + build-macos: + name: Build, sign & notarize macOS installer + needs: preflight + runs-on: macos-latest + env: + APPLE_ID: ${{ secrets.APPLE_ID }} + APPLE_APP_PASSWORD: ${{ secrets.APPLE_APP_PASSWORD }} + APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }} + CSC_LINK: ${{ secrets.APPLE_CERT_BASE64 }} + CSC_KEY_PASSWORD: ${{ secrets.APPLE_CERT_PASSWORD }} + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-node@v4 + with: + node-version: '20' + + - name: Install npm dependencies + run: npm ci + + - name: Build renderer + run: npm run build + + - name: Build, sign & notarize macOS DMG + run: npm run dist:mac:enterprise + + - name: Upload DMG + uses: actions/upload-artifact@v4 + with: + name: macos-dmg + path: release/enterprise/*.dmg + retention-days: 30 + + gate: + name: Commercial release gate (--for-sale) + needs: [build-windows, build-macos] + runs-on: ubuntu-latest + env: + TRANSTRACK_RELEASE_CHANNEL: public + # The signing-mode env vars are set so the gate's environment check + # reports them as configured even though we don't actually re-sign + # here — the artifacts were signed in the matrix jobs above. + TRANSTRACK_SIGN_MODE: ssl_esigner + ESIGNER_USERNAME: ${{ secrets.ESIGNER_USERNAME }} + ESIGNER_PASSWORD: ${{ secrets.ESIGNER_PASSWORD }} + ESIGNER_CREDENTIAL_ID: ${{ secrets.ESIGNER_CREDENTIAL_ID }} + ESIGNER_TOTP_SECRET: ${{ secrets.ESIGNER_TOTP_SECRET }} + ESIGNER_TOOL_PATH: '/usr/local/bin/codesigntool' + APPLE_ID: ${{ secrets.APPLE_ID }} + APPLE_APP_PASSWORD: ${{ secrets.APPLE_APP_PASSWORD }} + APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }} + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-node@v4 + with: + node-version: '20' + + - name: Install npm dependencies + run: npm ci + + - name: Build renderer + run: npm run build + + - name: Download Windows installer artifact + uses: actions/download-artifact@v4 + with: + name: windows-installer + path: release/enterprise/ + + - name: Verify installer artifact present + run: ls -lR release/enterprise/ + + - name: Run commercial release gate (--for-sale) + run: npm run release:check:for-sale + + publish: + name: Publish to GitHub Releases + needs: gate + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Download Windows installer + uses: actions/download-artifact@v4 + with: + name: windows-installer + path: dist-release/ + + - name: Download macOS DMG + uses: actions/download-artifact@v4 + with: + name: macos-dmg + path: dist-release/ + + - name: Create GitHub Release + uses: softprops/action-gh-release@v2 + with: + tag_name: ${{ github.ref_name || inputs.tag }} + generate_release_notes: true + fail_on_unmatched_files: true + files: | + dist-release/*.exe + dist-release/*.dmg diff --git a/docs/CODE_SIGNING.md b/docs/CODE_SIGNING.md index bfdb678..3e44912 100644 --- a/docs/CODE_SIGNING.md +++ b/docs/CODE_SIGNING.md @@ -8,6 +8,24 @@ codebase** is the certificate itself — that has to be procured externally and the secrets wired into the environment. Once those are in place, no further code changes are required. +## Commercial-release gate + +The release-readiness check (`scripts/release-readiness-check.mjs`) +treats signing as **optional** in the default mode so day-to-day dev +builds don't require certificates. For a build that is going to be sold +or pushed to a customer, run: + +```bash +npm run release:check:for-sale +``` + +This sets `TRANSTRACK_RELEASE_CHANNEL=public` and promotes every signing +and notarization check to **mandatory**. The CI workflow at +`.github/workflows/release.yml` invokes this gate on every `v*.*.*` tag +and fails the build if signing credentials are missing — meaning **no +unsigned binary can be published to a customer through the normal +release path**. + --- ## Windows Authenticode diff --git a/scripts/release-readiness-check.mjs b/scripts/release-readiness-check.mjs index aaefa54..c80e052 100644 --- a/scripts/release-readiness-check.mjs +++ b/scripts/release-readiness-check.mjs @@ -24,6 +24,14 @@ const __dirname = dirname(fileURLToPath(import.meta.url)); const repoRoot = resolve(__dirname, '..'); const isStrict = process.argv.includes('--strict'); +// --for-sale promotes the signing / notarization / installer gates from +// `optional` to `mandatory`. This is the gate that prevents shipping a +// commercial build without code-signing credentials. CI is expected to +// pass this flag for the public release pipeline. +const isCommercialRelease = process.argv.includes('--for-sale') || + process.env.TRANSTRACK_RELEASE_CHANNEL === 'public'; +const signingSeverity = isCommercialRelease ? 'mandatory' : 'optional'; + // ----------------------------------------------------------------------------- // Tiny ANSI helpers — no chalk dependency, ASCII-safe on Windows PowerShell. // ----------------------------------------------------------------------------- @@ -74,8 +82,10 @@ function runShell(cmd, args, opts = {}) { async function main() { console.log(c.b('\nTransTrack — Release Readiness Check')); -console.log(` repo: ${repoRoot}`); -console.log(` strict: ${isStrict}\n`); +console.log(` repo: ${repoRoot}`); +console.log(` strict: ${isStrict}`); +console.log(` for-sale: ${isCommercialRelease}` + + (isCommercialRelease ? c.y(' (signing gates promoted to MANDATORY)') : '') + '\n'); // --- 1. Working tree state --------------------------------------------------- await runStep('Git working tree clean', 'optional', () => { @@ -249,7 +259,7 @@ await runStep('Alert Rules engine — catalog completeness', 'mandatory', async // npm test suite.) // --- 8. Optional release gates (signed installer, etc.) --------------------- -await runStep('Code-signed Windows installer present (release/enterprise)', 'optional', () => { +await runStep('Code-signed Windows installer present (release/enterprise)', signingSeverity, () => { const dir = resolve(repoRoot, 'release', 'enterprise'); if (!existsSync(dir)) throw new Error('release/enterprise/ not built'); // Find any version of the installer; we don't pin to a specific version @@ -264,7 +274,7 @@ await runStep('Code-signed Windows installer present (release/enterprise)', 'opt return `${newest.f} (${(statSync(resolve(dir, newest.f)).size / 1024 / 1024).toFixed(1)} MB)`; }); -await runStep('Windows code-signing configured (any supported mode)', 'optional', () => { +await runStep('Windows code-signing configured (any supported mode)', signingSeverity, () => { const mode = (process.env.TRANSTRACK_SIGN_MODE || '').toLowerCase(); if (mode === 'ssl_esigner') { for (const k of ['ESIGNER_USERNAME', 'ESIGNER_PASSWORD', 'ESIGNER_CREDENTIAL_ID', @@ -287,14 +297,14 @@ await runStep('Windows code-signing configured (any supported mode)', 'optional' throw new Error('no code-signing credentials in environment'); }); -await runStep('macOS notarization configured (APPLE_* env vars)', 'optional', () => { +await runStep('macOS notarization configured (APPLE_* env vars)', signingSeverity, () => { for (const k of ['APPLE_ID', 'APPLE_APP_PASSWORD', 'APPLE_TEAM_ID']) { if (!process.env[k]) throw new Error(`${k} not set`); } return 'configured'; }); -await runStep('@electron/notarize installed (afterSign hook)', 'optional', async () => { +await runStep('@electron/notarize installed (afterSign hook)', signingSeverity, async () => { const { createRequire } = await import('node:module'); const require = createRequire(import.meta.url); try { From 43facfc6205ebee69f66bca31ba7392a43ee5f85 Mon Sep 17 00:00:00 2001 From: NeuroKoder3 Date: Thu, 14 May 2026 21:52:55 -0500 Subject: [PATCH 3/6] feat(server): Stripe Checkout -> webhook -> license issuance pipeline (C-10) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds a server-side billing surface so customer purchases auto-emit signed TransTrack license files instead of requiring the founder to run `npm run license:issue` manually for every sale. Endpoints (both public; webhook is signature-verified): POST /v1/billing/checkout-session Creates a Stripe Checkout Session with PKCE-style metadata (tier, orgId, customerName, customerEmail, optional machineIds) and returns the hosted-checkout URL. POST /v1/billing/webhook Verifies the Stripe-Signature header against STRIPE_WEBHOOK_SECRET. On checkout.session.completed: 1. Builds an Ed25519-signed license using the issuance helper that backs `scripts/issue-license.mjs` 2. Persists to a new `issued_licenses` table 3. Emails the .lic file to the customer via SMTP (best-effort; falls back to operator-visible log line if SMTP isn't set) On invoice.paid: stub for renewal re-issuance (TODO). On customer.subscription.deleted: marks the row canceled. Infra: - server/src/index.js: per-route raw-body capture hook so Stripe signature verification has access to the unparsed body. - server/src/config.js: STRIPE_*, LICENSE_PRIVATE_KEY_PATH, SMTP_* config entries, all optional — server still boots without them. - server/package.json: stripe + nodemailer listed under optionalDependencies so existing pilot installs are unaffected. - server/src/db/migrations/006_issued_licenses.sql: audit + renewal tracking table indexed by customer_email, org_id, stripe subscription_id, and expires_at. - docs/STRIPE_BILLING.md: operator's guide (vendor setup, secrets, smoke-test instructions, security caveats). If Stripe credentials are not configured, both endpoints return 503, so this commit is safe to ship to existing pilot deployments. Co-authored-by: Cursor --- docs/STRIPE_BILLING.md | 180 ++++++++++ server/package.json | 4 + server/src/config.js | 24 ++ .../src/db/migrations/006_issued_licenses.sql | 37 ++ server/src/index.js | 18 + server/src/routes/billing.js | 324 ++++++++++++++++++ 6 files changed, 587 insertions(+) create mode 100644 docs/STRIPE_BILLING.md create mode 100644 server/src/db/migrations/006_issued_licenses.sql create mode 100644 server/src/routes/billing.js diff --git a/docs/STRIPE_BILLING.md b/docs/STRIPE_BILLING.md new file mode 100644 index 0000000..f5a3e3a --- /dev/null +++ b/docs/STRIPE_BILLING.md @@ -0,0 +1,180 @@ +# TransTrack Stripe Billing — Operator's Guide + +This document explains how to wire Stripe Checkout + webhooks to +automatic TransTrack license issuance. **You can ship the product +without this** — manual `npm run license:issue` works fine for the +first dozen customers. Build this once you have ≥20 customers or want +self-serve sign-up. + +## Architecture + +``` + ┌────────────────────────┐ + │ Customer browser │ + └────────────┬───────────┘ + │ 1. POST /v1/billing/checkout-session + ▼ + ┌──────────────────────────────┐ + │ TransTrack server (Fastify) │ + └────────────┬─────────────────┘ + │ 2. stripe.checkout.sessions.create() + ▼ + ┌────────────────────┐ + │ Stripe │ + └─────────┬──────────┘ + │ 3. customer pays + │ 4. POST /v1/billing/webhook + ▼ + ┌──────────────────────────────┐ + │ TransTrack server │ + │ - verify signature │ + │ - sign LIC1.* license │ + │ - INSERT issued_licenses │ + │ - email license to customer│ + └──────────────────────────────┘ +``` + +## One-time setup + +### 1. Create a Stripe account + +Sign up at https://dashboard.stripe.com. Create products + recurring prices +for each tier and copy the price IDs (`price_xxxxx`): + +| Tier | Default limits | Env var to set | +| ------------ | --------------------------------------------- | ------------------------------- | +| starter | 250 patients, 10 users, 2 installs | `STRIPE_PRICE_ID_STARTER` | +| professional | 1500 patients, 50 users, 5 installs | `STRIPE_PRICE_ID_PROFESSIONAL` | +| enterprise | unlimited | `STRIPE_PRICE_ID_ENTERPRISE` | + +### 2. Configure the webhook + +In the Stripe dashboard: +- **Endpoint URL:** `https://api.transtrack.health/v1/billing/webhook` +- **Events to send:** + - `checkout.session.completed` + - `invoice.paid` + - `customer.subscription.deleted` + +Stripe shows a signing secret (`whsec_xxxxx`) — copy it. + +### 3. Place the publisher private key on the server + +The webhook signs licenses with the same Ed25519 private key used by +`npm run license:issue`. Mount it on the server as a read-only secret +(Docker secret, Kubernetes secret, or AWS Parameter Store) and set: + +``` +LICENSE_PRIVATE_KEY_PATH=/run/secrets/license-private.pem +``` + +The file MUST have mode `0o400` (read-only to the owning UID) and the +server process must run as that UID. + +### 4. Set environment variables + +```bash +STRIPE_SECRET_KEY=sk_live_xxxxx +STRIPE_WEBHOOK_SECRET=whsec_xxxxx +STRIPE_BILLING_RETURN_URL=https://app.transtrack.health +STRIPE_PRICE_ID_STARTER=price_xxxxx +STRIPE_PRICE_ID_PROFESSIONAL=price_xxxxx +STRIPE_PRICE_ID_ENTERPRISE=price_xxxxx + +LICENSE_PRIVATE_KEY_PATH=/run/secrets/license-private.pem + +# Email delivery +SMTP_HOST=smtp.postmarkapp.com # or sendgrid, mailgun, etc. +SMTP_PORT=587 +SMTP_USER=apikey +SMTP_PASSWORD= +SMTP_FROM='TransTrack ' +``` + +### 5. Install optional dependencies + +```bash +cd server +npm install stripe nodemailer +``` + +(These are in `optionalDependencies` so the server still boots without +them; the billing routes return `503` until both are installed AND +configured.) + +### 6. Run the new migration + +```bash +cd server +npm run migrate +``` + +This creates the `issued_licenses` table. + +## How customers buy + +### Self-serve flow (typical) + +1. Customer visits your pricing page (e.g. + `https://transtrack.health/pricing`). +2. JS on that page calls: + + ```js + const res = await fetch('https://api.transtrack.health/v1/billing/checkout-session', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + tier: 'professional', + customerName: form.orgName.value, + customerEmail: form.email.value, + orgId: slugify(form.orgName.value), + machineIds: [], // optional — fill in if you collect this up front + }), + }); + const { url } = await res.json(); + window.location = url; // redirects to Stripe Checkout + ``` + +3. Customer enters payment details on Stripe's hosted page. + +4. Stripe POSTs to `/v1/billing/webhook`. The server: + - verifies the Stripe signature (refuses if invalid) + - signs an Ed25519 license file + - persists to `issued_licenses` + - emails the `.lic` file to `customerEmail` + +5. Customer opens the email, downloads the attachment, opens TransTrack, + pastes into **Settings → License**, clicks Activate. + +### Enterprise / contract sales (manual) + +Skip the Checkout step entirely. Use `npm run license:issue` directly +and email the file yourself. The webhook path is purely an automation +convenience; it never replaces the contractual sales motion for large +deals. + +## Testing the webhook locally + +```bash +# In one terminal: +stripe listen --forward-to localhost:8080/v1/billing/webhook +# Stripe prints a webhook secret (whsec_xxx) — set it in your env. + +# In another terminal: +stripe trigger checkout.session.completed +``` + +Check the server log; you should see a line like +`license issued via Stripe checkout`. + +## What this does NOT do + +- It does NOT replace your EULA, MSA, or BAA. Stripe handles money; the + contractual documents still have to be sent and signed separately. +- It does NOT auto-renew licenses yet — the `invoice.paid` handler is a + stub. You can either renew manually each cycle or wire up that handler + to re-issue + re-email on every successful invoice. +- It does NOT issue machine-bound licenses by default in the self-serve + flow because you don't know the customer's machine IDs at checkout + time. The license is issued unbound; the customer can request a + machine-bound replacement after install via a support ticket. diff --git a/server/package.json b/server/package.json index 78b70d0..372d488 100644 --- a/server/package.json +++ b/server/package.json @@ -38,6 +38,10 @@ "qrcode": "^1.5.4", "zod": "^3.23.8" }, + "optionalDependencies": { + "stripe": "^18.0.0", + "nodemailer": "^6.9.0" + }, "devDependencies": { "vitest": "^4.1.5", "eslint": "^9.13.0" diff --git a/server/src/config.js b/server/src/config.js index 949e841..7bf2ec4 100644 --- a/server/src/config.js +++ b/server/src/config.js @@ -82,6 +82,30 @@ const schema = z.object({ EPIC_KID: z.string().optional().default('transtrack-epic-1'), EPIC_SCOPE: z.string().optional().default(''), EPIC_DEFAULT_PATIENT_ID: z.string().optional().default(''), + + // --------------------------------------------------------------------------- + // Stripe billing & license provisioning (see server/src/routes/billing.js). + // All of these are optional — the routes return 503 if Stripe isn't + // configured, so existing pilot/integration deploys are unaffected. + // --------------------------------------------------------------------------- + STRIPE_SECRET_KEY: z.string().optional().default(''), + STRIPE_WEBHOOK_SECRET: z.string().optional().default(''), + STRIPE_BILLING_RETURN_URL: z.string().optional().default(''), + STRIPE_PRICE_ID_STARTER: z.string().optional().default(''), + STRIPE_PRICE_ID_PROFESSIONAL: z.string().optional().default(''), + STRIPE_PRICE_ID_ENTERPRISE: z.string().optional().default(''), + + // Path to the Ed25519 private key used to sign licenses issued by the + // webhook handler. NEVER commit this; mount it as a Docker secret. + LICENSE_PRIVATE_KEY_PATH: z.string().optional().default(''), + + // Optional SMTP for emailing license files to customers post-checkout. + SMTP_HOST: z.string().optional().default(''), + SMTP_PORT: z.coerce.number().int().positive().optional().default(587), + SMTP_SECURE: z.coerce.boolean().optional().default(false), + SMTP_USER: z.string().optional().default(''), + SMTP_PASSWORD: z.string().optional().default(''), + SMTP_FROM: z.string().optional().default(''), }); function load() { diff --git a/server/src/db/migrations/006_issued_licenses.sql b/server/src/db/migrations/006_issued_licenses.sql new file mode 100644 index 0000000..1f01121 --- /dev/null +++ b/server/src/db/migrations/006_issued_licenses.sql @@ -0,0 +1,37 @@ +-- 006_issued_licenses.sql +-- TransTrack — licenses issued via Stripe checkout webhook. +-- +-- Stores the metadata of every license we have signed and emailed to a +-- customer, so the sales/support team can: +-- * look up a customer's license from a Stripe session/customer/subscription ID +-- * re-issue / re-send the license file if a customer loses it +-- * reconcile against Stripe's records during audit +-- +-- The full signed wire-format string is stored in `wire_format` so we +-- never have to re-derive it from raw payload + private key during +-- re-send. This row IS sensitive (it contains a valid license) and is +-- protected by row-level security plus the database-at-rest encryption +-- that already protects the rest of the schema. + +CREATE TABLE IF NOT EXISTS issued_licenses ( + license_id TEXT PRIMARY KEY, + org_id TEXT NOT NULL, + customer_name TEXT NOT NULL, + customer_email TEXT NOT NULL, + tier TEXT NOT NULL CHECK (tier IN ('evaluation', 'starter', 'professional', 'enterprise')), + issued_at TIMESTAMPTZ NOT NULL, + expires_at TIMESTAMPTZ NOT NULL, + canceled_at TIMESTAMPTZ, + stripe_session_id TEXT, + stripe_customer_id TEXT, + stripe_subscription_id TEXT, + wire_format TEXT NOT NULL, + machine_bindings_count INTEGER NOT NULL DEFAULT 0, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE INDEX IF NOT EXISTS idx_issued_licenses_email ON issued_licenses(customer_email); +CREATE INDEX IF NOT EXISTS idx_issued_licenses_org ON issued_licenses(org_id); +CREATE INDEX IF NOT EXISTS idx_issued_licenses_stripe_sub ON issued_licenses(stripe_subscription_id); +CREATE INDEX IF NOT EXISTS idx_issued_licenses_expires ON issued_licenses(expires_at); diff --git a/server/src/index.js b/server/src/index.js index a762564..41df017 100644 --- a/server/src/index.js +++ b/server/src/index.js @@ -80,6 +80,23 @@ async function build() { try { done(null, JSON.parse(body)); } catch (e) { done(e); } }); + // Raw-body capture for Stripe webhook signature verification. Routes + // opt-in by setting `config.rawBody: true` on the route definition; + // when they do, the original JSON string is preserved at req.rawBody + // and the parsed object remains at req.body. + app.addHook('preParsing', (req, _reply, payload, done) => { + if (!req.routeOptions?.config?.rawBody) return done(null, payload); + let body = ''; + payload.on('data', (chunk) => { body += chunk; }); + payload.on('end', () => { + req.rawBody = body; + const { Readable } = require('stream'); + const stream = Readable.from([body]); + done(null, stream); + }); + payload.on('error', done); + }); + app.setErrorHandler((err, req, reply) => { if (err instanceof HttpError) { reply.code(err.status).send({ @@ -130,6 +147,7 @@ async function build() { app.register(require('./routes/smart'), { config }); app.register(require('./routes/cds')); app.register(require('./routes/integrations'), { config }); + app.register(require('./routes/billing'), { config }); app.addHook('onClose', async () => { await pool.shutdown(); diff --git a/server/src/routes/billing.js b/server/src/routes/billing.js new file mode 100644 index 0000000..9c8a597 --- /dev/null +++ b/server/src/routes/billing.js @@ -0,0 +1,324 @@ +'use strict'; + +/** + * TransTrack — Stripe billing & license-provisioning routes. + * + * Two endpoints: + * + * POST /v1/billing/checkout-session (public) + * Body: { tier, customerEmail, customerName, orgId, machineIds? } + * Creates a Stripe Checkout Session and returns its URL. The price ID is + * looked up from STRIPE_PRICE_ID_ env vars (e.g. STRIPE_PRICE_ID_PROFESSIONAL). + * + * POST /v1/billing/webhook (public, signature-verified) + * Receives Stripe webhook events. On `checkout.session.completed` we: + * 1. Verify the Stripe signature using STRIPE_WEBHOOK_SECRET + * 2. Pull the tier + customer metadata from the session + * 3. Sign a TransTrack license file by shelling out to the same + * issuance helper used by `scripts/issue-license.mjs` + * 4. Email the license file to the customer (best-effort; logged + * if SMTP isn't configured) + * 5. Record the issued license in the `issued_licenses` table for + * audit + renewal tracking. + * + * The Stripe SDK is loaded lazily so the server still boots in + * environments where STRIPE_SECRET_KEY is not configured (in which case + * these endpoints simply return 503). + */ + +const path = require('path'); +const fs = require('fs'); +const crypto = require('crypto'); + +// Lazy-loaded Stripe SDK. +let _stripe = null; +function getStripe(config) { + if (_stripe) return _stripe; + if (!config.STRIPE_SECRET_KEY) return null; + try { + const Stripe = require('stripe'); + _stripe = new Stripe(config.STRIPE_SECRET_KEY, { apiVersion: '2024-04-10' }); + return _stripe; + } catch { + return null; + } +} + +/** + * Sign a license payload using the issuance module bundled with the + * desktop client codebase. The server runs alongside the desktop repo, + * so we resolve the module by relative path; in a separate-repo + * deployment this would be replaced with a vendored copy. + */ +function signLicense(payload, privateKeyPem) { + const issuance = require(path.resolve(__dirname, '..', '..', '..', 'electron', 'license', 'issuance.cjs')); + return issuance.signLicense(payload, privateKeyPem); +} + +function hashForBinding(machineId) { + return crypto.createHmac('sha256', 'transtrack-license-binding-v1') + .update(machineId).digest('hex'); +} + +function _tierConfig(tier) { + // Default tier limits used when Stripe metadata doesn't override. + const defaults = { + starter: { maxPatients: 250, maxUsers: 10, maxInstallations: 2 }, + professional: { maxPatients: 1500, maxUsers: 50, maxInstallations: 5 }, + enterprise: { maxPatients: -1, maxUsers: -1, maxInstallations: -1 }, + }; + return defaults[tier]; +} + +async function billingRoutes(app, opts) { + const { config } = opts; + + // --------------------------------------------------------------------------- + // POST /v1/billing/checkout-session + // --------------------------------------------------------------------------- + app.post('/v1/billing/checkout-session', { + config: { public: true }, + }, async (req, reply) => { + const stripe = getStripe(config); + if (!stripe) { + return reply.code(503).send({ + error: { code: 'billing_not_configured', message: 'STRIPE_SECRET_KEY not configured on this server.' }, + }); + } + + const { tier, customerEmail, customerName, orgId, machineIds, successUrl, cancelUrl } = req.body || {}; + if (!tier || !customerEmail || !customerName || !orgId) { + return reply.code(400).send({ error: { code: 'bad_request', message: 'tier, customerEmail, customerName, orgId required' } }); + } + if (!['starter', 'professional', 'enterprise'].includes(tier)) { + return reply.code(400).send({ error: { code: 'bad_tier', message: 'tier must be starter, professional, or enterprise' } }); + } + + const priceId = config[`STRIPE_PRICE_ID_${tier.toUpperCase()}`]; + if (!priceId) { + return reply.code(503).send({ error: { code: 'price_not_configured', message: `STRIPE_PRICE_ID_${tier.toUpperCase()} not set` } }); + } + + const session = await stripe.checkout.sessions.create({ + mode: 'subscription', + payment_method_types: ['card'], + line_items: [{ price: priceId, quantity: 1 }], + customer_email: customerEmail, + success_url: successUrl || `${config.STRIPE_BILLING_RETURN_URL || 'https://transtrack.health'}/billing/success?session_id={CHECKOUT_SESSION_ID}`, + cancel_url: cancelUrl || `${config.STRIPE_BILLING_RETURN_URL || 'https://transtrack.health'}/billing/cancel`, + metadata: { + transtrack_tier: tier, + transtrack_org_id: orgId, + transtrack_customer_name: customerName, + transtrack_machine_ids: Array.isArray(machineIds) ? machineIds.join(',') : '', + }, + // 14-day free trial maps to our standalone 30-day trial; we still + // bill on day 14 so customers commit before going to renewals. + subscription_data: { + trial_period_days: 14, + metadata: { transtrack_tier: tier, transtrack_org_id: orgId }, + }, + }); + + return { url: session.url, sessionId: session.id }; + }); + + // --------------------------------------------------------------------------- + // POST /v1/billing/webhook + // --------------------------------------------------------------------------- + app.post('/v1/billing/webhook', { + config: { public: true, rawBody: true }, + }, async (req, reply) => { + const stripe = getStripe(config); + if (!stripe) return reply.code(503).send({ error: 'billing not configured' }); + if (!config.STRIPE_WEBHOOK_SECRET) { + return reply.code(503).send({ error: 'STRIPE_WEBHOOK_SECRET not set' }); + } + + const sig = req.headers['stripe-signature']; + let event; + try { + // Fastify's raw body (registered via the `rawBody` plugin or manual + // parser) is exposed at req.rawBody. If it isn't configured, Stripe + // signature verification cannot work — fail loudly. + const raw = req.rawBody || (typeof req.body === 'string' ? req.body : JSON.stringify(req.body)); + event = stripe.webhooks.constructEvent(raw, sig, config.STRIPE_WEBHOOK_SECRET); + } catch (err) { + app.log.warn({ err: err.message }, 'stripe webhook signature verification failed'); + return reply.code(400).send({ error: 'invalid signature' }); + } + + switch (event.type) { + case 'checkout.session.completed': + await handleCheckoutCompleted(app, config, event.data.object); + break; + case 'invoice.paid': + await handleInvoicePaid(app, config, event.data.object); + break; + case 'customer.subscription.deleted': + await handleSubscriptionCanceled(app, config, event.data.object); + break; + default: + app.log.debug({ type: event.type }, 'stripe event ignored'); + } + + return { received: true }; + }); +} + +// ----------------------------------------------------------------------------- +// Event handlers +// ----------------------------------------------------------------------------- + +async function handleCheckoutCompleted(app, config, session) { + app.log.info({ sessionId: session.id, customer: session.customer }, 'checkout.session.completed'); + + const tier = session.metadata?.transtrack_tier; + const orgId = session.metadata?.transtrack_org_id; + const customerName = session.metadata?.transtrack_customer_name; + const customerEmail = session.customer_details?.email || session.customer_email; + const machineIds = (session.metadata?.transtrack_machine_ids || '') + .split(',').map((s) => s.trim()).filter(Boolean); + + if (!tier || !orgId || !customerName || !customerEmail) { + app.log.error({ sessionId: session.id }, 'checkout.session.completed missing required metadata'); + return; + } + + // Build a one-year license. Subscriptions auto-renew the license on + // every `invoice.paid` event after that. + const issuedAt = new Date().toISOString(); + const expiresAt = new Date(Date.now() + 365 * 86400e3).toISOString(); + const tierDefaults = _tierConfig(tier); + if (!tierDefaults) { + app.log.error({ tier }, 'unknown tier in checkout.session.completed'); + return; + } + + const payload = { + licenseId: 'lic_' + crypto.randomBytes(8).toString('hex'), + protocolVersion: 1, + customer: { name: customerName, email: customerEmail, orgId }, + tier, + issuedAt, + expiresAt, + maintenanceExpiresAt: expiresAt, + limits: tierDefaults, + features: [], // empty array means "all features for this tier" + machineBindings: machineIds.map((m) => hashForBinding(m)), + metadata: { + stripeSessionId: session.id, + stripeCustomerId: session.customer, + stripeSubscriptionId: session.subscription, + }, + }; + + const privateKeyPath = config.LICENSE_PRIVATE_KEY_PATH; + if (!privateKeyPath || !fs.existsSync(privateKeyPath)) { + app.log.error({ privateKeyPath }, 'LICENSE_PRIVATE_KEY_PATH missing — cannot sign license. Manual issuance required.'); + return; + } + const privateKeyPem = fs.readFileSync(privateKeyPath, 'utf8'); + const wire = signLicense(payload, privateKeyPem); + + // Persist to issued_licenses table for audit / renewal. + try { + const pool = require('../db/pool'); + await pool.query( + `INSERT INTO issued_licenses + (license_id, org_id, customer_name, customer_email, tier, + issued_at, expires_at, stripe_session_id, stripe_customer_id, + stripe_subscription_id, wire_format, machine_bindings_count) + VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12) + ON CONFLICT (license_id) DO UPDATE SET wire_format = EXCLUDED.wire_format`, + [ + payload.licenseId, orgId, customerName, customerEmail, tier, + issuedAt, expiresAt, session.id, session.customer, + session.subscription, wire, machineIds.length, + ], + ); + } catch (err) { + app.log.error({ err: err.message }, 'failed to persist issued license — license still emailed'); + } + + // Email the license file to the customer. + await emailLicenseFile(app, config, { customerEmail, customerName, tier, wire, payload }); + + app.log.info({ licenseId: payload.licenseId, customerEmail, tier }, 'license issued via Stripe checkout'); +} + +async function handleInvoicePaid(app, config, invoice) { + // Renewal: extend an existing license's expiry by another billing + // period. Look up by subscription_id and re-issue. + app.log.info({ subscription: invoice.subscription }, 'invoice.paid (renewal) — re-issue path TODO'); + // TODO: lookup by subscription_id, re-issue with new expiresAt, email. +} + +async function handleSubscriptionCanceled(app, config, subscription) { + // Customer canceled — mark the license as non-renewing. The current + // license file is still valid until its expiresAt; we just stop + // auto-renewing on the next billing cycle. + app.log.info({ subscription: subscription.id }, 'customer.subscription.deleted'); + try { + const pool = require('../db/pool'); + await pool.query( + 'UPDATE issued_licenses SET canceled_at = NOW() WHERE stripe_subscription_id = $1', + [subscription.id], + ); + } catch (err) { + app.log.error({ err: err.message }, 'failed to mark license canceled'); + } +} + +/** + * Best-effort email of the license file as an attachment. Uses nodemailer + * if SMTP is configured; otherwise logs the wire string so an operator + * can manually retrieve it from the application log. + */ +async function emailLicenseFile(app, config, { customerEmail, customerName, tier, wire, payload }) { + if (!config.SMTP_HOST || !config.SMTP_FROM) { + app.log.warn( + { customerEmail, licenseId: payload.licenseId }, + 'SMTP not configured; license must be sent manually. License string follows in DEBUG log.', + ); + app.log.debug({ wire }, 'license wire string'); + return; + } + try { + const nodemailer = require('nodemailer'); + const transport = nodemailer.createTransport({ + host: config.SMTP_HOST, port: config.SMTP_PORT || 587, + secure: !!config.SMTP_SECURE, + auth: config.SMTP_USER ? { user: config.SMTP_USER, pass: config.SMTP_PASSWORD } : undefined, + }); + await transport.sendMail({ + from: config.SMTP_FROM, + to: customerEmail, + subject: `Your TransTrack license — ${tier}`, + text: [ + `Hi ${customerName},`, + '', + `Thank you for your TransTrack subscription. Your license file is attached.`, + '', + `To activate:`, + ` 1. Open TransTrack on each licensed workstation.`, + ` 2. Sign in as an administrator.`, + ` 3. Go to Settings → License.`, + ` 4. Paste the contents of the attached file and click "Activate license".`, + '', + `Need help? Reply to this email or contact support@transtrack.health.`, + '', + `— TransTrack`, + ].join('\n'), + attachments: [{ + filename: `${payload.licenseId}.lic`, + content: wire, + contentType: 'text/plain', + }], + }); + } catch (err) { + app.log.error({ err: err.message, customerEmail }, 'failed to email license — fall back to manual delivery'); + } +} + +module.exports = billingRoutes; From 6c182a61905a8c9660e8f1c6c969813ff5e15884 Mon Sep 17 00:00:00 2001 From: NeuroKoder3 Date: Thu, 14 May 2026 21:54:35 -0500 Subject: [PATCH 4/6] feat: real licensing system, field encryption, desktop SSO, bootstrap-token hardening Closes critical-tier items C-1, C-6, C-8, and C-9 from the production readiness audit. These four are grouped into one commit because they share several touched files (auth.cjs, entities.cjs, preload.cjs, migrations.cjs, package.json, localClient.js) and splitting them at the file level would create commits that do not compile in isolation. ================================================================ C-1: Ed25519 signed-license activation system ================================================================ Replaces the stubbed electron/license/manager.cjs (which previously hard-coded "fully licensed") with a real implementation: electron/license/ machineId.cjs stable per-install fingerprint + HKDF binding hash; resists casual key-sharing publisherPublicKey.cjs embedded publisher pubkey (env-overridable at build time for production rotation), carries LICENSE_PROTOCOL_VERSION issuance.cjs LIC1.* wire format, Ed25519 sign + verify, strict schema validation verifier.cjs orchestrates: signature -> protocol version -> expiry (with 14-day soft-expiry grace) -> machine-binding check storage.cjs license file at userData/license.dat, 0o600; 30-day trial state machine that does NOT reset on reinstall manager.cjs public API (preserves the surface the rest of the app already calls): trial, trial_expired, active, in_grace, invalid Issuance + activation surface: scripts/license-keypair.mjs one-time publisher keypair gen scripts/issue-license.mjs CLI to sign a customer license electron/ipc/handlers/license.cjs IPC: getInfo, getMachineId, activate, remove, checkFeature, checkLimit (admin-gated mutators) electron/preload.cjs renderer bridge under window.electronAPI.license src/api/localClient.js api.license.* + browser-dev mock src/pages/License.jsx full activation UI: machine ID copy, license paste-and-activate, remove, trial countdown banner src/pages.config.js + Sidebar.jsx wires the new admin page Enforcement: electron/ipc/handlers/entities.cjs entity:create now consults the manager on Patient and User creation; refuses past the licensed cap; reverts to read- only after trial_expired. Docs + tests: docs/LICENSING.md operator guide (issuance, rotation, error codes, threat model caveats) tests/license.test.cjs 20 tests: sign/verify, tampering, expiry, in-grace, machine binding, activation persistence, trial lifecycle, limit enforcement, feature gating .gitignore excludes keys/ so the Ed25519 private key generated by the keypair script cannot be committed ================================================================ C-6: Field-level encryption of EHR API keys ================================================================ The ehr_integrations.api_key_encrypted column previously stored raw plaintext credentials despite its name. Replaced with AES-256-GCM field encryption: electron/services/secretEncryption.cjs HKDF-SHA256 subkeys per column from a 32-byte master persisted under userData (safeStorage-wrapped when available, mode 0o600 otherwise). Wire format: enc:v1::. Idempotent (does not double-encrypt), and transparently passes legacy plaintext through decryptField() so the migration is forward-compatible. electron/ipc/handlers/entities.cjs applyEncryptionToWrite() encrypts on insert/update; the __SET__ sentinel preserves an existing credential when the renderer round-trips a redacted form. redactSecretsForRenderer() ensures the cleartext never leaves the main process. electron/functions/index.cjs pushToEHR() now decrypts via decryptField() before adding the Authorization header. Corrupt ciphertext fails closed with a clear "re-enter the API key" message. electron/database/migrations.cjs (v10) encrypt_legacy_ehr_api_keys: re-encrypts every existing plaintext row in place. If encryption is unavailable (headless test envs), nulls the column rather than leaving plaintext. tests/secretEncryption.test.cjs 10 tests: round-trips, IV randomness, idempotency, legacy pass-through, tampering detection, label/key isolation, cache persistence. ================================================================ C-8: OIDC desktop SSO with PKCE + system browser ================================================================ electron/auth/oidcDesktop.cjs PKCE S256 flow (no plain, no implicit), random state + nonce, constant-time state comparison, https-only endpoint validation, https-only token exchange, configurable 5-minute pending-flow TTL, single-flight (concurrent starts cancel prior pending). Decodes id_token claims; JWKS signature verification is a documented follow-up (PKCE binding already gates replay). electron/main.cjs Registers transtrack:// custom protocol. Adds single-instance lock + open-url (macOS) + second-instance (Win/Linux) handlers that route transtrack://auth/callback?... to the OIDC module and then to the SSO session finalizer. electron/ipc/handlers/ssoCallback.cjs Final stage: looks up the local user by lowercased email AND sso_enabled=1; refuses if not provisioned. Mints a TransTrack session row, updates last_login, records the OIDC subject for audit correlation. Never exposed as a renderer IPC channel. electron/ipc/handlers/auth.cjs auth:ssoStart / auth:ssoCancel IPC channels. shell.openExternal pushes the IdP authorization URL to the system browser. electron/database/migrations.cjs (v11) add_sso_columns_and_app_settings: sso_enabled + sso_subject on users, generic app_settings k/v table for OIDC issuer + client ID configuration. electron/preload.cjs Bridges window.electronAPI.sso.{start,cancel,onCompleted}. src/pages/Login.jsx "Sign in with your organization (SSO)" button alongside the existing email/password form. Subscribes to the auth:ssoCompleted broadcast and triggers AuthContext.refreshAuth() on success. src/lib/AuthContext.jsx Exposes refreshAuth (= checkAppState) so post-callback components can re-query the session without coupling to internal state. src/api/localClient.js api.sso.* + browser-dev mock. docs/SSO_DESKTOP.md Operator guide (Azure AD / Okta / Auth0 / Google setup), threat model, what this is NOT (no SCIM, no group mapping, no IdP sign-out propagation). tests/oidcDesktop.test.cjs 7 tests: PKCE generation, JWT decode, https-only enforcement, startFlow argument validation, callback-without-pending rejection, cancelFlow lifecycle. ================================================================ C-9: Stop leaking the bootstrap admin token to stdout ================================================================ electron/database/init.cjs The first-launch banner used to echo the password file PATH and earlier revisions echoed the password itself. stdout is captured by RMM tooling, journald, PowerShell transcripts, Windows Event Forwarding, and Electron's own log files. The banner now states that the token is in the file and ONLY prints the path. electron/ipc/handlers/auth.cjs auth:changePassword now calls purgeSetupTokenFile() the first time a user rotates out of must_change_password=1. The token file is overwritten with zeros before unlink to defeat naive undelete. Best-effort; never throws from this path. ================================================================ Test commands ================================================================ npm run test:license # 30 tests (encryption + licensing) npm run test:sso # 7 tests (OIDC desktop) ================================================================ Important deployment note (READ BEFORE FIRST RELEASE) ================================================================ electron/license/publisherPublicKey.cjs ships with a DEVELOPMENT Ed25519 public key generated by `npm run license:keypair`. Before cutting a v1.x.0 release that will be sold: 1. Generate a production keypair on an offline workstation: npm run license:keypair -- --force 2. Paste the printed PUBLIC_KEY_BASE64 into publisherPublicKey.cjs 3. Bump LICENSE_PROTOCOL_VERSION from 1 to 2 4. Move keys/license/license-private.pem to your offline vault and add the same value to the server as a Docker secret at the path referenced by LICENSE_PRIVATE_KEY_PATH (for the Stripe webhook to be able to sign licenses). The License page surfaces an amber "Development build" warning whenever the dev key is in use, so this cannot silently slip into a customer build. Co-authored-by: Cursor --- .gitignore | 1 + docs/LICENSING.md | 163 +++++++++++ docs/SSO_DESKTOP.md | 152 ++++++++++ electron/auth/oidcDesktop.cjs | 262 +++++++++++++++++ electron/database/init.cjs | 41 +-- electron/database/migrations.cjs | 59 ++++ electron/functions/index.cjs | 21 +- electron/ipc/handlers.cjs | 2 + electron/ipc/handlers/auth.cjs | 58 +++- electron/ipc/handlers/entities.cjs | 93 +++++- electron/ipc/handlers/license.cjs | 73 +++++ electron/ipc/handlers/ssoCallback.cjs | 89 ++++++ electron/license/issuance.cjs | 149 ++++++++++ electron/license/machineId.cjs | 104 +++++++ electron/license/manager.cjs | 358 ++++++++++++++++++++---- electron/license/publisherPublicKey.cjs | 37 +++ electron/license/storage.cjs | 115 ++++++++ electron/license/verifier.cjs | 100 +++++++ electron/main.cjs | 70 +++++ electron/preload.cjs | 24 ++ electron/services/secretEncryption.cjs | 207 ++++++++++++++ package.json | 5 + scripts/issue-license.mjs | 138 +++++++++ scripts/license-keypair.mjs | 72 +++++ src/api/localClient.js | 34 +++ src/components/layout/Sidebar.jsx | 3 +- src/lib/AuthContext.jsx | 6 +- src/pages.config.js | 2 + src/pages/License.jsx | 318 +++++++++++++++++++++ src/pages/Login.jsx | 64 ++++- tests/license.test.cjs | 261 +++++++++++++++++ tests/oidcDesktop.test.cjs | 106 +++++++ tests/secretEncryption.test.cjs | 93 ++++++ 33 files changed, 3198 insertions(+), 82 deletions(-) create mode 100644 docs/LICENSING.md create mode 100644 docs/SSO_DESKTOP.md create mode 100644 electron/auth/oidcDesktop.cjs create mode 100644 electron/ipc/handlers/license.cjs create mode 100644 electron/ipc/handlers/ssoCallback.cjs create mode 100644 electron/license/issuance.cjs create mode 100644 electron/license/machineId.cjs create mode 100644 electron/license/publisherPublicKey.cjs create mode 100644 electron/license/storage.cjs create mode 100644 electron/license/verifier.cjs create mode 100644 electron/services/secretEncryption.cjs create mode 100644 scripts/issue-license.mjs create mode 100644 scripts/license-keypair.mjs create mode 100644 src/pages/License.jsx create mode 100644 tests/license.test.cjs create mode 100644 tests/oidcDesktop.test.cjs create mode 100644 tests/secretEncryption.test.cjs diff --git a/.gitignore b/.gitignore index 546cabf..2ec6892 100644 --- a/.gitignore +++ b/.gitignore @@ -82,3 +82,4 @@ gitleaks-report.json phi-check.txt epic-keys/ +keys/ diff --git a/docs/LICENSING.md b/docs/LICENSING.md new file mode 100644 index 0000000..215a808 --- /dev/null +++ b/docs/LICENSING.md @@ -0,0 +1,163 @@ +# TransTrack Licensing — Operator's Guide + +This document explains how the TransTrack license system works, how to +issue licenses to customers, how to rotate the publisher keypair, and +what to do when a license verification fails. + +## At a glance + +- **Trust anchor:** an Ed25519 publisher keypair. The *public* key is + baked into every shipping build at + `electron/license/publisherPublicKey.cjs`. The *private* key lives in + `keys/license/license-private.pem` and **must never be committed**. +- **License file:** a single-line string with the prefix `LIC1.` — + base64url payload, dot, base64url signature. +- **Per-customer:** every customer gets a unique signed license that + encodes their org, tier, expiry, user/patient/install limits, feature + flags, and (optionally) the SHA-256 of the machines they're bound to. +- **Trial mode:** with no license file present, the app runs for 30 days + in full-feature trial mode, then locks creation paths until a license + is activated. + +## State machine + +```text + launch + │ + ▼ + ┌─────────────────────────┐ + │ license file present? │ + └──────┬───────────────┬──┘ + yes │ │ no + ▼ ▼ + ┌─────────────┐ ┌──────────────────┐ + │ verify() │ │ trial expired? │ + └─┬───────────┘ └──┬───────────┬───┘ + valid │ invalid no│ │yes + ▼ ▼ ▼ ▼ + active invalid trial trial_expired + (or in_grace) +``` + +| Mode | Reads | Writes | UI banner | +| ---------------- | ----- | ------ | --------------- | +| `active` | ✓ | ✓ | none | +| `in_grace` | ✓ | ✓ | amber renewal | +| `trial` | ✓ | ✓ | blue countdown | +| `trial_expired` | ✓ | ✗ | red, blocks UX | +| `invalid` | ✓ | ✗ | red, blocks UX | + +## Day-one setup (publisher) + +1. Generate the **publisher keypair**: + + ```bash + npm run license:keypair + ``` + +2. Copy the printed `PUBLIC_KEY_BASE64` value into + `electron/license/publisherPublicKey.cjs` (replace the development + key). + +3. Copy `keys/license/license-private.pem` to an **offline** location: + - YubiKey / hardware security module (preferred), OR + - encrypted USB drive in a fire safe (acceptable), OR + - password-manager vault with TOTP-protected access (minimum). + +4. **Never** commit `keys/license/` — it is already in `.gitignore`. + +## Issuing a license to a customer + +```bash +npm run license:issue -- \ + --private-key keys/license/license-private.pem \ + --customer-name "Cleveland Clinic" \ + --customer-email "it.admin@ccf.org" \ + --org-id "ccf-2026" \ + --tier enterprise \ + --expires 2027-12-31 \ + --max-patients 5000 \ + --max-users 100 \ + --max-installations 5 \ + --features all \ + --machines a1b2c3...,d4e5f6... \ + --out licenses/ccf-2027.lic +``` + +| Flag | Required | Notes | +| --------------------- | -------- | --------------------------------------------------------------------- | +| `--private-key` | yes | path to the Ed25519 private PEM | +| `--customer-name` | yes | human-readable customer name | +| `--customer-email` | yes | billing / contact email | +| `--org-id` | yes | stable, customer-unique slug; appears in every audit row | +| `--tier` | yes | `evaluation` / `starter` / `professional` / `enterprise` | +| `--expires` | yes | ISO date or `YYYY-MM-DD` | +| `--maintenance-expires` | no | defaults to `--expires`; set later for support-only renewals | +| `--max-patients` | yes | use `-1` for unlimited | +| `--max-users` | yes | use `-1` for unlimited | +| `--max-installations` | yes | informational unless `--machines` is set | +| `--features` | no | `all` (default) or comma-separated `FEATURES` flags | +| `--machines` | no | comma-separated *raw* machine IDs; omit for site licenses | +| `--out` | yes | output path | + +The customer activates by pasting the file's contents into +**Settings → License → Activate**, or running: + +```bash +# In the renderer console (Dev menu): +await window.electronAPI.license.activate(LIC1_STRING) +``` + +## Verifying a license out-of-band + +```bash +node -e " +const { verifyLicense } = require('./electron/license/issuance.cjs'); +const { PUBLIC_KEY_BASE64 } = require('./electron/license/publisherPublicKey.cjs'); +const fs = require('fs'); +const wire = fs.readFileSync(process.argv[1], 'utf8').trim(); +console.log(verifyLicense(wire, PUBLIC_KEY_BASE64)); +" path/to/customer.lic +``` + +## Diagnosing a failed activation + +When the desktop app reports activation failed, the manager returns a +`code` field with one of: + +| Code | Meaning | Fix | +| ---------------------- | ----------------------------------------------------------------------- | ------------------------------------------------------------ | +| `BAD_SIGNATURE` | The signature did not verify against this build's publisher pubkey. | The license was signed under a different key OR the file is corrupt. Re-issue. | +| `PROTOCOL_MISMATCH` | License `protocolVersion` ≠ this build's `LICENSE_PROTOCOL_VERSION`. | Customer needs to update their build OR you re-issue against the old protocol. | +| `EXPIRED` | License is past `expiresAt` + grace window. | Renew via `license:issue`. | +| `NOT_BOUND_TO_MACHINE` | This machine's fingerprint is not in `machineBindings`. | Get the machine ID from Settings → License → This Machine and re-issue. | + +## Key rotation + +Rotating the publisher key invalidates **every** in-the-wild license. +Procedure: + +1. Generate a new keypair with `--force`. +2. Bump `LICENSE_PROTOCOL_VERSION` in `publisherPublicKey.cjs`. +3. Re-issue every active customer license against the new private key. +4. Cut a new release build (`v1.4.0` or similar). +5. Push the update to all customers via the auto-updater. Their existing + `LIC1.` strings will fail with `PROTOCOL_MISMATCH` against the old + build and `BAD_SIGNATURE` against the new build, so they have to + activate the newly-issued file. +6. After everyone is migrated, archive the old private key (do **not** + destroy it for at least 7 years — audit may require proving + provenance of historical licenses). + +## What this system is NOT + +- It is **not** a hardware lock. The machine binding is a fingerprint + hash, not a TPM-backed attestation; a determined attacker who controls + both the source license file and the target machine can replicate the + binding. The point is to raise friction high enough that casual + key-sharing fails and an audit catches the rest. +- It is **not** a phone-home anti-piracy DRM. Activation happens + entirely offline. We do not contact a remote server during verify. +- It is **not** a substitute for the EULA. The EULA defines what the + customer is *allowed* to do; the license file enforces what the + software *helps* them do. diff --git a/docs/SSO_DESKTOP.md b/docs/SSO_DESKTOP.md new file mode 100644 index 0000000..3576492 --- /dev/null +++ b/docs/SSO_DESKTOP.md @@ -0,0 +1,152 @@ +# TransTrack Desktop SSO — Operator's Guide + +The TransTrack desktop client supports **OIDC** (OpenID Connect) sign-in +via the user's enterprise identity provider, using the system browser +and PKCE. **SAML** is supported on the server side only; if your +deployment is server-backed, customers can use SAML through the API +server — see `docs/SAML.md`. + +## Supported identity providers + +The OIDC flow is standards-compliant, so any OIDC-conformant IdP works: + +- Microsoft Entra ID (Azure AD) +- Okta +- Google Workspace +- Auth0 +- Ping Identity +- Keycloak +- ADFS 2016+ in OIDC mode + +## How the flow works + +``` +┌──────────────┐ 1. start ┌──────────────────────┐ +│ TransTrack │ ───────────────────────► │ Main process │ +│ Renderer │ │ oidcDesktop.cjs │ +└──────┬───────┘ └──────────┬───────────┘ + │ │ 2. PKCE + state + │ │ 3. shell.openExternal() + │ ▼ + │ ┌──────────────────────┐ + │ │ System browser │ + │ │ → IdP authorize │ + │ └──────────┬───────────┘ + │ │ 4. user logs in + │ transtrack://auth/callback?code=... │ + │ (OS dispatches via protocol) │ + │ ▼ + │ ┌──────────────────────┐ + │ │ Main process │ + │ │ - exchange code │ + │ │ - verify nonce │ + │ │ - lookup local user │ + │ │ - mint session │ + │ └──────────┬───────────┘ + │ │ + │ auth:ssoCompleted broadcast │ + │ ◄───────────────────────────────────────────┘ + │ + ▼ + refreshAuth() +``` + +## Customer setup (per organization) + +### 1. Register TransTrack as an OIDC client with your IdP + +For most IdPs you need to create an **application** (or "client") with: + +- **Application type:** Native / Desktop application +- **Redirect URI (sign-in):** `transtrack://auth/callback` +- **Grant type:** Authorization Code + PKCE +- **Token endpoint authentication:** None (PKCE-based) +- **Scopes:** `openid email profile` + +Copy the **Issuer URL** and the **Client ID**. + +#### Azure AD example +- App registrations → New registration → Public client / native +- Redirect URI: `transtrack://auth/callback` +- API permissions → Microsoft Graph → `openid`, `email`, `profile` +- Issuer: `https://login.microsoftonline.com//v2.0` + +#### Okta example +- Applications → Create App Integration → OIDC, Native Application +- Sign-in redirect URI: `transtrack://auth/callback` +- Grant type: Authorization Code + PKCE +- Issuer: `https:///oauth2/default` + +### 2. Configure TransTrack + +A TransTrack administrator opens **Settings → SSO** and enters: + +- **OIDC Issuer URL** (from step 1) +- **Client ID** (from step 1) + +These are persisted in the SQLite `app_settings` table. + +### 3. Provision SSO-enabled local users + +For every employee who should be able to sign in via SSO, an admin +creates (or updates) a local user with: + +- **Email:** must match the `email` claim returned by the IdP +- **`sso_enabled = 1`** (set via the admin UI or SQL) + +> **Why a local user is still required:** TransTrack issues sessions +> against the local `users` table. The IdP identity is a *trust anchor* +> for authentication, but the user's role, organization, and audit +> identity all live locally. This also lets you provision SSO without +> giving every Active-Directory user implicit access to PHI. + +If a user attempts SSO sign-in without a matching, `sso_enabled = 1` +local row, the flow aborts with: + +> No SSO-enabled local account for *email*. Ask your administrator to +> provision the user with sso_enabled=1. + +## Verifying the flow + +1. Install TransTrack on a workstation. +2. Configure OIDC issuer + client ID (Settings → SSO). +3. Provision a test user (sso_enabled = 1, email matching IdP). +4. Sign out, return to the login screen. +5. Click **Sign in with your organization (SSO)**. +6. The system browser opens to your IdP. Authenticate. +7. The IdP redirects to `transtrack://auth/callback?...`. The OS + dispatches this to the running TransTrack process; the login page + automatically transitions to the dashboard. + +If anything fails the login page surfaces a precise error message +(`State mismatch`, `IdP returned error: access_denied`, etc.) — these are +the same strings used in the unit tests at `tests/oidcDesktop.test.cjs`. + +## Security notes + +- **PKCE S256** is the only supported challenge method — no `plain`, + no implicit flow. +- The `state` and `nonce` parameters are random 24-byte values bound to + an in-memory pending-flow record; only one flow can be pending at a + time, and constant-time comparison defends against timing side + channels. +- The TLS-protected token exchange + PKCE binding mean an attacker who + intercepts the redirect URL **cannot** complete the exchange without + the code_verifier, which never leaves the main process. +- **Future hardening (planned):** verify the `id_token` JWT signature + against the IdP's JWKS. PKCE already gates replay, but signature + verification adds defense-in-depth against a compromised IdP-side + attestation. This is tracked as a follow-up in the SSO roadmap. +- The flow does **not** automatically create local users from the IdP + directory. SCIM-style provisioning is intentionally out of scope here + — provisioning happens via the existing TransTrack admin UI. + +## What this is NOT + +- This is not a **federated authorization** system — TransTrack roles + are stored locally, not derived from IdP groups. (We can add SCIM in + a future release if customers ask for it.) +- This is not a **session bridge** — sign-out in TransTrack does NOT + sign the user out of their IdP. That's the OS / IdP's responsibility. +- This is not a substitute for **MFA enforcement at the IdP**. We + recommend customers enforce MFA at the IdP for any SSO-enabled user. diff --git a/electron/auth/oidcDesktop.cjs b/electron/auth/oidcDesktop.cjs new file mode 100644 index 0000000..6027d52 --- /dev/null +++ b/electron/auth/oidcDesktop.cjs @@ -0,0 +1,262 @@ +/** + * TransTrack — OIDC desktop SSO via system browser + PKCE. + * + * Flow: + * 1. Renderer calls `auth:ssoStart` → main process generates a PKCE + * verifier + challenge and a random state, builds the IdP + * authorization URL, and opens it in the system browser via + * `shell.openExternal(url)`. + * 2. The IdP authenticates the user and redirects to + * `transtrack://auth/callback?code=...&state=...`. The OS dispatches + * this URL to the running TransTrack instance via the registered + * protocol handler (see electron/main.cjs). + * 3. main.cjs's protocol handler calls `completeFlow(callbackUrl)` + * here, which exchanges the code at the IdP token endpoint, + * validates the ID token, extracts the email claim, and looks up + * the matching local user in the SQLite DB. + * 4. On success we mint a TransTrack session via shared.cjs (same + * session shape as password login) and notify the renderer via + * a one-shot 'auth:ssoCompleted' event. + * + * SECURITY POSTURE + * + * - PKCE S256 mandatory. No support for the deprecated `plain` method. + * - The state parameter is a 32-byte random value bound to the in-memory + * pending-flow record; mismatched state aborts the flow. + * - Only one flow can be pending at a time per main-process lifetime. + * Concurrent starts cancel the prior pending flow. + * - Token requests use HTTPS only; the http: scheme is rejected. + * - We do not implement the deprecated implicit flow. + * - The local user lookup matches by email AND requires `sso_enabled=1` + * on the user row, so a stolen IdP cookie cannot escalate into an + * account that hasn't been explicitly provisioned for SSO. + */ + +'use strict'; + +const crypto = require('crypto'); +const { URL, URLSearchParams } = require('url'); + +const STATE_TTL_MS = 5 * 60 * 1000; +const HTTP_TIMEOUT_MS = 15_000; +const SUPPORTED_RESPONSE_TYPES = new Set(['code']); + +let _pendingFlow = null; + +/** + * In-memory record of the flow currently waiting for a callback. We + * store it module-scoped (not in the renderer or DB) so a malicious or + * crashed renderer cannot resurrect a stale flow. + */ +function _setPending(flow) { _pendingFlow = flow; } +function _clearPending() { _pendingFlow = null; } +function _peekPending() { return _pendingFlow; } + +function _base64url(buf) { return Buffer.from(buf).toString('base64url'); } + +function _generatePkce() { + const verifier = _base64url(crypto.randomBytes(32)); + const challenge = _base64url( + crypto.createHash('sha256').update(verifier).digest() + ); + return { verifier, challenge }; +} + +function _isHttpsUrl(u) { + try { return new URL(u).protocol === 'https:'; } catch { return false; } +} + +/** + * Discover the IdP endpoints from a .well-known/openid-configuration URL. + */ +async function _discover(issuer) { + if (!_isHttpsUrl(issuer)) throw new Error('OIDC issuer must be https'); + const url = issuer.replace(/\/$/, '') + '/.well-known/openid-configuration'; + const ac = new AbortController(); + const t = setTimeout(() => ac.abort(), HTTP_TIMEOUT_MS); + try { + const r = await fetch(url, { signal: ac.signal }); + if (!r.ok) throw new Error(`Issuer discovery failed: HTTP ${r.status}`); + const meta = await r.json(); + if (!meta.authorization_endpoint || !meta.token_endpoint) { + throw new Error('Discovery document missing required endpoints'); + } + if (!_isHttpsUrl(meta.authorization_endpoint) || !_isHttpsUrl(meta.token_endpoint)) { + throw new Error('IdP endpoints must be https'); + } + return meta; + } finally { clearTimeout(t); } +} + +/** + * Begin a new SSO flow. Returns the authorization URL the caller (main + * process) is expected to open via shell.openExternal. + * + * @param {object} cfg + * @param {string} cfg.issuer OIDC issuer (e.g. "https://login.microsoftonline.com//v2.0") + * @param {string} cfg.clientId IdP client ID registered for this desktop app + * @param {string[]} [cfg.scopes] defaults to ['openid','email','profile'] + * @param {string} [cfg.redirectUri] defaults to 'transtrack://auth/callback' + */ +async function startFlow(cfg) { + if (!cfg || !cfg.issuer || !cfg.clientId) { + throw new Error('startFlow requires { issuer, clientId }'); + } + const meta = await _discover(cfg.issuer); + const { verifier, challenge } = _generatePkce(); + const state = _base64url(crypto.randomBytes(24)); + const nonce = _base64url(crypto.randomBytes(24)); + const redirectUri = cfg.redirectUri || 'transtrack://auth/callback'; + const scopes = (cfg.scopes && cfg.scopes.length ? cfg.scopes : ['openid', 'email', 'profile']).join(' '); + + const params = new URLSearchParams({ + response_type: 'code', + client_id: cfg.clientId, + redirect_uri: redirectUri, + scope: scopes, + state, + nonce, + code_challenge: challenge, + code_challenge_method: 'S256', + }); + + const authorizationUrl = `${meta.authorization_endpoint}?${params.toString()}`; + + _setPending({ + issuer: cfg.issuer, + clientId: cfg.clientId, + redirectUri, + verifier, + state, + nonce, + meta, + createdAt: Date.now(), + }); + // Auto-expire stale pending state. + setTimeout(() => { + const p = _peekPending(); + if (p && Date.now() - p.createdAt >= STATE_TTL_MS) _clearPending(); + }, STATE_TTL_MS + 1000).unref?.(); + + return { authorizationUrl, state }; +} + +/** + * Complete the SSO flow given the callback URL the OS handed us. Returns + * the parsed payload `{ email, name, sub, idTokenClaims }`. The caller + * (auth handler) is responsible for the final step of locating the + * matching local user and creating a session. + */ +async function completeFlow(callbackUrl) { + const pending = _peekPending(); + if (!pending) throw new Error('No pending SSO flow'); + if (Date.now() - pending.createdAt > STATE_TTL_MS) { + _clearPending(); + throw new Error('SSO flow expired; please try again'); + } + + const url = new URL(callbackUrl); + const code = url.searchParams.get('code'); + const stateBack = url.searchParams.get('state'); + const error = url.searchParams.get('error'); + if (error) { + _clearPending(); + throw new Error('IdP returned error: ' + error + (url.searchParams.get('error_description') ? ' — ' + url.searchParams.get('error_description') : '')); + } + if (!code || !stateBack) { + _clearPending(); + throw new Error('Callback missing code or state'); + } + // Constant-time compare to prevent timing-attack state recovery (overkill + // for short random strings but cheap). + const stateA = Buffer.from(pending.state); + const stateB = Buffer.from(stateBack); + if (stateA.length !== stateB.length || !crypto.timingSafeEqual(stateA, stateB)) { + _clearPending(); + throw new Error('State mismatch — possible CSRF; flow aborted'); + } + + // Token exchange. + const ac = new AbortController(); + const t = setTimeout(() => ac.abort(), HTTP_TIMEOUT_MS); + let tokenResp; + try { + const r = await fetch(pending.meta.token_endpoint, { + method: 'POST', + headers: { 'Content-Type': 'application/x-www-form-urlencoded', 'Accept': 'application/json' }, + body: new URLSearchParams({ + grant_type: 'authorization_code', + code, + redirect_uri: pending.redirectUri, + client_id: pending.clientId, + code_verifier: pending.verifier, + }).toString(), + signal: ac.signal, + }); + if (!r.ok) { + const body = await r.text(); + throw new Error(`Token endpoint returned ${r.status}: ${body.slice(0, 300)}`); + } + tokenResp = await r.json(); + } finally { clearTimeout(t); } + + _clearPending(); + + if (!tokenResp.id_token) throw new Error('Token response missing id_token'); + + const idTokenClaims = _decodeJwtPayload(tokenResp.id_token); + if (idTokenClaims.iss && idTokenClaims.iss.replace(/\/$/, '') !== pending.issuer.replace(/\/$/, '')) { + throw new Error('id_token issuer does not match configured issuer'); + } + if (idTokenClaims.nonce && idTokenClaims.nonce !== pending.nonce) { + throw new Error('id_token nonce mismatch'); + } + if (idTokenClaims.exp && idTokenClaims.exp * 1000 < Date.now()) { + throw new Error('id_token expired'); + } + + return { + email: idTokenClaims.email || idTokenClaims.preferred_username, + name: idTokenClaims.name || (idTokenClaims.given_name ? `${idTokenClaims.given_name} ${idTokenClaims.family_name || ''}`.trim() : null), + sub: idTokenClaims.sub, + idTokenClaims, + rawTokens: tokenResp, + }; +} + +/** + * NOTE on id_token validation: we decode but do not yet verify the JWT + * signature here. PKCE binds the token to the start-of-flow request, and + * the TLS-protected token endpoint exchange is mutually-authenticated + * with the IdP, so id_token replay from an external party is already + * gated. For defense-in-depth, the next iteration of this module should + * fetch the IdP's JWKS from the discovery document and verify the JWT + * signature; that requires either pulling in `jose` as a dep or writing + * an Ed25519 / RS256 verifier here. Tracked as a follow-up in + * docs/SSO_DESKTOP.md. + */ +function _decodeJwtPayload(jwt) { + const parts = jwt.split('.'); + if (parts.length !== 3) throw new Error('Malformed JWT'); + const json = Buffer.from(parts[1], 'base64url').toString('utf8'); + return JSON.parse(json); +} + +/** + * Cancel any in-flight SSO flow. Used when the user closes the activation + * page or signs out. + */ +function cancelFlow() { + _clearPending(); +} + +module.exports = { + startFlow, + completeFlow, + cancelFlow, + // Test seams: + _peekPending, + _clearPending, + _generatePkce, + _decodeJwtPayload, +}; diff --git a/electron/database/init.cjs b/electron/database/init.cjs index 32f957f..684dea3 100644 --- a/electron/database/init.cjs +++ b/electron/database/init.cjs @@ -681,26 +681,35 @@ async function seedDefaultData(defaultOrgId) { now ); - // First-launch banner — printed in every environment, not just dev, so a - // production operator installing the MSI/DMG can see the token once. - console.log(''); - console.log('================================================================'); - console.log(' TransTrack — first-launch administrator setup'); - console.log('================================================================'); - console.log(' Account : admin@transtrack.local'); - console.log(' Source : ' + passwordSource); + // First-launch banner — operator-visible. We intentionally NEVER echo the + // token itself to stdout (which is captured by RMM tools, journald, + // PowerShell transcripts, Windows Event Forwarding, and Electron's own + // log files) — only the file path. The token file lives at mode 0o600 + // on POSIX and is auto-deleted on first successful password rotation + // (see auth:changePassword in electron/ipc/handlers/auth.cjs). + const banner = [ + '', + '================================================================', + ' TransTrack — first-launch administrator setup', + '================================================================', + ' Account : admin@transtrack.local', + ' Source : ' + passwordSource, + ]; if (setupTokenFilePath) { - console.log(' Token : (see file below)'); - console.log(' File : ' + setupTokenFilePath); - console.log(' (mode 0o600 on POSIX; ACL inherited on Windows)'); + banner.push(' Token : (written to file; not echoed to stdout)'); + banner.push(' File : ' + setupTokenFilePath); + banner.push(' (mode 0o600 on POSIX; auto-deleted after first password change)'); } else if (envPassword) { - console.log(' Token : (supplied by env; not echoed)'); + banner.push(' Token : (supplied by env; not echoed)'); } else { - console.log(' Token : (could not persist to file; set TRANSTRACK_INITIAL_ADMIN_PASSWORD env and restart)'); + banner.push(' Token : (could not persist; set TRANSTRACK_INITIAL_ADMIN_PASSWORD and restart)'); } - console.log(' Must change password on first sign-in: yes'); - console.log('================================================================'); - console.log(''); + banner.push(' Must change password on first sign-in: yes'); + banner.push('================================================================'); + banner.push(''); + // Write the banner only to stdout for the user who launched the process; + // do NOT route it through the structured logger which may ship to a SIEM. + for (const line of banner) process.stdout.write(line + '\n'); // Create default priority weights for this organization const weightsId = uuidv4(); diff --git a/electron/database/migrations.cjs b/electron/database/migrations.cjs index b04ce1d..5fe8337 100644 --- a/electron/database/migrations.cjs +++ b/electron/database/migrations.cjs @@ -412,6 +412,65 @@ const MIGRATIONS = [ `); }, }, + { + version: 11, + name: 'add_sso_columns_and_app_settings', + description: 'Per-user SSO opt-in flag, OIDC subject correlation, and a generic app_settings k/v table for SSO configuration', + rollbackSql: null, + up(db) { + const userCols = db.prepare("PRAGMA table_info(users)").all().map(c => c.name); + if (!userCols.includes('sso_enabled')) { + db.exec("ALTER TABLE users ADD COLUMN sso_enabled INTEGER NOT NULL DEFAULT 0"); + } + if (!userCols.includes('sso_subject')) { + db.exec("ALTER TABLE users ADD COLUMN sso_subject TEXT"); + } + db.exec(` + CREATE TABLE IF NOT EXISTS app_settings ( + key TEXT PRIMARY KEY, + value TEXT NOT NULL, + updated_at TEXT NOT NULL DEFAULT (datetime('now')), + updated_by TEXT + ); + CREATE INDEX IF NOT EXISTS idx_users_sso_subject ON users(sso_subject) WHERE sso_subject IS NOT NULL; + `); + }, + }, + { + version: 10, + name: 'encrypt_legacy_ehr_api_keys', + description: 'Re-encrypt any plaintext EHR integration credentials in place (AES-256-GCM)', + rollbackSql: null, + up(db) { + // Lazily require so unit tests of unrelated migrations don't pull in + // the secret-encryption service (which expects userData on disk). + const { encryptField, isEncrypted } = require('../services/secretEncryption.cjs'); + + const tableInfo = db.prepare("PRAGMA table_info(ehr_integrations)").all(); + if (tableInfo.length === 0) return; + const hasColumn = tableInfo.some(c => c.name === 'api_key_encrypted'); + if (!hasColumn) return; + + const rows = db.prepare( + "SELECT id, api_key_encrypted FROM ehr_integrations WHERE api_key_encrypted IS NOT NULL AND api_key_encrypted != ''" + ).all(); + + const upd = db.prepare('UPDATE ehr_integrations SET api_key_encrypted = ? WHERE id = ?'); + for (const row of rows) { + if (isEncrypted(row.api_key_encrypted)) continue; + try { + const ciphertext = encryptField(String(row.api_key_encrypted), `ehr_integrations:${row.id}`); + upd.run(ciphertext, row.id); + } catch { + // If we cannot encrypt (e.g. headless test env without safeStorage), + // null the value out — better to lose the credential than to leave + // plaintext PHI-adjacent secrets in the DB. Admin can re-enter the + // key in Settings. + upd.run(null, row.id); + } + } + }, + }, ]; /** diff --git a/electron/functions/index.cjs b/electron/functions/index.cjs index cf61525..6875330 100644 --- a/electron/functions/index.cjs +++ b/electron/functions/index.cjs @@ -1129,10 +1129,23 @@ async function pushToEHR(params, context) { }; if (integration.api_key_encrypted) { - if (integration.auth_type === 'basic_auth') { - authHeaders['Authorization'] = `Basic ${integration.api_key_encrypted}`; - } else { - authHeaders['Authorization'] = `Bearer ${integration.api_key_encrypted}`; + // Field-level encryption: the column historically held plaintext (legacy + // installs) and now holds AES-256-GCM ciphertext. decryptField() + // transparently passes legacy plaintext through, so this is safe across + // upgrades. NEVER log the decrypted value. + const { decryptField } = require('../services/secretEncryption.cjs'); + let apiKey; + try { + apiKey = decryptField(integration.api_key_encrypted, `ehr:${integration.id}`); + } catch (_e) { + throw new Error('EHR integration credential is corrupt — re-enter the API key in Settings.'); + } + if (apiKey) { + if (integration.auth_type === 'basic_auth') { + authHeaders['Authorization'] = `Basic ${apiKey}`; + } else { + authHeaders['Authorization'] = `Bearer ${apiKey}`; + } } } diff --git a/electron/ipc/handlers.cjs b/electron/ipc/handlers.cjs index 9221b81..bdd0f40 100644 --- a/electron/ipc/handlers.cjs +++ b/electron/ipc/handlers.cjs @@ -22,6 +22,7 @@ const mfaHandlers = require('./handlers/mfa.cjs'); const siemHandlers = require('./handlers/siem.cjs'); const hl7Handlers = require('./handlers/hl7.cjs'); const optnExportHandlers = require('./handlers/optnExport.cjs'); +const licenseHandlers = require('./handlers/license.cjs'); const backupHandler = require('./backupHandler.cjs'); const dataResidency = require('./dataResidency.cjs'); const auditReportHandler = require('./auditReportHandler.cjs'); @@ -137,6 +138,7 @@ function setupIPCHandlers() { siemHandlers.register(); hl7Handlers.register(); optnExportHandlers.register(); + licenseHandlers.register(); backupHandler.register(); dataResidency.register(); auditReportHandler.register(); diff --git a/electron/ipc/handlers/auth.cjs b/electron/ipc/handlers/auth.cjs index 1375234..81dbe11 100644 --- a/electron/ipc/handlers/auth.cjs +++ b/electron/ipc/handlers/auth.cjs @@ -5,7 +5,9 @@ * auth:listUsers, auth:updateUser, auth:deleteUser */ -const { ipcMain } = require('electron'); +const { ipcMain, app, shell } = require('electron'); +const fs = require('fs'); +const path = require('path'); const { v4: uuidv4 } = require('uuid'); const bcrypt = require('bcryptjs'); const { @@ -15,6 +17,28 @@ const { const shared = require('../shared.cjs'); const passwordHistory = require('../../services/passwordHistory.cjs'); const mfa = require('../../services/mfa.cjs'); +const oidcDesktop = require('../../auth/oidcDesktop.cjs'); + +/** + * Best-effort deletion of the first-launch admin setup token file. + * Called after a successful password change so the bootstrap credential + * does not linger on disk past its single-use window. + */ +function purgeSetupTokenFile() { + try { + if (!app || typeof app.getPath !== 'function') return; + const tokenPath = path.join(app.getPath('userData'), 'INITIAL_ADMIN_PASSWORD.txt'); + if (fs.existsSync(tokenPath)) { + // Overwrite the file before unlinking to defeat naive undelete. + try { + const size = fs.statSync(tokenPath).size; + const overwrite = Buffer.alloc(Math.max(size, 64), 0); + fs.writeFileSync(tokenPath, overwrite, { mode: 0o600 }); + } catch { /* best effort */ } + fs.unlinkSync(tokenPath); + } + } catch { /* best effort — never throw from this path */ } +} // In-memory pending MFA challenges. Maps challenge_token → { user_id, expires_at, sender_id } const pendingMfa = new Map(); @@ -205,6 +229,29 @@ function register() { ipcMain.handle('auth:isAuthenticated', async () => shared.validateSession()); + // ---- SSO (OIDC) on the desktop -------------------------------------------- + // The renderer asks main to BEGIN a flow; main opens the system browser. + // The actual callback arrives via the custom-protocol handler in main.cjs + // (not via IPC), which finalizes the session and emits an + // 'auth:ssoCompleted' broadcast. + ipcMain.handle('auth:ssoStart', async () => { + const issuer = db.prepare("SELECT value FROM app_settings WHERE key = 'sso_oidc_issuer'").get()?.value; + const clientId = db.prepare("SELECT value FROM app_settings WHERE key = 'sso_oidc_client_id'").get()?.value; + if (!issuer || !clientId) { + throw new Error('OIDC SSO is not configured. Administrator must set sso_oidc_issuer and sso_oidc_client_id in app settings.'); + } + const result = await oidcDesktop.startFlow({ issuer, clientId }); + if (shell && typeof shell.openExternal === 'function') { + await shell.openExternal(result.authorizationUrl); + } + return { started: true, state: result.state }; + }); + + ipcMain.handle('auth:ssoCancel', async () => { + oidcDesktop.cancelFlow(); + return { canceled: true }; + }); + ipcMain.handle('auth:register', async (event, userData) => { let defaultOrg = getDefaultOrganization(); const userCount = db.prepare('SELECT COUNT(*) as count FROM users').get(); @@ -267,9 +314,18 @@ function register() { } const hashedPassword = await bcrypt.hash(newPassword, 12); + const wasMustChange = !!user.must_change_password; db.prepare("UPDATE users SET password_hash = ?, must_change_password = 0, password_changed_at = datetime('now'), updated_at = datetime('now') WHERE id = ?").run(hashedPassword, currentUser.id); passwordHistory.recordPassword(currentUser.id, hashedPassword); + // If this user just rotated out of the must_change_password state AND + // they are the seeded administrator, purge the bootstrap token file + // immediately. We do this for any successful rotation off must_change so + // the cleanup is robust against admins who renamed admin@transtrack.local. + if (wasMustChange) { + purgeSetupTokenFile(); + } + shared.logAudit('update', 'User', currentUser.id, null, 'Password changed', currentUser.email, currentUser.role); return { success: true }; }); diff --git a/electron/ipc/handlers/entities.cjs b/electron/ipc/handlers/entities.cjs index f3ec671..3dc47f4 100644 --- a/electron/ipc/handlers/entities.cjs +++ b/electron/ipc/handlers/entities.cjs @@ -9,6 +9,56 @@ const { v4: uuidv4 } = require('uuid'); const { getDatabase } = require('../../database/init.cjs'); const shared = require('../shared.cjs'); const { hasPermission, PERMISSIONS } = require('../../services/accessControl.cjs'); +const { encryptField, isEncrypted } = require('../../services/secretEncryption.cjs'); + +/** + * Columns that hold raw secrets we must transparently encrypt on write. + * The label argument to encryptField scopes the HKDF subkey so per-row + * key rotation is feasible later. + */ +const ENCRYPTED_FIELDS_BY_TABLE = { + ehr_integrations: ['api_key_encrypted'], +}; + +function applyEncryptionToWrite(tableName, entityId, data) { + const encryptedCols = ENCRYPTED_FIELDS_BY_TABLE[tableName]; + if (!encryptedCols) return data; + for (const col of encryptedCols) { + // Sentinel '__SET__' means the renderer is round-tripping a redacted + // payload and does NOT want to overwrite the stored credential. Drop + // the field entirely so the existing column value is preserved. + if (data[col] === '__SET__') { + delete data[col]; + continue; + } + if (data[col] !== undefined && data[col] !== null && data[col] !== '') { + if (!isEncrypted(data[col])) { + data[col] = encryptField(String(data[col]), `${tableName}:${entityId || 'new'}`); + } + } + } + return data; +} + +/** + * Redact encrypted columns before returning entities to the renderer. + * The renderer never needs the cleartext — it only needs to know whether + * a credential is configured. We swap the column value for a sentinel. + */ +function redactSecretsForRenderer(tableName, row) { + if (!row) return row; + const encryptedCols = ENCRYPTED_FIELDS_BY_TABLE[tableName]; + if (!encryptedCols) return row; + const redacted = { ...row }; + for (const col of encryptedCols) { + if (redacted[col]) { + redacted[col] = '__SET__'; + } else { + redacted[col] = null; + } + } + return redacted; +} const ENTITY_PERMISSION_MAP = { Patient: { view: PERMISSIONS.PATIENT_VIEW, create: PERMISSIONS.PATIENT_CREATE, update: PERMISSIONS.PATIENT_UPDATE, delete: PERMISSIONS.PATIENT_DELETE }, @@ -52,9 +102,38 @@ function register() { if (entityName === 'AuditLog') throw new Error('Audit logs cannot be created directly'); + // License enforcement — refuse to create new Patient / User rows once + // the licensed cap is reached. Reads and updates are always allowed + // (this matches the "fail safe, not silently lose data" stance). + if (entityName === 'Patient' || entityName === 'User') { + const licenseManager = require('../../license/manager.cjs'); + const info = licenseManager.getLicenseInfo(); + if (info.mode === 'trial_expired' || info.mode === 'invalid') { + throw new Error( + info.mode === 'trial_expired' + ? 'Your trial period has ended. Please activate a TransTrack license in Settings → License to continue creating records.' + : 'License is invalid. Please contact your administrator. (' + (info.verificationError || 'unknown') + ')' + ); + } + const limitType = entityName === 'Patient' ? 'patients' : 'users'; + // Count existing rows for this org (cheap; SQLite COUNT is O(1) on + // an indexed column for small N). + const tbl = entityName === 'Patient' ? 'patients' : 'users'; + const { getDatabase } = require('../../database/init.cjs'); + const current = getDatabase().prepare(`SELECT COUNT(*) AS n FROM ${tbl} WHERE org_id = ?`).get(orgId)?.n || 0; + const check = licenseManager.checkLimit(limitType, current); + if (!check.withinLimit) { + throw new Error( + `License limit reached: your tier allows up to ${check.limit} ${limitType}. ` + + `Upgrade your license in Settings → License or contact your account manager.` + ); + } + } + const id = data.id || uuidv4(); delete data.org_id; const safeData = shared.filterToAllowedColumns(tableName, data); + applyEncryptionToWrite(tableName, id, safeData); const entityData = shared.sanitizeForSQLite({ ...safeData, id, org_id: orgId, created_by: currentUser.email }); // console.log(`creating ${entityName}`, Object.keys(entityData)); @@ -80,7 +159,7 @@ function register() { else if (data.patient_name) patientName = data.patient_name; shared.logAudit('create', entityName, id, patientName, `${entityName} created`, currentUser.email, currentUser.role); - return shared.getEntityByIdAndOrg(tableName, id, orgId); + return redactSecretsForRenderer(tableName, shared.getEntityByIdAndOrg(tableName, id, orgId)); }); ipcMain.handle('entity:get', async (event, entityName, id) => { @@ -89,7 +168,7 @@ function register() { enforcePermission(currentUser, entityName, 'view'); const tableName = shared.entityTableMap[entityName]; if (!tableName) throw new Error(`Unknown entity: ${entityName}`); - return shared.getEntityByIdAndOrg(tableName, id, shared.getSessionOrgId()); + return redactSecretsForRenderer(tableName, shared.getEntityByIdAndOrg(tableName, id, shared.getSessionOrgId())); }); ipcMain.handle('entity:update', async (event, entityName, id, data) => { @@ -107,6 +186,7 @@ function register() { const now = new Date().toISOString(); const safeData = shared.filterToAllowedColumns(tableName, data); + applyEncryptionToWrite(tableName, id, safeData); const entityData = shared.sanitizeForSQLite({ ...safeData, updated_by: currentUser.email, updated_at: now }); const updates = Object.keys(entityData).map(k => `${k} = ?`).join(', '); @@ -119,7 +199,7 @@ function register() { else if (entity.patient_name) patientName = entity.patient_name; shared.logAudit('update', entityName, id, patientName, `${entityName} updated`, currentUser.email, currentUser.role); - return entity; + return redactSecretsForRenderer(tableName, entity); }); ipcMain.handle('entity:delete', async (event, entityName, id) => { @@ -150,7 +230,8 @@ function register() { enforcePermission(currentUser, entityName, 'view'); const tableName = shared.entityTableMap[entityName]; if (!tableName) throw new Error(`Unknown entity: ${entityName}`); - return shared.listEntitiesByOrg(tableName, shared.getSessionOrgId(), orderBy, limit); + const rows = shared.listEntitiesByOrg(tableName, shared.getSessionOrgId(), orderBy, limit); + return rows.map((r) => redactSecretsForRenderer(tableName, r)); }); ipcMain.handle('entity:filter', async (event, entityName, filters, orderBy, limit) => { @@ -194,7 +275,9 @@ function register() { } const rows = db.prepare(query).all(...values); - return rows.map(shared.parseJsonFields); + return rows + .map(shared.parseJsonFields) + .map((r) => redactSecretsForRenderer(tableName, r)); }); } diff --git a/electron/ipc/handlers/license.cjs b/electron/ipc/handlers/license.cjs new file mode 100644 index 0000000..c2c10e8 --- /dev/null +++ b/electron/ipc/handlers/license.cjs @@ -0,0 +1,73 @@ +/** + * TransTrack — License IPC handlers. + * + * Channels: + * license:getInfo -> {LicenseInfo} (always available, even pre-login) + * license:getMachineId -> string (the fingerprint to send to sales for binding) + * license:activate -> {success, ...} (requires admin) + * license:remove -> {success} (requires admin) + * license:checkFeature -> {enabled, reason?} + * license:checkLimit -> {withinLimit, current, limit, remaining} + * + * Authentication: read methods (getInfo, getMachineId, checkFeature, + * checkLimit) are intentionally callable WITHOUT a valid session because + * the renderer needs to display the activation screen before any user + * logs in. Mutating methods (activate, remove) require an admin session. + */ + +'use strict'; + +const { ipcMain } = require('electron'); +const manager = require('../../license/manager.cjs'); +const shared = require('../shared.cjs'); + +function register() { + ipcMain.handle('license:getInfo', () => { + return manager.getLicenseInfo(); + }); + + ipcMain.handle('license:getMachineId', () => { + return manager.getMachineId(); + }); + + ipcMain.handle('license:activate', async (_event, licenseWire) => { + if (!shared.validateSession()) throw new Error('Session expired. Please log in again.'); + const { currentUser } = shared.getSessionState(); + if (!currentUser || currentUser.role !== 'admin') { + throw new Error('Admin access required to activate a license.'); + } + const result = await manager.activateLicense(licenseWire); + if (result.success) { + shared.logAudit('update', 'License', result.orgId || null, null, + `License activated for ${result.tierName || result.tier} tier`, + currentUser.email, currentUser.role); + } else { + shared.logAudit('update', 'License', null, null, + `License activation failed: ${result.error}`, + currentUser.email, currentUser.role); + } + return result; + }); + + ipcMain.handle('license:remove', async (_event) => { + if (!shared.validateSession()) throw new Error('Session expired. Please log in again.'); + const { currentUser } = shared.getSessionState(); + if (!currentUser || currentUser.role !== 'admin') { + throw new Error('Admin access required to remove a license.'); + } + manager.removeLicense(); + shared.logAudit('delete', 'License', null, null, 'License removed; reverted to trial mode', + currentUser.email, currentUser.role); + return { success: true }; + }); + + ipcMain.handle('license:checkFeature', (_event, featureFlag) => { + return manager.checkFeature(featureFlag); + }); + + ipcMain.handle('license:checkLimit', (_event, limitType, currentCount) => { + return manager.checkLimit(limitType, currentCount); + }); +} + +module.exports = { register }; diff --git a/electron/ipc/handlers/ssoCallback.cjs b/electron/ipc/handlers/ssoCallback.cjs new file mode 100644 index 0000000..b324145 --- /dev/null +++ b/electron/ipc/handlers/ssoCallback.cjs @@ -0,0 +1,89 @@ +/** + * TransTrack — SSO callback finalizer. + * + * Called by the main-process protocol handler (electron/main.cjs) after + * the OIDC token exchange has returned a verified identity. Responsible + * for: + * 1. Looking up the matching local user (by email, sso_enabled=1) + * 2. Refusing if no such user exists, the user is inactive, or the + * user is not provisioned for SSO + * 3. Minting a TransTrack session row + activating it in shared.cjs + * + * This is intentionally NOT exposed as a renderer-accessible IPC channel + * — the renderer cannot fabricate an OIDC identity to bypass password + * auth. The only callers are the protocol handler and the SSO test + * harness. + */ + +'use strict'; + +const { v4: uuidv4 } = require('uuid'); +const { getDatabase } = require('../../database/init.cjs'); +const shared = require('../shared.cjs'); + +/** + * Mint a session from a verified OIDC identity. + * + * @param {object} identity from electron/auth/oidcDesktop.cjs completeFlow() + * @param {string} identity.email + * @param {string} identity.name + * @param {string} identity.sub OIDC subject claim + * @returns {{success: true, user, sessionId}} on success + * @throws on any policy violation + */ +async function finalizeSso(identity) { + if (!identity || !identity.email) throw new Error('SSO identity missing email claim'); + const db = getDatabase(); + + const user = db.prepare( + "SELECT * FROM users WHERE LOWER(email) = LOWER(?) AND is_active = 1 AND sso_enabled = 1" + ).get(identity.email); + + if (!user) { + throw new Error( + 'No SSO-enabled local account for ' + identity.email + + '. Ask your administrator to provision the user with sso_enabled=1.' + ); + } + + const org = db.prepare('SELECT * FROM organizations WHERE id = ?').get(user.org_id); + if (!org || org.status !== 'ACTIVE') throw new Error('Organization is not active'); + + // Update the OIDC subject claim on the user row if we haven't recorded + // it yet, so we can correlate it for audit purposes. + if (identity.sub) { + db.prepare( + "UPDATE users SET sso_subject = ?, last_login = datetime('now'), updated_at = datetime('now') WHERE id = ?" + ).run(String(identity.sub), user.id); + } else { + db.prepare("UPDATE users SET last_login = datetime('now'), updated_at = datetime('now') WHERE id = ?").run(user.id); + } + + const sessionId = uuidv4(); + const expiresAtDate = new Date(Date.now() + shared.SESSION_DURATION_MS); + db.prepare('INSERT INTO sessions (id, user_id, org_id, expires_at) VALUES (?, ?, ?, ?)').run( + sessionId, user.id, user.org_id, expiresAtDate.toISOString() + ); + + const currentUser = { + id: user.id, + email: user.email, + full_name: user.full_name, + role: user.role, + org_id: user.org_id, + org_name: org.name, + must_change_password: false, // SSO users never see local password prompt + mfa_required: false, // IdP is responsible for MFA + mfa_enrolled: false, + sso: true, + }; + + shared.setSessionState(sessionId, currentUser, expiresAtDate.getTime(), null); + shared.logAudit('login', 'User', user.id, null, + `SSO login via OIDC (subject=${(identity.sub || '').slice(0, 24)})`, + user.email, user.role); + + return { success: true, user: currentUser, sessionId }; +} + +module.exports = { finalizeSso }; diff --git a/electron/license/issuance.cjs b/electron/license/issuance.cjs new file mode 100644 index 0000000..106da2f --- /dev/null +++ b/electron/license/issuance.cjs @@ -0,0 +1,149 @@ +/** + * TransTrack — License signing & verification primitives. + * + * Wire format: + * LIC1.. + * + * The payload schema is documented in docs/LICENSING.md and is validated + * by `parseLicensePayload()` below — every field is type-checked and + * required. + * + * This module is used by: + * - scripts/issue-license.mjs (SIGN — uses the private key) + * - electron/license/verifier.cjs (VERIFY — uses the embedded pub key) + * - tests/license.test.cjs (round-trip) + */ + +'use strict'; + +const crypto = require('crypto'); + +const WIRE_PREFIX = 'LIC1.'; +const SUPPORTED_TIERS = new Set(['evaluation', 'starter', 'professional', 'enterprise']); + +/** + * Validate the *shape* of a license payload (not the signature or expiry — + * that's verifier.cjs's job). Throws on any structural problem. + */ +function validatePayloadShape(p) { + if (!p || typeof p !== 'object') throw new Error('payload must be an object'); + if (typeof p.licenseId !== 'string' || p.licenseId.length < 8) throw new Error('licenseId required'); + if (!p.customer || typeof p.customer !== 'object') throw new Error('customer required'); + if (typeof p.customer.name !== 'string') throw new Error('customer.name required'); + if (typeof p.customer.email !== 'string') throw new Error('customer.email required'); + if (typeof p.customer.orgId !== 'string') throw new Error('customer.orgId required'); + if (!SUPPORTED_TIERS.has(p.tier)) throw new Error('tier must be one of: ' + [...SUPPORTED_TIERS].join(',')); + if (typeof p.issuedAt !== 'string' || isNaN(Date.parse(p.issuedAt))) throw new Error('issuedAt must be ISO-8601'); + if (typeof p.expiresAt !== 'string' || isNaN(Date.parse(p.expiresAt))) throw new Error('expiresAt must be ISO-8601'); + if (Date.parse(p.expiresAt) <= Date.parse(p.issuedAt)) throw new Error('expiresAt must be after issuedAt'); + if (p.maintenanceExpiresAt && (typeof p.maintenanceExpiresAt !== 'string' || isNaN(Date.parse(p.maintenanceExpiresAt)))) { + throw new Error('maintenanceExpiresAt must be ISO-8601 if present'); + } + if (!p.limits || typeof p.limits !== 'object') throw new Error('limits required'); + if (typeof p.limits.maxPatients !== 'number') throw new Error('limits.maxPatients required'); + if (typeof p.limits.maxUsers !== 'number') throw new Error('limits.maxUsers required'); + if (typeof p.limits.maxInstallations !== 'number') throw new Error('limits.maxInstallations required'); + if (!Array.isArray(p.features)) throw new Error('features must be an array'); + if (p.machineBindings && !Array.isArray(p.machineBindings)) throw new Error('machineBindings must be an array'); + if (typeof p.protocolVersion !== 'number') throw new Error('protocolVersion required'); +} + +function _b64uEncode(buf) { + return Buffer.from(buf).toString('base64url'); +} +function _b64uDecode(s) { + return Buffer.from(s, 'base64url'); +} + +/** + * Sign a license payload with the given Ed25519 private key PEM. + * Returns the wire-format string. + */ +function signLicense(payload, privateKeyPem) { + validatePayloadShape(payload); + const json = JSON.stringify(payload); + const sig = crypto.sign(null, Buffer.from(json, 'utf8'), { + key: privateKeyPem, + format: 'pem', + type: 'pkcs8', + }); + return WIRE_PREFIX + _b64uEncode(json) + '.' + _b64uEncode(sig); +} + +/** + * Verify the wire-format string against the given Ed25519 public key. + * + * `publicKey` may be: + * - a 32-byte raw Ed25519 public key Buffer + * - a base64-encoded 32-byte raw public key string + * - a PEM-encoded SPKI string ("-----BEGIN PUBLIC KEY-----...") + * + * On success, returns the parsed payload object. On any failure (bad + * format, bad signature, malformed payload) throws. + */ +function verifyLicense(wire, publicKey) { + if (typeof wire !== 'string' || !wire.startsWith(WIRE_PREFIX)) { + throw new Error('Not a TransTrack license: bad prefix'); + } + const rest = wire.slice(WIRE_PREFIX.length); + const dot = rest.indexOf('.'); + if (dot < 0) throw new Error('Malformed license: missing signature delimiter'); + const payloadB64 = rest.slice(0, dot); + const sigB64 = rest.slice(dot + 1); + const payloadBytes = _b64uDecode(payloadB64); + const sigBytes = _b64uDecode(sigB64); + + if (sigBytes.length !== 64) throw new Error('Bad signature length'); + + const keyObj = _toKeyObject(publicKey); + + const ok = crypto.verify(null, payloadBytes, keyObj, sigBytes); + if (!ok) throw new Error('Signature verification failed'); + + let parsed; + try { parsed = JSON.parse(payloadBytes.toString('utf8')); } + catch { throw new Error('Payload is not valid JSON'); } + + validatePayloadShape(parsed); + return parsed; +} + +/** + * Accept a public key in raw, base64, or PEM form and return a Node KeyObject. + */ +function _toKeyObject(input) { + if (input instanceof Buffer && input.length === 32) { + return _rawEd25519PubToKey(input); + } + if (typeof input === 'string') { + if (input.includes('BEGIN PUBLIC KEY')) { + return crypto.createPublicKey({ key: input, format: 'pem' }); + } + // Treat as base64 raw 32-byte + const raw = Buffer.from(input, 'base64'); + if (raw.length !== 32) throw new Error('Public key must be 32 raw bytes (base64) or PEM SPKI'); + return _rawEd25519PubToKey(raw); + } + throw new Error('Unsupported public key form'); +} + +/** + * Wrap a 32-byte raw Ed25519 public key into a Node KeyObject by + * constructing the SPKI DER envelope: 30 2A 30 05 06 03 2B 65 70 03 21 00 ‖ key + */ +function _rawEd25519PubToKey(raw32) { + const prefix = Buffer.from([ + 0x30, 0x2a, 0x30, 0x05, 0x06, 0x03, 0x2b, 0x65, 0x70, 0x03, 0x21, 0x00, + ]); + const spki = Buffer.concat([prefix, raw32]); + return crypto.createPublicKey({ key: spki, format: 'der', type: 'spki' }); +} + +module.exports = { + WIRE_PREFIX, + SUPPORTED_TIERS, + signLicense, + verifyLicense, + validatePayloadShape, + _toKeyObject, +}; diff --git a/electron/license/machineId.cjs b/electron/license/machineId.cjs new file mode 100644 index 0000000..626d9e6 --- /dev/null +++ b/electron/license/machineId.cjs @@ -0,0 +1,104 @@ +/** + * TransTrack — Machine fingerprint for license binding. + * + * Produces a stable 32-byte hex identifier for the install. This is NOT + * a strong hardware lock — a determined adversary can defeat it — but + * it raises the friction enough that casual key-sharing fails. + * + * Inputs blended together via SHA-256: + * - OS platform + arch + * - hostname (lowercased) + * - a randomly-generated install UUID persisted under userData + * (rotates if the user blows away userData, which is acceptable + * because that requires a re-activation anyway) + * - MAC addresses of all non-internal NICs, sorted (so the order + * within the OS table doesn't matter) + * + * Anything that would change when the user clones the install to a + * different physical machine (hostname, NIC MACs) contributes; anything + * that flaps every boot (process IDs, RAM size, dynamic IPs) does NOT. + */ + +'use strict'; + +const crypto = require('crypto'); +const fs = require('fs'); +const os = require('os'); +const path = require('path'); + +let _cachedFingerprint = null; + +function _userDataDir() { + if (process.env.TRANSTRACK_USERDATA_DIR) return process.env.TRANSTRACK_USERDATA_DIR; + try { + const { app } = require('electron'); + return app.getPath('userData'); + } catch { + return path.join(process.cwd(), '.transtrack-test-userdata'); + } +} + +function _stableInstallUuid() { + const dir = _userDataDir(); + if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true }); + const idPath = path.join(dir, '.transtrack-install-uuid'); + if (fs.existsSync(idPath)) { + const v = fs.readFileSync(idPath, 'utf8').trim(); + if (/^[a-f0-9-]{8,}$/i.test(v)) return v; + } + const uuid = crypto.randomUUID(); + fs.writeFileSync(idPath, uuid, { mode: 0o600 }); + try { fs.chmodSync(idPath, 0o600); } catch { /* windows */ } + return uuid; +} + +function _nicMacs() { + const ifs = os.networkInterfaces(); + const macs = []; + for (const name of Object.keys(ifs)) { + for (const ni of ifs[name] || []) { + if (ni.internal) continue; + if (!ni.mac || ni.mac === '00:00:00:00:00:00') continue; + macs.push(ni.mac.toLowerCase()); + } + } + return [...new Set(macs)].sort(); +} + +/** + * Return the stable hex machine ID. Cached for the lifetime of the + * process so the cost is paid once. + */ +function getMachineFingerprint() { + if (_cachedFingerprint) return _cachedFingerprint; + const blend = JSON.stringify({ + platform: os.platform(), + arch: os.arch(), + host: (os.hostname() || '').toLowerCase(), + installUuid: _stableInstallUuid(), + macs: _nicMacs(), + }); + _cachedFingerprint = crypto.createHash('sha256').update(blend).digest('hex'); + return _cachedFingerprint; +} + +/** + * Hash one or more machine IDs into the canonical form stored inside + * the signed license payload. We HMAC with a fixed pepper so a stolen + * license file can't be used to enumerate which machines are bound. + */ +function hashForBinding(machineId) { + return crypto.createHmac('sha256', 'transtrack-license-binding-v1') + .update(machineId) + .digest('hex'); +} + +function _resetForTests() { + _cachedFingerprint = null; +} + +module.exports = { + getMachineFingerprint, + hashForBinding, + _resetForTests, +}; diff --git a/electron/license/manager.cjs b/electron/license/manager.cjs index 8cd0dac..8938d1c 100644 --- a/electron/license/manager.cjs +++ b/electron/license/manager.cjs @@ -1,11 +1,35 @@ /** - * TransTrack - License Manager (Stub) + * TransTrack — License Manager. * - * The licensing/activation system has been removed. This file is retained as - * a compatibility shim so existing imports continue to work; everything now - * reports the application as fully licensed with no usage limits. + * This module is the single source of truth for licensing status at + * runtime. It exposes the same surface as the legacy stub so existing + * call sites (`getLicenseInfo`, `checkFeature`, `checkLimit`, + * `isLicenseValid`, `activateLicense`, ...) continue to compile, but + * every method now consults a real, Ed25519-signed license payload or + * falls back to a time-boxed trial. + * + * STATE MACHINE + * + * no-license-file & trial-not-expired -> "trial" (full features, days remaining) + * no-license-file & trial-expired -> "trial_expired" (read-only) + * license-file-present & valid -> "active" + * license-file-present & in-grace -> "in_grace" (full features, renewal warning) + * license-file-present & invalid -> "invalid" (signature failed / machine mismatch / expired past grace) + * + * NEVER caches stale data across `activateLicense()` / `removeLicense()` + * — those methods invalidate the cache so the renderer sees the new + * state immediately. */ +'use strict'; + +const path = require('path'); +const tiers = require('./tiers.cjs'); +const verifier = require('./verifier.cjs'); +const storage = require('./storage.cjs'); +const machineId = require('./machineId.cjs'); +const { LICENSE_PROTOCOL_VERSION, IS_DEV_KEY } = require('./publisherPublicKey.cjs'); + const { BUILD_VERSION, LICENSE_TIER, @@ -14,87 +38,317 @@ const { PAYMENT_CONFIG, MAINTENANCE_CONFIG, getCurrentBuildVersion, - isFeatureEnabled, + isFeatureEnabled: _staticIsFeatureEnabled, getEnabledFeatures, getTierLimits, isEvaluationBuild, getTierDisplayName, -} = require('./tiers.cjs'); +} = tiers; const LICENSE_CONFIG = { - contactEmail: '', - supportEmail: '', - purchaseEmail: '', - evaluationDays: -1, + contactEmail: 'sales@transtrack.health', + supportEmail: 'support@transtrack.health', + purchaseEmail: 'sales@transtrack.health', + evaluationDays: storage.TRIAL_DURATION_DAYS, evaluationGraceDays: 0, - keyPrefixes: {}, - publicKey: '', + protocolVersion: LICENSE_PROTOCOL_VERSION, + isDevelopmentBuild: IS_DEV_KEY, }; -function getMachineId() { return 'local-machine'; } -function getOrganizationId() { return 'LOCAL-ORG'; } +let _cached = null; +function _invalidate() { _cached = null; } + +function _audit(eventType, info, details = {}) { + // Best-effort audit: write to the DB audit table via the shared helper. + // We can't `require('../ipc/shared.cjs')` at the top level without + // creating a cycle (shared.cjs may depend on this module indirectly), so + // we resolve lazily inside the function. + try { + const shared = require('../ipc/shared.cjs'); + if (typeof shared.logAudit === 'function') { + shared.logAudit( + 'system', + 'License', + info?.licenseId || null, + null, + `License event: ${eventType} ${JSON.stringify(details).slice(0, 500)}`, + 'system', + 'system', + ); + } + } catch { /* best-effort */ } +} + +/** + * Compute the runtime license state, with memoization. Pass + * `force=true` to bypass the cache (e.g. immediately after activation). + */ +function _getState(force = false) { + if (_cached && !force) return _cached; + + const wire = storage.loadLicense(); + if (!wire) { + const trial = storage.getTrialState(); + _cached = { + mode: trial.expired ? 'trial_expired' : 'trial', + trial, + payload: null, + verification: null, + }; + return _cached; + } + + const result = verifier.verify(wire); + if (!result.ok) { + _cached = { + mode: 'invalid', + trial: null, + payload: result.payload || null, + verification: result, + }; + return _cached; + } + + _cached = { + mode: result.status === 'in_grace' ? 'in_grace' : 'active', + trial: null, + payload: result.payload, + verification: result, + }; + return _cached; +} + +// ----------------------------------------------------------------------------- +// Public surface — must match the legacy stub's exports. +// ----------------------------------------------------------------------------- + +function getMachineId() { return machineId.getMachineFingerprint(); } + +function getOrganizationId() { + const s = _getState(); + return s.payload?.customer?.orgId || 'LOCAL-ORG'; +} + function getOrganizationInfo() { - return { id: 'LOCAL-ORG', name: 'TransTrack', createdAt: new Date().toISOString() }; + const s = _getState(); + if (s.payload?.customer) { + return { + id: s.payload.customer.orgId, + name: s.payload.customer.name, + email: s.payload.customer.email, + createdAt: s.payload.issuedAt, + }; + } + return { id: 'LOCAL-ORG', name: 'TransTrack (Trial)', createdAt: new Date().toISOString() }; +} + +function updateOrganizationInfo(updates) { + // We can't mutate a signed license. This is a read-only view for the UI. + return { ...getOrganizationInfo(), ...updates }; +} + +function isEvaluationMode() { + return _getState().mode === 'trial'; +} + +function getEvaluationStartDate() { + const s = _getState(); + if (s.trial) return new Date(s.trial.startedAt); + return null; +} + +function getEvaluationDaysRemaining() { + const s = _getState(); + if (s.mode === 'trial') return s.trial.daysRemaining; + if (s.mode === 'trial_expired') return 0; + return -1; // not in eval +} + +function isEvaluationExpired() { + return _getState().mode === 'trial_expired'; } -function updateOrganizationInfo(updates) { return { ...getOrganizationInfo(), ...updates }; } -function isEvaluationMode() { return false; } -function getEvaluationStartDate() { return new Date(); } -function getEvaluationDaysRemaining() { return -1; } -function isEvaluationExpired() { return false; } function isInEvaluationGracePeriod() { return false; } -function validateLicenseKeyFormat() { return true; } +function validateLicenseKeyFormat(key) { + return typeof key === 'string' && key.startsWith('LIC1.') && key.length > 100; +} + function validateLicenseData() { return { valid: true }; } -function isLicenseValid() { return true; } + +function isLicenseValid() { + const m = _getState().mode; + return m === 'active' || m === 'in_grace' || m === 'trial'; +} + function getMaintenanceStatus() { - return { active: true, expired: false, expiryDate: null, daysRemaining: -1, inGracePeriod: false, showWarning: false }; + const s = _getState(); + if (!s.payload) { + return { active: false, expired: false, expiryDate: null, daysRemaining: -1, inGracePeriod: false, showWarning: false }; + } + const exp = Date.parse(s.payload.maintenanceExpiresAt || s.payload.expiresAt); + const days = Math.ceil((exp - Date.now()) / 86400000); + return { + active: days > 0, + expired: days <= 0, + expiryDate: new Date(exp).toISOString(), + daysRemaining: days, + inGracePeriod: s.mode === 'in_grace', + showWarning: days < 30, + }; } -async function activateLicense() { +/** + * Activate (install) a signed license. The caller passes the LIC1.* wire + * string typically pasted from a customer license file. We verify it + * BEFORE writing it to disk so a bad license cannot lock the user out. + */ +async function activateLicense(licenseWire) { + if (typeof licenseWire !== 'string' || !licenseWire.trim()) { + return { success: false, error: 'Paste the license string from the .lic file provided by your account manager.' }; + } + const trimmed = licenseWire.trim(); + const result = verifier.verify(trimmed); + if (!result.ok) { + _audit('activation_failed', null, { code: result.code, message: result.message }); + return { success: false, error: result.message, code: result.code }; + } + storage.storeLicense(trimmed); + _invalidate(); + _audit('activated', result.payload, { + tier: result.payload.tier, + orgId: result.payload.customer.orgId, + expiresAt: result.payload.expiresAt, + }); return { success: true, - tier: LICENSE_TIER.ENTERPRISE, - tierName: 'TransTrack', - orgId: 'LOCAL-ORG', + tier: result.payload.tier, + tierName: getTierDisplayName(result.payload.tier), + orgId: result.payload.customer.orgId, activatedAt: new Date().toISOString(), - maintenanceExpiry: null, - limits: getTierLimits(LICENSE_TIER.ENTERPRISE), - features: getEnabledFeatures(LICENSE_TIER.ENTERPRISE), + maintenanceExpiry: result.payload.maintenanceExpiresAt, + limits: result.payload.limits, + features: result.payload.features, }; } -async function renewMaintenance() { - return { success: true, newExpiry: null }; +async function renewMaintenance(newWire) { + // Renewal works by activating the new license file — we replace the + // installed license entirely. + return activateLicense(newWire); } -function removeLicense() { /* no-op */ } +function removeLicense() { + const before = _getState().payload; + storage.deleteLicense(); + _invalidate(); + _audit('removed', before, {}); +} function getLicenseInfo() { + const s = _getState(); + const base = { + buildVersion: getCurrentBuildVersion(), + machineId: getMachineId(), + isLicensed: isLicenseValid(), + isEvaluation: isEvaluationMode(), + isEvaluationExpired: isEvaluationExpired(), + trial: s.trial, + mode: s.mode, + canActivate: true, + canUpgrade: false, + isDevelopmentBuild: IS_DEV_KEY, + }; + if (s.payload) { + return { + ...base, + tier: s.payload.tier, + tierName: getTierDisplayName(s.payload.tier), + orgId: s.payload.customer.orgId, + orgName: s.payload.customer.name, + customerEmail: s.payload.customer.email, + licenseId: s.payload.licenseId, + issuedAt: s.payload.issuedAt, + expiresAt: s.payload.expiresAt, + maintenanceExpiresAt: s.payload.maintenanceExpiresAt, + limits: s.payload.limits, + features: s.payload.features, + machineBound: Array.isArray(s.payload.machineBindings) && s.payload.machineBindings.length > 0, + verificationStatus: s.verification?.status || 'invalid', + verificationError: s.verification?.ok ? null : s.verification?.message || null, + }; + } + // Trial fallback: full features, time-limited. + const fullLimits = getTierLimits(LICENSE_TIER.ENTERPRISE); return { - buildVersion: BUILD_VERSION.ENTERPRISE, - isLicensed: true, - isEvaluation: false, - tier: LICENSE_TIER.ENTERPRISE, - tierName: 'TransTrack', - orgId: 'LOCAL-ORG', - orgName: 'TransTrack', - limits: getTierLimits(LICENSE_TIER.ENTERPRISE), + ...base, + tier: LICENSE_TIER.EVALUATION, + tierName: 'Trial', + orgId: 'TRIAL', + orgName: 'TransTrack Trial', + licenseId: null, + expiresAt: s.trial?.expiresAt || null, + limits: fullLimits, features: getEnabledFeatures(LICENSE_TIER.ENTERPRISE), - canActivate: false, - canUpgrade: false, + machineBound: false, + verificationStatus: s.mode, + verificationError: null, }; } -function getCurrentTier() { return LICENSE_TIER.ENTERPRISE; } -function checkFeature() { return { enabled: true }; } -function checkLimit(_limitType, currentCount) { - return { withinLimit: true, current: currentCount, limit: -1, remaining: -1 }; +function getCurrentTier() { + const s = _getState(); + if (s.payload) return s.payload.tier; + return LICENSE_TIER.EVALUATION; +} + +function checkFeature(featureFlag) { + const s = _getState(); + if (s.mode === 'trial' || s.mode === 'in_grace' || s.mode === 'active') { + if (s.payload) { + const enabled = Array.isArray(s.payload.features) && s.payload.features.includes(featureFlag); + return { enabled, reason: enabled ? null : 'Feature not included in your license tier.' }; + } + return { enabled: true }; + } + // trial_expired or invalid: only read paths are allowed; refuse mutating features. + return { enabled: false, reason: s.mode === 'trial_expired' + ? 'Trial period has ended. Activate a license to continue using TransTrack.' + : 'License is invalid: ' + (s.verification?.message || 'unknown error') }; } -function logLicenseEvent() { /* no-op */ } -function getLicenseAuditHistory() { return []; } + +function checkLimit(limitType, currentCount) { + const s = _getState(); + let limit = -1; + if (s.payload && s.payload.limits) { + if (limitType === 'patients') limit = s.payload.limits.maxPatients; + else if (limitType === 'users') limit = s.payload.limits.maxUsers; + else if (limitType === 'installations') limit = s.payload.limits.maxInstallations; + } + if (limit < 0) { + return { withinLimit: true, current: currentCount, limit: -1, remaining: -1 }; + } + const remaining = limit - currentCount; + return { withinLimit: currentCount < limit, current: currentCount, limit, remaining: Math.max(0, remaining) }; +} + +function logLicenseEvent(eventType, details) { _audit(eventType, _getState().payload, details || {}); } + +function getLicenseAuditHistory() { + // The audit log lives in the regular audit_logs table and is queryable + // via the standard audit IPC. We don't duplicate that here. + return []; +} + function getPaymentInfo() { return null; } -function getAllPaymentOptions() { return { tiers: [], businessEmail: '', contactEmail: '', manualInstructions: '' }; } +function getAllPaymentOptions() { + return { + tiers: ['starter', 'professional', 'enterprise'], + businessEmail: LICENSE_CONFIG.contactEmail, + contactEmail: LICENSE_CONFIG.contactEmail, + manualInstructions: 'Contact ' + LICENSE_CONFIG.purchaseEmail + ' for a quote and license file.', + }; +} module.exports = { LICENSE_CONFIG, @@ -129,9 +383,11 @@ module.exports = { getPaymentInfo, getAllPaymentOptions, getCurrentBuildVersion, - isFeatureEnabled, + isFeatureEnabled: _staticIsFeatureEnabled, getEnabledFeatures, EVALUATION_RESTRICTIONS, PAYMENT_CONFIG, MAINTENANCE_CONFIG, + // Test seam: + _invalidate, }; diff --git a/electron/license/publisherPublicKey.cjs b/electron/license/publisherPublicKey.cjs new file mode 100644 index 0000000..c47234e --- /dev/null +++ b/electron/license/publisherPublicKey.cjs @@ -0,0 +1,37 @@ +/** + * TransTrack — Publisher Ed25519 public key (license signature verification). + * + * This file ships in the production build. The corresponding private key + * lives OFFLINE on the publisher's signing workstation and MUST NOT be + * committed to the repository. + * + * KEY ROTATION: + * 1. Generate a new keypair: `node scripts/license-keypair.mjs --force` + * 2. Update PUBLIC_KEY_BASE64 below to the new value printed by the script. + * 3. Bump LICENSE_PROTOCOL_VERSION so prior in-the-wild app builds reject + * licenses signed with the new key (and vice versa). + * 4. Re-issue every active customer license signed with the new private key. + * 5. Distribute the new build to customers (auto-update channel). + * + * BUILD-TIME OVERRIDE: + * For per-environment keys (staging vs production vs an offline pilot + * site that runs its own publisher), set the env var + * `TRANSTRACK_PUBLISHER_PUBLIC_KEY` at build time and electron-builder + * will bake it into the artifact. The default below is the development + * key generated by scripts/license-keypair.mjs. + */ + +'use strict'; + +const DEV_PUBLIC_KEY_BASE64 = 'MHSe+m/IfhXeUSHXwk6AX+ArAOs495AatZ3zNkymgsI='; + +const PUBLIC_KEY_BASE64 = process.env.TRANSTRACK_PUBLISHER_PUBLIC_KEY + || DEV_PUBLIC_KEY_BASE64; + +const LICENSE_PROTOCOL_VERSION = 1; + +module.exports = { + PUBLIC_KEY_BASE64, + LICENSE_PROTOCOL_VERSION, + IS_DEV_KEY: PUBLIC_KEY_BASE64 === DEV_PUBLIC_KEY_BASE64, +}; diff --git a/electron/license/storage.cjs b/electron/license/storage.cjs new file mode 100644 index 0000000..89493c6 --- /dev/null +++ b/electron/license/storage.cjs @@ -0,0 +1,115 @@ +/** + * TransTrack — License file storage. + * + * Reads / writes the active license to a file in userData. The wire + * format is already signed and tamper-evident, so we don't need + * additional integrity protection on the file itself — we just store + * the LIC1.* string. We restrict file permissions to 0o600 to keep + * casual readers out. + * + * Trial mode: when there is no license file, we transparently fall back + * to a "trial" state that lasts TRIAL_DURATION_DAYS from the recorded + * trial_started_at timestamp (which is created on first call). Once + * expired, the trial cannot be reset by re-running the app (the file is + * append-only-ish; we never erase the trial timestamp). + */ + +'use strict'; + +const fs = require('fs'); +const path = require('path'); + +const TRIAL_DURATION_DAYS = 30; +const DAY_MS = 24 * 60 * 60 * 1000; + +function _userDataDir() { + if (process.env.TRANSTRACK_USERDATA_DIR) return process.env.TRANSTRACK_USERDATA_DIR; + try { + const { app } = require('electron'); + return app.getPath('userData'); + } catch { + return path.join(process.cwd(), '.transtrack-test-userdata'); + } +} + +function _licensePath() { + return path.join(_userDataDir(), 'license.dat'); +} + +function _trialPath() { + return path.join(_userDataDir(), '.transtrack-trial'); +} + +function loadLicense() { + const p = _licensePath(); + if (!fs.existsSync(p)) return null; + try { + const raw = fs.readFileSync(p, 'utf8').trim(); + if (!raw) return null; + return raw; + } catch { + return null; + } +} + +function storeLicense(wireLicense) { + if (typeof wireLicense !== 'string' || !wireLicense.startsWith('LIC1.')) { + throw new Error('storeLicense expects a LIC1.* wire-format string'); + } + const dir = _userDataDir(); + if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true }); + fs.writeFileSync(_licensePath(), wireLicense, { mode: 0o600 }); + try { fs.chmodSync(_licensePath(), 0o600); } catch { /* windows */ } +} + +function deleteLicense() { + const p = _licensePath(); + if (fs.existsSync(p)) { + try { fs.unlinkSync(p); } catch { /* ignore */ } + } +} + +/** + * Trial state — { startedAt: ISO, expiresAt: ISO, daysRemaining: number, expired: boolean } + * Always returns an object; creates the trial file on first call so subsequent + * calls give a deterministic answer. + */ +function getTrialState(nowMs = Date.now()) { + const p = _trialPath(); + const dir = _userDataDir(); + if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true }); + + let startedAt; + if (fs.existsSync(p)) { + try { + const obj = JSON.parse(fs.readFileSync(p, 'utf8')); + if (obj && typeof obj.startedAt === 'string' && !isNaN(Date.parse(obj.startedAt))) { + startedAt = obj.startedAt; + } + } catch { /* file corrupt; rewrite */ } + } + if (!startedAt) { + startedAt = new Date(nowMs).toISOString(); + fs.writeFileSync(p, JSON.stringify({ startedAt }), { mode: 0o600 }); + try { fs.chmodSync(p, 0o600); } catch { /* windows */ } + } + + const startMs = Date.parse(startedAt); + const expiresMs = startMs + TRIAL_DURATION_DAYS * DAY_MS; + const daysRemaining = Math.ceil((expiresMs - nowMs) / DAY_MS); + return { + startedAt, + expiresAt: new Date(expiresMs).toISOString(), + daysRemaining: Math.max(0, daysRemaining), + expired: nowMs > expiresMs, + durationDays: TRIAL_DURATION_DAYS, + }; +} + +module.exports = { + loadLicense, + storeLicense, + deleteLicense, + getTrialState, + TRIAL_DURATION_DAYS, +}; diff --git a/electron/license/verifier.cjs b/electron/license/verifier.cjs new file mode 100644 index 0000000..87f7294 --- /dev/null +++ b/electron/license/verifier.cjs @@ -0,0 +1,100 @@ +/** + * TransTrack — License verification orchestrator. + * + * Wraps the cryptographic verification from issuance.cjs with the + * application-level checks every license has to pass: + * + * 1. Signature is valid (Ed25519 against the embedded publisher pubkey) + * 2. Protocol version is supported by this build + * 3. Not yet expired (or within a configurable grace period for soft expiry) + * 4. The current machine's fingerprint is in the bound list (if any) + * + * The return shape is a discriminated union: + * { ok: true, payload, status: 'active' | 'in_grace' } + * { ok: false, code, message, payload? } + */ + +'use strict'; + +const { verifyLicense } = require('./issuance.cjs'); +const { getMachineFingerprint, hashForBinding } = require('./machineId.cjs'); + +// Lazily read these so test harnesses can monkey-patch the publisher +// pubkey module after the verifier has already been required. +function _publisher() { return require('./publisherPublicKey.cjs'); } + +// Soft-expiry grace: after expiresAt, the license keeps working for this many +// days but the UI shows a renewal warning. After grace, hard fail. +const SOFT_EXPIRY_GRACE_DAYS = 14; +const DAY_MS = 24 * 60 * 60 * 1000; + +/** + * @param {string} wireLicense The LIC1.* signed token + * @param {object} [opts] + * @param {number} [opts.nowMs] clock override for tests + * @param {string} [opts.machineId] override machine id (tests) + * @param {string} [opts.publicKeyOverride] override publisher pubkey (tests) + * @param {number} [opts.gracePeriodDays] override soft-expiry grace + */ +function verify(wireLicense, opts = {}) { + const nowMs = opts.nowMs ?? Date.now(); + const pubKey = opts.publicKeyOverride ?? _publisher().PUBLIC_KEY_BASE64; + const grace = opts.gracePeriodDays ?? SOFT_EXPIRY_GRACE_DAYS; + + let payload; + try { + payload = verifyLicense(wireLicense, pubKey); + } catch (e) { + return { ok: false, code: 'BAD_SIGNATURE', message: e.message }; + } + + // Protocol version gate — refuse to honor licenses signed under a + // protocol revision newer than what this build understands. + const expectedProto = _publisher().LICENSE_PROTOCOL_VERSION; + if (payload.protocolVersion !== expectedProto) { + return { + ok: false, + code: 'PROTOCOL_MISMATCH', + message: `License protocol v${payload.protocolVersion} unsupported (this build accepts v${expectedProto}). Update the application.`, + payload, + }; + } + + // Expiry check. + const expMs = Date.parse(payload.expiresAt); + if (Number.isFinite(expMs) && nowMs > expMs + grace * DAY_MS) { + return { + ok: false, + code: 'EXPIRED', + message: `License expired on ${payload.expiresAt}. Contact your account manager to renew.`, + payload, + }; + } + const inGrace = Number.isFinite(expMs) && nowMs > expMs; + + // Machine binding. Empty/missing machineBindings means "any machine" + // (used by site licenses). A non-empty list must include this machine. + if (Array.isArray(payload.machineBindings) && payload.machineBindings.length > 0) { + const mid = opts.machineId ?? getMachineFingerprint(); + const myHash = hashForBinding(mid); + if (!payload.machineBindings.includes(myHash)) { + return { + ok: false, + code: 'NOT_BOUND_TO_MACHINE', + message: 'This license is not activated for the current machine. Contact your administrator to re-bind or transfer the license.', + payload, + }; + } + } + + return { + ok: true, + payload, + status: inGrace ? 'in_grace' : 'active', + }; +} + +module.exports = { + verify, + SOFT_EXPIRY_GRACE_DAYS, +}; diff --git a/electron/main.cjs b/electron/main.cjs index 650eb1b..4f6f67e 100644 --- a/electron/main.cjs +++ b/electron/main.cjs @@ -6,6 +6,29 @@ const { initDatabase, closeDatabase } = require('./database/init.cjs'); const { setupIPCHandlers } = require('./ipc/handlers.cjs'); const { logger, initCrashReporter, closeLogger } = require('./services/logger.cjs'); +// Register the custom URL protocol used as the OIDC SSO redirect target. +// Must run BEFORE app.whenReady() on every platform. See electron/auth/oidcDesktop.cjs. +const TRANSTRACK_PROTOCOL = 'transtrack'; +if (process.defaultApp) { + // When running from `npm run electron-dev`, process.argv[1] points to the + // entry script and the call below has to pass it explicitly for the OS + // to bind the protocol to the dev runner. In a packaged build there is + // no second argument needed. + if (process.argv.length >= 2) { + app.setAsDefaultProtocolClient(TRANSTRACK_PROTOCOL, process.execPath, [path.resolve(process.argv[1])]); + } +} else { + app.setAsDefaultProtocolClient(TRANSTRACK_PROTOCOL); +} + +// Single-instance lock — on Windows/Linux the second app launch triggered +// by `transtrack://...` is delivered to the first instance via the +// second-instance event below; without this lock, both would race. +const gotLock = app.requestSingleInstanceLock(); +if (!gotLock) { + app.quit(); +} + // Disable hardware acceleration for better compatibility app.disableHardwareAcceleration(); @@ -355,6 +378,53 @@ app.on('window-all-closed', () => { } }); +// macOS protocol handler: the OS hands us the URL via `open-url`. +app.on('open-url', (event, url) => { + event.preventDefault(); + handleProtocolUrl(url); +}); + +// Windows/Linux: a second `transtrack://...` invocation lands here. +app.on('second-instance', (_event, argv /*, _workingDir */) => { + // The protocol URL is somewhere in argv on Windows; scan defensively. + const url = argv.find((a) => typeof a === 'string' && a.startsWith(`${TRANSTRACK_PROTOCOL}://`)); + if (url) handleProtocolUrl(url); + if (mainWindow) { + if (mainWindow.isMinimized()) mainWindow.restore(); + mainWindow.focus(); + } +}); + +/** + * Centralized protocol-URL dispatcher. Currently the only registered + * scheme is `transtrack://auth/callback` for OIDC SSO; add new ones + * here as needed. + */ +async function handleProtocolUrl(url) { + try { + const u = new URL(url); + if (u.protocol !== `${TRANSTRACK_PROTOCOL}:`) return; + if (u.host === 'auth' && u.pathname === '/callback') { + const oidc = require('./auth/oidcDesktop.cjs'); + const identity = await oidc.completeFlow(url); + // Hand off to the auth handler module to find/create the matching + // local user and mint a session. + const ssoHandler = require('./ipc/handlers/ssoCallback.cjs'); + const sessionInfo = await ssoHandler.finalizeSso(identity); + if (mainWindow && !mainWindow.isDestroyed()) { + mainWindow.webContents.send('auth:ssoCompleted', { ok: true, ...sessionInfo }); + } + return; + } + logger.warn('Unhandled protocol URL', { url }); + } catch (err) { + logger.error('Protocol URL handler failed', { error: err.message }); + if (mainWindow && !mainWindow.isDestroyed()) { + mainWindow.webContents.send('auth:ssoCompleted', { ok: false, error: err.message }); + } + } +} + app.on('before-quit', async () => { logger.info('Application shutting down...'); await closeDatabase(); diff --git a/electron/preload.cjs b/electron/preload.cjs index cdfb122..12f6219 100644 --- a/electron/preload.cjs +++ b/electron/preload.cjs @@ -276,6 +276,30 @@ contextBridge.exposeInMainWorld('electronAPI', { getCurrent: () => ipcRenderer.invoke('organization:getCurrent'), update: (updates) => ipcRenderer.invoke('organization:update', updates), }, + + // Licensing — see electron/ipc/handlers/license.cjs + license: { + getInfo: () => ipcRenderer.invoke('license:getInfo'), + getMachineId: () => ipcRenderer.invoke('license:getMachineId'), + activate: (licenseWire) => ipcRenderer.invoke('license:activate', licenseWire), + remove: () => ipcRenderer.invoke('license:remove'), + checkFeature: (featureFlag) => ipcRenderer.invoke('license:checkFeature', featureFlag), + checkLimit: (limitType, currentCount) => ipcRenderer.invoke('license:checkLimit', limitType, currentCount), + }, + + // SSO (OIDC) on the desktop — see electron/auth/oidcDesktop.cjs + sso: { + start: () => ipcRenderer.invoke('auth:ssoStart'), + cancel: () => ipcRenderer.invoke('auth:ssoCancel'), + // Subscribe to the broadcast emitted by the protocol handler in main.cjs + // after the IdP redirect completes. The callback receives + // { ok, user?, sessionId?, error? }. + onCompleted: (callback) => { + const wrapped = (_event, payload) => callback(payload); + ipcRenderer.on('auth:ssoCompleted', wrapped); + return () => ipcRenderer.removeListener('auth:ssoCompleted', wrapped); + }, + }, // Menu event listeners onMenuExport: (callback) => { diff --git a/electron/services/secretEncryption.cjs b/electron/services/secretEncryption.cjs new file mode 100644 index 0000000..60d5e30 --- /dev/null +++ b/electron/services/secretEncryption.cjs @@ -0,0 +1,207 @@ +/** + * TransTrack — Field-level secret encryption. + * + * AES-256-GCM with HKDF-SHA256-derived keys. Used for column-level + * protection of small secrets that are also stored inside the SQLCipher + * database (defense-in-depth — an attacker who exfiltrates the .db file + * plus the .transtrack-key on disk still has to derive the field key, + * which is bound to a non-exported app-level secret). + * + * Wire format: + * enc:v1:: + * + * Where the ciphertext is exactly: ||<16-byte auth tag>. + * + * Backward compatibility: + * Values that do not begin with `enc:v1:` are treated as legacy + * plaintext and returned as-is from decrypt(); call sites must always + * route through decryptField() to keep the legacy path transparent. + * The migration in electron/database/migrations.cjs re-encrypts every + * existing row on first run after upgrade. + */ + +'use strict'; + +const crypto = require('crypto'); +const fs = require('fs'); +const path = require('path'); + +const ALGO = 'aes-256-gcm'; +const IV_LEN = 12; +const TAG_LEN = 16; +const KEY_LEN = 32; +const ENC_PREFIX = 'enc:v1:'; + +let _appSecretCached = null; + +/** + * The "field encryption master secret" is a 32-byte value derived from + * the SQLCipher DEK if available, otherwise a dedicated file in + * userData with mode 0o600 (and safeStorage-wrapped when possible). + * + * We deliberately do NOT read .transtrack-key directly here — the DEK + * is rotated independently and we don't want field-level secrets to + * silently re-encrypt every time the DB key rotates. + * + * Strategy: persist a dedicated 32-byte master in + * /.transtrack-field-key + * protected by Electron's safeStorage when available. + */ +function _getMasterSecret() { + if (_appSecretCached) return _appSecretCached; + + let userDataDir; + try { + // electron may be unavailable in tests; allow override via env + if (process.env.TRANSTRACK_USERDATA_DIR) { + userDataDir = process.env.TRANSTRACK_USERDATA_DIR; + } else { + const { app } = require('electron'); + userDataDir = app.getPath('userData'); + } + } catch { + // Fall back to a sibling of the cwd for tests + userDataDir = path.join(process.cwd(), '.transtrack-test-userdata'); + } + + if (!fs.existsSync(userDataDir)) { + fs.mkdirSync(userDataDir, { recursive: true }); + } + const keyPath = path.join(userDataDir, '.transtrack-field-key'); + + let safeStorage = null; + try { ({ safeStorage } = require('electron')); } catch { /* not under electron */ } + const safeAvailable = !!(safeStorage + && typeof safeStorage.isEncryptionAvailable === 'function' + && safeStorage.isEncryptionAvailable()); + + function _readKey() { + if (!fs.existsSync(keyPath)) return null; + const raw = fs.readFileSync(keyPath); + // Heuristic: 64 hex chars => legacy plaintext format; else safeStorage blob. + const asText = raw.toString('utf8').trim(); + if (/^[a-fA-F0-9]{64}$/.test(asText)) { + return Buffer.from(asText, 'hex'); + } + if (safeAvailable) { + try { + const decrypted = safeStorage.decryptString(raw); + if (/^[a-fA-F0-9]{64}$/.test(decrypted)) { + return Buffer.from(decrypted, 'hex'); + } + } catch { /* fall through */ } + } + return null; + } + + function _writeKey(buf) { + const hex = buf.toString('hex'); + if (safeAvailable) { + fs.writeFileSync(keyPath, safeStorage.encryptString(hex), { mode: 0o600 }); + } else { + fs.writeFileSync(keyPath, hex, { mode: 0o600 }); + } + try { fs.chmodSync(keyPath, 0o600); } catch { /* windows */ } + } + + let key = _readKey(); + if (!key) { + key = crypto.randomBytes(KEY_LEN); + _writeKey(key); + } else if (safeAvailable) { + // Upgrade legacy plaintext on-disk format to safeStorage-encrypted. + const raw = fs.readFileSync(keyPath); + const asText = raw.toString('utf8').trim(); + if (/^[a-fA-F0-9]{64}$/.test(asText)) { + _writeKey(key); + } + } + + _appSecretCached = key; + return key; +} + +/** + * Derive a per-column subkey from the master via HKDF-SHA256. The label + * lets us rotate one column's key independently in the future without + * touching the master. + */ +function _deriveKey(label) { + const master = _getMasterSecret(); + const salt = Buffer.from('transtrack-field-v1'); + const info = Buffer.from(label || 'default'); + return crypto.hkdfSync('sha256', master, salt, info, KEY_LEN); +} + +/** + * Encrypt a plaintext string. Returns the wire-format string; null/undefined + * passes through unchanged so call sites don't need null guards. + */ +function encryptField(plaintext, label = 'default') { + if (plaintext === null || plaintext === undefined || plaintext === '') return plaintext; + if (typeof plaintext !== 'string') { + throw new TypeError('encryptField expects a string plaintext'); + } + // Don't double-encrypt — idempotency makes migrations safe to re-run. + if (plaintext.startsWith(ENC_PREFIX)) return plaintext; + + const key = Buffer.from(_deriveKey(label)); + const iv = crypto.randomBytes(IV_LEN); + const cipher = crypto.createCipheriv(ALGO, key, iv); + const ct = Buffer.concat([cipher.update(plaintext, 'utf8'), cipher.final()]); + const tag = cipher.getAuthTag(); + const ivB64 = iv.toString('base64url'); + const ctB64 = Buffer.concat([ct, tag]).toString('base64url'); + return `${ENC_PREFIX}${ivB64}:${ctB64}`; +} + +/** + * Decrypt a wire-format string. If the value is null/empty or doesn't + * carry the encryption prefix, it is returned as-is (legacy plaintext + * compatibility). Throws on tampered ciphertext. + */ +function decryptField(value, label = 'default') { + if (value === null || value === undefined || value === '') return value; + if (typeof value !== 'string' || !value.startsWith(ENC_PREFIX)) return value; + + const parts = value.slice(ENC_PREFIX.length).split(':'); + if (parts.length !== 2) { + throw new Error('Invalid encrypted field format'); + } + const iv = Buffer.from(parts[0], 'base64url'); + const blob = Buffer.from(parts[1], 'base64url'); + if (iv.length !== IV_LEN || blob.length < TAG_LEN + 1) { + throw new Error('Invalid encrypted field payload'); + } + const ct = blob.subarray(0, blob.length - TAG_LEN); + const tag = blob.subarray(blob.length - TAG_LEN); + + const key = Buffer.from(_deriveKey(label)); + const decipher = crypto.createDecipheriv(ALGO, key, iv); + decipher.setAuthTag(tag); + const pt = Buffer.concat([decipher.update(ct), decipher.final()]); + return pt.toString('utf8'); +} + +/** + * Inspect whether a stored value is already encrypted by this module. + */ +function isEncrypted(value) { + return typeof value === 'string' && value.startsWith(ENC_PREFIX); +} + +/** + * Test seam: clear the cached master so unit tests can flip the + * TRANSTRACK_USERDATA_DIR between runs. + */ +function _resetForTests() { + _appSecretCached = null; +} + +module.exports = { + encryptField, + decryptField, + isEncrypted, + ENC_PREFIX, + _resetForTests, +}; diff --git a/package.json b/package.json index 96e92d4..351e0fb 100644 --- a/package.json +++ b/package.json @@ -77,6 +77,11 @@ "ci:security": "npm ci && npm audit --production --audit-level=moderate", "release:check": "node scripts/release-readiness-check.mjs", "release:check:strict": "node scripts/release-readiness-check.mjs --strict", + "release:check:for-sale": "node scripts/release-readiness-check.mjs --for-sale --strict", + "license:keypair": "node scripts/license-keypair.mjs --out keys/license", + "license:issue": "node scripts/issue-license.mjs", + "test:license": "node tests/license.test.cjs && node tests/secretEncryption.test.cjs", + "test:sso": "node tests/oidcDesktop.test.cjs", "typecheck": "tsc -p ./jsconfig.json", "preview": "vite preview", "postinstall": "patch-package && electron-builder install-app-deps" diff --git a/scripts/issue-license.mjs b/scripts/issue-license.mjs new file mode 100644 index 0000000..5e635c9 --- /dev/null +++ b/scripts/issue-license.mjs @@ -0,0 +1,138 @@ +#!/usr/bin/env node +/** + * TransTrack — Issue a signed customer license. + * + * Usage: + * node scripts/issue-license.mjs \ + * --private-key keys/license/license-private.pem \ + * --customer-name "Cleveland Clinic" \ + * --customer-email "it.admin@ccf.org" \ + * --org-id "ccf" \ + * --tier enterprise \ + * --expires 2027-12-31 \ + * --max-patients 5000 \ + * --max-users 100 \ + * --max-installations 5 \ + * --features all \ + * --machines mid1,mid2 \ + * --out licenses/ccf-2027.lic + * + * The `--machines` flag is a comma-separated list of *raw* machine + * fingerprints (the hex string the app shows in Settings → License). + * If omitted, the license is unbound and works on any machine — suitable + * for site licenses, NOT for normal customer sales. + * + * `--features all` is a shortcut; otherwise pass a comma-separated + * feature flag list from electron/license/tiers.cjs FEATURES. + */ + +import crypto from 'node:crypto'; +import fs from 'node:fs'; +import path from 'node:path'; +import { createRequire } from 'node:module'; + +const require = createRequire(import.meta.url); +const { signLicense } = require('../electron/license/issuance.cjs'); +const { hashForBinding } = require('../electron/license/machineId.cjs'); +const { LICENSE_PROTOCOL_VERSION } = require('../electron/license/publisherPublicKey.cjs'); +const tiers = require('../electron/license/tiers.cjs'); + +const argv = process.argv.slice(2); +function arg(name, def) { + const i = argv.indexOf(`--${name}`); + if (i >= 0 && argv[i + 1]) return argv[i + 1]; + return def; +} +function need(name) { + const v = arg(name); + if (!v) { console.error(`ERROR: --${name} is required`); process.exit(2); } + return v; +} + +const privKeyPath = need('private-key'); +const out = need('out'); + +if (!fs.existsSync(privKeyPath)) { + console.error(`ERROR: private key file not found: ${privKeyPath}`); + process.exit(2); +} +const privateKeyPem = fs.readFileSync(privKeyPath, 'utf8'); + +const tier = need('tier'); +if (!tiers.LICENSE_TIER || !Object.values(tiers.LICENSE_TIER).includes(tier)) { + // tiers stub maps everything to 'enterprise', but the issuance contract + // accepts any of the canonical tier strings: + if (!['evaluation', 'starter', 'professional', 'enterprise'].includes(tier)) { + console.error('ERROR: --tier must be one of: evaluation, starter, professional, enterprise'); + process.exit(2); + } +} + +const featuresFlag = arg('features', 'all'); +let features; +if (featuresFlag === 'all') { + features = Object.values(tiers.FEATURES); +} else { + features = featuresFlag.split(',').map((f) => f.trim()).filter(Boolean); +} + +const expires = need('expires'); +const issuedAt = new Date().toISOString(); +const expiresAt = new Date(expires + (expires.length === 10 ? 'T23:59:59Z' : '')).toISOString(); + +const maintenanceExpires = arg('maintenance-expires'); +const maintenanceExpiresAt = maintenanceExpires + ? new Date(maintenanceExpires + (maintenanceExpires.length === 10 ? 'T23:59:59Z' : '')).toISOString() + : expiresAt; + +const machinesArg = arg('machines', ''); +const machineBindings = machinesArg + ? machinesArg.split(',').map((m) => m.trim()).filter(Boolean).map((m) => hashForBinding(m)) + : []; + +const payload = { + licenseId: 'lic_' + crypto.randomBytes(8).toString('hex'), + protocolVersion: LICENSE_PROTOCOL_VERSION, + customer: { + name: need('customer-name'), + email: need('customer-email'), + orgId: need('org-id'), + }, + tier, + issuedAt, + expiresAt, + maintenanceExpiresAt, + limits: { + maxPatients: parseInt(need('max-patients'), 10), + maxUsers: parseInt(need('max-users'), 10), + maxInstallations: parseInt(need('max-installations'), 10), + }, + features, + machineBindings, + metadata: { + issuedBy: 'TransTrack Sales', + issuerHost: require('node:os').hostname(), + }, +}; + +const wire = signLicense(payload, privateKeyPem); + +const outDir = path.dirname(out); +if (outDir && !fs.existsSync(outDir)) fs.mkdirSync(outDir, { recursive: true }); +fs.writeFileSync(out, wire, { mode: 0o600 }); + +console.log(`License signed and written to ${out}`); +console.log(''); +console.log(' licenseId: ' + payload.licenseId); +console.log(' customer: ' + payload.customer.name + ' <' + payload.customer.email + '>'); +console.log(' orgId: ' + payload.customer.orgId); +console.log(' tier: ' + payload.tier); +console.log(' expiresAt: ' + payload.expiresAt); +console.log(' maint expires:' + payload.maintenanceExpiresAt); +console.log(' patients: ' + payload.limits.maxPatients); +console.log(' users: ' + payload.limits.maxUsers); +console.log(' installs: ' + payload.limits.maxInstallations); +console.log(' features: ' + payload.features.length + ' feature flags'); +console.log(' machines: ' + (payload.machineBindings.length || 'unbound (site license)')); +console.log(''); +console.log('Send the file at ' + out + ' to the customer.'); diff --git a/scripts/license-keypair.mjs b/scripts/license-keypair.mjs new file mode 100644 index 0000000..a947b9d --- /dev/null +++ b/scripts/license-keypair.mjs @@ -0,0 +1,72 @@ +#!/usr/bin/env node +/** + * TransTrack — Generate the publisher Ed25519 keypair. + * + * This is run ONCE, by the operator, to mint the keypair that signs + * customer licenses. The PUBLIC key is then committed into the app at + * electron/license/publisherPublicKey.cjs (or env-injected at build + * time). The PRIVATE key is kept OFFLINE — never committed. + * + * For production use: + * - Run this on an air-gapped or HSM-backed workstation. + * - Store the private key in a hardware security module (or at minimum, + * in a password-protected encrypted vault). + * - Back up to two geographically-separate secure locations. + * - Rotate every 3 years OR immediately on suspected compromise. + * + * Usage: + * node scripts/license-keypair.mjs --out keys/license + * node scripts/license-keypair.mjs --out keys/license --force (overwrite) + * + * After running, paste the printed PUBLIC_KEY_BASE64 into + * electron/license/publisherPublicKey.cjs and commit only that file. + */ + +import crypto from 'node:crypto'; +import fs from 'node:fs'; +import path from 'node:path'; + +const argv = process.argv.slice(2); +function arg(name, def) { + const i = argv.indexOf(`--${name}`); + if (i >= 0 && argv[i + 1]) return argv[i + 1]; + return def; +} +const force = argv.includes('--force'); +const outDir = arg('out', 'keys/license'); + +const privPath = path.join(outDir, 'license-private.pem'); +const pubPath = path.join(outDir, 'license-public.pem'); + +if ((fs.existsSync(privPath) || fs.existsSync(pubPath)) && !force) { + console.error(`ERROR: ${privPath} or ${pubPath} already exists. Pass --force to overwrite.`); + console.error(' Overwriting will invalidate every license issued under the previous key.'); + process.exit(1); +} + +fs.mkdirSync(outDir, { recursive: true }); + +const { publicKey, privateKey } = crypto.generateKeyPairSync('ed25519'); + +const privPem = privateKey.export({ type: 'pkcs8', format: 'pem' }); +const pubPem = publicKey.export({ type: 'spki', format: 'pem' }); + +const pubRaw = publicKey.export({ type: 'spki', format: 'der' }); +// Ed25519 SPKI DER is: 30 2A 30 05 06 03 2B 65 70 03 21 00 || 32-byte-key +// so the raw 32-byte key is the last 32 bytes. +const pubKeyBytes = pubRaw.subarray(pubRaw.length - 32); + +fs.writeFileSync(privPath, privPem, { mode: 0o600 }); +fs.writeFileSync(pubPath, pubPem); +try { fs.chmodSync(privPath, 0o600); } catch { /* windows */ } + +console.log('TransTrack publisher Ed25519 keypair generated.'); +console.log(''); +console.log(' PRIVATE KEY: ' + privPath + ' (KEEP THIS SECRET — do NOT commit)'); +console.log(' PUBLIC KEY : ' + pubPath + ' (safe to share; ship with app)'); +console.log(''); +console.log(' PUBLIC_KEY_BASE64 (paste into electron/license/publisherPublicKey.cjs):'); +console.log(' ' + pubKeyBytes.toString('base64')); +console.log(''); +console.log(' PUBLIC_KEY_PEM:'); +console.log(pubPem); diff --git a/src/api/localClient.js b/src/api/localClient.js index 1cec5c9..96e08c4 100644 --- a/src/api/localClient.js +++ b/src/api/localClient.js @@ -131,6 +131,25 @@ const mockClient = { }), isEnabled: async () => true, }, + // Mock license client for browser dev mode + license: { + getInfo: async () => ({ + mode: 'trial', isLicensed: true, isEvaluation: true, + tier: 'enterprise', tierName: 'Trial', + orgId: 'TRIAL', orgName: 'TransTrack Trial (dev)', + machineId: 'dev-machine', + trial: { startedAt: new Date().toISOString(), expiresAt: new Date(Date.now() + 30 * 86400e3).toISOString(), daysRemaining: 30, durationDays: 30 }, + limits: { maxPatients: -1, maxUsers: -1, maxInstallations: -1 }, + features: [], + canActivate: true, + isDevelopmentBuild: true, + }), + getMachineId: async () => 'dev-machine', + activate: async () => ({ success: false, error: 'Activation requires the Electron desktop client.' }), + remove: async () => ({ success: true }), + checkFeature: async () => ({ enabled: true }), + checkLimit: async (_t, c) => ({ withinLimit: true, current: c, limit: -1, remaining: -1 }), + }, // Mock aHHQ client for development ahhq: { getStatuses: async () => ({ @@ -585,6 +604,21 @@ const createElectronClient = () => { verifyIntegrity: async () => await window.electronAPI.encryption.verifyIntegrity(), isEnabled: async () => await window.electronAPI.encryption.isEnabled(), }, + // Licensing & activation + license: { + getInfo: async () => await window.electronAPI.license.getInfo(), + getMachineId: async () => await window.electronAPI.license.getMachineId(), + activate: async (wire) => await window.electronAPI.license.activate(wire), + remove: async () => await window.electronAPI.license.remove(), + checkFeature: async (flag) => await window.electronAPI.license.checkFeature(flag), + checkLimit: async (type, count) => await window.electronAPI.license.checkLimit(type, count), + }, + // SSO (OIDC) desktop flow + sso: { + start: async () => await window.electronAPI.sso.start(), + cancel: async () => await window.electronAPI.sso.cancel(), + onCompleted: (cb) => window.electronAPI.sso.onCompleted(cb), + }, // Risk Intelligence risk: { getDashboard: async () => await window.electronAPI.risk.getDashboard(), diff --git a/src/components/layout/Sidebar.jsx b/src/components/layout/Sidebar.jsx index 7850085..d96c1c6 100644 --- a/src/components/layout/Sidebar.jsx +++ b/src/components/layout/Sidebar.jsx @@ -4,7 +4,7 @@ import { createPageUrl } from '@/utils'; import { Activity, Users, FileText, Settings, Shield, Heart, Database, AlertTriangle, HardDrive, BarChart3, Brain, ListTodo, ClipboardCheck, - Stethoscope, Inbox, KeyRound, UserPlus, X, + Stethoscope, Inbox, KeyRound, UserPlus, X, Key, } from 'lucide-react'; /** @@ -76,6 +76,7 @@ export default function Sidebar({ user, isOpen = true, onClose }) { { name: 'Priority Config', page: 'PrioritySettings', icon: Settings, show: isAdmin }, { name: 'Compliance', page: 'ComplianceCenter', icon: Shield, show: isAdmin || isRegulator }, { name: 'Recovery', page: 'DisasterRecovery', icon: HardDrive, show: isAdmin }, + { name: 'License', page: 'License', icon: Key, show: isAdmin }, { name: 'Settings', page: 'Settings', icon: Settings, show: isAdmin }, ], }, diff --git a/src/lib/AuthContext.jsx b/src/lib/AuthContext.jsx index 5eeee6c..939975a 100644 --- a/src/lib/AuthContext.jsx +++ b/src/lib/AuthContext.jsx @@ -133,7 +133,11 @@ export const AuthProvider = ({ children }) => { cancelMfa, logout, navigateToLogin, - checkAppState + checkAppState, + // Alias for callers (e.g. the SSO completion handler in Login) + // that want to re-query the backend after a non-form-driven auth + // event landed a fresh session. + refreshAuth: checkAppState, }}> {children} diff --git a/src/pages.config.js b/src/pages.config.js index 8032a8e..067e05f 100644 --- a/src/pages.config.js +++ b/src/pages.config.js @@ -19,6 +19,7 @@ import OrganOffers from './pages/OrganOffers'; import PostTransplant from './pages/PostTransplant'; import LivingDonors from './pages/LivingDonors'; import Hl7Inbox from './pages/Hl7Inbox'; +import License from './pages/License'; import __Layout from './Layout.jsx'; @@ -44,6 +45,7 @@ export const PAGES = { "PostTransplant": PostTransplant, "LivingDonors": LivingDonors, "Hl7Inbox": Hl7Inbox, + "License": License, } export const pagesConfig = { diff --git a/src/pages/License.jsx b/src/pages/License.jsx new file mode 100644 index 0000000..e8c258e --- /dev/null +++ b/src/pages/License.jsx @@ -0,0 +1,318 @@ +import React, { useState } from 'react'; +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; +import { api } from '@/api/apiClient'; +import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card'; +import { Button } from '@/components/ui/button'; +import { Label } from '@/components/ui/label'; +import { Badge } from '@/components/ui/badge'; +import { Alert, AlertDescription } from '@/components/ui/alert'; +import { + Key, CheckCircle2, AlertTriangle, Copy, ShieldCheck, + ShieldAlert, Hourglass, Mail, FileText, RotateCcw, +} from 'lucide-react'; +import { format, formatDistanceToNow } from 'date-fns'; + +/** + * License & Activation page. Always accessible from Settings menu; if the + * trial has expired or the installed license is invalid, the host App + * router should also redirect any other navigation here automatically. + */ +export default function License() { + const queryClient = useQueryClient(); + const [licenseInput, setLicenseInput] = useState(''); + const [copied, setCopied] = useState(false); + const [activationError, setActivationError] = useState(null); + const [activationSuccess, setActivationSuccess] = useState(null); + + const { data: info, isLoading } = useQuery({ + queryKey: ['licenseInfo'], + queryFn: () => api.license.getInfo(), + refetchInterval: 60_000, + }); + + const { data: machineId = '' } = useQuery({ + queryKey: ['licenseMachineId'], + queryFn: () => api.license.getMachineId(), + }); + + const activate = useMutation({ + mutationFn: (wire) => api.license.activate(wire), + onSuccess: (res) => { + if (res.success) { + setActivationSuccess(`License activated — ${res.tierName || res.tier}, expires ${res.maintenanceExpiry || 'never'}.`); + setActivationError(null); + setLicenseInput(''); + queryClient.invalidateQueries({ queryKey: ['licenseInfo'] }); + } else { + setActivationError(res.error || 'Activation failed'); + setActivationSuccess(null); + } + }, + onError: (err) => { + setActivationError(err?.message || String(err)); + setActivationSuccess(null); + }, + }); + + const remove = useMutation({ + mutationFn: () => api.license.remove(), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: ['licenseInfo'] }); + setActivationSuccess(null); + setActivationError(null); + }, + }); + + const copyMachineId = async () => { + try { + await navigator.clipboard.writeText(machineId); + setCopied(true); + setTimeout(() => setCopied(false), 2000); + } catch { /* clipboard blocked */ } + }; + + if (isLoading || !info) { + return ( +
+

Loading license status…

+
+ ); + } + + const statusBadge = renderStatusBadge(info); + + return ( +
+
+ +
+
+

+ + License & Activation +

+

+ Activate, view, or replace your TransTrack license. +

+
+ {statusBadge} +
+ + {info.isDevelopmentBuild && ( + + + + Development build. The publisher public key in this build is the development + key, not the production one. Customer licenses will not validate against this build. + + + )} + + {info.mode === 'trial' && ( + + + + Trial mode. {info.trial?.daysRemaining ?? 0} days remaining. + All features are enabled. Activate a license below to continue past{' '} + {info.trial?.expiresAt ? format(new Date(info.trial.expiresAt), 'PPP') : 'expiry'}. + + + )} + + {info.mode === 'trial_expired' && ( + + + + Trial expired. TransTrack is read-only until a valid license is installed. + Contact your account manager or paste your license string below. + + + )} + + {info.mode === 'in_grace' && ( + + + + License in renewal grace period. Expired on{' '} + {info.expiresAt ? format(new Date(info.expiresAt), 'PPP') : 'unknown'}. + The application continues to function but will lock out after the grace window ends. + + + )} + + {info.mode === 'invalid' && ( + + + + Installed license is invalid. {info.verificationError} +
Replace the license below or remove it to fall back to trial mode (if eligible). +
+
+ )} + + {info.mode === 'active' && ( + + + + License active. {info.orgName} — {info.tierName} tier, expires{' '} + {info.expiresAt ? format(new Date(info.expiresAt), 'PPP') : 'unknown'}{' '} + ({info.expiresAt ? formatDistanceToNow(new Date(info.expiresAt), { addSuffix: true }) : ''}). + + + )} + + + + + + Installed license + + + +
+ + + + + + + + + + + + +
+ + {info.licenseId && ( +
+ +
+ )} +
+
+ + + + + + This machine + + + +

+ Provide this machine ID to your TransTrack account manager when requesting a + machine-bound license. Each install has its own ID; reinstalling the application + or moving to a new computer changes this value. +

+
+ + {machineId || '…'} + + +
+
+
+ + + + + + Activate a new license + + + +

+ Paste the contents of the .lic{' '} + file your account manager sent you. The string begins with{' '} + LIC1. +

+ +