From 6f14b8b15d7371a0037c5b8bafbda061a8d8ed6c Mon Sep 17 00:00:00 2001 From: Jordan Tomkinson Date: Thu, 22 Jan 2026 21:40:48 +0000 Subject: [PATCH] Feature: backup and restore --- backend/internal/backup.js | 799 ++++++++++++++++++ backend/logger.js | 3 +- backend/package.json | 2 + backend/routes/backup.js | 63 ++ backend/routes/main.js | 2 + frontend/src/api/backend/backup.ts | 29 + frontend/src/api/backend/index.ts | 1 + .../Table/Formatter/EventFormatter.tsx | 7 +- frontend/src/hooks/index.ts | 1 + frontend/src/hooks/useBackup.ts | 33 + frontend/src/locale/src/en.json | 75 ++ frontend/src/pages/Settings/Backup.tsx | 299 +++++++ frontend/src/pages/Settings/Layout.tsx | 23 +- test/cypress/e2e/api/Backup.cy.js | 432 ++++++++++ test/cypress/plugins/backendApi/client.js | 59 ++ test/cypress/plugins/backendApi/task.js | 34 + 16 files changed, 1857 insertions(+), 5 deletions(-) create mode 100644 backend/internal/backup.js create mode 100644 backend/routes/backup.js create mode 100644 frontend/src/api/backend/backup.ts create mode 100644 frontend/src/hooks/useBackup.ts create mode 100644 frontend/src/pages/Settings/Backup.tsx create mode 100644 test/cypress/e2e/api/Backup.cy.js diff --git a/backend/internal/backup.js b/backend/internal/backup.js new file mode 100644 index 0000000000..c9ba93d945 --- /dev/null +++ b/backend/internal/backup.js @@ -0,0 +1,799 @@ +import fs from "node:fs"; +import path from "node:path"; +import archiver from "archiver"; +import archiverZipEncrypted from "archiver-zip-encrypted"; +import unzipper from "unzipper"; + +// Register encrypted zip format +archiver.registerFormat("zip-encrypted", archiverZipEncrypted); +import db from "../db.js"; +import errs from "../lib/error.js"; +import { debug, backup as logger } from "../logger.js"; + +import settingModel from "../models/setting.js"; +import userModel from "../models/user.js"; +import authModel from "../models/auth.js"; +import userPermissionModel from "../models/user_permission.js"; +import certificateModel from "../models/certificate.js"; +import accessListModel from "../models/access_list.js"; +import accessListAuthModel from "../models/access_list_auth.js"; +import accessListClientModel from "../models/access_list_client.js"; +import proxyHostModel from "../models/proxy_host.js"; +import redirectionHostModel from "../models/redirection_host.js"; +import deadHostModel from "../models/dead_host.js"; +import streamModel from "../models/stream.js"; +import auditLogModel from "../models/audit-log.js"; + +import internalNginx from "./nginx.js"; +import internalAccessList from "./access-list.js"; + +// Model lookup map for table operations +const models = { + setting: settingModel, + user: userModel, + auth: authModel, + user_permission: userPermissionModel, + certificate: certificateModel, + access_list: accessListModel, + access_list_auth: accessListAuthModel, + access_list_client: accessListClientModel, + proxy_host: proxyHostModel, + redirection_host: redirectionHostModel, + dead_host: deadHostModel, + stream: streamModel, + audit_log: auditLogModel, +}; + +const BACKUP_VERSION = 1; + +// Table configuration for export/import operations +// - softDelete: if true, filter by is_deleted=0 on export +// - useModel: if true, use model.insert() instead of raw knex (for settings with upsert) +const TABLE_CONFIG = [ + { table: "setting", useModel: true }, + { table: "user", softDelete: true }, + { table: "auth", softDelete: true }, + { table: "user_permission" }, + { table: "certificate", softDelete: true }, + { table: "access_list", softDelete: true }, + { table: "access_list_auth" }, + { table: "access_list_client" }, + { table: "proxy_host", softDelete: true }, + { table: "redirection_host", softDelete: true }, + { table: "dead_host", softDelete: true }, + { table: "stream", softDelete: true }, +]; + +// Delete order: reverse of TABLE_CONFIG with audit_log first (respects FK dependencies) +const DELETE_TABLE_ORDER = ["audit_log", ...TABLE_CONFIG.map((t) => t.table).reverse()]; + +// Host types that have nginx configs +const NGINX_HOST_TYPES = [ + { type: "proxy_host", graph: "[certificate, access_list.[items,clients]]" }, + { type: "redirection_host", graph: "[certificate]" }, + { type: "dead_host", graph: "[certificate]" }, + { type: "stream", graph: "[certificate]" }, +]; + +// JSON fields per table that need to be stringified for raw knex inserts +const JSON_FIELDS = { + user: ["roles"], + auth: ["meta"], + setting: ["meta"], + certificate: ["domain_names", "meta"], + access_list: ["meta"], + access_list_auth: ["meta"], + access_list_client: ["meta"], + proxy_host: ["domain_names", "meta", "locations"], + redirection_host: ["domain_names", "meta"], + dead_host: ["domain_names", "meta"], + stream: ["meta"], +}; + +// Boolean fields per table that need to be converted to integers for database compatibility +// SQLite/MySQL store booleans as 0/1 integers, PostgreSQL has native booleans but our schema uses integers +// Converting to 0/1 works for all supported databases (SQLite, MySQL, and PostgreSQL) +const BOOLEAN_FIELDS = { + user: ["is_deleted", "is_disabled"], + auth: ["is_deleted"], + certificate: ["is_deleted"], + access_list: ["is_deleted", "satisfy_any", "pass_auth"], + access_list_auth: [], + access_list_client: [], + proxy_host: ["is_deleted", "enabled", "ssl_forced", "hsts_enabled", "hsts_subdomains", "http2_support", "block_exploits", "caching_enabled", "allow_websocket_upgrade"], + redirection_host: ["is_deleted", "enabled", "ssl_forced", "hsts_enabled", "hsts_subdomains", "http2_support", "block_exploits", "preserve_path"], + dead_host: ["is_deleted", "enabled", "ssl_forced", "hsts_enabled", "hsts_subdomains", "http2_support"], + stream: ["is_deleted", "enabled", "tcp_forwarding", "udp_forwarding"], +}; + +// Datetime fields per table that need to be converted for database compatibility +// Backups store dates in ISO 8601 format (e.g., '2026-01-22T06:49:44.000Z') +// MySQL doesn't accept ISO 8601, so we convert to 'YYYY-MM-DD HH:MM:SS' which works +// for all supported databases (SQLite, MySQL, and PostgreSQL) +const DATETIME_FIELDS = { + user: ["created_on", "modified_on"], + auth: ["created_on", "modified_on", "expires_on"], + user_permission: ["created_on", "modified_on"], + certificate: ["created_on", "modified_on", "expires_on"], + access_list: ["created_on", "modified_on"], + access_list_auth: ["created_on", "modified_on"], + access_list_client: ["created_on", "modified_on"], + proxy_host: ["created_on", "modified_on"], + redirection_host: ["created_on", "modified_on"], + dead_host: ["created_on", "modified_on"], + stream: ["created_on", "modified_on"], +}; + +/** + * Converts an ISO 8601 datetime string to database-compatible format + * @param {string} isoString - ISO 8601 datetime (e.g., '2026-01-22T06:49:44.000Z') + * @returns {string} - Database-compatible datetime (e.g., '2026-01-22 06:49:44') + */ +const convertDatetimeForDb = (isoString) => { + if (!isoString || typeof isoString !== "string") return isoString; + // Replace 'T' with space and remove milliseconds and 'Z' suffix + return isoString.replace("T", " ").replace(/\.\d{3}Z$/, "").replace(/Z$/, ""); +}; + +/** + * Prepares a row for raw knex insert by converting fields to database-compatible formats + * @param {string} table - Table name + * @param {Object} row - Row data + * @returns {Object} - Row with fields converted for database compatibility + */ +const prepareRowForInsert = (table, row) => { + const prepared = { ...row }; + + // Stringify JSON fields + const jsonFields = JSON_FIELDS[table]; + if (jsonFields) { + for (const field of jsonFields) { + if (prepared[field] !== undefined && prepared[field] !== null && typeof prepared[field] === "object") { + prepared[field] = JSON.stringify(prepared[field]); + } + } + } + + // Convert boolean fields to integers (0/1) + const booleanFields = BOOLEAN_FIELDS[table]; + if (booleanFields) { + for (const field of booleanFields) { + if (prepared[field] !== undefined && prepared[field] !== null) { + prepared[field] = prepared[field] ? 1 : 0; + } + } + } + + // Convert datetime fields from ISO 8601 to database-compatible format + const datetimeFields = DATETIME_FIELDS[table]; + if (datetimeFields) { + for (const field of datetimeFields) { + if (prepared[field]) { + prepared[field] = convertDatetimeForDb(prepared[field]); + } + } + } + + return prepared; +}; + +/** + * Safely deletes a file or directory, logging errors instead of throwing + * @param {string} targetPath - Path to delete + * @param {boolean} recursive - Whether to delete recursively (for directories) + */ +const safeDelete = (targetPath, recursive = false) => { + try { + if (fs.existsSync(targetPath)) { + if (recursive) { + fs.rmSync(targetPath, { recursive: true, force: true }); + } else { + fs.unlinkSync(targetPath); + } + debug(logger, `Deleted: ${targetPath}`); + } + } catch (err) { + debug(logger, `Could not delete ${targetPath}:`, err.message); + } +}; + +/** + * Safely copies a file, creating parent directories as needed + * @param {string} source - Source file path + * @param {string} target - Target file path + * @param {Object} options - Options: { mode, recursive } + */ +const safeCopy = (source, target, options = {}) => { + if (!fs.existsSync(source)) return false; + + fs.mkdirSync(path.dirname(target), { recursive: true }); + if (options.recursive) { + fs.cpSync(source, target, { recursive: true }); + } else { + fs.copyFileSync(source, target); + if (options.mode !== undefined) { + fs.chmodSync(target, options.mode); + } + } + debug(logger, `Copied: ${source} -> ${target}`); + return true; +}; + +/** + * Builds the table counts object for audit log meta + * @param {Object} tables - Tables object from backup data + * @returns {Object} - Table counts + */ +const buildTableCounts = (tables) => ({ + users: tables.user?.length || 0, + certificates: tables.certificate?.length || 0, + access_lists: tables.access_list?.length || 0, + proxy_hosts: tables.proxy_host?.length || 0, + redirection_hosts: tables.redirection_host?.length || 0, + dead_hosts: tables.dead_host?.length || 0, + streams: tables.stream?.length || 0, +}); + +// File paths +const LETSENCRYPT_PATH = "/etc/letsencrypt"; +const LETSENCRYPT_LIVE_PATH = `${LETSENCRYPT_PATH}/live`; +const LETSENCRYPT_ARCHIVE_PATH = `${LETSENCRYPT_PATH}/archive`; +const LETSENCRYPT_RENEWAL_PATH = `${LETSENCRYPT_PATH}/renewal`; +const LETSENCRYPT_ACCOUNTS_PATH = `${LETSENCRYPT_PATH}/accounts`; +const LETSENCRYPT_CREDENTIALS_PATH = `${LETSENCRYPT_PATH}/credentials`; +const LETSENCRYPT_RENEWAL_HOOKS_PATH = `${LETSENCRYPT_PATH}/renewal-hooks`; +const CUSTOM_SSL_PATH = "/data/custom_ssl"; +const ACCESS_LIST_PATH = "/data/access"; + +const internalBackup = { + /** + * Export all configuration data and certificate files to a ZIP + * @param {Access} access + * @param {string|null} password - Optional password for ZIP encryption + * @returns {Promise<{fileName: string}>} + */ + exportAll: async (access, password = null) => { + await access.can("settings:update"); + + logger.info("Starting backup export..."); + + // Collect all database data + const data = { + version: BACKUP_VERSION, + exportedAt: new Date().toISOString(), + tables: {}, + }; + + // Export tables (excluding soft-deleted records where applicable) + for (const { table, softDelete } of TABLE_CONFIG) { + const query = models[table].query(); + data.tables[table] = softDelete ? await query.where("is_deleted", 0) : await query; + } + + // Create ZIP file + const downloadName = `npm-backup-${Date.now()}.zip`; + const zipPath = `/tmp/${downloadName}`; + + await internalBackup.createBackupZip(data, zipPath, password); + + logger.info("Backup export completed:", zipPath); + + // Add to audit log + await auditLogModel.query().insert({ + user_id: access.token.getUserId(1), + action: "exported", + object_type: "backup", + object_id: 0, + meta: { + exportedAt: data.exportedAt, + tables: buildTableCounts(data.tables), + }, + }); + + return { + fileName: zipPath, + }; + }, + + /** + * Creates the backup ZIP file with database JSON and certificate files + * @param {Object} data - Database export data + * @param {string} outputPath - Path to write ZIP file + * @param {string|null} password - Optional password for ZIP encryption + * @returns {Promise} + */ + createBackupZip: async (data, outputPath, password = null) => { + return new Promise((resolve, reject) => { + // Note: Using zip20 (ZipCrypto) instead of aes256 because unzipper only supports + // legacy zip encryption. AES-256 is more secure but incompatible with unzipper. + const archive = password + ? archiver.create("zip-encrypted", { zlib: { level: 9 }, encryptionMethod: "zip20", password }) + : archiver("zip", { zlib: { level: 9 } }); + const stream = fs.createWriteStream(outputPath); + + archive.on("error", (err) => reject(err)); + stream.on("close", () => resolve()); + + archive.pipe(stream); + + // Add database export as JSON + archive.append(JSON.stringify(data, null, 2), { name: "database.json" }); + + // Add Let's Encrypt certificate directories + // Note: We don't backup the 'live' directory because it contains symlinks to 'archive'. + // Instead, we backup 'archive' and recreate the symlinks on restore. + if (data.tables.certificate) { + for (const cert of data.tables.certificate) { + if (cert.provider === "letsencrypt") { + // Archive directory (actual cert files) + const archivePath = `${LETSENCRYPT_ARCHIVE_PATH}/npm-${cert.id}`; + if (fs.existsSync(archivePath)) { + debug(logger, `Adding Let's Encrypt archive directory: ${archivePath}`); + archive.directory(archivePath, `letsencrypt/archive/npm-${cert.id}`); + } + + // README file from live directory (certbot creates this in live, not archive) + const readmePath = `${LETSENCRYPT_LIVE_PATH}/npm-${cert.id}/README`; + if (fs.existsSync(readmePath)) { + debug(logger, `Adding README file: ${readmePath}`); + archive.file(readmePath, { name: `letsencrypt/live/npm-${cert.id}/README` }); + } + + // Renewal configuration + const renewalPath = `${LETSENCRYPT_RENEWAL_PATH}/npm-${cert.id}.conf`; + if (fs.existsSync(renewalPath)) { + debug(logger, `Adding renewal config: ${renewalPath}`); + archive.file(renewalPath, { name: `letsencrypt/renewal/npm-${cert.id}.conf` }); + } + + // DNS challenge credentials + const credPath = `${LETSENCRYPT_CREDENTIALS_PATH}/credentials-${cert.id}`; + if (fs.existsSync(credPath)) { + debug(logger, `Adding credentials file: ${credPath}`); + archive.file(credPath, { name: `letsencrypt/credentials/credentials-${cert.id}` }); + } + } else if (cert.provider === "other") { + // Custom SSL certificates + const customPath = `${CUSTOM_SSL_PATH}/npm-${cert.id}`; + if (fs.existsSync(customPath)) { + debug(logger, `Adding custom SSL cert directory: ${customPath}`); + archive.directory(customPath, `custom_ssl/npm-${cert.id}`); + } + } + } + } + + // Add Let's Encrypt accounts directory (shared across all certs) + if (fs.existsSync(LETSENCRYPT_ACCOUNTS_PATH)) { + debug(logger, `Adding Let's Encrypt accounts directory: ${LETSENCRYPT_ACCOUNTS_PATH}`); + archive.directory(LETSENCRYPT_ACCOUNTS_PATH, "letsencrypt/accounts"); + } + + // Add Let's Encrypt renewal-hooks directory (shared across all certs) + if (fs.existsSync(LETSENCRYPT_RENEWAL_HOOKS_PATH)) { + debug(logger, `Adding Let's Encrypt renewal-hooks directory: ${LETSENCRYPT_RENEWAL_HOOKS_PATH}`); + archive.directory(LETSENCRYPT_RENEWAL_HOOKS_PATH, "letsencrypt/renewal-hooks"); + } + + // Add access list htpasswd files + if (data.tables.access_list) { + for (const list of data.tables.access_list) { + const htpasswdPath = `${ACCESS_LIST_PATH}/${list.id}`; + if (fs.existsSync(htpasswdPath)) { + debug(logger, `Adding htpasswd file: ${htpasswdPath}`); + archive.file(htpasswdPath, { name: `access/${list.id}` }); + } + } + } + + archive.finalize(); + }); + }, + + /** + * Extracts a ZIP directory to disk, supporting password-protected files + * @param {Object} directory - unzipper directory object from Open.file() + * @param {string} extractDir - Directory to extract files to + * @param {string|null} password - Optional password for encrypted files + * @returns {Promise} + */ + extractZipDirectory: async (directory, extractDir, password = null) => { + for (const file of directory.files) { + const filePath = path.join(extractDir, file.path); + + if (file.type === "Directory") { + fs.mkdirSync(filePath, { recursive: true }); + continue; + } + + // Ensure parent directory exists + fs.mkdirSync(path.dirname(filePath), { recursive: true }); + + // Extract file content using stream with optional password + await new Promise((resolve, reject) => { + const writeStream = fs.createWriteStream(filePath); + const readStream = file.stream(password || undefined); + + // Must listen for errors on BOTH streams - unzipper emits password errors on the read stream + readStream.on("error", reject); + writeStream.on("error", reject); + writeStream.on("finish", resolve); + + readStream.pipe(writeStream); + }); + } + }, + + /** + * Import configuration from a backup ZIP file + * @param {Access} access + * @param {Object} file - Uploaded file object from express-fileupload + * @param {string|null} password - Optional password for encrypted ZIPs + * @returns {Promise<{success: boolean, message: string}>} + */ + importAll: async (access, file, password = null) => { + await access.can("settings:update"); + + if (!file || !file.data) { + throw new errs.ValidationError("No backup file provided"); + } + + logger.info("Starting backup import..."); + + // Write uploaded file to temp location + const tempZipPath = `/tmp/npm-backup-upload-${Date.now()}.zip`; + const extractDir = `/tmp/npm-backup-extract-${Date.now()}`; + + try { + // Write the uploaded file to disk + fs.writeFileSync(tempZipPath, file.data); + + // Extract ZIP (with optional password for encrypted backups) + fs.mkdirSync(extractDir, { recursive: true }); + try { + const directory = await unzipper.Open.file(tempZipPath); + await internalBackup.extractZipDirectory(directory, extractDir, password); + } catch (extractErr) { + // Check if this is a password-related error from unzipper + const errMsg = extractErr.message || ""; + debug(logger, `Zip extraction error: ${errMsg}`); + + if (errMsg === "MISSING_PASSWORD") { + throw new errs.ValidationError("This backup is password-protected. Please provide the password."); + } + if (errMsg === "BAD_PASSWORD") { + throw new errs.ValidationError("Incorrect password. Please check and try again."); + } + throw new errs.ValidationError(`Failed to extract backup file: ${errMsg}`); + } + + // Read and validate database.json + const dbJsonPath = path.join(extractDir, "database.json"); + if (!fs.existsSync(dbJsonPath)) { + throw new errs.ValidationError("Invalid backup file: missing database.json"); + } + + const data = JSON.parse(fs.readFileSync(dbJsonPath, "utf8")); + + // Validate backup version + if (!data.version || data.version > BACKUP_VERSION) { + throw new errs.ValidationError( + `Unsupported backup version: ${data.version}. Maximum supported: ${BACKUP_VERSION}`, + ); + } + + // Validate required tables exist + const requiredTables = ["user", "setting"]; + for (const table of requiredTables) { + if (!data.tables[table]) { + throw new errs.ValidationError(`Invalid backup: missing ${table} table`); + } + } + + // Purge existing files before we delete DB rows + await internalBackup.purgeNginxConfigs(); + await internalBackup.purgeAccessListFiles(); + await internalBackup.purgeCertificateFiles(); + + // Perform the import + await internalBackup.performImport(data, extractDir); + + // Regenerate all nginx configs + await internalBackup.regenerateNginxConfigs(); + + logger.info("Backup import completed successfully"); + + // Add audit log entry for the import (user_id=0 since original user no longer exists) + try { + await models.audit_log.query().insert({ + user_id: 0, + action: "imported", + object_type: "backup", + object_id: 0, + meta: { + importedAt: new Date().toISOString(), + backupVersion: data.version, + backupExportedAt: data.exportedAt, + tables: buildTableCounts(data.tables), + }, + }); + } catch (auditErr) { + // Don't fail the restore if audit logging fails + logger.warn("Could not add audit log entry:", auditErr.message); + } + + return { + success: true, + message: "Backup restored successfully. Please log in again.", + }; + } finally { + // Cleanup temp files + try { + if (fs.existsSync(tempZipPath)) { + fs.unlinkSync(tempZipPath); + } + if (fs.existsSync(extractDir)) { + fs.rmSync(extractDir, { recursive: true, force: true }); + } + } catch (e) { + logger.warn("Failed to cleanup temp files:", e.message); + } + } + }, + + /** + * Performs the actual database import in correct FK order + * @param {Object} data - Parsed backup data + * @param {string} extractDir - Path to extracted backup files + * @returns {Promise} + */ + performImport: async (data, extractDir) => { + const tables = data.tables; + const knex = db(); + + // Clear ALL existing data in reverse dependency order + // This includes users - restore replaces everything + logger.info("Clearing existing data..."); + for (const table of DELETE_TABLE_ORDER) { + await models[table].query().delete(); + } + + // Restore all files FIRST (before DB imports) + logger.info("Restoring certificate files..."); + await internalBackup.restoreCertificateFiles(tables.certificate || [], extractDir); + + logger.info("Restoring access list files..."); + await internalBackup.restoreAccessListFiles(tables.access_list || [], extractDir); + + // Import all tables in dependency order + // Uses raw knex to bypass Objection's $beforeInsert hooks (preserves IDs, hashed passwords, etc.) + for (const { table, useModel } of TABLE_CONFIG) { + if (!tables[table]) continue; + + logger.info(`Importing ${table}...`); + for (const row of tables[table]) { + if (useModel) { + await models[table].query().insert(row).onConflict("id").merge(); + } else { + await knex(table).insert(prepareRowForInsert(table, row)); + } + } + } + }, + + /** + * Restores certificate files from backup to their target locations + * @param {Array} certificates - Array of certificate records + * @param {string} extractDir - Path to extracted backup + * @returns {Promise} + */ + restoreCertificateFiles: async (certificates, extractDir) => { + // Restore shared Let's Encrypt directories first + safeCopy(path.join(extractDir, "letsencrypt/accounts"), LETSENCRYPT_ACCOUNTS_PATH, { recursive: true }); + safeCopy(path.join(extractDir, "letsencrypt/renewal-hooks"), LETSENCRYPT_RENEWAL_HOOKS_PATH, { recursive: true }); + + // Restore per-certificate files + for (const cert of certificates) { + if (cert.provider === "letsencrypt") { + // Archive directory (actual cert files) + const archiveSource = path.join(extractDir, `letsencrypt/archive/npm-${cert.id}`); + const archiveTarget = `${LETSENCRYPT_ARCHIVE_PATH}/npm-${cert.id}`; + if (safeCopy(archiveSource, archiveTarget, { recursive: true })) { + // Create live directory with symlinks to archive + await internalBackup.createLiveSymlinks(cert.id); + } + + // Renewal config + safeCopy( + path.join(extractDir, `letsencrypt/renewal/npm-${cert.id}.conf`), + `${LETSENCRYPT_RENEWAL_PATH}/npm-${cert.id}.conf`, + ); + + // Credentials (with restricted permissions) + safeCopy( + path.join(extractDir, `letsencrypt/credentials/credentials-${cert.id}`), + `${LETSENCRYPT_CREDENTIALS_PATH}/credentials-${cert.id}`, + { mode: 0o600 }, + ); + + // README file (certbot creates this in live directory) + safeCopy( + path.join(extractDir, `letsencrypt/live/npm-${cert.id}/README`), + `${LETSENCRYPT_LIVE_PATH}/npm-${cert.id}/README`, + ); + } else if (cert.provider === "other") { + safeCopy( + path.join(extractDir, `custom_ssl/npm-${cert.id}`), + `${CUSTOM_SSL_PATH}/npm-${cert.id}`, + { recursive: true }, + ); + } + } + }, + + /** + * Creates the live directory symlinks pointing to the latest files in archive + * Certbot uses symlinks like: live/npm-X/fullchain.pem -> ../../archive/npm-X/fullchainN.pem + * @param {number} certId - Certificate ID + * @returns {Promise} + */ + createLiveSymlinks: async (certId) => { + const archiveDir = `${LETSENCRYPT_ARCHIVE_PATH}/npm-${certId}`; + const liveDir = `${LETSENCRYPT_LIVE_PATH}/npm-${certId}`; + + if (!fs.existsSync(archiveDir)) { + debug(logger, `Archive directory does not exist: ${archiveDir}`); + return; + } + + // Create live directory + fs.mkdirSync(liveDir, { recursive: true }); + + // Standard certbot certificate files + const certFiles = ["cert", "chain", "fullchain", "privkey"]; + + for (const baseName of certFiles) { + // Find the highest version number for this file in archive + const files = fs.readdirSync(archiveDir).filter((f) => f.startsWith(`${baseName}`) && f.endsWith(".pem")); + if (files.length === 0) continue; + + // Sort to get the highest version (e.g., fullchain2.pem > fullchain1.pem) + files.sort((a, b) => { + const numA = Number.parseInt(a.replace(`${baseName}`, "").replace(".pem", ""), 10) || 0; + const numB = Number.parseInt(b.replace(`${baseName}`, "").replace(".pem", ""), 10) || 0; + return numB - numA; + }); + + const latestFile = files[0]; + const symlinkPath = path.join(liveDir, `${baseName}.pem`); + const targetPath = `../../archive/npm-${certId}/${latestFile}`; + + try { + // Remove existing symlink if present + if (fs.existsSync(symlinkPath)) { + fs.unlinkSync(symlinkPath); + } + fs.symlinkSync(targetPath, symlinkPath); + debug(logger, `Created symlink: ${symlinkPath} -> ${targetPath}`); + } catch (err) { + debug(logger, `Could not create symlink ${symlinkPath}:`, err.message); + } + } + debug(logger, `Created live symlinks for certificate ${certId}`); + }, + + /** + * Restores access list htpasswd files from backup + * @param {Array} accessLists - Array of access list records + * @param {string} extractDir - Path to extracted backup + * @returns {Promise} + */ + restoreAccessListFiles: async (accessLists, extractDir) => { + for (const list of accessLists) { + safeCopy(path.join(extractDir, `access/${list.id}`), `${ACCESS_LIST_PATH}/${list.id}`); + } + }, + + /** + * Purges all access list (htpasswd) files before import + * @returns {Promise} + */ + purgeAccessListFiles: async () => { + logger.info("Purging existing access list files..."); + + const accessLists = await models.access_list.query().where("is_deleted", 0); + + for (const list of accessLists) { + safeDelete(internalAccessList.getFilename(list)); + } + + logger.info(`Access list files purged (${accessLists.length} files)`); + }, + + /** + * Purges all certificate files before import + * This removes Let's Encrypt certs, credentials, and custom SSL certs + * @returns {Promise} + */ + purgeCertificateFiles: async () => { + logger.info("Purging existing certificate files..."); + + // Purge shared Let's Encrypt directories + safeDelete(LETSENCRYPT_ACCOUNTS_PATH, true); + safeDelete(LETSENCRYPT_RENEWAL_HOOKS_PATH, true); + + // Purge per-certificate files + const certificates = await models.certificate.query().where("is_deleted", 0); + + for (const cert of certificates) { + if (cert.provider === "letsencrypt") { + safeDelete(`${LETSENCRYPT_LIVE_PATH}/npm-${cert.id}`, true); + safeDelete(`${LETSENCRYPT_ARCHIVE_PATH}/npm-${cert.id}`, true); + safeDelete(`${LETSENCRYPT_RENEWAL_PATH}/npm-${cert.id}.conf`); + safeDelete(`${LETSENCRYPT_CREDENTIALS_PATH}/credentials-${cert.id}`); + } else if (cert.provider === "other") { + safeDelete(`${CUSTOM_SSL_PATH}/npm-${cert.id}`, true); + } + } + + logger.info(`Certificate files purged (${certificates.length} certificates)`); + }, + + /** + * Purges all nginx configuration files before import + * This must be called before database rows are deleted to avoid orphaned configs + * @returns {Promise} + */ + purgeNginxConfigs: async () => { + logger.info("Purging existing nginx configs..."); + + for (const { type } of NGINX_HOST_TYPES) { + const hosts = await models[type].query().where("is_deleted", 0); + if (hosts.length) { + logger.info(`Deleting ${hosts.length} ${type} configs...`); + await internalNginx.bulkDeleteConfigs(type, hosts); + } + } + + logger.info("Nginx configs purged"); + }, + + /** + * Regenerates all nginx configuration files after import + * @returns {Promise} + */ + regenerateNginxConfigs: async () => { + logger.info("Regenerating nginx configs..."); + + // Regenerate configs for all host types + for (const { type, graph } of NGINX_HOST_TYPES) { + const hosts = await models[type] + .query() + .where("is_deleted", 0) + .andWhere("enabled", 1) + .withGraphFetched(graph); + + if (hosts.length) { + logger.info(`Regenerating ${hosts.length} ${type} configs...`); + await internalNginx.bulkGenerateConfigs(type, hosts); + } + } + + // Regenerate default site config + const defaultSiteSetting = await models.setting.query().where("id", "default-site").first(); + if (defaultSiteSetting) { + logger.info("Regenerating default site config..."); + await internalNginx.generateConfig("default", defaultSiteSetting); + } + + // Test and reload nginx + logger.info("Testing nginx configuration..."); + await internalNginx.test(); + logger.info("Reloading nginx..."); + await internalNginx.reload(); + + logger.info("Nginx configs regenerated successfully"); + }, +}; + +export default internalBackup; diff --git a/backend/logger.js b/backend/logger.js index 2b60dbff7b..a00e97ad40 100644 --- a/backend/logger.js +++ b/backend/logger.js @@ -16,6 +16,7 @@ const importer = new signale.Signale({ scope: "Importer ", ...opts }); const setup = new signale.Signale({ scope: "Setup ", ...opts }); const ipRanges = new signale.Signale({ scope: "IP Ranges", ...opts }); const remoteVersion = new signale.Signale({ scope: "Remote Version", ...opts }); +const backup = new signale.Signale({ scope: "Backup ", ...opts }); const debug = (logger, ...args) => { if (isDebugMode()) { @@ -23,4 +24,4 @@ const debug = (logger, ...args) => { } }; -export { debug, global, migrate, express, access, nginx, ssl, certbot, importer, setup, ipRanges, remoteVersion }; +export { debug, global, migrate, express, access, nginx, ssl, certbot, importer, setup, ipRanges, remoteVersion, backup }; diff --git a/backend/package.json b/backend/package.json index ed143c6816..56334b6f08 100644 --- a/backend/package.json +++ b/backend/package.json @@ -15,12 +15,14 @@ "@apidevtools/json-schema-ref-parser": "^11.7.0", "ajv": "^8.17.1", "archiver": "^5.3.0", + "archiver-zip-encrypted": "^2.0.0", "batchflow": "^0.4.0", "bcrypt": "^5.0.0", "body-parser": "^1.20.3", "compression": "^1.7.4", "express": "^4.22.0", "express-fileupload": "^1.5.2", + "unzipper": "^0.12.3", "gravatar": "^1.8.2", "jsonwebtoken": "^9.0.2", "knex": "2.4.2", diff --git a/backend/routes/backup.js b/backend/routes/backup.js new file mode 100644 index 0000000000..0843d4c06c --- /dev/null +++ b/backend/routes/backup.js @@ -0,0 +1,63 @@ +import express from "express"; +import internalBackup from "../internal/backup.js"; +import jwtdecode from "../lib/express/jwt-decode.js"; +import { debug, backup as logger } from "../logger.js"; + +const router = express.Router({ + caseSensitive: true, + strict: true, + mergeParams: true, +}); + +/** + * Export Configuration + * + * GET /api/backup/export + */ +router + .route("/export") + .options((_req, res) => { + res.sendStatus(204); + }) + .all(jwtdecode()) + .get(async (req, res, next) => { + try { + req.setTimeout(300000); // 5 minutes timeout for large exports + const password = req.query.password || null; + const result = await internalBackup.exportAll(res.locals.access, password); + res.status(200).download(result.fileName); + } catch (err) { + debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`); + next(err); + } + }); + +/** + * Import Configuration + * + * POST /api/backup/import + */ +router + .route("/import") + .options((_req, res) => { + res.sendStatus(204); + }) + .all(jwtdecode()) + .post(async (req, res, next) => { + if (!req.files || !req.files.backup) { + res.status(400).send({ error: { message: "No backup file uploaded" } }); + return; + } + + try { + req.setTimeout(600000); // 10 minutes timeout for large imports + const password = req.body.password || null; + const result = await internalBackup.importAll(res.locals.access, req.files.backup, password); + res.status(200).send(result); + } catch (err) { + debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`); + next(err); + } + }); + +export default router; diff --git a/backend/routes/main.js b/backend/routes/main.js index 94682cfba4..94546dd32a 100644 --- a/backend/routes/main.js +++ b/backend/routes/main.js @@ -3,6 +3,7 @@ import errs from "../lib/error.js"; import pjson from "../package.json" with { type: "json" }; import { isSetup } from "../setup.js"; import auditLogRoutes from "./audit-log.js"; +import backupRoutes from "./backup.js"; import accessListsRoutes from "./nginx/access_lists.js"; import certificatesHostsRoutes from "./nginx/certificates.js"; import deadHostsRoutes from "./nginx/dead_hosts.js"; @@ -48,6 +49,7 @@ router.use("/audit-log", auditLogRoutes); router.use("/reports", reportsRoutes); router.use("/settings", settingsRoutes); router.use("/version", versionRoutes); +router.use("/backup", backupRoutes); router.use("/nginx/proxy-hosts", proxyHostsRoutes); router.use("/nginx/redirection-hosts", redirectionHostsRoutes); router.use("/nginx/dead-hosts", deadHostsRoutes); diff --git a/frontend/src/api/backend/backup.ts b/frontend/src/api/backend/backup.ts new file mode 100644 index 0000000000..bbf07b0e54 --- /dev/null +++ b/frontend/src/api/backend/backup.ts @@ -0,0 +1,29 @@ +import * as api from "./base"; + +export interface ImportResult { + success: boolean; + message: string; +} + +export async function exportBackup(password?: string): Promise { + const params = password ? `?password=${encodeURIComponent(password)}` : ""; + await api.download( + { + url: `/backup/export${params}`, + }, + `npm-backup-${Date.now()}.zip`, + ); +} + +export async function importBackup(file: File, password?: string): Promise { + const formData = new FormData(); + formData.append("backup", file); + if (password) { + formData.append("password", password); + } + + return await api.post({ + url: "/backup/import", + data: formData, + }); +} diff --git a/frontend/src/api/backend/index.ts b/frontend/src/api/backend/index.ts index 40cb4142fc..b7dd6b8cc1 100644 --- a/frontend/src/api/backend/index.ts +++ b/frontend/src/api/backend/index.ts @@ -1,3 +1,4 @@ +export * from "./backup"; export * from "./checkVersion"; export * from "./createAccessList"; export * from "./createCertificate"; diff --git a/frontend/src/components/Table/Formatter/EventFormatter.tsx b/frontend/src/components/Table/Formatter/EventFormatter.tsx index 1220fa0961..2de31cbd2e 100644 --- a/frontend/src/components/Table/Formatter/EventFormatter.tsx +++ b/frontend/src/components/Table/Formatter/EventFormatter.tsx @@ -1,4 +1,4 @@ -import { IconArrowsCross, IconBolt, IconBoltOff, IconDisc, IconLock, IconShield, IconUser } from "@tabler/icons-react"; +import { IconArchive, IconArrowsCross, IconBolt, IconBoltOff, IconDisc, IconLock, IconShield, IconUser } from "@tabler/icons-react"; import cn from "classnames"; import type { AuditLog } from "src/api/backend"; import { useLocaleState } from "src/context"; @@ -17,6 +17,8 @@ const getEventValue = (event: AuditLog) => { return event.meta?.incomingPort || "N/A"; case "certificate": return event.meta?.domainNames?.join(", ") || event.meta?.niceName || "N/A"; + case "backup": + return event.meta?.exportedAt || event.meta?.importedAt || "N/A"; default: return `UNKNOWN EVENT TYPE: ${event.objectType}`; } @@ -58,6 +60,9 @@ const getIcon = (row: AuditLog) => { case "certificate": ico = ; break; + case "backup": + ico = ; + break; } return ico; diff --git a/frontend/src/hooks/index.ts b/frontend/src/hooks/index.ts index 744190ade1..b3531aec3e 100644 --- a/frontend/src/hooks/index.ts +++ b/frontend/src/hooks/index.ts @@ -1,4 +1,5 @@ export * from "./useAccessList"; +export * from "./useBackup"; export * from "./useAccessLists"; export * from "./useAuditLog"; export * from "./useAuditLogs"; diff --git a/frontend/src/hooks/useBackup.ts b/frontend/src/hooks/useBackup.ts new file mode 100644 index 0000000000..0d8a7c3722 --- /dev/null +++ b/frontend/src/hooks/useBackup.ts @@ -0,0 +1,33 @@ +import { useMutation, useQueryClient } from "@tanstack/react-query"; +import { exportBackup, importBackup, type ImportResult } from "src/api/backend"; +import AuthStore from "src/modules/AuthStore"; + +const useExportBackup = () => { + return useMutation({ + mutationFn: (password?: string) => exportBackup(password), + }); +}; + +interface ImportBackupParams { + file: File; + password?: string; +} + +const useImportBackup = () => { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: ({ file, password }: ImportBackupParams) => importBackup(file, password), + onSuccess: () => { + // Force logout user and do a full navigation to ensure fresh state + AuthStore.clear(); + queryClient.clear(); + // Small delay to ensure backend has fully completed + setTimeout(() => { + window.location.href = "/"; + }, 1000); + }, + }); +}; + +export { useExportBackup, useImportBackup }; diff --git a/frontend/src/locale/src/en.json b/frontend/src/locale/src/en.json index ae02605e98..f0df575ac7 100644 --- a/frontend/src/locale/src/en.json +++ b/frontend/src/locale/src/en.json @@ -542,6 +542,12 @@ "object.event.enabled": { "defaultMessage": "Enabled {object}" }, + "object.event.exported": { + "defaultMessage": "Exported {object}" + }, + "object.event.imported": { + "defaultMessage": "Imported {object}" + }, "object.event.renewed": { "defaultMessage": "Renewed {object}" }, @@ -650,6 +656,75 @@ "settings": { "defaultMessage": "Settings" }, + "settings.backup": { + "defaultMessage": "Backup & Restore" + }, + "settings.backup.export.button": { + "defaultMessage": "Export Backup" + }, + "settings.backup.export.description": { + "defaultMessage": "Download a backup of all your configuration including hosts, access lists, certificates, users, and settings." + }, + "settings.backup.export.password.confirm": { + "defaultMessage": "Confirm password" + }, + "settings.backup.export.password.enable": { + "defaultMessage": "Protect with password" + }, + "settings.backup.export.password.label": { + "defaultMessage": "Password" + }, + "settings.backup.export.password.mismatch": { + "defaultMessage": "Passwords do not match" + }, + "settings.backup.export.secrets-warning": { + "defaultMessage": "This backup may contain sensitive data including SSL certificates, private keys, DNS provider credentials, and htpasswd files. Consider using password protection." + }, + "settings.backup.export.success": { + "defaultMessage": "Backup exported successfully" + }, + "settings.backup.export.title": { + "defaultMessage": "Export Configuration" + }, + "settings.backup.import.confirm.button": { + "defaultMessage": "Import Backup" + }, + "settings.backup.import.confirm.file": { + "defaultMessage": "File" + }, + "settings.backup.import.confirm.logout": { + "defaultMessage": "You will be logged out and required to log in again after the import is complete." + }, + "settings.backup.import.confirm.message": { + "defaultMessage": "Are you sure you want to import this backup? All existing hosts, access lists, certificates, users, and settings will be replaced." + }, + "settings.backup.import.confirm.title": { + "defaultMessage": "Confirm Import" + }, + "settings.backup.import.confirm.warning": { + "defaultMessage": "This will permanently delete all existing configuration!" + }, + "settings.backup.import.description": { + "defaultMessage": "Restore configuration from a previously exported backup file." + }, + "settings.backup.import.password.hint": { + "defaultMessage": "Enter password if the backup is encrypted" + }, + "settings.backup.import.password.label": { + "defaultMessage": "Password (if encrypted)" + }, + "settings.backup.import.progress": { + "defaultMessage": "Importing backup... This may take a few minutes." + }, + "settings.backup.import.success": { + "defaultMessage": "Backup imported successfully. You may need to refresh the page." + }, + "settings.backup.import.title": { + "defaultMessage": "Import Configuration" + }, + "settings.backup.import.warning": { + "defaultMessage": "Warning: Importing a backup will replace ALL existing configuration data. This action cannot be undone." + }, "settings.default-site": { "defaultMessage": "Default Site" }, diff --git a/frontend/src/pages/Settings/Backup.tsx b/frontend/src/pages/Settings/Backup.tsx new file mode 100644 index 0000000000..82478e5b46 --- /dev/null +++ b/frontend/src/pages/Settings/Backup.tsx @@ -0,0 +1,299 @@ +import { type ChangeEvent, useRef, useState } from "react"; +import { IconEye, IconEyeOff } from "@tabler/icons-react"; +import { Alert, Modal } from "react-bootstrap"; +import { Button, Loading } from "src/components"; +import { useExportBackup, useImportBackup } from "src/hooks"; +import { intl, T } from "src/locale"; +import { showError, showSuccess } from "src/notifications"; + +export default function Backup() { + const fileInputRef = useRef(null); + const [selectedFile, setSelectedFile] = useState(null); + const [showConfirmModal, setShowConfirmModal] = useState(false); + const [errorMsg, setErrorMsg] = useState(null); + + // Export password state + const [useExportPassword, setUseExportPassword] = useState(false); + const [exportPassword, setExportPassword] = useState(""); + const [exportPasswordConfirm, setExportPasswordConfirm] = useState(""); + const [showExportPassword, setShowExportPassword] = useState(false); + + // Import password state + const [importPassword, setImportPassword] = useState(""); + const [showImportPassword, setShowImportPassword] = useState(false); + + const { mutate: doExport, isPending: isExporting } = useExportBackup(); + const { mutate: doImport, isPending: isImporting } = useImportBackup(); + + const exportPasswordsMatch = !useExportPassword || exportPassword === exportPasswordConfirm; + const canExport = !useExportPassword || (exportPassword.length > 0 && exportPasswordsMatch); + + const handleExport = () => { + setErrorMsg(null); + const password = useExportPassword && exportPassword ? exportPassword : undefined; + doExport(password, { + onSuccess: () => { + showSuccess(intl.formatMessage({ id: "settings.backup.export.success" })); + // Reset password fields after successful export + setExportPassword(""); + setExportPasswordConfirm(""); + }, + onError: (err: Error) => { + setErrorMsg(err.message); + showError(err.message); + }, + }); + }; + + const handleFileSelect = (e: ChangeEvent) => { + const file = e.target.files?.[0]; + if (file) { + setSelectedFile(file); + setShowConfirmModal(true); + } + }; + + const handleImportConfirm = () => { + if (!selectedFile) return; + + setErrorMsg(null); + setShowConfirmModal(false); + + doImport( + { file: selectedFile, password: importPassword || undefined }, + { + onSuccess: () => { + showSuccess(intl.formatMessage({ id: "settings.backup.import.success" })); + setSelectedFile(null); + setImportPassword(""); + if (fileInputRef.current) { + fileInputRef.current.value = ""; + } + }, + onError: (err: Error) => { + setErrorMsg(err.message); + showError(err.message); + // Re-open modal so user can retry with different password or cancel + setShowConfirmModal(true); + }, + }, + ); + }; + + const handleImportCancel = () => { + setShowConfirmModal(false); + setSelectedFile(null); + setImportPassword(""); + if (fileInputRef.current) { + fileInputRef.current.value = ""; + } + }; + + return ( + <> +
+ setErrorMsg(null)} dismissible> + {errorMsg} + + + {/* Export Section */} +
+

+ +

+

+ +

+ + + + + + +
+
+ setUseExportPassword(e.target.checked)} + disabled={isExporting || isImporting} + /> + +
+
+ + {useExportPassword && ( +
+
+
+ +
+ setExportPassword(e.target.value)} + disabled={isExporting || isImporting} + /> + +
+
+
+ +
+ setExportPasswordConfirm(e.target.value)} + disabled={isExporting || isImporting} + /> + +
+ {exportPassword && !exportPasswordsMatch && ( +
+ + + +
+ )} +
+
+
+ )} + + +
+ +
+ + {/* Import Section */} +
+

+ +

+

+ +

+ + + + + +
+ +
+ + {isImporting && ( +
+ +

+ +

+
+ )} +
+
+ + {/* Confirmation Modal */} + + + + + + + + + + + +

+ +

+

+ +

+

+ + : + {" "} + {selectedFile?.name} +

+ +
+ +
+ setImportPassword(e.target.value)} + placeholder={intl.formatMessage({ id: "settings.backup.import.password.hint" })} + /> + +
+ + + +
+
+ + + + +
+ + ); +} diff --git a/frontend/src/pages/Settings/Layout.tsx b/frontend/src/pages/Settings/Layout.tsx index a0a77db29e..bce8256c68 100644 --- a/frontend/src/pages/Settings/Layout.tsx +++ b/frontend/src/pages/Settings/Layout.tsx @@ -1,9 +1,12 @@ +import { useState } from "react"; import { T } from "src/locale"; +import Backup from "./Backup"; import DefaultSite from "./DefaultSite"; export default function Layout() { // Taken from https://preview.tabler.io/settings.html // Refer to that when updating this content + const [activeTab, setActiveTab] = useState("default-site"); return (
- + {activeTab === "default-site" && } + {activeTab === "backup" && }
diff --git a/test/cypress/e2e/api/Backup.cy.js b/test/cypress/e2e/api/Backup.cy.js new file mode 100644 index 0000000000..c2f23b550b --- /dev/null +++ b/test/cypress/e2e/api/Backup.cy.js @@ -0,0 +1,432 @@ +/// + +describe('Backup endpoints', () => { + let token; + let backupData; + + // Track created resource IDs for verification + let createdResources = {}; + + before(() => { + cy.resetUsers(); + cy.getToken().then((tok) => { + token = tok; + }); + }); + + after(() => { + // The import test restores all data from the backup, which affects subsequent test suites. + // We need to clean up by deleting all restored resources and resetting users. + // Re-authenticate first since the import replaced all users. + cy.getToken().then((newToken) => { + // Delete all streams + cy.task('backendApiGet', { + token: newToken, + path: '/api/nginx/streams' + }).then((streams) => { + const deleteStreams = streams.map(s => + cy.task('backendApiDelete', { + token: newToken, + path: `/api/nginx/streams/${s.id}` + }) + ); + return Cypress.Promise.all(deleteStreams); + }).then(() => { + // Delete all dead hosts + return cy.task('backendApiGet', { + token: newToken, + path: '/api/nginx/dead-hosts' + }); + }).then((deadHosts) => { + const deleteDeadHosts = deadHosts.map(h => + cy.task('backendApiDelete', { + token: newToken, + path: `/api/nginx/dead-hosts/${h.id}` + }) + ); + return Cypress.Promise.all(deleteDeadHosts); + }).then(() => { + // Delete all redirection hosts + return cy.task('backendApiGet', { + token: newToken, + path: '/api/nginx/redirection-hosts' + }); + }).then((redirectionHosts) => { + const deleteRedirectionHosts = redirectionHosts.map(h => + cy.task('backendApiDelete', { + token: newToken, + path: `/api/nginx/redirection-hosts/${h.id}` + }) + ); + return Cypress.Promise.all(deleteRedirectionHosts); + }).then(() => { + // Delete all proxy hosts + return cy.task('backendApiGet', { + token: newToken, + path: '/api/nginx/proxy-hosts' + }); + }).then((proxyHosts) => { + const deleteProxyHosts = proxyHosts.map(h => + cy.task('backendApiDelete', { + token: newToken, + path: `/api/nginx/proxy-hosts/${h.id}` + }) + ); + return Cypress.Promise.all(deleteProxyHosts); + }).then(() => { + // Delete all access lists + return cy.task('backendApiGet', { + token: newToken, + path: '/api/nginx/access-lists' + }); + }).then((accessLists) => { + const deleteAccessLists = accessLists.map(a => + cy.task('backendApiDelete', { + token: newToken, + path: `/api/nginx/access-lists/${a.id}` + }) + ); + return Cypress.Promise.all(deleteAccessLists); + }).then(() => { + // Delete all certificates + return cy.task('backendApiGet', { + token: newToken, + path: '/api/nginx/certificates' + }); + }).then((certificates) => { + const deleteCertificates = certificates.map(c => + cy.task('backendApiDelete', { + token: newToken, + path: `/api/nginx/certificates/${c.id}` + }) + ); + return Cypress.Promise.all(deleteCertificates); + }).then(() => { + // Finally reset users to put system back in setup mode + cy.resetUsers(); + }); + }); + }); + + it('Full backup/restore cycle with all resource types', () => { + // ===================================================== + // STEP 1: Create one resource of every type + // ===================================================== + + // 1a. Create a non-admin user + cy.task('backendApiPost', { + token: token, + path: '/api/users', + data: { + name: 'Backup Test User', + nickname: 'BackupUser', + email: 'backupuser@example.com', + roles: [], + auth: { + type: 'password', + secret: 'testpassword123' + } + } + }).then((user) => { + expect(user).to.have.property('id'); + createdResources.nonAdminUser = user; + + // 1b. Create a custom certificate + cy.task('backendApiPost', { + token: token, + path: '/api/nginx/certificates', + data: { + provider: 'other', + nice_name: 'Backup Test Certificate' + } + }).then((cert) => { + expect(cert).to.have.property('id'); + createdResources.certificate = cert; + + // Upload certificate files + cy.task('backendApiPostFiles', { + token: token, + path: `/api/nginx/certificates/${cert.id}/upload`, + files: { + certificate: 'test.example.com.pem', + certificate_key: 'test.example.com-key.pem' + } + }).then(() => { + + // 1c. Create an access list + cy.task('backendApiPost', { + token: token, + path: '/api/nginx/access-lists', + data: { + name: 'Backup Test Access List', + satisfy_any: true, + pass_auth: false, + items: [ + { + username: 'testuser', + password: 'testpass' + } + ], + clients: [ + { + directive: 'allow', + address: '192.168.1.0/24' + } + ] + } + }).then((accessList) => { + expect(accessList).to.have.property('id'); + createdResources.accessList = accessList; + + // 1d. Create a proxy host (with certificate and access list) + cy.task('backendApiPost', { + token: token, + path: '/api/nginx/proxy-hosts', + data: { + domain_names: ['backup-proxy.example.com'], + forward_scheme: 'http', + forward_host: '192.168.1.100', + forward_port: 8080, + access_list_id: accessList.id, + certificate_id: cert.id, + meta: {}, + advanced_config: '', + locations: [], + block_exploits: false, + caching_enabled: false, + allow_websocket_upgrade: true, + http2_support: false, + hsts_enabled: false, + hsts_subdomains: false, + ssl_forced: false + } + }).then((proxyHost) => { + expect(proxyHost).to.have.property('id'); + createdResources.proxyHost = proxyHost; + + // 1e. Create a redirection host + cy.task('backendApiPost', { + token: token, + path: '/api/nginx/redirection-hosts', + data: { + domain_names: ['backup-redirect.example.com'], + forward_scheme: 'https', + forward_http_code: 301, + forward_domain_name: 'target.example.com', + preserve_path: true, + certificate_id: 0, + ssl_forced: false, + http2_support: false, + hsts_enabled: false, + hsts_subdomains: false, + block_exploits: false, + advanced_config: '', + meta: {} + } + }).then((redirectionHost) => { + expect(redirectionHost).to.have.property('id'); + createdResources.redirectionHost = redirectionHost; + + // 1f. Create a 404 host (dead host) + cy.task('backendApiPost', { + token: token, + path: '/api/nginx/dead-hosts', + data: { + domain_names: ['backup-404.example.com'], + certificate_id: 0, + ssl_forced: false, + http2_support: false, + hsts_enabled: false, + hsts_subdomains: false, + advanced_config: '', + meta: {} + } + }).then((deadHost) => { + expect(deadHost).to.have.property('id'); + createdResources.deadHost = deadHost; + + // 1g. Create a stream + cy.task('backendApiPost', { + token: token, + path: '/api/nginx/streams', + data: { + incoming_port: 19999, + forwarding_host: '192.168.1.200', + forwarding_port: 3306, + certificate_id: 0, + meta: {}, + tcp_forwarding: true, + udp_forwarding: false + } + }).then((stream) => { + expect(stream).to.have.property('id'); + createdResources.stream = stream; + + // ===================================================== + // STEP 2: Export backup + // ===================================================== + cy.task('backendApiGetBuffer', { + token: token, + path: '/api/backup/export' + }).then((result) => { + expect(result).to.have.property('length'); + expect(result.length).to.be.greaterThan(0); + backupData = result.data; + + // ===================================================== + // STEP 3: Delete all created resources + // ===================================================== + cy.task('backendApiDelete', { + token: token, + path: `/api/nginx/streams/${createdResources.stream.id}` + }).then(() => { + cy.task('backendApiDelete', { + token: token, + path: `/api/nginx/dead-hosts/${createdResources.deadHost.id}` + }).then(() => { + cy.task('backendApiDelete', { + token: token, + path: `/api/nginx/redirection-hosts/${createdResources.redirectionHost.id}` + }).then(() => { + cy.task('backendApiDelete', { + token: token, + path: `/api/nginx/proxy-hosts/${createdResources.proxyHost.id}` + }).then(() => { + cy.task('backendApiDelete', { + token: token, + path: `/api/nginx/access-lists/${createdResources.accessList.id}` + }).then(() => { + cy.task('backendApiDelete', { + token: token, + path: `/api/nginx/certificates/${createdResources.certificate.id}` + }).then(() => { + cy.task('backendApiDelete', { + token: token, + path: `/api/users/${createdResources.nonAdminUser.id}` + }).then(() => { + + // Verify resources are deleted + cy.task('backendApiGet', { + token: token, + path: '/api/nginx/proxy-hosts' + }).then((hosts) => { + const found = hosts.find(h => h.domain_names && h.domain_names.includes('backup-proxy.example.com')); + expect(found).to.be.undefined; + + // ===================================================== + // STEP 4: Import the backup + // ===================================================== + cy.task('backendApiPostBuffer', { + token: token, + path: '/api/backup/import', + buffer: backupData, + fieldName: 'backup', + fileName: 'backup.zip' + }).then((importResult) => { + expect(importResult).to.have.property('success', true); + + // ===================================================== + // STEP 5: Re-authenticate (import replaces all users) + // ===================================================== + cy.getToken().then((newToken) => { + token = newToken; + + // ===================================================== + // STEP 6: Verify all resources were restored + // ===================================================== + + // Verify non-admin user + cy.task('backendApiGet', { + token: token, + path: '/api/users' + }).then((users) => { + const restoredUser = users.find(u => u.email === 'backupuser@example.com'); + expect(restoredUser).to.not.be.undefined; + expect(restoredUser.name).to.equal('Backup Test User'); + expect(restoredUser.roles).to.deep.equal([]); + + // Verify certificate + cy.task('backendApiGet', { + token: token, + path: '/api/nginx/certificates' + }).then((certs) => { + const restoredCert = certs.find(c => c.nice_name === 'Backup Test Certificate'); + expect(restoredCert).to.not.be.undefined; + + // Verify access list + cy.task('backendApiGet', { + token: token, + path: '/api/nginx/access-lists' + }).then((accessLists) => { + const restoredAccessList = accessLists.find(a => a.name === 'Backup Test Access List'); + expect(restoredAccessList).to.not.be.undefined; + expect(restoredAccessList.satisfy_any).to.equal(true); + + // Verify proxy host + cy.task('backendApiGet', { + token: token, + path: '/api/nginx/proxy-hosts' + }).then((proxyHosts) => { + const restoredProxy = proxyHosts.find(h => h.domain_names && h.domain_names.includes('backup-proxy.example.com')); + expect(restoredProxy).to.not.be.undefined; + expect(restoredProxy.forward_host).to.equal('192.168.1.100'); + expect(restoredProxy.forward_port).to.equal(8080); + expect(restoredProxy.allow_websocket_upgrade).to.equal(true); + + // Verify redirection host + cy.task('backendApiGet', { + token: token, + path: '/api/nginx/redirection-hosts' + }).then((redirectionHosts) => { + const restoredRedirect = redirectionHosts.find(h => h.domain_names && h.domain_names.includes('backup-redirect.example.com')); + expect(restoredRedirect).to.not.be.undefined; + expect(restoredRedirect.forward_domain_name).to.equal('target.example.com'); + expect(restoredRedirect.forward_http_code).to.equal(301); + expect(restoredRedirect.preserve_path).to.equal(true); + + // Verify 404 host + cy.task('backendApiGet', { + token: token, + path: '/api/nginx/dead-hosts' + }).then((deadHosts) => { + const restoredDead = deadHosts.find(h => h.domain_names && h.domain_names.includes('backup-404.example.com')); + expect(restoredDead).to.not.be.undefined; + + // Verify stream + cy.task('backendApiGet', { + token: token, + path: '/api/nginx/streams' + }).then((streams) => { + const restoredStream = streams.find(s => s.incoming_port === 19999); + expect(restoredStream).to.not.be.undefined; + expect(restoredStream.forwarding_host).to.equal('192.168.1.200'); + expect(restoredStream.forwarding_port).to.equal(3306); + expect(restoredStream.tcp_forwarding).to.equal(true); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); +}); diff --git a/test/cypress/plugins/backendApi/client.js b/test/cypress/plugins/backendApi/client.js index 6f5f7661ea..8c26e32c8e 100644 --- a/test/cypress/plugins/backendApi/client.js +++ b/test/cypress/plugins/backendApi/client.js @@ -129,4 +129,63 @@ BackendApi.prototype.postForm = function (path, form, returnOnError) { }); }; +/** + * GET request that returns raw buffer (for file downloads) + * @param {string} path + * @returns {Promise} + */ +BackendApi.prototype.getBuffer = function (path) { + logger('GET (buffer)', this.config.baseUrl + path); + const options = this._prepareOptions(false); + + return new Promise((resolve, reject) => { + this.axios({ + method: 'get', + url: path, + responseType: 'arraybuffer', + ...options + }) + .then((response) => { + logger('Response buffer length:', response.data.length); + resolve(Buffer.from(response.data)); + }) + .catch((err) => { + this._handleError(err, null, reject, false); + }); + }); +}; + +/** + * POST request with buffer as file upload + * @param {string} path + * @param {Buffer} buffer + * @param {string} fieldName + * @param {string} fileName + * @returns {Promise} + */ +BackendApi.prototype.postBuffer = function (path, buffer, fieldName, fileName) { + logger('POST (buffer)', this.config.baseUrl + path); + const options = this._prepareOptions(false); + const FormData = require('form-data'); + + const form = new FormData(); + form.append(fieldName, buffer, { filename: fileName }); + + return new Promise((resolve, reject) => { + this.axios.post(path, form, { + ...options, + headers: { + ...options.headers, + ...form.getHeaders(), + } + }) + .then((response) => { + this._handleResponse(response, resolve, reject, false); + }) + .catch((err) => { + this._handleError(err, null, reject, false); + }); + }); +}; + module.exports = BackendApi; diff --git a/test/cypress/plugins/backendApi/task.js b/test/cypress/plugins/backendApi/task.js index ab9704f4a6..0f6fe98634 100644 --- a/test/cypress/plugins/backendApi/task.js +++ b/test/cypress/plugins/backendApi/task.js @@ -79,6 +79,40 @@ module.exports = function (config) { const api = new Client(config); api.setToken(options.token); return api.request('delete', options.path, options.returnOnError || false); + }, + + /** + * GET request that returns raw buffer (for file downloads like backup export) + * @param {object} options + * @param {string} options.token JWT + * @param {string} options.path API path + * @returns {Promise<{data: number[], length: number}>} Buffer data as array (Cypress serialization) + */ + backendApiGetBuffer: (options) => { + const api = new Client(config); + api.setToken(options.token); + return api.getBuffer(options.path).then((buffer) => { + // Convert Buffer to array for Cypress task serialization + return { data: Array.from(buffer), length: buffer.length }; + }); + }, + + /** + * POST request with buffer as file upload (for backup import) + * @param {object} options + * @param {string} options.token JWT + * @param {string} options.path API path + * @param {number[]} options.buffer Buffer data as array + * @param {string} options.fieldName Form field name + * @param {string} options.fileName File name for upload + * @returns {Promise} + */ + backendApiPostBuffer: (options) => { + const api = new Client(config); + api.setToken(options.token); + // Convert array back to Buffer + const buffer = Buffer.from(options.buffer); + return api.postBuffer(options.path, buffer, options.fieldName, options.fileName); } }; };