From c6c0028135e7e28d7b27b793155aa839c72355df Mon Sep 17 00:00:00 2001 From: maotora Date: Thu, 19 Mar 2026 16:48:05 +0300 Subject: [PATCH 01/10] fix: use Prisma Accelerate in production --- .env.example | 1 + README.md | 7 ++- package.json | 1 + pnpm-lock.yaml | 13 ++++++ prisma.config.ts | 5 ++- scripts/migrate.ts | 7 ++- server.ts | 56 ++++++++++++++++++------ src/app.ts | 14 +++--- src/config.ts | 17 ++++++++ src/db/prisma.ts | 79 +++++++++++++++++++++++++++++++--- src/middleware/errorHandler.ts | 22 +++++++++- tests/locations-api.test.ts | 10 +++++ 12 files changed, 204 insertions(+), 28 deletions(-) diff --git a/.env.example b/.env.example index 92c795e..5fe6381 100644 --- a/.env.example +++ b/.env.example @@ -1,3 +1,4 @@ DATABASE_URL="postgresql://postgres:postgres@localhost:5432/locations_api" +DIRECT_DATABASE_URL="postgresql://postgres:postgres@localhost:5432/locations_api" PORT="8080" PAGE_SIZE="10" diff --git a/README.md b/README.md index a666012..220bbb9 100644 --- a/README.md +++ b/README.md @@ -30,7 +30,11 @@ Compatibility-first REST API for Tanzania location data backed by PostgreSQL and cp .env.example .env ``` -3. Start PostgreSQL and update `DATABASE_URL` if needed. +3. Start PostgreSQL and update your connection strings if needed. + + - Local and test environments use a direct PostgreSQL `DATABASE_URL`. + - Production uses a Prisma Accelerate `DATABASE_URL`. + - If you run `pnpm db:migrate` against an Accelerate-backed environment, also provide `DIRECT_DATABASE_URL` so the migration bootstrap can talk to Postgres directly. 4. Apply the checked-in schema and seed deterministic fixture data. @@ -66,6 +70,7 @@ pnpm openapi:json - On a fresh database it bootstraps the historical `init` migration, marks that baseline as applied, and then deploys later migrations - On an existing database that already has the older Prisma migration history, it only applies the new additive migrations - Prefer `pnpm db:migrate` over calling `prisma migrate deploy` directly +- `DATABASE_URL` may point at Prisma Accelerate in production, but `pnpm db:migrate` still requires a direct Postgres URL in `DIRECT_DATABASE_URL` ## Testing diff --git a/package.json b/package.json index f78259c..2eb519b 100644 --- a/package.json +++ b/package.json @@ -37,6 +37,7 @@ "dependencies": { "@prisma/adapter-pg": "^7.5.0", "@prisma/client": "^7.5.0", + "@prisma/extension-accelerate": "^3.0.1", "cors": "^2.8.6", "dotenv": "^17.3.1", "express": "^5.2.1", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 7ba510e..97fd3e3 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -14,6 +14,9 @@ importers: '@prisma/client': specifier: ^7.5.0 version: 7.5.0(prisma@7.5.0(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@5.9.3))(typescript@5.9.3) + '@prisma/extension-accelerate': + specifier: ^3.0.1 + version: 3.0.1(@prisma/client@7.5.0(prisma@7.5.0(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@5.9.3))(typescript@5.9.3)) cors: specifier: ^2.8.6 version: 2.8.6 @@ -703,6 +706,12 @@ packages: '@prisma/engines@7.5.0': resolution: {integrity: sha512-ondGRhzoaVpRWvFaQ5wH5zS1BIbhzbKqczKjCn6j3L0Zfe/LInjcEg8+xtB49AuZBX30qyx1ZtGoootUohz2pw==} + '@prisma/extension-accelerate@3.0.1': + resolution: {integrity: sha512-xc+kn4AjjTzS9jsdD1JWCebB09y0Aj+C8GjjG7oUm81PF9psvmJOw5rxpl7tOEBz/8hmuNX996XL28ys/OLxVA==} + engines: {node: '>=22'} + peerDependencies: + '@prisma/client': '>=4.16.1' + '@prisma/fetch-engine@7.5.0': resolution: {integrity: sha512-kZCl2FV54qnyrVdnII8MI6qvt7HfU6Cbiz8dZ8PXz4f4lbSw45jEB9/gEMK2SGdiNhBKyk/Wv95uthoLhGMLYA==} @@ -3515,6 +3524,10 @@ snapshots: '@prisma/fetch-engine': 7.5.0 '@prisma/get-platform': 7.5.0 + '@prisma/extension-accelerate@3.0.1(@prisma/client@7.5.0(prisma@7.5.0(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@5.9.3))(typescript@5.9.3))': + dependencies: + '@prisma/client': 7.5.0(prisma@7.5.0(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@5.9.3))(typescript@5.9.3) + '@prisma/fetch-engine@7.5.0': dependencies: '@prisma/debug': 7.5.0 diff --git a/prisma.config.ts b/prisma.config.ts index e5e83d7..91c27ae 100644 --- a/prisma.config.ts +++ b/prisma.config.ts @@ -8,6 +8,9 @@ export default defineConfig({ seed: 'tsx prisma/seed.ts', }, datasource: { - url: process.env.DATABASE_URL ?? 'postgresql://postgres:postgres@localhost:5432/locations_api', + url: + process.env.DIRECT_DATABASE_URL ?? + process.env.DATABASE_URL ?? + 'postgresql://postgres:postgres@localhost:5432/locations_api', }, }); diff --git a/scripts/migrate.ts b/scripts/migrate.ts index a5a25c7..ce5db50 100644 --- a/scripts/migrate.ts +++ b/scripts/migrate.ts @@ -3,6 +3,11 @@ import { Pool } from 'pg'; import config from '../src/config.js'; const pnpmCommand = process.platform === 'win32' ? 'pnpm.cmd' : 'pnpm'; +const directDatabaseUrl = config.directDatabaseUrl; + +if (!directDatabaseUrl) { + throw new Error('db:migrate requires DIRECT_DATABASE_URL when DATABASE_URL uses Prisma Accelerate.'); +} function runPrisma(args: string[]) { const result = spawnSync( @@ -21,7 +26,7 @@ function runPrisma(args: string[]) { async function bootstrapIfNeeded() { const pool = new Pool({ - connectionString: config.databaseUrl, + connectionString: directDatabaseUrl, }); try { diff --git a/server.ts b/server.ts index 3c6e6e3..59f585b 100644 --- a/server.ts +++ b/server.ts @@ -1,22 +1,50 @@ import app from './src/app.js'; import config from './src/config.js'; -import { disconnectPrisma } from './src/db/prisma.js'; - -const server = app.listen(config.port, () => { - console.log( - JSON.stringify({ - environment: config.nodeEnv, - message: 'Server started', - openApiUrl: `http://localhost:${config.port}/openapi.json`, - port: config.port, - swaggerUrl: `http://localhost:${config.port}/api-docs`, - }), - ); -}); +import { checkDatabaseConnection, disconnectPrisma } from './src/db/prisma.js'; + +let server: ReturnType | undefined; + +async function startServer() { + const database = await checkDatabaseConnection(); + + if (!database.ok) { + console.error( + JSON.stringify({ + error: database.error, + message: 'Database readiness check failed. Refusing to start server.', + }), + ); + process.exit(1); + } + + server = app.listen(config.port, () => { + console.log( + JSON.stringify({ + environment: config.nodeEnv, + message: 'Server started', + openApiUrl: `http://localhost:${config.port}/openapi.json`, + port: config.port, + swaggerUrl: `http://localhost:${config.port}/api-docs`, + }), + ); + }); +} async function shutdown(signal: NodeJS.Signals) { console.log(JSON.stringify({ message: 'Graceful shutdown requested', signal })); + if (!server) { + void disconnectPrisma() + .then(() => { + process.exit(0); + }) + .catch((error: unknown) => { + console.error(JSON.stringify({ error, message: 'Failed to disconnect Prisma cleanly' })); + process.exit(1); + }); + return; + } + server.close(() => { void disconnectPrisma() .then(() => { @@ -36,3 +64,5 @@ process.on('SIGINT', () => { process.on('SIGTERM', () => { void shutdown('SIGTERM'); }); + +await startServer(); diff --git a/src/app.ts b/src/app.ts index e61cf92..1f64960 100644 --- a/src/app.ts +++ b/src/app.ts @@ -4,6 +4,7 @@ import helmet from 'helmet'; import morgan from 'morgan'; import type { Request, Response } from 'express'; import config from './config.js'; +import { checkDatabaseConnection } from './db/prisma.js'; import { setupSwagger } from './docs/swagger.js'; import { errorHandler } from './middleware/errorHandler.js'; import { @@ -36,12 +37,15 @@ app.use(morgan(logFormatter)); app.use(express.json()); app.use(express.urlencoded({ extended: true })); -app.get('/health', (_: Request, res: Response) => { - res.status(200).json({ - status: 'UP', - timestamp: new Date().toISOString(), +app.get('/health', async (_: Request, res: Response) => { + const database = await checkDatabaseConnection({ logErrors: false }); + + res.status(database.ok ? 200 : 503).json({ + database: database.ok ? 'UP' : 'DOWN', environment: config.nodeEnv, - version: process.env.npm_package_version || '1.0.0' + status: database.ok ? 'UP' : 'DEGRADED', + timestamp: new Date().toISOString(), + version: process.env.npm_package_version || '1.0.0', }); }); diff --git a/src/config.ts b/src/config.ts index 3d4abe1..7166270 100644 --- a/src/config.ts +++ b/src/config.ts @@ -3,20 +3,37 @@ import { z } from 'zod'; dotenv.config(); +function isAccelerateUrl(url: string) { + return url.startsWith('prisma://') || url.startsWith('prisma+postgres://'); +} + const envSchema = z.object({ DATABASE_URL: z.string().min(1, 'DATABASE_URL is required'), + DIRECT_DATABASE_URL: z.string().min(1, 'DIRECT_DATABASE_URL cannot be empty').optional(), NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), PAGE_SIZE: z.coerce.number().int().positive().max(100).default(10), PORT: z.coerce.number().int().positive().default(8080), }); const env = envSchema.parse(process.env); +const usesAccelerate = isAccelerateUrl(env.DATABASE_URL); +const directDatabaseUrl = env.DIRECT_DATABASE_URL ?? (usesAccelerate ? undefined : env.DATABASE_URL); + +if (env.NODE_ENV === 'production' && !usesAccelerate) { + throw new Error('Production requires DATABASE_URL to be a Prisma Accelerate URL.'); +} + +if (env.NODE_ENV !== 'production' && !directDatabaseUrl) { + throw new Error('Non-production requires a direct PostgreSQL URL via DIRECT_DATABASE_URL or DATABASE_URL.'); +} const config = { databaseUrl: env.DATABASE_URL, + directDatabaseUrl, nodeEnv: env.NODE_ENV, pageSize: env.PAGE_SIZE, port: env.PORT, + usesAccelerate, }; export default config; diff --git a/src/db/prisma.ts b/src/db/prisma.ts index 5ca1c0f..f9a1a16 100644 --- a/src/db/prisma.ts +++ b/src/db/prisma.ts @@ -1,4 +1,5 @@ import { PrismaPg } from '@prisma/adapter-pg'; +import { withAccelerate } from '@prisma/extension-accelerate'; import { Pool } from 'pg'; import { PrismaClient } from '../generated/prisma/client.js'; import config from '../config.js'; @@ -11,12 +12,48 @@ const globalForPrisma = globalThis as typeof globalThis & { let pool = globalForPrisma.pgPool; let prismaClient = globalForPrisma.prismaClient; +function databaseHost() { + try { + return new URL(config.usesAccelerate ? config.databaseUrl : (config.directDatabaseUrl ?? config.databaseUrl)).hostname; + } catch { + return 'unknown'; + } +} + +function serializeError(error: unknown) { + if (error instanceof Error) { + const errorWithCode = error as Error & { code?: string }; + + return { + code: errorWithCode.code, + message: error.message, + name: error.name, + stack: error.stack, + }; + } + + return { + message: String(error), + name: 'UnknownError', + }; +} + function createPool() { + if (!config.directDatabaseUrl) { + throw new Error('DIRECT_DATABASE_URL is required for direct PostgreSQL connections.'); + } + return new Pool({ - connectionString: config.databaseUrl, + connectionString: config.directDatabaseUrl, }); } +function createAcceleratedPrismaClient() { + return new PrismaClient({ + accelerateUrl: config.databaseUrl, + }).$extends(withAccelerate()) as unknown as PrismaClient; +} + function createPrismaClient(nextPool: Pool) { return new PrismaClient({ adapter: new PrismaPg(nextPool as unknown as ConstructorParameters[0]), @@ -31,12 +68,17 @@ function cacheInstances() { } function ensurePrismaClient(): PrismaClient { - if (!pool) { - pool = createPool(); - } - if (!prismaClient) { - prismaClient = createPrismaClient(pool); + if (config.usesAccelerate) { + prismaClient = createAcceleratedPrismaClient(); + } else { + if (!pool) { + pool = createPool(); + } + + prismaClient = createPrismaClient(pool); + } + cacheInstances(); } @@ -56,6 +98,31 @@ if (pool && prismaClient && config.nodeEnv !== 'production') { cacheInstances(); } +export async function checkDatabaseConnection(options: { logErrors?: boolean } = {}) { + const { logErrors = true } = options; + + try { + await ensurePrismaClient().$queryRawUnsafe('SELECT 1'); + return { ok: true } as const; + } catch (error) { + if (logErrors) { + console.error( + JSON.stringify({ + databaseHost: databaseHost(), + error: serializeError(error), + level: 'error', + message: 'Database connectivity check failed', + }), + ); + } + + return { + error: serializeError(error), + ok: false, + } as const; + } +} + export async function disconnectPrisma() { if (prismaClient) { await prismaClient.$disconnect(); diff --git a/src/middleware/errorHandler.ts b/src/middleware/errorHandler.ts index abc878d..a287aec 100644 --- a/src/middleware/errorHandler.ts +++ b/src/middleware/errorHandler.ts @@ -1,4 +1,4 @@ -import type { Request, Response } from 'express'; +import type { NextFunction, Request, Response } from 'express'; import { ZodError } from 'zod'; import type { ErrorResponse } from '../types.js'; @@ -18,6 +18,7 @@ export const errorHandler = ( err: Error | ApiError | ZodError, req: Request, res: Response, + _: NextFunction, ) => { console.error( JSON.stringify({ @@ -31,6 +32,16 @@ export const errorHandler = ( let statusCode = 500; let message = 'Something went wrong'; + const errorWithCode = err as Error & { code?: string }; + const databaseUnavailableCodes = new Set(['P1000', 'P1001', 'P1002', 'P1017']); + const databaseUnavailablePatterns = [ + /Unable to connect to the Accelerate API/i, + /Connection terminated due to connection timeout/i, + /connect ECONN/i, + /ECONNREFUSED/i, + /ENOTFOUND/i, + /timeout/i, + ]; if (err instanceof ApiError) { statusCode = err.statusCode; @@ -47,6 +58,15 @@ export const errorHandler = ( message = 'Requested resource not found'; } + if ( + errorWithCode.name === 'PrismaClientInitializationError' || + databaseUnavailableCodes.has(errorWithCode.code ?? '') || + databaseUnavailablePatterns.some((pattern) => pattern.test(err.message)) + ) { + statusCode = 503; + message = 'Database unavailable'; + } + if (err instanceof SyntaxError || err instanceof TypeError) { statusCode = 400; message = 'Invalid request data'; diff --git a/tests/locations-api.test.ts b/tests/locations-api.test.ts index 36554d7..77a0cc6 100644 --- a/tests/locations-api.test.ts +++ b/tests/locations-api.test.ts @@ -81,6 +81,16 @@ describe.each(['/v1', '/api'])('Tanzania Locations API (%s)', (basePath) => { }); describe('Shared API behavior', () => { + it('reports database readiness on the health endpoint', async () => { + const res = await request(app).get('/health'); + + expect(res.statusCode).toBe(200); + expect(res.body).toMatchObject({ + database: 'UP', + status: 'UP', + }); + }); + it('keeps the /api alias active', async () => { const res = await request(app).get('/api/countries'); From 9f24e30fa45d56c673c9295647e958e96caa8403 Mon Sep 17 00:00:00 2001 From: maotora Date: Thu, 19 Mar 2026 16:54:40 +0300 Subject: [PATCH 02/10] chore: align Node support with Accelerate --- .github/workflows/ci.yml | 3 +-- README.md | 4 ++-- package.json | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 16fc097..db7ad94 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,8 +14,7 @@ jobs: fail-fast: false matrix: node-version: - - '20.19.0' - - '22' + - '22.13.0' services: postgres: image: postgres:16 diff --git a/README.md b/README.md index 220bbb9..9c63225 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ Compatibility-first REST API for Tanzania location data backed by PostgreSQL and ## Requirements -- Node.js `>=20.19.0` +- Node.js `22.13.0+` - pnpm `10.7.0+` - PostgreSQL `16+` recommended @@ -151,7 +151,7 @@ Additional filters: ## Dependency Automation - `.github/dependabot.yml` opens weekly update PRs for npm packages and GitHub Actions -- `.github/workflows/ci.yml` validates every PR against Postgres on Node `20.19.0` and `22` +- `.github/workflows/ci.yml` validates every PR against Postgres on Node `22.13.0` ## License diff --git a/package.json b/package.json index 2eb519b..7d208d7 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,7 @@ "type": "module", "main": "./dist/server.js", "engines": { - "node": ">=20.19.0" + "node": "^22.13.0 || >=24.0.0" }, "scripts": { "dev": "tsx watch server.ts", From 2b39218eb79813609c71670bf7ad2c4df6f9032f Mon Sep 17 00:00:00 2001 From: maotora Date: Thu, 19 Mar 2026 17:01:02 +0300 Subject: [PATCH 03/10] fix: unblock CI lint and migration flow --- .../20250411175910_cleanup/migration.sql | 20 +++++++++---------- src/middleware/errorHandler.ts | 4 +++- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/prisma/migrations/20250411175910_cleanup/migration.sql b/prisma/migrations/20250411175910_cleanup/migration.sql index 7841308..377c166 100644 --- a/prisma/migrations/20250411175910_cleanup/migration.sql +++ b/prisma/migrations/20250411175910_cleanup/migration.sql @@ -14,19 +14,19 @@ */ -- AlterTable -ALTER TABLE "districts" DROP COLUMN "properties_count", -DROP COLUMN "view_count", -DROP COLUMN "watcher_count"; +ALTER TABLE "districts" DROP COLUMN IF EXISTS "properties_count", +DROP COLUMN IF EXISTS "view_count", +DROP COLUMN IF EXISTS "watcher_count"; -- AlterTable -ALTER TABLE "places" DROP COLUMN "properties_count", -DROP COLUMN "view_count"; +ALTER TABLE "places" DROP COLUMN IF EXISTS "properties_count", +DROP COLUMN IF EXISTS "view_count"; -- AlterTable -ALTER TABLE "regions" DROP COLUMN "properties_count", -DROP COLUMN "view_count", -DROP COLUMN "watcher_count"; +ALTER TABLE "regions" DROP COLUMN IF EXISTS "properties_count", +DROP COLUMN IF EXISTS "view_count", +DROP COLUMN IF EXISTS "watcher_count"; -- AlterTable -ALTER TABLE "wards" DROP COLUMN "properties_count", -DROP COLUMN "view_count"; +ALTER TABLE "wards" DROP COLUMN IF EXISTS "properties_count", +DROP COLUMN IF EXISTS "view_count"; diff --git a/src/middleware/errorHandler.ts b/src/middleware/errorHandler.ts index a287aec..3e1d727 100644 --- a/src/middleware/errorHandler.ts +++ b/src/middleware/errorHandler.ts @@ -18,8 +18,10 @@ export const errorHandler = ( err: Error | ApiError | ZodError, req: Request, res: Response, - _: NextFunction, + next: NextFunction, ) => { + void next; + console.error( JSON.stringify({ level: 'error', From 361c3538a5b8269a520fee14f92ab3013ee7efd6 Mon Sep 17 00:00:00 2001 From: maotora Date: Thu, 19 Mar 2026 23:01:08 +0300 Subject: [PATCH 04/10] chore: add repo git hooks --- .githooks/pre-commit | 8 +++++ .githooks/pre-push | 8 +++++ README.md | 6 ++++ package.json | 4 +++ scripts/install-git-hooks.ts | 17 +++++++++ scripts/run-pre-push-checks.ts | 64 ++++++++++++++++++++++++++++++++++ 6 files changed, 107 insertions(+) create mode 100755 .githooks/pre-commit create mode 100755 .githooks/pre-push create mode 100644 scripts/install-git-hooks.ts create mode 100644 scripts/run-pre-push-checks.ts diff --git a/.githooks/pre-commit b/.githooks/pre-commit new file mode 100755 index 0000000..0e5c964 --- /dev/null +++ b/.githooks/pre-commit @@ -0,0 +1,8 @@ +#!/bin/sh +set -eu + +if [ "${SKIP_GIT_HOOKS:-0}" = "1" ]; then + exit 0 +fi + +pnpm hooks:pre-commit diff --git a/.githooks/pre-push b/.githooks/pre-push new file mode 100755 index 0000000..c1b24ca --- /dev/null +++ b/.githooks/pre-push @@ -0,0 +1,8 @@ +#!/bin/sh +set -eu + +if [ "${SKIP_GIT_HOOKS:-0}" = "1" ]; then + exit 0 +fi + +pnpm hooks:pre-push diff --git a/README.md b/README.md index 9c63225..dfb89ff 100644 --- a/README.md +++ b/README.md @@ -153,6 +153,12 @@ Additional filters: - `.github/dependabot.yml` opens weekly update PRs for npm packages and GitHub Actions - `.github/workflows/ci.yml` validates every PR against Postgres on Node `22.13.0` +## Git Hooks + +- `pnpm prepare` and `pnpm hooks:install` configure `core.hooksPath` to `.githooks` +- Pre-commit runs `pnpm hooks:pre-commit` (`lint` + `typecheck`) +- Pre-push runs `pnpm hooks:pre-push`, which creates a temporary Postgres database and runs `pnpm test:ci` +- Pre-push requires `DIRECT_DATABASE_URL` to be a direct PostgreSQL URL ## License This project is licensed under the CopyLeft License. See [LICENSE](./LICENSE). diff --git a/package.json b/package.json index 7d208d7..1906f78 100644 --- a/package.json +++ b/package.json @@ -9,9 +9,13 @@ }, "scripts": { "dev": "tsx watch server.ts", + "prepare": "tsx scripts/install-git-hooks.ts", "generate": "prisma generate", "db:migrate": "tsx scripts/migrate.ts", "db:seed": "prisma db seed", + "hooks:install": "tsx scripts/install-git-hooks.ts", + "hooks:pre-commit": "pnpm lint && pnpm typecheck", + "hooks:pre-push": "tsx scripts/run-pre-push-checks.ts", "lint": "pnpm generate && eslint server.ts \"src/**/*.ts\" \"tests/**/*.ts\" \"scripts/**/*.ts\" \"prisma/**/*.ts\"", "typecheck": "pnpm generate && tsc --noEmit", "build:ci": "pnpm generate && pnpm lint && pnpm typecheck && pnpm build", diff --git a/scripts/install-git-hooks.ts b/scripts/install-git-hooks.ts new file mode 100644 index 0000000..1db3205 --- /dev/null +++ b/scripts/install-git-hooks.ts @@ -0,0 +1,17 @@ +import { execFileSync } from 'node:child_process'; +import { existsSync } from 'node:fs'; +import path from 'node:path'; + +const repoRoot = process.cwd(); +const gitDir = path.join(repoRoot, '.git'); + +if (!existsSync(gitDir)) { + process.exit(0); +} + +execFileSync('git', ['config', 'core.hooksPath', '.githooks'], { + cwd: repoRoot, + stdio: 'inherit', +}); + +console.log('Configured git hooks path to .githooks'); diff --git a/scripts/run-pre-push-checks.ts b/scripts/run-pre-push-checks.ts new file mode 100644 index 0000000..6afaea2 --- /dev/null +++ b/scripts/run-pre-push-checks.ts @@ -0,0 +1,64 @@ +import { execFileSync } from 'node:child_process'; +import { randomUUID } from 'node:crypto'; + +function resolveDirectDatabaseUrl() { + const candidate = process.env.DIRECT_DATABASE_URL ?? process.env.DATABASE_URL; + + if (!candidate) { + throw new Error('Set DIRECT_DATABASE_URL to a direct PostgreSQL connection before pushing.'); + } + + if (candidate.startsWith('prisma://') || candidate.startsWith('prisma+postgres://')) { + throw new Error('Pre-push checks require DIRECT_DATABASE_URL to point at direct PostgreSQL, not Prisma Accelerate.'); + } + + return new URL(candidate); +} + +function toMaintenanceEnv(url: URL) { + return { + PGDATABASE: 'postgres', + PGHOST: url.hostname, + PGPASSWORD: decodeURIComponent(url.password), + PGPORT: url.port || '5432', + PGUSER: decodeURIComponent(url.username), + }; +} + +function tempDatabaseUrl(baseUrl: URL, databaseName: string) { + const next = new URL(baseUrl.toString()); + next.pathname = `/${databaseName}`; + + return next.toString(); +} + +const directUrl = resolveDirectDatabaseUrl(); +const originalDatabase = directUrl.pathname.replace(/^\//, '') || 'locations_api'; +const tempDatabaseName = `${originalDatabase}_prepush_${randomUUID().replace(/-/g, '').slice(0, 8)}`; +const maintenanceEnv = { + ...process.env, + ...toMaintenanceEnv(directUrl), +}; +const isolatedDatabaseUrl = tempDatabaseUrl(directUrl, tempDatabaseName); + +try { + execFileSync('createdb', [tempDatabaseName], { + env: maintenanceEnv, + stdio: 'inherit', + }); + + execFileSync('pnpm', ['test:ci'], { + env: { + ...process.env, + DATABASE_URL: isolatedDatabaseUrl, + DIRECT_DATABASE_URL: isolatedDatabaseUrl, + NODE_ENV: 'test', + }, + stdio: 'inherit', + }); +} finally { + execFileSync('dropdb', ['--if-exists', tempDatabaseName], { + env: maintenanceEnv, + stdio: 'inherit', + }); +} From f0fd41549ed7013deb1ae134aadd07792f8a4f2e Mon Sep 17 00:00:00 2001 From: maotora Date: Thu, 19 Mar 2026 23:09:34 +0300 Subject: [PATCH 05/10] chore: check build before push --- README.md | 2 +- package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index dfb89ff..7168ec5 100644 --- a/README.md +++ b/README.md @@ -157,7 +157,7 @@ Additional filters: - `pnpm prepare` and `pnpm hooks:install` configure `core.hooksPath` to `.githooks` - Pre-commit runs `pnpm hooks:pre-commit` (`lint` + `typecheck`) -- Pre-push runs `pnpm hooks:pre-push`, which creates a temporary Postgres database and runs `pnpm test:ci` +- Pre-push runs `pnpm hooks:pre-push`, which first builds the app, then creates a temporary Postgres database and runs `pnpm test:ci` - Pre-push requires `DIRECT_DATABASE_URL` to be a direct PostgreSQL URL ## License diff --git a/package.json b/package.json index 1906f78..2f1c5f4 100644 --- a/package.json +++ b/package.json @@ -15,7 +15,7 @@ "db:seed": "prisma db seed", "hooks:install": "tsx scripts/install-git-hooks.ts", "hooks:pre-commit": "pnpm lint && pnpm typecheck", - "hooks:pre-push": "tsx scripts/run-pre-push-checks.ts", + "hooks:pre-push": "pnpm build && tsx scripts/run-pre-push-checks.ts", "lint": "pnpm generate && eslint server.ts \"src/**/*.ts\" \"tests/**/*.ts\" \"scripts/**/*.ts\" \"prisma/**/*.ts\"", "typecheck": "pnpm generate && tsc --noEmit", "build:ci": "pnpm generate && pnpm lint && pnpm typecheck && pnpm build", From 81dccd648a05111bca0fd984871707f76bdcc99a Mon Sep 17 00:00:00 2001 From: maotora Date: Thu, 19 Mar 2026 23:43:30 +0300 Subject: [PATCH 06/10] fix: harden pre-push temp database cleanup --- scripts/run-pre-push-checks.ts | 107 +++++++++++++++++++++++---------- 1 file changed, 75 insertions(+), 32 deletions(-) diff --git a/scripts/run-pre-push-checks.ts b/scripts/run-pre-push-checks.ts index 6afaea2..159f62e 100644 --- a/scripts/run-pre-push-checks.ts +++ b/scripts/run-pre-push-checks.ts @@ -1,5 +1,8 @@ import { execFileSync } from 'node:child_process'; import { randomUUID } from 'node:crypto'; +import { Pool } from 'pg'; + +const pnpmCommand = process.platform === 'win32' ? 'pnpm.cmd' : 'pnpm'; function resolveDirectDatabaseUrl() { const candidate = process.env.DIRECT_DATABASE_URL ?? process.env.DATABASE_URL; @@ -15,16 +18,6 @@ function resolveDirectDatabaseUrl() { return new URL(candidate); } -function toMaintenanceEnv(url: URL) { - return { - PGDATABASE: 'postgres', - PGHOST: url.hostname, - PGPASSWORD: decodeURIComponent(url.password), - PGPORT: url.port || '5432', - PGUSER: decodeURIComponent(url.username), - }; -} - function tempDatabaseUrl(baseUrl: URL, databaseName: string) { const next = new URL(baseUrl.toString()); next.pathname = `/${databaseName}`; @@ -32,33 +25,83 @@ function tempDatabaseUrl(baseUrl: URL, databaseName: string) { return next.toString(); } -const directUrl = resolveDirectDatabaseUrl(); -const originalDatabase = directUrl.pathname.replace(/^\//, '') || 'locations_api'; -const tempDatabaseName = `${originalDatabase}_prepush_${randomUUID().replace(/-/g, '').slice(0, 8)}`; -const maintenanceEnv = { - ...process.env, - ...toMaintenanceEnv(directUrl), -}; -const isolatedDatabaseUrl = tempDatabaseUrl(directUrl, tempDatabaseName); - -try { - execFileSync('createdb', [tempDatabaseName], { - env: maintenanceEnv, +function adminDatabaseUrl(baseUrl: URL) { + const next = new URL(baseUrl.toString()); + next.pathname = '/postgres'; + + return next.toString(); +} + +function quoteIdentifier(value: string) { + return `"${value.replaceAll('"', '""')}"`; +} + +function toError(error: unknown) { + if (error instanceof Error) { + return error; + } + + return new Error(String(error)); +} + +function runPnpm(args: string[], env: NodeJS.ProcessEnv) { + execFileSync(pnpmCommand, args, { + env, stdio: 'inherit', }); +} - execFileSync('pnpm', ['test:ci'], { - env: { +async function dropTemporaryDatabase(pool: Pool, databaseName: string) { + await pool.query( + `SELECT pg_terminate_backend(pid) + FROM pg_stat_activity + WHERE datname = $1 + AND pid <> pg_backend_pid()`, + [databaseName], + ); + await pool.query(`DROP DATABASE IF EXISTS ${quoteIdentifier(databaseName)}`); +} + +async function main() { + const directUrl = resolveDirectDatabaseUrl(); + const originalDatabase = directUrl.pathname.replace(/^\//, '') || 'locations_api'; + const tempDatabaseName = `${originalDatabase}_prepush_${randomUUID().replace(/-/g, '').slice(0, 8)}`; + const isolatedDatabaseUrl = tempDatabaseUrl(directUrl, tempDatabaseName); + const adminPool = new Pool({ + connectionString: adminDatabaseUrl(directUrl), + }); + + let primaryError: unknown; + + try { + await adminPool.query(`CREATE DATABASE ${quoteIdentifier(tempDatabaseName)}`); + + runPnpm(['test:ci'], { ...process.env, DATABASE_URL: isolatedDatabaseUrl, DIRECT_DATABASE_URL: isolatedDatabaseUrl, NODE_ENV: 'test', - }, - stdio: 'inherit', - }); -} finally { - execFileSync('dropdb', ['--if-exists', tempDatabaseName], { - env: maintenanceEnv, - stdio: 'inherit', - }); + }); + } catch (error) { + primaryError = error; + } + + try { + await dropTemporaryDatabase(adminPool, tempDatabaseName); + } catch (cleanupError) { + if (primaryError) { + console.error('Failed to drop temporary pre-push database after the primary failure.'); + console.error(cleanupError); + } else { + throw cleanupError; + } + } finally { + await adminPool.end(); + } + + if (primaryError) { + throw toError(primaryError); + } } + +await main(); From 672669d6042d8b58114ee58be00981b17e48b0cf Mon Sep 17 00:00:00 2001 From: maotora Date: Tue, 24 Mar 2026 08:05:11 +0300 Subject: [PATCH 07/10] fix: allow direct postgres in production --- README.md | 7 ++++--- src/config.ts | 4 ---- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 7168ec5..75ebff4 100644 --- a/README.md +++ b/README.md @@ -33,8 +33,8 @@ Compatibility-first REST API for Tanzania location data backed by PostgreSQL and 3. Start PostgreSQL and update your connection strings if needed. - Local and test environments use a direct PostgreSQL `DATABASE_URL`. - - Production uses a Prisma Accelerate `DATABASE_URL`. - - If you run `pnpm db:migrate` against an Accelerate-backed environment, also provide `DIRECT_DATABASE_URL` so the migration bootstrap can talk to Postgres directly. + - Production can use either a direct PostgreSQL `DATABASE_URL` or a Prisma Accelerate `DATABASE_URL`. + - If `DATABASE_URL` points at Prisma Accelerate, also provide `DIRECT_DATABASE_URL` so migrations can talk to Postgres directly. 4. Apply the checked-in schema and seed deterministic fixture data. @@ -70,7 +70,8 @@ pnpm openapi:json - On a fresh database it bootstraps the historical `init` migration, marks that baseline as applied, and then deploys later migrations - On an existing database that already has the older Prisma migration history, it only applies the new additive migrations - Prefer `pnpm db:migrate` over calling `prisma migrate deploy` directly -- `DATABASE_URL` may point at Prisma Accelerate in production, but `pnpm db:migrate` still requires a direct Postgres URL in `DIRECT_DATABASE_URL` +- `DATABASE_URL` may point at direct Postgres or Prisma Accelerate +- If `DATABASE_URL` points at Prisma Accelerate, `pnpm db:migrate` still requires a direct Postgres URL in `DIRECT_DATABASE_URL` ## Testing diff --git a/src/config.ts b/src/config.ts index 7166270..7f3fb8e 100644 --- a/src/config.ts +++ b/src/config.ts @@ -19,10 +19,6 @@ const env = envSchema.parse(process.env); const usesAccelerate = isAccelerateUrl(env.DATABASE_URL); const directDatabaseUrl = env.DIRECT_DATABASE_URL ?? (usesAccelerate ? undefined : env.DATABASE_URL); -if (env.NODE_ENV === 'production' && !usesAccelerate) { - throw new Error('Production requires DATABASE_URL to be a Prisma Accelerate URL.'); -} - if (env.NODE_ENV !== 'production' && !directDatabaseUrl) { throw new Error('Non-production requires a direct PostgreSQL URL via DIRECT_DATABASE_URL or DATABASE_URL.'); } From 90b2ff327320d3432d3ca1cb4601bd528e6eb529 Mon Sep 17 00:00:00 2001 From: maotora Date: Tue, 24 Mar 2026 08:20:29 +0300 Subject: [PATCH 08/10] feat: add api safety gates --- .env.example | 9 +++ README.md | 19 ++++++ src/app.ts | 16 ++++- src/config.ts | 22 ++++++ src/middleware/rateLimit.ts | 133 ++++++++++++++++++++++++++++++++++++ tests/rate-limit.test.ts | 85 +++++++++++++++++++++++ 6 files changed, 282 insertions(+), 2 deletions(-) create mode 100644 src/middleware/rateLimit.ts create mode 100644 tests/rate-limit.test.ts diff --git a/.env.example b/.env.example index 5fe6381..a0cf114 100644 --- a/.env.example +++ b/.env.example @@ -2,3 +2,12 @@ DATABASE_URL="postgresql://postgres:postgres@localhost:5432/locations_api" DIRECT_DATABASE_URL="postgresql://postgres:postgres@localhost:5432/locations_api" PORT="8080" PAGE_SIZE="10" +REQUEST_BODY_LIMIT="16kb" +RATE_LIMIT_WINDOW_MS="60000" +RATE_LIMIT_MAX_REQUESTS="120" +RATE_LIMIT_BURST_WINDOW_MS="10000" +RATE_LIMIT_BURST_MAX_REQUESTS="30" +SEARCH_RATE_LIMIT_WINDOW_MS="60000" +SEARCH_RATE_LIMIT_MAX_REQUESTS="30" +SEARCH_RATE_LIMIT_BURST_WINDOW_MS="10000" +SEARCH_RATE_LIMIT_BURST_MAX_REQUESTS="10" diff --git a/README.md b/README.md index 75ebff4..edd26ff 100644 --- a/README.md +++ b/README.md @@ -64,6 +64,25 @@ pnpm test:ci pnpm openapi:json ``` +## Runtime Protection + +- API routes are protected by per-IP rate limits with both sustained and burst thresholds +- `/search` has a stricter limit than the rest of the API because it is the easiest expensive endpoint to abuse +- Request bodies are capped with `REQUEST_BODY_LIMIT`, even though the public API is mostly read-only +- All limits are configurable with environment variables: + + ```bash + REQUEST_BODY_LIMIT=16kb + RATE_LIMIT_WINDOW_MS=60000 + RATE_LIMIT_MAX_REQUESTS=120 + RATE_LIMIT_BURST_WINDOW_MS=10000 + RATE_LIMIT_BURST_MAX_REQUESTS=30 + SEARCH_RATE_LIMIT_WINDOW_MS=60000 + SEARCH_RATE_LIMIT_MAX_REQUESTS=30 + SEARCH_RATE_LIMIT_BURST_WINDOW_MS=10000 + SEARCH_RATE_LIMIT_BURST_MAX_REQUESTS=10 + ``` + ## Migration Behavior - `pnpm db:migrate` is the supported entrypoint for schema changes in this repo diff --git a/src/app.ts b/src/app.ts index 1f64960..4118824 100644 --- a/src/app.ts +++ b/src/app.ts @@ -7,6 +7,7 @@ import config from './config.js'; import { checkDatabaseConnection } from './db/prisma.js'; import { setupSwagger } from './docs/swagger.js'; import { errorHandler } from './middleware/errorHandler.js'; +import { createRateLimiter } from './middleware/rateLimit.js'; import { apiCompatibilityHeaders, attachRequestContext, @@ -14,6 +15,14 @@ import { import routes from './routes.js'; const app = express(); +const apiRateLimiter = createRateLimiter({ + ...config.rateLimit, + name: 'api', +}); +const searchRateLimiter = createRateLimiter({ + ...config.searchRateLimit, + name: 'search', +}); morgan.token('request-id', (req) => (req as Request).requestId ?? '-'); @@ -34,8 +43,11 @@ app.disable('x-powered-by'); app.use(attachRequestContext); app.use(morgan(logFormatter)); -app.use(express.json()); -app.use(express.urlencoded({ extended: true })); +app.use(express.json({ limit: config.requestBodyLimit })); +app.use(express.urlencoded({ extended: true, limit: config.requestBodyLimit })); + +app.use(['/v1', '/api', '/openapi.json', '/api-docs'], apiRateLimiter); +app.use(['/v1/search', '/api/search'], searchRateLimiter); app.get('/health', async (_: Request, res: Response) => { const database = await checkDatabaseConnection({ logErrors: false }); diff --git a/src/config.ts b/src/config.ts index 7f3fb8e..d35f519 100644 --- a/src/config.ts +++ b/src/config.ts @@ -13,6 +13,15 @@ const envSchema = z.object({ NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), PAGE_SIZE: z.coerce.number().int().positive().max(100).default(10), PORT: z.coerce.number().int().positive().default(8080), + REQUEST_BODY_LIMIT: z.string().trim().min(1).default('16kb'), + RATE_LIMIT_WINDOW_MS: z.coerce.number().int().positive().default(60_000), + RATE_LIMIT_MAX_REQUESTS: z.coerce.number().int().positive().default(120), + RATE_LIMIT_BURST_WINDOW_MS: z.coerce.number().int().positive().default(10_000), + RATE_LIMIT_BURST_MAX_REQUESTS: z.coerce.number().int().positive().default(30), + SEARCH_RATE_LIMIT_WINDOW_MS: z.coerce.number().int().positive().default(60_000), + SEARCH_RATE_LIMIT_MAX_REQUESTS: z.coerce.number().int().positive().default(30), + SEARCH_RATE_LIMIT_BURST_WINDOW_MS: z.coerce.number().int().positive().default(10_000), + SEARCH_RATE_LIMIT_BURST_MAX_REQUESTS: z.coerce.number().int().positive().default(10), }); const env = envSchema.parse(process.env); @@ -29,6 +38,19 @@ const config = { nodeEnv: env.NODE_ENV, pageSize: env.PAGE_SIZE, port: env.PORT, + requestBodyLimit: env.REQUEST_BODY_LIMIT, + rateLimit: { + burstMaxRequests: env.RATE_LIMIT_BURST_MAX_REQUESTS, + burstWindowMs: env.RATE_LIMIT_BURST_WINDOW_MS, + maxRequests: env.RATE_LIMIT_MAX_REQUESTS, + windowMs: env.RATE_LIMIT_WINDOW_MS, + }, + searchRateLimit: { + burstMaxRequests: env.SEARCH_RATE_LIMIT_BURST_MAX_REQUESTS, + burstWindowMs: env.SEARCH_RATE_LIMIT_BURST_WINDOW_MS, + maxRequests: env.SEARCH_RATE_LIMIT_MAX_REQUESTS, + windowMs: env.SEARCH_RATE_LIMIT_WINDOW_MS, + }, usesAccelerate, }; diff --git a/src/middleware/rateLimit.ts b/src/middleware/rateLimit.ts new file mode 100644 index 0000000..49b0f24 --- /dev/null +++ b/src/middleware/rateLimit.ts @@ -0,0 +1,133 @@ +import type { Request, RequestHandler, Response } from 'express'; + +const CLEANUP_INTERVAL = 200; + +interface ClientState { + burstTimestamps: number[]; + count: number; + lastSeenAt: number; + resetAt: number; +} + +interface RateLimitOptions { + burstMaxRequests?: number; + burstWindowMs?: number; + maxRequests: number; + name: string; + skip?: (req: Request) => boolean; + windowMs: number; +} + +function clientAddress(req: Request) { + const cloudflareIp = req.header('cf-connecting-ip')?.trim(); + const forwardedFor = req.header('x-forwarded-for')?.split(',')[0]?.trim(); + + if (cloudflareIp) { + return cloudflareIp; + } + + if (forwardedFor) { + return forwardedFor; + } + + return req.ip || req.socket.remoteAddress || 'unknown'; +} + +function cleanupStaleEntries(entries: Map, ttlMs: number, now: number) { + for (const [key, state] of entries.entries()) { + if (state.lastSeenAt + ttlMs < now) { + entries.delete(key); + } + } +} + +function setRateLimitHeaders(res: Response, options: RateLimitOptions, remaining: number, resetAt: number) { + const windowSeconds = Math.ceil(options.windowMs / 1000); + const burstPolicy = options.burstWindowMs && options.burstMaxRequests + ? `, burst=${options.burstMaxRequests}/${Math.ceil(options.burstWindowMs / 1000)}s` + : ''; + + res.setHeader('X-RateLimit-Limit', String(options.maxRequests)); + res.setHeader('X-RateLimit-Remaining', String(remaining)); + res.setHeader('X-RateLimit-Reset', String(Math.ceil(resetAt / 1000))); + res.setHeader('X-RateLimit-Policy', `${options.name}; window=${windowSeconds}s; limit=${options.maxRequests}${burstPolicy}`); +} + +export function createRateLimiter(options: RateLimitOptions): RequestHandler { + const entries = new Map(); + const ttlMs = Math.max(options.windowMs, options.burstWindowMs ?? 0) * 2; + let requestCounter = 0; + + return (req, res, next) => { + if (req.method === 'OPTIONS' || options.skip?.(req)) { + next(); + return; + } + + const now = Date.now(); + requestCounter += 1; + + if (requestCounter % CLEANUP_INTERVAL === 0) { + cleanupStaleEntries(entries, ttlMs, now); + } + + const key = clientAddress(req); + const state = entries.get(key) ?? { + burstTimestamps: [], + count: 0, + lastSeenAt: now, + resetAt: now + options.windowMs, + }; + + if (now >= state.resetAt) { + state.count = 0; + state.resetAt = now + options.windowMs; + } + + state.count += 1; + state.lastSeenAt = now; + + let remaining = Math.max(0, options.maxRequests - state.count); + const windowLimited = state.count > options.maxRequests; + const windowRetryAfterMs = state.resetAt - now; + + let burstLimited = false; + let burstRetryAfterMs = 0; + + if (options.burstWindowMs && options.burstMaxRequests) { + const burstWindowStart = now - options.burstWindowMs; + state.burstTimestamps = state.burstTimestamps.filter((timestamp) => timestamp > burstWindowStart); + state.burstTimestamps.push(now); + + remaining = Math.min(remaining, Math.max(0, options.burstMaxRequests - state.burstTimestamps.length)); + burstLimited = state.burstTimestamps.length > options.burstMaxRequests; + + if (burstLimited) { + const oldestTimestamp = state.burstTimestamps[0]; + burstRetryAfterMs = oldestTimestamp + options.burstWindowMs - now; + } + } + + entries.set(key, state); + const resetAt = windowLimited || burstLimited + ? now + Math.max(windowLimited ? windowRetryAfterMs : 0, burstLimited ? burstRetryAfterMs : 0) + : state.resetAt; + + setRateLimitHeaders(res, options, remaining, resetAt); + + if (windowLimited || burstLimited) { + const retryAfterMs = Math.max(windowLimited ? windowRetryAfterMs : 0, burstLimited ? burstRetryAfterMs : 0); + const retryAfterSeconds = Math.max(1, Math.ceil(retryAfterMs / 1000)); + + res.setHeader('Retry-After', String(retryAfterSeconds)); + res.status(429).json({ + error: { + message: 'Rate limit exceeded. Please slow down and try again later.', + }, + }); + return; + } + + next(); + }; +} diff --git a/tests/rate-limit.test.ts b/tests/rate-limit.test.ts new file mode 100644 index 0000000..c625b16 --- /dev/null +++ b/tests/rate-limit.test.ts @@ -0,0 +1,85 @@ +import express from 'express'; +import request from 'supertest'; +import { createRateLimiter } from '../src/middleware/rateLimit.js'; + +function createTestApp(limiter = createRateLimiter({ + burstMaxRequests: 2, + burstWindowMs: 1_000, + maxRequests: 3, + name: 'test', + windowMs: 60_000, +})) { + const app = express(); + + app.use(limiter); + app.get('/limited', (_req, res) => { + res.json({ ok: true }); + }); + app.get('/health', (_req, res) => { + res.json({ ok: true }); + }); + + return app; +} + +describe('rate limiting middleware', () => { + it('returns rate limit headers for allowed requests', async () => { + const app = createTestApp(); + const res = await request(app).get('/limited'); + + expect(res.statusCode).toBe(200); + expect(res.headers['x-ratelimit-limit']).toBe('3'); + expect(res.headers['x-ratelimit-remaining']).toBe('1'); + expect(res.headers['x-ratelimit-policy']).toContain('test'); + }); + + it('blocks bursts before the sustained limit is reached', async () => { + const app = createTestApp(createRateLimiter({ + burstMaxRequests: 2, + burstWindowMs: 60_000, + maxRequests: 10, + name: 'burst-test', + windowMs: 60_000, + })); + + await request(app).get('/limited'); + await request(app).get('/limited'); + const res = await request(app).get('/limited'); + + expect(res.statusCode).toBe(429); + expect(res.headers['retry-after']).toBeDefined(); + expect(res.body.error.message).toMatch(/Rate limit exceeded/i); + }); + + it('blocks requests after the sustained limit is reached', async () => { + const app = createTestApp(createRateLimiter({ + maxRequests: 2, + name: 'window-test', + windowMs: 60_000, + })); + + await request(app).get('/limited'); + await request(app).get('/limited'); + const res = await request(app).get('/limited'); + + expect(res.statusCode).toBe(429); + expect(res.headers['retry-after']).toBeDefined(); + }); + + it('supports skipping selected routes', async () => { + const app = createTestApp(createRateLimiter({ + maxRequests: 1, + name: 'skip-test', + skip: (req) => req.path === '/health', + windowMs: 60_000, + })); + + await request(app).get('/health'); + const healthRes = await request(app).get('/health'); + await request(app).get('/limited'); + const limitedRes = await request(app).get('/limited'); + + expect(healthRes.statusCode).toBe(200); + expect(limitedRes.statusCode).toBe(429); + }); +}); From 6fe9a653f9a54ab4a1624a61d069ea3a0a3d0bfd Mon Sep 17 00:00:00 2001 From: maotora Date: Wed, 25 Mar 2026 10:22:56 +0300 Subject: [PATCH 09/10] fix: harden rate limiting and hooks --- .env.example | 1 + README.md | 2 ++ scripts/run-pre-push-checks.ts | 7 +++++-- src/app.ts | 2 ++ src/config.ts | 31 +++++++++++++++++++++++++++++++ src/middleware/rateLimit.ts | 11 ----------- tests/rate-limit.test.ts | 33 +++++++++++++++++++++++++++++++++ 7 files changed, 74 insertions(+), 13 deletions(-) diff --git a/.env.example b/.env.example index a0cf114..7de2835 100644 --- a/.env.example +++ b/.env.example @@ -3,6 +3,7 @@ DIRECT_DATABASE_URL="postgresql://postgres:postgres@localhost:5432/locations_api PORT="8080" PAGE_SIZE="10" REQUEST_BODY_LIMIT="16kb" +# TRUST_PROXY="loopback, linklocal, uniquelocal" RATE_LIMIT_WINDOW_MS="60000" RATE_LIMIT_MAX_REQUESTS="120" RATE_LIMIT_BURST_WINDOW_MS="10000" diff --git a/README.md b/README.md index edd26ff..146c46a 100644 --- a/README.md +++ b/README.md @@ -69,10 +69,12 @@ pnpm openapi:json - API routes are protected by per-IP rate limits with both sustained and burst thresholds - `/search` has a stricter limit than the rest of the API because it is the easiest expensive endpoint to abuse - Request bodies are capped with `REQUEST_BODY_LIMIT`, even though the public API is mostly read-only +- Rate limiting keys off Express `req.ip`; if you deploy behind a trusted proxy/load balancer, set `TRUST_PROXY` so Express resolves the real client IP correctly - All limits are configurable with environment variables: ```bash REQUEST_BODY_LIMIT=16kb + TRUST_PROXY="loopback, linklocal, uniquelocal" RATE_LIMIT_WINDOW_MS=60000 RATE_LIMIT_MAX_REQUESTS=120 RATE_LIMIT_BURST_WINDOW_MS=10000 diff --git a/scripts/run-pre-push-checks.ts b/scripts/run-pre-push-checks.ts index 159f62e..fd85c3b 100644 --- a/scripts/run-pre-push-checks.ts +++ b/scripts/run-pre-push-checks.ts @@ -1,14 +1,17 @@ import { execFileSync } from 'node:child_process'; import { randomUUID } from 'node:crypto'; +import dotenv from 'dotenv'; import { Pool } from 'pg'; const pnpmCommand = process.platform === 'win32' ? 'pnpm.cmd' : 'pnpm'; +dotenv.config(); + function resolveDirectDatabaseUrl() { - const candidate = process.env.DIRECT_DATABASE_URL ?? process.env.DATABASE_URL; + const candidate = process.env.DIRECT_DATABASE_URL; if (!candidate) { - throw new Error('Set DIRECT_DATABASE_URL to a direct PostgreSQL connection before pushing.'); + throw new Error('Set DIRECT_DATABASE_URL in your shell or .env before pushing.'); } if (candidate.startsWith('prisma://') || candidate.startsWith('prisma+postgres://')) { diff --git a/src/app.ts b/src/app.ts index 4118824..5abbd5f 100644 --- a/src/app.ts +++ b/src/app.ts @@ -24,6 +24,8 @@ const searchRateLimiter = createRateLimiter({ name: 'search', }); +app.set('trust proxy', config.trustProxy); + morgan.token('request-id', (req) => (req as Request).requestId ?? '-'); const logFormatter: morgan.FormatFn = (tokens, req, res) => { diff --git a/src/config.ts b/src/config.ts index d35f519..3ad9834 100644 --- a/src/config.ts +++ b/src/config.ts @@ -7,6 +7,35 @@ function isAccelerateUrl(url: string) { return url.startsWith('prisma://') || url.startsWith('prisma+postgres://'); } +function parseTrustProxy(value?: string) { + if (!value) { + return false; + } + + const trimmed = value.trim(); + + if (trimmed === 'true') { + return true; + } + + if (trimmed === 'false') { + return false; + } + + if (/^\d+$/.test(trimmed)) { + return Number(trimmed); + } + + if (trimmed.includes(',')) { + return trimmed + .split(',') + .map((entry) => entry.trim()) + .filter(Boolean); + } + + return trimmed; +} + const envSchema = z.object({ DATABASE_URL: z.string().min(1, 'DATABASE_URL is required'), DIRECT_DATABASE_URL: z.string().min(1, 'DIRECT_DATABASE_URL cannot be empty').optional(), @@ -14,6 +43,7 @@ const envSchema = z.object({ PAGE_SIZE: z.coerce.number().int().positive().max(100).default(10), PORT: z.coerce.number().int().positive().default(8080), REQUEST_BODY_LIMIT: z.string().trim().min(1).default('16kb'), + TRUST_PROXY: z.string().trim().min(1).optional(), RATE_LIMIT_WINDOW_MS: z.coerce.number().int().positive().default(60_000), RATE_LIMIT_MAX_REQUESTS: z.coerce.number().int().positive().default(120), RATE_LIMIT_BURST_WINDOW_MS: z.coerce.number().int().positive().default(10_000), @@ -39,6 +69,7 @@ const config = { pageSize: env.PAGE_SIZE, port: env.PORT, requestBodyLimit: env.REQUEST_BODY_LIMIT, + trustProxy: parseTrustProxy(env.TRUST_PROXY), rateLimit: { burstMaxRequests: env.RATE_LIMIT_BURST_MAX_REQUESTS, burstWindowMs: env.RATE_LIMIT_BURST_WINDOW_MS, diff --git a/src/middleware/rateLimit.ts b/src/middleware/rateLimit.ts index 49b0f24..4c8b3b5 100644 --- a/src/middleware/rateLimit.ts +++ b/src/middleware/rateLimit.ts @@ -19,17 +19,6 @@ interface RateLimitOptions { } function clientAddress(req: Request) { - const cloudflareIp = req.header('cf-connecting-ip')?.trim(); - const forwardedFor = req.header('x-forwarded-for')?.split(',')[0]?.trim(); - - if (cloudflareIp) { - return cloudflareIp; - } - - if (forwardedFor) { - return forwardedFor; - } - return req.ip || req.socket.remoteAddress || 'unknown'; } diff --git a/tests/rate-limit.test.ts b/tests/rate-limit.test.ts index c625b16..b6bc428 100644 --- a/tests/rate-limit.test.ts +++ b/tests/rate-limit.test.ts @@ -82,4 +82,37 @@ describe('rate limiting middleware', () => { expect(healthRes.statusCode).toBe(200); expect(limitedRes.statusCode).toBe(429); }); + + it('does not trust spoofed forwarded headers by default', async () => { + const app = createTestApp(createRateLimiter({ + maxRequests: 1, + name: 'spoof-test', + windowMs: 60_000, + })); + + await request(app).get('/limited').set('x-forwarded-for', '203.0.113.10'); + const res = await request(app).get('/limited').set('x-forwarded-for', '198.51.100.25'); + + expect(res.statusCode).toBe(429); + }); + + it('uses forwarded client IPs when Express trust proxy is enabled', async () => { + const app = express(); + + app.set('trust proxy', true); + app.use(createRateLimiter({ + maxRequests: 1, + name: 'trusted-proxy-test', + windowMs: 60_000, + })); + app.get('/limited', (_req, res) => { + res.json({ ok: true }); + }); + + const first = await request(app).get('/limited').set('x-forwarded-for', '203.0.113.10'); + const second = await request(app).get('/limited').set('x-forwarded-for', '198.51.100.25'); + + expect(first.statusCode).toBe(200); + expect(second.statusCode).toBe(200); + }); }); From d9453f457eb367417e71425cd970e17c0d891252 Mon Sep 17 00:00:00 2001 From: maotora Date: Wed, 25 Mar 2026 10:37:58 +0300 Subject: [PATCH 10/10] fix: guard pre-push against remote databases --- README.md | 1 + scripts/run-pre-push-checks.ts | 9 +++++++++ 2 files changed, 10 insertions(+) diff --git a/README.md b/README.md index fc69638..8a9d5ef 100644 --- a/README.md +++ b/README.md @@ -183,6 +183,7 @@ Additional filters: - Pre-commit runs `pnpm hooks:pre-commit` (`lint` + `typecheck`) - Pre-push runs `pnpm hooks:pre-push`, which first builds the app, then creates a temporary Postgres database and runs `pnpm test:ci` - Pre-push requires `DIRECT_DATABASE_URL` or legacy `DIRECT_URL` to be a direct PostgreSQL URL +- Pre-push refuses non-local databases by default; set `ALLOW_REMOTE_PREPUSH_DB=1` only if you intentionally want hook verification against a remote direct Postgres instance ## License This project is licensed under the CopyLeft License. See [LICENSE](./LICENSE). diff --git a/scripts/run-pre-push-checks.ts b/scripts/run-pre-push-checks.ts index 7f4be09..9be06fb 100644 --- a/scripts/run-pre-push-checks.ts +++ b/scripts/run-pre-push-checks.ts @@ -21,6 +21,10 @@ function resolveDirectDatabaseUrl() { return new URL(candidate); } +function isLocalDatabaseHost(hostname: string) { + return hostname === 'localhost' || hostname === '127.0.0.1' || hostname === '::1'; +} + function tempDatabaseUrl(baseUrl: URL, databaseName: string) { const next = new URL(baseUrl.toString()); next.pathname = `/${databaseName}`; @@ -67,6 +71,11 @@ async function dropTemporaryDatabase(pool: Pool, databaseName: string) { async function main() { const directUrl = resolveDirectDatabaseUrl(); + + if (!isLocalDatabaseHost(directUrl.hostname) && process.env.ALLOW_REMOTE_PREPUSH_DB !== '1') { + throw new Error('Pre-push checks refuse to use non-local databases by default. Set ALLOW_REMOTE_PREPUSH_DB=1 if you really want that.'); + } + const originalDatabase = directUrl.pathname.replace(/^\//, '') || 'locations_api'; const tempDatabaseName = `${originalDatabase}_prepush_${randomUUID().replace(/-/g, '').slice(0, 8)}`; const isolatedDatabaseUrl = tempDatabaseUrl(directUrl, tempDatabaseName);