diff --git a/.env.template b/.env.template index 69e2590a1..9f135763f 100644 --- a/.env.template +++ b/.env.template @@ -1,9 +1,12 @@ -ENVIRONMENT=$ENV_NAME -API_KEY= -HEADERAUTH= PR_NUMBER=prxx # remove if needs to run against main -NHSD_APIM_TOKEN= -PROXY_NAME= +GITHUB_TOKEN= # Your github Personal Access Token (PAT) + + +# The variables below are used for End to End tests +PROXY_NAME= # information about the proxy name can be found in the tests/e2e-tests/README.md + + + # * nhs-notify-supplier--internal-dev--nhs-notify-supplier # * nhs-notify-supplier--internal-dev--nhs-notify-supplier-PR-XX # * nhs-notify-supplier--ref--nhs-notify-supplier -- ref env diff --git a/README.md b/README.md index 8e61610ca..d8b7cf781 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,4 @@ + # NHS Notify Supplier API [![1. CI/CD pull request](https://github.com/NHSDigital/nhs-notify-supplier-api/actions/workflows/cicd-1-pull-request.yaml/badge.svg)](https://github.com/NHSDigital/nhs-notify-supplier-api/actions/workflows/cicd-1-pull-request.yaml) @@ -75,9 +76,49 @@ New developers of the NHS Notify Supplier API should understand the below. #### Prerequisites and Configuration -- Utilised the devcontainer, for pre reqs and configuration. -- You should open in a devcontainer or a Github workspaces. -- By default it will run `make config` when the container is first setup +- Create the file `~/.aws/config` with the following contents: + + ```dsconfig + [profile ] + region = eu-west-2 + output = json + + [profile supplier-dev] + sso_start_url = https://d-9c67018f89.awsapps.com/start#/ + sso_region = eu-west-2 + sso_account_id = 820178564574 + sso_role_name = nhs-notify-bc-developer + region = eu-west-2 + output = json + + [profile supplier-nonprod] + sso_start_url = https://d-9c67018f89.awsapps.com/start#/ + sso_region = eu-west-2 + sso_account_id = 885964308133 + sso_role_name = nhs-notify-bc-developer + region = eu-west-2 + output = json + ``` + +- In your `~/.bashrc` or `~/.zshrc` add the export `export AWS_PROFILE=supplier-dev`, or whichever profile you need +- In the project's root directory create an `.env` file based on the `.env.template` file and fill variables as needed. +- Create the file `~/.npmrc` with the contents: + + ```dsconfig + # Authenticate to GitHub Packages for github.com + //npm.pkg.github.com/:_authToken= + + + # Package is scoped under @org, set registry for that scope + @nhsdigital:registry=https://npm.pkg.github.com + ``` + +- Install `node` (to run `npm install` and build the project) +- Install `aws cli` to be able to connect to AWS (needed for some tests) +- If AWS CLI calls are blocked by a firewall (e.g. Zscaler), you need to add the custom certificates in the location `/scripts/devcontainer/custom-ca-certs` +- Install `docker` or `Rancher` for containerisation + - You should open in a devcontainer or a Github workspaces. (In VSCode -> Open Command Palet -> "Dev containers: rebuild without cache and reopen in container") + - By default it will run `make config` when the container is first setup ##### SDKs @@ -151,3 +192,5 @@ Import the files into postman Select a target environment in postman Run the collection The collections must be kept in sync manually + + diff --git a/internal/helpers/src/group-id.ts b/internal/helpers/src/group-id.ts new file mode 100644 index 000000000..a5a6cff2a --- /dev/null +++ b/internal/helpers/src/group-id.ts @@ -0,0 +1,7 @@ +export default function formatGroupId( + clientId: string, + campaignId = "unknown", + safeTemplateId = "unknown", +): string { + return `${clientId}_${campaignId}_${safeTemplateId}`; +} diff --git a/internal/helpers/src/index.ts b/internal/helpers/src/index.ts index 9132c898c..7162fc99f 100644 --- a/internal/helpers/src/index.ts +++ b/internal/helpers/src/index.ts @@ -9,3 +9,4 @@ export { default as $Environment } from "./environment"; export * from "./id-ref"; export * from "./logger"; export * from "./metrics"; +export { default as formatGroupId } from "./group-id"; diff --git a/lambdas/supplier-allocator/package.json b/lambdas/supplier-allocator/package.json index 22485a858..73c2c0113 100644 --- a/lambdas/supplier-allocator/package.json +++ b/lambdas/supplier-allocator/package.json @@ -26,4 +26,4 @@ "typecheck": "tsc --noEmit" }, "version": "0.0.1" -} +} \ No newline at end of file diff --git a/lambdas/supplier-allocator/src/handler/__tests__/allocate-handler.test.ts b/lambdas/supplier-allocator/src/handler/__tests__/allocate-handler.test.ts index eb1a3bfdb..3e6bbf593 100644 --- a/lambdas/supplier-allocator/src/handler/__tests__/allocate-handler.test.ts +++ b/lambdas/supplier-allocator/src/handler/__tests__/allocate-handler.test.ts @@ -69,8 +69,8 @@ function createPreparedV1Event( requestItemId: "requestItem1", requestItemPlanId: "requestItemPlan1", clientId: "client1", - campaignId: "campaign1", - templateId: "template1", + campaignId: overrides.campaignId ?? "campaign1", + templateId: overrides.templateId ?? "template1", url: overrides.url ?? "s3://letterDataBucket/letter1.pdf", sha256Hash: "3a7bd3e2360a3d29eea436fcfb7e44c735d117c8f2f1d2d1e4f6e8f7e6e8f7e6", @@ -230,6 +230,40 @@ describe("createSupplierAllocatorHandler", () => { }); }); + test("parses SNS notification and sends message to SQS queue for v2 event without a campaignId and templateId", async () => { + const preparedEvent = createPreparedV2Event({ + campaignId: "", + templateId: "", + }); + const evt: SQSEvent = createSQSEvent([ + createSqsRecord("msg1", JSON.stringify(preparedEvent)), + ]); + + setupDefaultMocks(); + process.env.UPSERT_LETTERS_QUEUE_URL = "https://sqs.test.queue"; + + const handler = createSupplierAllocatorHandler(mockedDeps); + const result = await handler(evt, {} as any, {} as any); + + expect(result).toBeDefined(); + if (!result) throw new Error("expected BatchResponse, got void"); + + expect(result.batchItemFailures).toHaveLength(0); + + expect(mockSqsClient.send).toHaveBeenCalledTimes(1); + const sendCall = (mockSqsClient.send as jest.Mock).mock.calls[0][0]; + expect(sendCall).toBeInstanceOf(SendMessageCommand); + + const messageBody = JSON.parse(sendCall.input.MessageBody); + expect(messageBody.letterEvent).toEqual(preparedEvent); + expect(messageBody.supplierSpec).toEqual({ + supplierId: "supplier1", + specId: "spec1", + priority: 1, + billingId: "billing1", + }); + }); + test("parses SNS notification and sends message to SQS queue for v1 event", async () => { const preparedEvent = createPreparedV1Event(); diff --git a/lambdas/supplier-allocator/src/handler/allocate-handler.ts b/lambdas/supplier-allocator/src/handler/allocate-handler.ts index 2288fae12..da7ef900d 100644 --- a/lambdas/supplier-allocator/src/handler/allocate-handler.ts +++ b/lambdas/supplier-allocator/src/handler/allocate-handler.ts @@ -10,7 +10,12 @@ import { import { LetterRequestPreparedEventV2 } from "@nhsdigital/nhs-notify-event-schemas-letter-rendering"; import z from "zod"; import { Unit } from "aws-embedded-metrics"; -import { MetricEntry, MetricStatus, buildEMFObject } from "@internal/helpers"; +import { + MetricEntry, + MetricStatus, + buildEMFObject, + formatGroupId, +} from "@internal/helpers"; import { getSupplierAllocationsForVolumeGroup, getSupplierDetails, @@ -144,6 +149,29 @@ function emitMetrics( } } +function emitDataMetrics( + letterEvent: PreparedEvents, + supplier: string, + metricKey: string, + deps: Deps, +) { + const namespace = "supplier-allocator"; + const { campaignId, clientId, templateId } = letterEvent.data; + const dimensions: Record = { + Supplier: supplier, + ClientId: clientId, + CampaignId: campaignId || "unknown", + TemplateId: templateId || "unknown", + GroupId: formatGroupId(clientId, campaignId, templateId), + }; + const metric: MetricEntry = { + key: metricKey, + value: 1, + unit: Unit.Count, + }; + deps.logger.info(buildEMFObject(namespace, dimensions, metric)); +} + export default function createSupplierAllocatorHandler(deps: Deps): SQSHandler { return async (event: SQSEvent) => { const batchItemFailures: SQSBatchItemFailure[] = []; @@ -154,7 +182,9 @@ export default function createSupplierAllocatorHandler(deps: Deps): SQSHandler { let supplier = "unknown"; let priority = "unknown"; try { - const letterEvent: unknown = JSON.parse(record.body); + const letterEvent: PreparedEvents = JSON.parse( + record.body, + ) as PreparedEvents; deps.logger.info({ description: "Extracted letter event", @@ -163,8 +193,8 @@ export default function createSupplierAllocatorHandler(deps: Deps): SQSHandler { validateType(letterEvent); - const supplierSpec = getSupplier(letterEvent as PreparedEvents, deps); - await getSupplierFromConfig(letterEvent as PreparedEvents, deps); + const supplierSpec = getSupplier(letterEvent, deps); + await getSupplierFromConfig(letterEvent, deps); supplier = supplierSpec.supplierId; priority = String(supplierSpec.priority); @@ -199,6 +229,7 @@ export default function createSupplierAllocatorHandler(deps: Deps): SQSHandler { ); incrementMetric(perAllocationSuccess, supplier, priority); + emitDataMetrics(letterEvent, supplier, "extra_data_dimensions", deps); } catch (error) { deps.logger.error({ description: "Error processing allocation of record", diff --git a/lambdas/upsert-letter/src/handler/__tests__/upsert-handler.test.ts b/lambdas/upsert-letter/src/handler/__tests__/upsert-handler.test.ts index 201f29999..9a7e244be 100644 --- a/lambdas/upsert-letter/src/handler/__tests__/upsert-handler.test.ts +++ b/lambdas/upsert-letter/src/handler/__tests__/upsert-handler.test.ts @@ -11,6 +11,8 @@ import { $LetterStatusChangeEvent, LetterStatusChangeEvent, } from "@nhsdigital/nhs-notify-event-schemas-supplier-api/src/events/letter-events"; +import { MetricStatus, buildEMFObject } from "@internal/helpers"; +import { Unit } from "aws-embedded-metrics"; import createUpsertLetterHandler from "../upsert-handler"; import { Deps } from "../../config/deps"; import { EnvVars } from "../../config/env"; @@ -24,6 +26,15 @@ jest.mock("@aws-lambda-powertools/idempotency", () => { }; }); +// mock the buildEMFObject function to just return its input so we can assert on it +jest.mock("@internal/helpers", () => { + const original = jest.requireActual("@internal/helpers"); + return { + ...original, + buildEMFObject: jest.fn(original.buildEMFObject), + }; +}); + const renderingSchemaVersion: string = packageJson.dependencies[ "@nhsdigital/nhs-notify-event-schemas-letter-rendering" @@ -190,26 +201,6 @@ function createSupplierStatusChangeEvent( }); } -// Mock aws-embedded-metrics -let mockMetrics: any; -jest.mock("aws-embedded-metrics", () => ({ - metricScope: ( - handler: (metrics: any) => (event: SQSEvent) => Promise, - ) => { - return async (event: SQSEvent) => { - mockMetrics = { - setNamespace: jest.fn(), - putDimensions: jest.fn(), - putMetric: jest.fn(), - }; - return handler(mockMetrics)(event); - }; - }, - Unit: { - Count: "Count", - }, -})); - describe("createUpsertLetterHandler", () => { const mockedDeps: jest.Mocked = { letterRepo: { @@ -280,7 +271,7 @@ describe("createUpsertLetterHandler", () => { expect(insertedV2Letter.billingRef).toBe("spec1"); expect(insertedV2Letter.url).toBe("s3://letterDataBucket/letter1.pdf"); expect(insertedV2Letter.status).toBe("PENDING"); - expect(insertedV2Letter.groupId).toBe("client1campaign1template1"); + expect(insertedV2Letter.groupId).toBe("client1_campaign1_template1"); expect(insertedV2Letter.source).toBe("/data-plane/letter-rendering/test"); expect(insertedV2Letter.specificationBillingId).toBe("billing1"); expect(insertedV2Letter.priority).toBe(10); @@ -293,7 +284,7 @@ describe("createUpsertLetterHandler", () => { expect(insertedV1Letter.billingRef).toBe("spec2"); expect(insertedV1Letter.url).toBe("s3://letterDataBucket/letter1.pdf"); expect(insertedV1Letter.status).toBe("PENDING"); - expect(insertedV1Letter.groupId).toBe("client1campaign1template1"); + expect(insertedV1Letter.groupId).toBe("client1_campaign1_template1"); expect(insertedV1Letter.source).toBe("/data-plane/letter-rendering/test"); expect(insertedV1Letter.specificationBillingId).toBe("billing2"); expect(insertedV1Letter.priority).toBe(10); @@ -306,19 +297,17 @@ describe("createUpsertLetterHandler", () => { expect(updatedLetter.reasonCode).toBe("R07"); expect(updatedLetter.reasonText).toBe("No such address"); expect(updatedLetter.supplierId).toBe("supplier1"); - expect(mockMetrics.setNamespace).toHaveBeenCalledWith("upsertLetter"); - expect(mockMetrics.putDimensions).toHaveBeenCalledWith({ - Supplier: "supplier1", - }); - expect(mockMetrics.putMetric).toHaveBeenCalledWith( - "MessagesProcessed", - 2, - "Count", - ); - expect(mockMetrics.putMetric).toHaveBeenCalledWith( - "MessagesProcessed", - 1, - "Count", + expect(buildEMFObject as jest.Mock).toHaveBeenCalledWith( + "upsertLetter", + { + Supplier: "supplier1", + GroupId: "client1_campaign1_template1", + }, + expect.objectContaining({ + key: MetricStatus.Success, + value: 1, + unit: Unit.Count, + }), ); }); @@ -376,14 +365,17 @@ describe("createUpsertLetterHandler", () => { await createUpsertLetterHandler(mockedDeps)(evt, {} as any, {} as any); - expect(mockMetrics.setNamespace).toHaveBeenCalledWith("upsertLetter"); - expect(mockMetrics.putDimensions).toHaveBeenCalledWith({ - Supplier: "unknown", - }); - expect(mockMetrics.putMetric).toHaveBeenCalledWith( - "MessagesProcessed", - 1, - "Count", + expect(buildEMFObject as jest.Mock).toHaveBeenCalledWith( + "upsertLetter", + { + Supplier: "unknown", + GroupId: "unknown", + }, + expect.objectContaining({ + key: MetricStatus.Success, + value: 1, + unit: Unit.Count, + }), ); }); @@ -410,14 +402,17 @@ describe("createUpsertLetterHandler", () => { }), ); expect(mockedDeps.letterRepo.putLetter).not.toHaveBeenCalled(); - expect(mockMetrics.setNamespace).toHaveBeenCalledWith("upsertLetter"); - expect(mockMetrics.putDimensions).toHaveBeenCalledWith({ - Supplier: "unknown", - }); - expect(mockMetrics.putMetric).toHaveBeenCalledWith( - "MessageFailed", - 1, - "Count", + expect(buildEMFObject as jest.Mock).toHaveBeenCalledWith( + "upsertLetter", + { + Supplier: "unknown", + GroupId: "unknown", + }, + expect.objectContaining({ + key: MetricStatus.Failure, + value: 1, + unit: Unit.Count, + }), ); }); @@ -506,14 +501,17 @@ describe("createUpsertLetterHandler", () => { messageId: "bad-event", }), ); - expect(mockMetrics.setNamespace).toHaveBeenCalledWith("upsertLetter"); - expect(mockMetrics.putDimensions).toHaveBeenCalledWith({ - Supplier: "unknown", - }); - expect(mockMetrics.putMetric).toHaveBeenCalledWith( - "MessageFailed", - 1, - "Count", + expect(buildEMFObject as jest.Mock).toHaveBeenCalledWith( + "upsertLetter", + { + Supplier: "unknown", + GroupId: "unknown", + }, + expect.objectContaining({ + key: MetricStatus.Failure, + value: 1, + unit: Unit.Count, + }), ); }); diff --git a/lambdas/upsert-letter/src/handler/upsert-handler.ts b/lambdas/upsert-letter/src/handler/upsert-handler.ts index 61b6feb0d..7b8dad3af 100644 --- a/lambdas/upsert-letter/src/handler/upsert-handler.ts +++ b/lambdas/upsert-letter/src/handler/upsert-handler.ts @@ -10,11 +10,18 @@ import { LetterStatusChangeEvent, } from "@nhsdigital/nhs-notify-event-schemas-supplier-api/src/events/letter-events"; import { $LetterRequestPreparedEventV2 } from "@nhsdigital/nhs-notify-event-schemas-letter-rendering"; -import { MetricsLogger, Unit, metricScope } from "aws-embedded-metrics"; +import { Unit } from "aws-embedded-metrics"; import { IdempotencyConfig, makeIdempotent, } from "@aws-lambda-powertools/idempotency"; +import { + MetricEntry, + MetricStatus, + buildEMFObject, + formatGroupId, +} from "@internal/helpers"; +import { Logger } from "pino"; import { Deps } from "../config/deps"; import { PreparedEvents, @@ -54,7 +61,21 @@ function getOperationFromType(type: string): UpsertOperation { letterId: letterToInsert.id, supplierId: letterToInsert.supplierId, }); + // emit success metric + emitIndividualMetric( + deps.logger, + letterToInsert.supplierId, + MetricStatus.Success, + letterToInsert.groupId, + ); } catch (error) { + // emit failure metric + emitIndividualMetric( + deps.logger, + letterToInsert.supplierId, + MetricStatus.Failure, + letterToInsert.groupId, + ); if (error instanceof LetterAlreadyExistsError) { deps.logger.warn({ description: "Letter already exists", @@ -83,6 +104,11 @@ function getOperationFromType(type: string): UpsertOperation { letterId: letterToUpdate.id, supplierId: letterToUpdate.supplierId, }); + emitIndividualMetric( + deps.logger, + letterToUpdate.supplierId, + MetricStatus.Success, + ); }, }; } @@ -103,10 +129,11 @@ function mapToInsertLetter( status: "PENDING", specificationId: spec, priority, - groupId: - upsertRequest.data.clientId + - upsertRequest.data.campaignId + + groupId: formatGroupId( + upsertRequest.data.clientId, + upsertRequest.data.campaignId, upsertRequest.data.templateId, + ), url: upsertRequest.data.url, source: upsertRequest.source, subject: upsertRequest.subject, @@ -145,26 +172,24 @@ async function runUpsert( } } -async function emitMetrics( - metrics: MetricsLogger, - successMetrics: Map, - failedMetrics: Map, +async function emitIndividualMetric( + logger: Logger, + supplier: string, + metricKey: MetricStatus, + groupId?: string, ) { - metrics.setNamespace(process.env.AWS_LAMBDA_FUNCTION_NAME || `upsertLetter`); - // emit success metrics - for (const [supplier, count] of successMetrics) { - metrics.putDimensions({ - Supplier: supplier, - }); - metrics.putMetric("MessagesProcessed", count, Unit.Count); - } - // emit failure metrics - for (const [supplier, count] of failedMetrics) { - metrics.putDimensions({ - Supplier: supplier, - }); - metrics.putMetric("MessageFailed", count, Unit.Count); - } + const namespace = process.env.AWS_LAMBDA_FUNCTION_NAME || "upsertLetter"; + const dimensions: Record = { + Supplier: supplier || "unknown", + GroupId: groupId || "unknown", + }; + + const metric: MetricEntry = { + key: metricKey, + value: 1, + unit: Unit.Count, + }; + logger.info(buildEMFObject(namespace, dimensions, metric)); } function getSupplierIdFromEvent(letterEvent: any): string { @@ -193,76 +218,64 @@ export default function createUpsertLetterHandler(deps: Deps): SQSHandler { config: idempotencyConfig, }); - return metricScope((metrics: MetricsLogger) => { - return async (event: SQSEvent, context: Context) => { - const batchItemFailures: SQSBatchItemFailure[] = []; - const perSupplierSuccess: Map = new Map(); - const perSupplierFailure: Map = new Map(); + return async (event: SQSEvent, context: Context) => { + const batchItemFailures: SQSBatchItemFailure[] = []; - const tasks = event.Records.map(async (record) => { - let supplier = "unknown"; - try { - deps.logger.info({ - description: "Processing record", - messageId: record.messageId, - message: record.body, - }); - const sqsMessage = JSON.parse(record.body); + const tasks = event.Records.map(async (record) => { + try { + deps.logger.info({ + description: "Processing record", + messageId: record.messageId, + message: record.body, + }); + const sqsMessage = JSON.parse(record.body); - const queueMessage: QueueMessage = parseQueueMessage(sqsMessage); + const queueMessage: QueueMessage = parseQueueMessage(sqsMessage); - let letterEvent: LetterStatusChangeEvent | PreparedEvents; - let supplierSpec: SupplierSpec | undefined; + let letterEvent: LetterStatusChangeEvent | PreparedEvents; + let supplierSpec: SupplierSpec | undefined; - if ("letterEvent" in queueMessage) { - letterEvent = queueMessage.letterEvent; - supplierSpec = queueMessage.supplierSpec; - } else { - letterEvent = queueMessage; - supplierSpec = undefined; - } - - deps.logger.info({ - description: "Extracted letter event", - messageId: record.messageId, - type: letterEvent.type, - supplier: supplierSpec, - }); - - idempotencyConfig.registerLambdaContext(context); - supplier = await processRecordIdempotently( - letterEvent, - supplierSpec, - perSupplierSuccess, - deps, - ); - } catch (error) { - deps.logger.error({ - description: "Error processing upsert of record", - err: error, - messageId: record.messageId, - message: record.body, - }); - perSupplierFailure.set( - supplier, - (perSupplierFailure.get(supplier) || 0) + 1, - ); - batchItemFailures.push({ itemIdentifier: record.messageId }); + if ("letterEvent" in queueMessage) { + letterEvent = queueMessage.letterEvent; + supplierSpec = queueMessage.supplierSpec; + } else { + letterEvent = queueMessage; + supplierSpec = undefined; } - }); - await Promise.all(tasks); + deps.logger.info({ + description: "Extracted letter event", + messageId: record.messageId, + type: letterEvent.type, + supplier: supplierSpec, + }); - await emitMetrics(metrics, perSupplierSuccess, perSupplierFailure); - return { batchItemFailures }; - }; - }); + idempotencyConfig.registerLambdaContext(context); + await processRecordIdempotently(letterEvent, supplierSpec, deps); + } catch (error) { + deps.logger.error({ + description: "Error processing upsert of record", + err: error, + messageId: record.messageId, + message: record.body, + }); + await emitIndividualMetric( + deps.logger, + "unknown", + MetricStatus.Failure, + ); + batchItemFailures.push({ itemIdentifier: record.messageId }); + } + }); + await Promise.all(tasks); + + return { batchItemFailures }; + }; } async function processRecord( letterEvent: LetterStatusChangeEvent | PreparedEvents, supplierSpec: SupplierSpec | undefined, - perSupplierSuccess: Map, deps: Deps, ) { const supplier = @@ -284,6 +297,5 @@ async function processRecord( deps, ); - perSupplierSuccess.set(supplier, (perSupplierSuccess.get(supplier) || 0) + 1); return supplier; } diff --git a/package-lock.json b/package-lock.json index f3b0abe38..6053d5922 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5802,9 +5802,9 @@ } }, "node_modules/@pact-foundation/pact/node_modules/content-disposition": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.1.tgz", - "integrity": "sha512-oIXISMynqSqm241k6kcQ5UwttDILMK4BiurCfGEREw6+X9jkkpEe5T9FZaApyLGGOnFuyMWZpdolTXMtvEJ08Q==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.1.0.tgz", + "integrity": "sha512-5jRCH9Z/+DRP7rkvY83B+yGIGX96OYdJmzngqnw2SBSxqCFPd0w2km3s5iawpGX8krnwSGmF0FW5Nhr0Hfai3g==", "license": "MIT", "engines": { "node": ">=18"