diff --git a/infrastructure/terraform/components/api/ddb_table_letter_queue.tf b/infrastructure/terraform/components/api/ddb_table_letter_queue.tf index 64a6c34b..b6952ab4 100644 --- a/infrastructure/terraform/components/api/ddb_table_letter_queue.tf +++ b/infrastructure/terraform/components/api/ddb_table_letter_queue.tf @@ -3,7 +3,7 @@ resource "aws_dynamodb_table" "letter_queue" { billing_mode = "PAY_PER_REQUEST" hash_key = "supplierId" - range_key = "queueTimestamp" + range_key = "letterId" ttl { attribute_name = "ttl" @@ -11,8 +11,8 @@ resource "aws_dynamodb_table" "letter_queue" { } local_secondary_index { - name = "letterId-index" - range_key = "letterId" + name = "queueSortOrder-index" + range_key = "queueTimestamp" projection_type = "ALL" } diff --git a/infrastructure/terraform/components/api/module_lambda_update_letter_queue.tf b/infrastructure/terraform/components/api/module_lambda_update_letter_queue.tf index cbd1d0ad..95862ff7 100644 --- a/infrastructure/terraform/components/api/module_lambda_update_letter_queue.tf +++ b/infrastructure/terraform/components/api/module_lambda_update_letter_queue.tf @@ -35,7 +35,7 @@ module "update_letter_queue" { log_subscription_role_arn = local.acct.log_subscription_role_arn lambda_env_vars = merge(local.common_lambda_env_vars, { - LETTER_QUEUE_TABLE_NAME = aws_dynamodb_table.letter_queue.name, + LETTER_QUEUE_TABLE_NAME = "${local.csi}-letter-queue", LETTER_QUEUE_TTL_HOURS = 168 # 7 days }) } @@ -47,11 +47,12 @@ data "aws_iam_policy_document" "update_letter_queue_lambda" { actions = [ "dynamodb:PutItem", + "dynamodb:DeleteItem", ] resources = [ - aws_dynamodb_table.letter_queue.arn, - "${aws_dynamodb_table.letter_queue.arn}/index/*" + "arn:aws:dynamodb:${var.region}:${var.aws_account_id}:table/${local.csi}-letter-queue", + "arn:aws:dynamodb:${var.region}:${var.aws_account_id}:table/${local.csi}-letter-queue/index/*" ] } diff --git a/internal/datastore/src/__test__/db.ts b/internal/datastore/src/__test__/db.ts index f382add6..ae652ad1 100644 --- a/internal/datastore/src/__test__/db.ts +++ b/internal/datastore/src/__test__/db.ts @@ -129,7 +129,7 @@ const createLetterQueueTableCommand = new CreateTableCommand({ ], LocalSecondaryIndexes: [ { - IndexName: "timestamp-index", + IndexName: "queueSortOrder-index", KeySchema: [ { AttributeName: "supplierId", KeyType: "HASH" }, // Partition key for LSI { AttributeName: "queueTimestamp", KeyType: "RANGE" }, // Sort key for LSI diff --git a/internal/datastore/src/__test__/letter-queue-repository.test.ts b/internal/datastore/src/__test__/letter-queue-repository.test.ts index fdba8e81..04e8d57c 100644 --- a/internal/datastore/src/__test__/letter-queue-repository.test.ts +++ b/internal/datastore/src/__test__/letter-queue-repository.test.ts @@ -1,3 +1,4 @@ +import { GetCommand } from "@aws-sdk/lib-dynamodb"; import { Logger } from "pino"; import { DBContext, @@ -7,8 +8,9 @@ import { } from "./db"; import LetterQueueRepository from "../letter-queue-repository"; import { InsertPendingLetter } from "../types"; -import { LetterAlreadyExistsError } from "../errors"; +import { LetterAlreadyExistsError } from "../letter-already-exists-error"; import { createTestLogger } from "./logs"; +import { LetterDoesNotExistError } from "../letter-does-not-exist-error"; function createLetter(letterId = "letter1"): InsertPendingLetter { return { @@ -51,32 +53,19 @@ describe("LetterQueueRepository", () => { await db.container.stop(); }); - function assertTtl(ttl: number, before: number, after: number) { - const expectedLower = Math.floor( - before / 1000 + 60 * 60 * db.config.letterQueueTtlHours, - ); - const expectedUpper = Math.floor( - after / 1000 + 60 * 60 * db.config.lettersTtlHours, - ); - expect(ttl).toBeGreaterThanOrEqual(expectedLower); - expect(ttl).toBeLessThanOrEqual(expectedUpper); - } - describe("putLetter", () => { it("adds a letter to the database", async () => { - const before = Date.now(); + jest.useFakeTimers().setSystemTime(new Date("2026-03-04T13:15:45.000Z")); const pendingLetter = await letterQueueRepository.putLetter(createLetter()); - const after = Date.now(); - - const timestampInMillis = new Date( - pendingLetter.queueTimestamp, - ).valueOf(); - expect(timestampInMillis).toBeGreaterThanOrEqual(before); - expect(timestampInMillis).toBeLessThanOrEqual(after); - assertTtl(pendingLetter.ttl, before, after); + expect(pendingLetter.queueTimestamp).toBe("2026-03-04T13:15:45.000Z"); + expect(pendingLetter.visibilityTimestamp).toBe( + "2026-03-04T13:15:45.000Z", + ); + expect(pendingLetter.ttl).toBe(1_772_633_745); + expect(await letterExists(db, "supplier1", "letter1")).toBe(true); }); it("throws LetterAlreadyExistsError when creating a letter which already exists", async () => { @@ -101,4 +90,48 @@ describe("LetterQueueRepository", () => { ).rejects.toThrow("Cannot do operations on a non-existent table"); }); }); + + describe("deleteLetter", () => { + it("deletes a letter from the database", async () => { + await letterQueueRepository.putLetter(createLetter()); + + await letterQueueRepository.deleteLetter("supplier1", "letter1"); + + expect(await letterExists(db, "supplier1", "letter1")).toBe(false); + }); + + it("throws an error when the letter does not exist", async () => { + await expect( + letterQueueRepository.deleteLetter("supplier1", "letter1"), + ).rejects.toThrow(LetterDoesNotExistError); + }); + + it("rethrows errors from DynamoDB when deleting a letter", async () => { + const misconfiguredRepository = new LetterQueueRepository( + db.docClient, + logger, + { + ...db.config, + letterQueueTableName: "nonexistent-table", + }, + ); + await expect( + misconfiguredRepository.deleteLetter("supplier1", "letter1"), + ).rejects.toThrow("Cannot do operations on a non-existent table"); + }); + }); }); + +async function letterExists( + db: DBContext, + supplierId: string, + letterId: string, +): Promise { + const result = await db.docClient.send( + new GetCommand({ + TableName: db.config.letterQueueTableName, + Key: { supplierId, letterId }, + }), + ); + return result.Item !== undefined; +} diff --git a/internal/datastore/src/index.ts b/internal/datastore/src/index.ts index 7ee912c2..72fd95d8 100644 --- a/internal/datastore/src/index.ts +++ b/internal/datastore/src/index.ts @@ -1,5 +1,6 @@ export * from "./types"; -export * from "./errors"; +export * from "./letter-already-exists-error"; +export * from "./letter-does-not-exist-error"; export * from "./mi-repository"; export * from "./letter-repository"; export * from "./supplier-repository"; diff --git a/internal/datastore/src/errors.ts b/internal/datastore/src/letter-already-exists-error.ts similarity index 100% rename from internal/datastore/src/errors.ts rename to internal/datastore/src/letter-already-exists-error.ts diff --git a/internal/datastore/src/letter-does-not-exist-error.ts b/internal/datastore/src/letter-does-not-exist-error.ts new file mode 100644 index 00000000..ab5410b9 --- /dev/null +++ b/internal/datastore/src/letter-does-not-exist-error.ts @@ -0,0 +1,15 @@ +/** + * Error thrown when attempting to delete a letter that does not exist in the database. + */ +// eslint-disable-next-line import-x/prefer-default-export +export class LetterDoesNotExistError extends Error { + constructor( + public readonly supplierId: string, + public readonly letterId: string, + ) { + super( + `Letter does not exist: supplierId=${supplierId}, letterId=${letterId}`, + ); + this.name = "LetterDoesNotExistError"; + } +} diff --git a/internal/datastore/src/letter-queue-repository.ts b/internal/datastore/src/letter-queue-repository.ts index 5e1da7dd..70592db2 100644 --- a/internal/datastore/src/letter-queue-repository.ts +++ b/internal/datastore/src/letter-queue-repository.ts @@ -1,11 +1,16 @@ -import { DynamoDBDocumentClient, PutCommand } from "@aws-sdk/lib-dynamodb"; +import { + DeleteCommand, + DynamoDBDocumentClient, + PutCommand, +} from "@aws-sdk/lib-dynamodb"; import { Logger } from "pino"; import { InsertPendingLetter, PendingLetter, PendingLetterSchema, } from "./types"; -import { LetterAlreadyExistsError } from "./errors"; +import { LetterAlreadyExistsError } from "./letter-already-exists-error"; +import { LetterDoesNotExistError } from "./letter-does-not-exist-error"; type LetterQueueRepositoryConfig = { letterQueueTableName: string; @@ -22,10 +27,13 @@ export default class LetterQueueRepository { async putLetter( insertPendingLetter: InsertPendingLetter, ): Promise { + // needs to be an ISO timestamp as Db sorts alphabetically + const now = new Date().toISOString(); + const pendingLetter: PendingLetter = { ...insertPendingLetter, - // needs to be an ISO timestamp as Db sorts alphabetically - queueTimestamp: new Date().toISOString(), + queueTimestamp: now, + visibilityTimestamp: now, ttl: Math.floor( Date.now() / 1000 + 60 * 60 * this.config.letterQueueTtlHours, ), @@ -52,4 +60,24 @@ export default class LetterQueueRepository { } return PendingLetterSchema.parse(pendingLetter); } + + async deleteLetter(supplierId: string, letterId: string): Promise { + try { + await this.ddbClient.send( + new DeleteCommand({ + TableName: this.config.letterQueueTableName, + Key: { supplierId, letterId }, + ConditionExpression: "attribute_exists(letterId)", + }), + ); + } catch (error) { + if ( + error instanceof Error && + error.name === "ConditionalCheckFailedException" + ) { + throw new LetterDoesNotExistError(supplierId, letterId); + } + throw error; + } + } } diff --git a/internal/datastore/src/types.ts b/internal/datastore/src/types.ts index bb0843f8..107a6c8b 100644 --- a/internal/datastore/src/types.ts +++ b/internal/datastore/src/types.ts @@ -80,6 +80,7 @@ export const PendingLetterSchema = z.object({ supplierId: idRef(SupplierSchema, "id"), letterId: idRef(LetterSchema, "id"), queueTimestamp: z.string().describe("Secondary index SK"), + visibilityTimestamp: z.string(), specificationId: z.string(), groupId: z.string(), ttl: z.int(), @@ -87,7 +88,10 @@ export const PendingLetterSchema = z.object({ export type PendingLetter = z.infer; -export type InsertPendingLetter = Omit; +export type InsertPendingLetter = Omit< + PendingLetter, + "ttl" | "queueTimestamp" | "visibilityTimestamp" +>; export const MISchemaBase = z.object({ id: z.string(), diff --git a/lambdas/update-letter-queue/src/__tests__/update-letter-queue.test.ts b/lambdas/update-letter-queue/src/__tests__/update-letter-queue.test.ts index 03f9ff72..801b7917 100644 --- a/lambdas/update-letter-queue/src/__tests__/update-letter-queue.test.ts +++ b/lambdas/update-letter-queue/src/__tests__/update-letter-queue.test.ts @@ -1,6 +1,7 @@ import { Letter, LetterAlreadyExistsError, + LetterDoesNotExistError, LetterQueueRepository, } from "@internal/datastore"; import { mockDeep } from "jest-mock-extended"; @@ -21,6 +22,7 @@ import { LetterStatus } from "../../../api-handler/src/contracts/letters"; const mockedDeps: jest.Mocked = { letterQueueRepository: { putLetter: jest.fn(), + deleteLetter: jest.fn(), } as unknown as LetterQueueRepository, logger: { info: jest.fn(), @@ -50,7 +52,7 @@ function generateLetter(status: LetterStatus, id?: string): Letter { } beforeEach(() => { - jest.clearAllMocks(); + jest.resetAllMocks(); }); describe("update-letter-queue Lambda", () => { @@ -74,23 +76,25 @@ describe("update-letter-queue Lambda", () => { expect(result.batchItemFailures).toEqual([]); }); - it("does not publish updates", async () => { + it("deletes letters that are no longer pending", async () => { const handler = createHandler(mockedDeps); const oldLetter = generateLetter("PENDING"); - const newLetter = generateLetter("PENDING"); + const newLetter = generateLetter("ACCEPTED"); const testData = generateKinesisEvent([ generateModifyRecord(oldLetter, newLetter), ]); const result = await handler(testData, mockDeep(), jest.fn()); - expect(mockedDeps.letterQueueRepository.putLetter).not.toHaveBeenCalled(); + expect( + mockedDeps.letterQueueRepository.deleteLetter, + ).toHaveBeenCalledWith("supplier1", "1"); expect(result.batchItemFailures).toEqual([]); }); it("does not publish non-PENDING letters", async () => { const handler = createHandler(mockedDeps); - const newLetter = generateLetter("PRINTED"); + const newLetter = generateLetter("ACCEPTED"); const testData = generateKinesisEvent([generateInsertRecord(newLetter)]); const result = await handler(testData, mockDeep(), jest.fn()); @@ -99,6 +103,22 @@ describe("update-letter-queue Lambda", () => { expect(result.batchItemFailures).toEqual([]); }); + it("does not delete letters that are still PENDING", async () => { + const handler = createHandler(mockedDeps); + const oldLetter = generateLetter("PENDING"); + const newLetter = generateLetter("PENDING"); + + const testData = generateKinesisEvent([ + generateModifyRecord(oldLetter, newLetter), + ]); + const result = await handler(testData, mockDeep(), jest.fn()); + + expect( + mockedDeps.letterQueueRepository.deleteLetter, + ).not.toHaveBeenCalled(); + expect(result.batchItemFailures).toEqual([]); + }); + it("handles empty Records array", async () => { const handler = createHandler(mockedDeps); const testData = { Records: [] } as unknown as KinesisStreamEvent; @@ -116,11 +136,10 @@ describe("update-letter-queue Lambda", () => { const newLetter = { id: "1", status: "PENDING" } as Letter; const testData = generateKinesisEvent([generateInsertRecord(newLetter)]); - await expect( - handler(testData, mockDeep(), jest.fn()), - ).rejects.toThrow(); + const result = await handler(testData, mockDeep(), jest.fn()); expect(mockedDeps.letterQueueRepository.putLetter).not.toHaveBeenCalled(); + expect(result.batchItemFailures).toEqual([{ itemIdentifier: "seq-0" }]); }); it("returns on the first failure", async () => { @@ -143,7 +162,7 @@ describe("update-letter-queue Lambda", () => { expect(result.batchItemFailures).toEqual([{ itemIdentifier: "seq-0" }]); }); - it("does not treat a replayed event as a failure", async () => { + it("does not treat a replayed insert as a failure", async () => { const handler = createHandler(mockedDeps); const newLetter1 = generateLetter("PENDING", "1"); const newLetter2 = generateLetter("PENDING", "2"); @@ -160,6 +179,25 @@ describe("update-letter-queue Lambda", () => { expect(result.batchItemFailures).toEqual([]); }); + it("does not treat a replayed delete as a failure", async () => { + const handler = createHandler(mockedDeps); + const oldLetter1 = generateLetter("PENDING", "1"); + const oldLetter2 = generateLetter("PENDING", "2"); + const newLetter1 = generateLetter("ACCEPTED", "1"); + const newLetter2 = generateLetter("ACCEPTED", "2"); + (mockedDeps.letterQueueRepository.deleteLetter as jest.Mock) + .mockRejectedValueOnce(new LetterDoesNotExistError("supplier1", "1")) + .mockResolvedValueOnce({}); + + const testData = generateKinesisEvent([ + generateModifyRecord(oldLetter1, newLetter1), + generateModifyRecord(oldLetter2, newLetter2), + ]); + const result = await handler(testData, mockDeep(), jest.fn()); + + expect(result.batchItemFailures).toEqual([]); + }); + it("throws error when Kinesis payload cannot be parsed as JSON", async () => { const handler = createHandler(mockedDeps); const invalidJsonPayload = "not valid json {{{"; @@ -191,11 +229,12 @@ describe("update-letter-queue Lambda", () => { describe("Metrics", () => { it("emits success metrics when all letters are processed successfully", async () => { const handler = createHandler(mockedDeps); - const newLetter1 = generateLetter("PENDING", "1"); + const oldLetter1 = generateLetter("PENDING", "1"); + const newLetter1 = generateLetter("ACCEPTED", "1"); const newLetter2 = generateLetter("PENDING", "2"); const testData = generateKinesisEvent([ - generateInsertRecord(newLetter1), + generateModifyRecord(oldLetter1, newLetter1), generateInsertRecord(newLetter2), ]); await handler(testData, mockDeep(), jest.fn()); @@ -204,7 +243,7 @@ describe("update-letter-queue Lambda", () => { assertFailureMetricLogged(0); }); - it("emits failure metrics when a letter fails to process", async () => { + it("emits failure metrics when a letter fails to be inserted", async () => { const handler = createHandler(mockedDeps); const newLetter1 = generateLetter("PENDING", "1"); const newLetter2 = generateLetter("PENDING", "2"); @@ -222,10 +261,31 @@ describe("update-letter-queue Lambda", () => { assertFailureMetricLogged(1); }); - it("does not count a reprocessed event as a success or failure", async () => { + it("emits failure metrics when a letter fails to be deleted", async () => { + const handler = createHandler(mockedDeps); + const oldLetter1 = generateLetter("PENDING", "1"); + const oldLetter2 = generateLetter("PENDING", "2"); + const newLetter1 = generateLetter("ACCEPTED", "1"); + const newLetter2 = generateLetter("ACCEPTED", "2"); + (mockedDeps.letterQueueRepository.deleteLetter as jest.Mock) + .mockResolvedValueOnce({}) + .mockRejectedValueOnce(new Error("DynamoDB error")); + + const testData = generateKinesisEvent([ + generateModifyRecord(oldLetter1, newLetter1), + generateModifyRecord(oldLetter2, newLetter2), + ]); + await handler(testData, mockDeep(), jest.fn()); + + assertSuccessMetricLogged(1); + assertFailureMetricLogged(1); + }); + + it("does not count a replayed insert as a success or failure", async () => { const handler = createHandler(mockedDeps); const newLetter1 = generateLetter("PENDING", "1"); const newLetter2 = generateLetter("PENDING", "2"); + (mockedDeps.letterQueueRepository.putLetter as jest.Mock) .mockRejectedValueOnce(new LetterAlreadyExistsError("supplier1", "1")) .mockResolvedValueOnce({}); @@ -240,6 +300,26 @@ describe("update-letter-queue Lambda", () => { assertFailureMetricLogged(0); }); + it("does not count a replayed delete as a success or failure", async () => { + const handler = createHandler(mockedDeps); + const oldLetter1 = generateLetter("PENDING", "1"); + const oldLetter2 = generateLetter("PENDING", "2"); + const newLetter1 = generateLetter("ACCEPTED", "1"); + const newLetter2 = generateLetter("ACCEPTED", "2"); + (mockedDeps.letterQueueRepository.deleteLetter as jest.Mock) + .mockRejectedValueOnce(new LetterDoesNotExistError("supplier1", "1")) + .mockResolvedValueOnce({}); + + const testData = generateKinesisEvent([ + generateModifyRecord(oldLetter1, newLetter1), + generateModifyRecord(oldLetter2, newLetter2), + ]); + await handler(testData, mockDeep(), jest.fn()); + + assertSuccessMetricLogged(1); + assertFailureMetricLogged(0); + }); + it("emits zero success metrics when no pending letters are in the batch", async () => { const handler = createHandler(mockedDeps); const newLetter = generateLetter("PRINTED"); diff --git a/lambdas/update-letter-queue/src/update-letter-queue.ts b/lambdas/update-letter-queue/src/update-letter-queue.ts index 5e124624..392336fb 100644 --- a/lambdas/update-letter-queue/src/update-letter-queue.ts +++ b/lambdas/update-letter-queue/src/update-letter-queue.ts @@ -11,6 +11,7 @@ import { InsertPendingLetter, Letter, LetterAlreadyExistsError, + LetterDoesNotExistError, LetterSchema, } from "@internal/datastore"; import { Deps } from "./deps"; @@ -28,49 +29,89 @@ export default function createHandler(deps: Deps): Handler { for (const record of streamEvent.Records) { const ddbRecord = extractPayload(record, deps); - if (isNewPendingLetter(ddbRecord)) { - const letter = extractNewLetter(ddbRecord); - const pendingLetter = mapLetterToPendingLetter(letter); - - try { - deps.logger.info({ - description: "Persisting pending letter", - pendingLetter, - }); - await deps.letterQueueRepository.putLetter(pendingLetter); - successCount += 1; - } catch (error) { - if (error instanceof LetterAlreadyExistsError) { - deps.logger.warn({ - description: "Letter already exists", - supplierId: pendingLetter.supplierId, - letterId: pendingLetter.letterId, - }); - } else { - deps.logger.error({ - description: "Error persisting pending letter", - error, - pendingLetter, - }); - recordProcessing(deps, successCount, 1); - // If we get a failure, return immediately without processing the remaining records. Since we are - // working with a Kinesis stream, AWS will retry from the point of failure and no records will be lost. - // See https://docs.aws.amazon.com/lambda/latest/dg/example_serverless_Kinesis_Lambda_batch_item_failures_section.html - return { - batchItemFailures: [ - { itemIdentifier: record.kinesis.sequenceNumber }, - ], - }; - } + try { + if (isNewPendingLetter(ddbRecord)) { + const added = await addPendingLetterToQueue(ddbRecord, deps); + successCount += added ? 1 : 0; + } else if (isNoLongerPending(ddbRecord)) { + const deleted = await deletePendingLetterFromQueue(ddbRecord, deps); + successCount += deleted ? 1 : 0; } + } catch (error) { + deps.logger.error({ + description: "Error processing ddbRecord", + error, + ddbRecord, + }); + recordProcessing(deps, successCount, 1); + // If we get a failure, return immediately without processing the remaining records. Since we are + // working with a Kinesis stream, AWS will retry from the point of failure and no records will be lost. + // See https://docs.aws.amazon.com/lambda/latest/dg/example_serverless_Kinesis_Lambda_batch_item_failures_section.html + return { + batchItemFailures: [ + { itemIdentifier: record.kinesis.sequenceNumber }, + ], + }; } } - recordProcessing(deps, successCount, 0); return { batchItemFailures: [] }; }; } +async function addPendingLetterToQueue( + ddbRecord: DynamoDBRecord, + deps: Deps, +): Promise { + const letter = extractNewLetter(ddbRecord); + const pendingLetter = mapLetterToPendingLetter(letter); + + try { + deps.logger.info({ + description: "Persisting pending letter", + pendingLetter, + }); + await deps.letterQueueRepository.putLetter(pendingLetter); + return true; + } catch (error) { + if (error instanceof LetterAlreadyExistsError) { + deps.logger.warn({ + description: "Letter already exists", + supplierId: pendingLetter.supplierId, + letterId: pendingLetter.letterId, + }); + return false; + } + throw error; + } +} + +async function deletePendingLetterFromQueue( + ddbRecord: DynamoDBRecord, + deps: Deps, +): Promise { + const letter = extractNewLetter(ddbRecord); + try { + deps.logger.info({ + description: "Deleting pending letter", + supplierId: letter.supplierId, + letterId: letter.id, + }); + await deps.letterQueueRepository.deleteLetter(letter.supplierId, letter.id); + return true; + } catch (error) { + if (error instanceof LetterDoesNotExistError) { + deps.logger.warn({ + description: "Letter does not exist", + supplierId: letter.supplierId, + letterId: letter.id, + }); + return false; + } + throw error; + } +} + function recordProcessing( deps: Deps, successCount: number, @@ -95,6 +136,15 @@ function isNewPendingLetter(record: DynamoDBRecord): boolean { return isInsert && isPending; } +function isNoLongerPending(record: DynamoDBRecord): boolean { + const isUpdate = record.eventName === "MODIFY"; + const oldImage = record.dynamodb?.OldImage; + const newImage = record.dynamodb?.NewImage; + const noLongerPending = + oldImage?.status?.S === "PENDING" && newImage?.status?.S !== "PENDING"; + return isUpdate && noLongerPending; +} + function extractPayload( record: KinesisStreamRecord, deps: Deps, diff --git a/package-lock.json b/package-lock.json index 84cfc4ff..d42cb9c5 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14565,21 +14565,24 @@ "license": "BSD-3-Clause" }, "node_modules/fast-xml-builder": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fast-xml-builder/-/fast-xml-builder-1.0.0.tgz", - "integrity": "sha512-fpZuDogrAgnyt9oDDz+5DBz0zgPdPZz6D4IR7iESxRXElrlGTRkHJ9eEt+SACRJwT0FNFrt71DFQIUFBJfX/uQ==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/fast-xml-builder/-/fast-xml-builder-1.1.4.tgz", + "integrity": "sha512-f2jhpN4Eccy0/Uz9csxh3Nu6q4ErKxf0XIsasomfOihuSUa3/xw6w8dnOtCDgEItQFJG8KyXPzQXzcODDrrbOg==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/NaturalIntelligence" } ], - "license": "MIT" + "license": "MIT", + "dependencies": { + "path-expression-matcher": "^1.1.3" + } }, "node_modules/fast-xml-parser": { - "version": "5.4.2", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.4.2.tgz", - "integrity": "sha512-pw/6pIl4k0CSpElPEJhDppLzaixDEuWui2CUQQBH/ECDf7+y6YwA4Gf7Tyb0Rfe4DIMuZipYj4AEL0nACKglvQ==", + "version": "5.5.6", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.5.6.tgz", + "integrity": "sha512-3+fdZyBRVg29n4rXP0joHthhcHdPUHaIC16cuyyd1iLsuaO6Vea36MPrxgAzbZna8lhvZeRL8Bc9GP56/J9xEw==", "funding": [ { "type": "github", @@ -14588,7 +14591,8 @@ ], "license": "MIT", "dependencies": { - "fast-xml-builder": "^1.0.0", + "fast-xml-builder": "^1.1.4", + "path-expression-matcher": "^1.1.3", "strnum": "^2.1.2" }, "bin": { @@ -14772,9 +14776,9 @@ } }, "node_modules/flatted": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.4.1.tgz", - "integrity": "sha512-IxfVbRFVlV8V/yRaGzk0UVIcsKKHMSfYw66T/u4nTwlWteQePsxe//LjudR1AMX4tZW3WFCh3Zqa/sjlqpbURQ==", + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.4.2.tgz", + "integrity": "sha512-PjDse7RzhcPkIJwy5t7KPWQSZ9cAbzQXcafsetQoD7sOJRQlGikNbx7yZp2OotDnJyrDcbyRq3Ttb18iYOqkxA==", "dev": true, "license": "ISC" }, @@ -19555,6 +19559,21 @@ "node": ">=8" } }, + "node_modules/path-expression-matcher": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/path-expression-matcher/-/path-expression-matcher-1.1.3.tgz", + "integrity": "sha512-qdVgY8KXmVdJZRSS1JdEPOKPdTiEK/pi0RkcT2sw1RhXxohdujUlJFPuS1TSkevZ9vzd3ZlL7ULl1MHGTApKzQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, "node_modules/path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", diff --git a/package.json b/package.json index db5a21ca..77ea9d07 100644 --- a/package.json +++ b/package.json @@ -64,7 +64,7 @@ "name": "nhs-notify-supplier-api", "overrides": { "axios": "^1.13.5", - "fast-xml-parser": "^5.3.6", + "fast-xml-parser": "^5.5.6", "@isaacs/brace-expansion": "^5.0.1", "flatted": "^3.4.0", "undici": "^7.24.0",