diff --git a/.github/workflows/automated-pr-validator.yml b/.github/workflows/automated-pr-validator.yml index ba656d5869..a26b69bcb0 100644 --- a/.github/workflows/automated-pr-validator.yml +++ b/.github/workflows/automated-pr-validator.yml @@ -7,25 +7,6 @@ on: types: [opened, edited, synchronize] jobs: - changes: - runs-on: ubuntu-latest - permissions: - pull-requests: read - outputs: - frontend: ${{ steps.filter.outputs.frontend }} - backend: ${{ steps.filter.outputs.backend }} - steps: - - uses: dorny/paths-filter@v3 - id: filter - with: - filters: | - frontend: - - 'app/src/**' - backend: - - '*.py' - - 'lambdas/*.py' - - 'lambdas/**/*.py' - checklist_validator: runs-on: ubuntu-latest permissions: @@ -160,82 +141,3 @@ jobs: BRANCH_NAME=${{ github.event.repository.default_branch }} chmod +x scripts/markdown-validator.sh scripts/markdown-validator.sh - - react_lint_and_build: - name: React Lint and Build - runs-on: ubuntu-latest - permissions: - contents: read - needs: changes - if: needs.changes.outputs.frontend == 'true' - steps: - - name: Checkout - uses: actions/checkout@v6 - with: - fetch-depth: 0 - - - name: Install packages - id: install - run: | - make install - - - name: Run Lint - id: lint - working-directory: app - run: | - npm run lint - - - name: Run Build - id: build - working-directory: app - if: always() - run: | - npm run build - - python_lint: - name: Python Lint - runs-on: ubuntu-latest - permissions: - contents: read - needs: changes - if: needs.changes.outputs.backend == 'true' - steps: - - name: Checkout - uses: actions/checkout@v6 - with: - fetch-depth: 0 - - - name: Set up Python 3.11 - uses: actions/setup-python@v6 - with: - python-version: 3.11 - - - name: Setup env - run: | - make env - - - name: Get changed files - id: changed-files - run: | - git remote set-branches origin main && git fetch --depth 1 origin main && git branch main origin/main - echo "CHANGED_FILES=$(git diff main --name-status | grep -E '^[^D].*\.py$' | cut -f2 | tr '\n' ' ')" >> $GITHUB_OUTPUT - - - name: Run black - id: black - run: | - if [ -z "${{ steps.changed-files.outputs.CHANGED_FILES }}" ]; then echo "No changed Python files to format"; exit 0; fi; \ - ./lambdas/venv/bin/python3 -m black --check --diff --color ${{ steps.changed-files.outputs.CHANGED_FILES }} - - - name: Run ruff - id: ruff - if: always() - run: | - if [ -z "${{ steps.changed-files.outputs.CHANGED_FILES }}" ]; then echo "No changed Python files to lint"; exit 0; fi; \ - ./lambdas/venv/bin/ruff check ${{ steps.changed-files.outputs.CHANGED_FILES }} - - - name: Run isort with black - id: isort - if: always() - run: | - if [ -z "${{ steps.changed-files.outputs.CHANGED_FILES }}" ]; then echo "No changed Python files to sort imports"; exit 0; fi; \ - ./lambdas/venv/bin/python3 -m isort --profile black --check-only ${{ steps.changed-files.outputs.CHANGED_FILES }} \ No newline at end of file diff --git a/.github/workflows/base-deploy-ui.yml b/.github/workflows/base-deploy-ui.yml index 6cd85e3864..fecd8dc01f 100644 --- a/.github/workflows/base-deploy-ui.yml +++ b/.github/workflows/base-deploy-ui.yml @@ -60,6 +60,12 @@ jobs: echo "::add-mask::$container_port" echo "CONTAINER_PORT=$container_port" >> $GITHUB_ENV + - name: Get CloudFront domain name + id: cloudfront-domain-name + run: | + cloudfront_domain_name=$(aws cloudfront list-distributions --query "DistributionList.Items[?starts_with(Origins.Items[0].DomainName, '${{ inputs.sandbox }}')].DomainName" --output text) + echo "Cloudfront domain name found for environment: $cloudfront_domain_name" + echo "CLOUDFRONT_DOMAIN_NAME=$cloudfront_domain_name" >> $GITHUB_ENV - name: Login to Amazon ECR id: login-ecr @@ -106,6 +112,7 @@ jobs: run: | docker build \ --build-arg="CONTAINER_PORT=$CONTAINER_PORT" \ + --build-arg="CLOUDFRONT_DOMAIN_NAME=$CLOUDFRONT_DOMAIN_NAME" \ --build-arg="BUILD_ENV=$BUILD_ENV" \ -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG \ -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG_SHA . diff --git a/.github/workflows/base-lambda-layer-reusable-publish-all.yml b/.github/workflows/base-lambda-layer-reusable-publish-all.yml index a7e49b91b9..be97edb37f 100644 --- a/.github/workflows/base-lambda-layer-reusable-publish-all.yml +++ b/.github/workflows/base-lambda-layer-reusable-publish-all.yml @@ -87,15 +87,3 @@ jobs: lambda_layer_name: alerting_lambda_layer secrets: AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} - - deploy_files_lambda_layer: - name: Deploy files_lambda_layer - uses: ./.github/workflows/base-lambda-layer-reusable-publish.yml - with: - environment: ${{ inputs.environment}} - python_version: ${{ inputs.python_version }} - build_branch: ${{ inputs.build_branch }} - sandbox: ${{ inputs.sandbox }} - lambda_layer_name: files_lambda_layer - secrets: - AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} \ No newline at end of file diff --git a/.github/workflows/base-lambdas-reusable-deploy-all.yml b/.github/workflows/base-lambdas-reusable-deploy-all.yml index 32d347ae45..7ae4922e09 100644 --- a/.github/workflows/base-lambdas-reusable-deploy-all.yml +++ b/.github/workflows/base-lambdas-reusable-deploy-all.yml @@ -695,7 +695,7 @@ jobs: sandbox: ${{ inputs.sandbox }} lambda_handler_name: document_reference_virus_scan_handler lambda_aws_name: DocumentReferenceVirusScanCheck - lambda_layer_names: "core_lambda_layer,files_lambda_layer" + lambda_layer_names: "core_lambda_layer" secrets: AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} @@ -838,3 +838,17 @@ jobs: lambda_layer_names: "core_lambda_layer" secrets: AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} + + deploy_report_s3_content_lambda: + name: Deploy Report S3 Content Lambda + uses: ./.github/workflows/base-lambdas-reusable-deploy.yml + with: + environment: ${{ inputs.environment }} + python_version: ${{ inputs.python_version }} + build_branch: ${{ inputs.build_branch }} + sandbox: ${{ inputs.sandbox }} + lambda_handler_name: report_s3_content_handler + lambda_aws_name: ReportS3Content + lambda_layer_names: "core_lambda_layer" + secrets: + AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} diff --git a/.github/workflows/base-lambdas-reusable-test.yml b/.github/workflows/base-lambdas-reusable-test.yml index abe270808d..ca5f980f64 100644 --- a/.github/workflows/base-lambdas-reusable-test.yml +++ b/.github/workflows/base-lambdas-reusable-test.yml @@ -53,6 +53,10 @@ jobs: source ./lambdas/venv/bin/activate echo PATH=$PATH >> $GITHUB_ENV + - name: Format Code and ruff check + run: | + make format + - name: Test with pytest run: | make test-unit diff --git a/.lintstagedrc b/.lintstagedrc index 0cfe966e6a..9567680eb1 100644 --- a/.lintstagedrc +++ b/.lintstagedrc @@ -7,8 +7,8 @@ "./app/node_modules/prettier/bin/prettier.cjs --write" ], "*.py": [ - "./lambdas/venv/bin/ruff check --fix", - "./lambdas/venv/bin/python3 -m black", "./lambdas/venv/bin/python3 -m isort --profile black", + "./lambdas/venv/bin/python3 -m black", + "./lambdas/venv/bin/ruff check ./lambdas" ] } \ No newline at end of file diff --git a/Makefile b/Makefile index aedadecb40..25017ef8d4 100644 --- a/Makefile +++ b/Makefile @@ -7,7 +7,6 @@ GITHUB_REQUIREMENTS=$(REQUIREMENTS_PATH)/requirements_github_runner.txt TEST_REQUIREMENTS=$(REQUIREMENTS_PATH)/requirements_test.txt CORE_REQUIREMENTS=$(LAMBDA_LAYER_REQUIREMENTS_PATH)/requirements_core_lambda_layer.txt DATA_REQUIREMENTS=$(LAMBDA_LAYER_REQUIREMENTS_PATH)/requirements_data_lambda_layer.txt -FILES_REQUIREMENTS=$(LAMBDA_LAYER_REQUIREMENTS_PATH)/requirements_files_lambda_layer.txt REPORTS_REQUIREMENTS=$(LAMBDA_LAYER_REQUIREMENTS_PATH)/requirements_reports_lambda_layer.txt ALERTING_REQUIREMENTS=$(LAMBDA_LAYER_REQUIREMENTS_PATH)/requirements_alerting_lambda_layer.txt EDGE_REQUIREMENTS=$(REQUIREMENTS_PATH)/requirements_edge_lambda.txt @@ -18,8 +17,6 @@ LAMBDA_LAYER_PYTHON_PATH=python/lib/python$(PYTHON_VERSION)/site-packages ZIP_BASE_PATH = ./$(LAMBDAS_BUILD_PATH)/$(lambda_name)/tmp ZIP_COMMON_FILES = lambdas/utils lambdas/models lambdas/services lambdas/repositories lambdas/enums lambdas/scripts CONTAINER ?= false -VENV_PATH_PREFIX := $(if $(filter true,$(CONTAINER)),./.venv,./lambdas/venv) -FORMAT_ALL ?= false .PHONY: \ install clean help format list requirements ruff build-and-deploy-sandbox \ @@ -84,22 +81,21 @@ clean-test: find . -name '.cache' -exec rm -fr {} + format: - @if [ $(FORMAT_ALL) = true ]; then \ - CHANGED_FILES=''; \ - else \ - CHANGED_FILES=$$(git diff main --name-status | grep -E '^[^D].*\.py$$' | cut -f2 | xargs); \ - echo $$CHANGED_FILES; \ - if [ -z "$$CHANGED_FILES" ]; then echo "No changed files to format"; exit 0; fi; \ - fi; \ - $(VENV_PATH_PREFIX)/bin/ruff check $$CHANGED_FILES --fix; \ - $(VENV_PATH_PREFIX)/bin/python3 -m black $$CHANGED_FILES; \ - $(VENV_PATH_PREFIX)/bin/python3 -m isort --profile black $$CHANGED_FILES +ifeq ($(CONTAINER), true) + ./.venv/bin/python3 -m isort --profile black lambdas/ + ./.venv/bin/python3 -m black lambdas/ + ./.venv/bin/ruff check lambdas/ --fix +else + ./lambdas/venv/bin/python3 -m isort --profile black lambdas/ + ./lambdas/venv/bin/python3 -m black lambdas/ + ./lambdas/venv/bin/ruff check lambdas/ --fix +endif + sort-requirements: sort -o $(TEST_REQUIREMENTS) $(TEST_REQUIREMENTS) sort -o $(CORE_REQUIREMENTS) $(CORE_REQUIREMENTS) sort -o $(DATA_REQUIREMENTS) $(DATA_REQUIREMENTS) - sort -o $(FILES_REQUIREMENTS) $(FILES_REQUIREMENTS) sort -o $(REPORTS_REQUIREMENTS) $(REPORTS_REQUIREMENTS) sort -o $(ALERTING_REQUIREMENTS) $(ALERTING_REQUIREMENTS) @@ -108,7 +104,6 @@ check-packages: ./lambdas/venv/bin/pip-audit -r $(TEST_REQUIREMENTS) ./lambdas/venv/bin/pip-audit -r $(CORE_REQUIREMENTS) ./lambdas/venv/bin/pip-audit -r $(DATA_REQUIREMENTS) - ./lambdas/venv/bin/pip-audit -r $(FILES_REQUIREMENTS) ./lambdas/venv/bin/pip-audit -r $(REPORTS_REQUIREMENTS) ./lambdas/venv/bin/pip-audit -r $(ALERTING_REQUIREMENTS) @@ -209,7 +204,6 @@ env: @./lambdas/venv/bin/pip3 install -r $(TEST_REQUIREMENTS) --no-cache-dir @./lambdas/venv/bin/pip3 install -r $(CORE_REQUIREMENTS) --no-cache-dir @./lambdas/venv/bin/pip3 install -r $(DATA_REQUIREMENTS) --no-cache-dir - @./lambdas/venv/bin/pip3 install -r $(FILES_REQUIREMENTS) --no-cache-dir @./lambdas/venv/bin/pip3 install -r $(REPORTS_REQUIREMENTS) --no-cache-dir @./lambdas/venv/bin/pip3 install -r $(ALERTING_REQUIREMENTS) --no-cache-dir @echo " " @@ -309,25 +303,13 @@ docker-down: docker-compose -f ./app/docker-compose.yml down cypress-open: -ifeq ($(CONTAINER), true) xvfb-run -- npm --prefix ./app run cypress -else - npm --prefix ./app run cypress -endif cypress-run: -ifeq ($(CONTAINER), true) xvfb-run -- npm --prefix ./app run cypress-run -else - npm --prefix ./app run cypress-run -endif cypress-report: -ifeq ($(CONTAINER), true) xvfb-run -- npm --prefix ./app run cypress-report -else - npm --prefix ./app run cypress-report -endif install-cypress: npm install --save-dev cypress diff --git a/app/.eslintrc b/app/.eslintrc index 0853cd6b61..ce1616713e 100644 --- a/app/.eslintrc +++ b/app/.eslintrc @@ -84,8 +84,7 @@ "import/no-extraneous-dependencies": [ "error", { "devDependencies": true } - ], - "@typescript-eslint/explicit-function-return-type": "off" + ] } }, { diff --git a/app/Dockerfile b/app/Dockerfile index 95e520af65..444eafe554 100644 --- a/app/Dockerfile +++ b/app/Dockerfile @@ -22,6 +22,7 @@ RUN npm run build -- --mode $BUILD_ENV FROM nginx:latest RUN apt update && apt list --upgradable && apt upgrade -y && rm -rf /var/lib/apt/lists/* ARG CONTAINER_PORT +ARG CLOUDFRONT_DOMAIN_NAME WORKDIR /usr/share/nginx/html RUN rm -rf ./* COPY --from=builder /app/dist . @@ -30,7 +31,7 @@ WORKDIR /etc/nginx COPY --from=builder ./app/docker/nginx.conf ./nginx.conf.template RUN sed -i "s/\$CONTAINER_PORT/${CONTAINER_PORT}/g" ./nginx.conf.template -RUN cp ./nginx.conf.template ./nginx.conf +RUN sed "s/\$CLOUDFRONT_DOMAIN_NAME/${CLOUDFRONT_DOMAIN_NAME}/g" ./nginx.conf.template > ./nginx.conf EXPOSE ${CONTAINER_PORT} ENTRYPOINT ["nginx", "-g", "daemon off;"] diff --git a/app/cypress/e2e/0-ndr-core-tests/gp_user_workflows/download_lloyd_george_workflow.cy.js b/app/cypress/e2e/0-ndr-core-tests/gp_user_workflows/download_lloyd_george_workflow.cy.js index 87ad495249..11e4411005 100644 --- a/app/cypress/e2e/0-ndr-core-tests/gp_user_workflows/download_lloyd_george_workflow.cy.js +++ b/app/cypress/e2e/0-ndr-core-tests/gp_user_workflows/download_lloyd_george_workflow.cy.js @@ -122,10 +122,7 @@ describe('GP Workflow: View Lloyd George record', () => { cy.intercept('GET', '/SearchDocumentReferences*', { statusCode: 200, - body: { - references: testFiles, - nextPageToken: 'abc', - }, + body: testFiles, }).as('searchDocumentReferences'); cy.get('#verify-submit').click(); @@ -145,10 +142,7 @@ describe('GP Workflow: View Lloyd George record', () => { cy.intercept('GET', '/SearchDocumentReferences*', { statusCode: 200, - body: { - references: testFiles, - nextPageToken: 'abc', - }, + body: testFiles, }).as('searchDocumentReferences'); setUpDownloadManifestIntercepts(); @@ -265,10 +259,7 @@ describe('GP Workflow: View Lloyd George record', () => { cy.intercept('GET', '/SearchDocumentReferences*', { statusCode: 200, - body: { - references: singleTestFile, - nextPageToken: 'abc', - }, + body: singleTestFile, }).as('searchDocumentReferences'); setUpDownloadManifestIntercepts(); @@ -360,7 +351,7 @@ describe('GP Workflow: View Lloyd George record', () => { statusCode: 200, body: { jobStatus: 'Pending' }, }); - if (pendingCounts >= 10) { + if (pendingCounts >= 3) { req.alias = 'documentManifestThirdTimePending'; } }); @@ -372,7 +363,7 @@ describe('GP Workflow: View Lloyd George record', () => { cy.getByTestId('toggle-selection-btn').click(); cy.getByTestId('download-selected-files-btn').click(); - cy.wait('@documentManifestThirdTimePending', { timeout: 20000 }); + cy.wait('@documentManifestThirdTimePending'); cy.title().should('have.string', 'Service error'); cy.url().should('have.string', '/server-error?encodedError='); diff --git a/app/cypress/e2e/0-ndr-core-tests/pcse_user_workflows/download_patient_files_workflow.cy.js b/app/cypress/e2e/0-ndr-core-tests/pcse_user_workflows/download_patient_files_workflow.cy.js index eeca32ea22..bc7337aa09 100644 --- a/app/cypress/e2e/0-ndr-core-tests/pcse_user_workflows/download_patient_files_workflow.cy.js +++ b/app/cypress/e2e/0-ndr-core-tests/pcse_user_workflows/download_patient_files_workflow.cy.js @@ -40,10 +40,7 @@ describe('PCSE Workflow: Access and download found files', () => { cy.intercept('GET', '/SearchDocumentReferences*', { statusCode: 200, - body: { - references: searchDocumentReferencesResponse, - nextPageToken: 'abc', - }, + body: searchDocumentReferencesResponse, }).as('documentSearch'); cy.get('#verify-submit').click(); diff --git a/app/docker/nginx.conf b/app/docker/nginx.conf index afc01494a6..84f2aef3a9 100644 --- a/app/docker/nginx.conf +++ b/app/docker/nginx.conf @@ -18,7 +18,7 @@ http { add_header Cache-Control "no-store, no-cache, must-revalidate" always; add_header Pragma "no-cache" always; add_header Strict-Transport-Security "max-age=63072000" always; - add_header Content-Security-Policy "frame-ancestors 'self'; img-src 'self' blob:; script-src 'self'; style-src 'self' 'unsafe-inline'; object-src 'self' blob:;" always; + add_header Content-Security-Policy "frame-ancestors 'self'; img-src 'self' blob:; script-src 'self'; style-src 'self' 'unsafe-inline'; object-src 'self' blob: https://$CLOUDFRONT_DOMAIN_NAME;" always; add_header Referrer-Policy "no-referrer" always; add_header Permissions-Policy "accelerometer=(self), autoplay=(self), camera=(self), cross-origin-isolated=(self), display-capture=(self), encrypted-media=(self), fullscreen=(self), geolocation=(self), gyroscope=(self), keyboard-map=(self), magnetometer=(self), microphone=(self), midi=(self), payment=(self), picture-in-picture=(self), publickey-credentials-get=(self), screen-wake-lock=(self), sync-xhr=(self), usb=(self), xr-spatial-tracking=(self), clipboard-read=(self), clipboard-write=(self), gamepad=(self), hid=(self), idle-detection=(self), interest-cohort=(self), serial=(self), unload=(self) " always; add_header X-Content-Type-Options "nosniff" always; diff --git a/app/src/components/blocks/_admin/reviewDetailsAddMoreChoiceStage/ReviewDetailsAddMoreChoiceStage.test.tsx b/app/src/components/blocks/_admin/reviewDetailsAddMoreChoiceStage/ReviewDetailsAddMoreChoiceStage.test.tsx index a5e6d4ded8..fdb159e703 100644 --- a/app/src/components/blocks/_admin/reviewDetailsAddMoreChoiceStage/ReviewDetailsAddMoreChoiceStage.test.tsx +++ b/app/src/components/blocks/_admin/reviewDetailsAddMoreChoiceStage/ReviewDetailsAddMoreChoiceStage.test.tsx @@ -1,18 +1,11 @@ import { render, screen, waitFor } from '@testing-library/react'; import userEvent from '@testing-library/user-event'; -import { afterEach, beforeEach, describe, expect, it, Mock, vi } from 'vitest'; -import { runAxeTest } from '../../../../helpers/test/axeTestHelper'; -import * as documentTypeModule from '../../../../helpers/utils/documentType'; -import { ReviewDetails } from '../../../../types/generic/reviews'; +import { afterEach, beforeEach, describe, expect, it, vi, Mock } from 'vitest'; import ReviewDetailsAddMoreChoiceStage from './ReviewDetailsAddMoreChoiceStage'; -import { DOCUMENT_TYPE } from '../../../../helpers/utils/documentType'; +import { runAxeTest } from '../../../../helpers/test/axeTestHelper'; const mockNavigate = vi.fn(); const mockReviewId = 'test-review-123'; -const testData = { - yesText: 'Yes, I have more scanned paper notes to add for this patient', - noText: "No, I don't have anymore scanned paper notes to add for this patient", -}; vi.mock('react-router-dom', async (): Promise => { const actual = await vi.importActual('react-router-dom'); @@ -23,23 +16,10 @@ vi.mock('react-router-dom', async (): Promise => { }; }); -describe('ReviewDetailsAddMoreChoiceStage', () => { - const mockReviewData = { - snomedCode: DOCUMENT_TYPE.LLOYD_GEORGE, - } as ReviewDetails; - +describe('ReviewDetailsAddMoreChoicePage', () => { beforeEach(() => { vi.clearAllMocks(); import.meta.env.VITE_ENVIRONMENT = 'vitest'; - const mockGetConfig = vi.spyOn(documentTypeModule, 'getConfigForDocType'); - mockGetConfig.mockReturnValue({ - ...documentTypeModule.getConfigForDocType(DOCUMENT_TYPE.LLOYD_GEORGE), - multifileZipped: true, - content: { - addMoreFilesRadioNoText: testData.noText, - addMoreFilesRadioYesText: testData.yesText, - }, - } as any); }); afterEach(() => { @@ -48,29 +28,29 @@ describe('ReviewDetailsAddMoreChoiceStage', () => { describe('Rendering', () => { it('renders the page heading correctly', () => { - render(); + render(); expect( screen.getByRole('heading', { - name: "Do you want to add more files to this patient's record?", + name: 'Do you want to add more files to this patients record?', }), ).toBeInTheDocument(); }); it('renders back button with correct text', () => { - render(); + render(); expect(screen.getByText('Go back')).toBeInTheDocument(); }); it('renders both radio button options', () => { - render(); + render(); const yesRadio = screen.getByRole('radio', { - name: testData.yesText, + name: /Yes I have more scanned paper records to add for this patient/i, }); const noRadio = screen.getByRole('radio', { - name: testData.noText, + name: /No, I don't have anymore scanned paper records to add for this patient/i, }); expect(yesRadio).toBeInTheDocument(); @@ -80,13 +60,13 @@ describe('ReviewDetailsAddMoreChoiceStage', () => { }); it('renders continue button', () => { - render(); + render(); expect(screen.getByRole('button', { name: 'Continue' })).toBeInTheDocument(); }); it('does not show error message initially', () => { - render(); + render(); expect(screen.queryByText('Select an option')).not.toBeInTheDocument(); }); @@ -94,7 +74,7 @@ describe('ReviewDetailsAddMoreChoiceStage', () => { describe('Error Handling', () => { it('displays error message when continue is clicked without selection', async () => { - render(); + render(); const continueButton = screen.getByRole('button', { name: 'Continue' }); await userEvent.click(continueButton); @@ -105,7 +85,7 @@ describe('ReviewDetailsAddMoreChoiceStage', () => { }); it('does not navigate when no selection is made', async () => { - render(); + render(); const continueButton = screen.getByRole('button', { name: 'Continue' }); await userEvent.click(continueButton); @@ -117,7 +97,7 @@ describe('ReviewDetailsAddMoreChoiceStage', () => { }); it('clears error message when yes radio button is selected', async () => { - render(); + render(); const continueButton = screen.getByRole('button', { name: 'Continue' }); await userEvent.click(continueButton); @@ -127,7 +107,7 @@ describe('ReviewDetailsAddMoreChoiceStage', () => { }); const yesRadio = screen.getByRole('radio', { - name: testData.yesText, + name: /Yes I have more scanned paper records to add for this patient/i, }); await userEvent.click(yesRadio); @@ -137,7 +117,7 @@ describe('ReviewDetailsAddMoreChoiceStage', () => { }); it('clears error message when no radio button is selected', async () => { - render(); + render(); const continueButton = screen.getByRole('button', { name: 'Continue' }); await userEvent.click(continueButton); @@ -147,7 +127,7 @@ describe('ReviewDetailsAddMoreChoiceStage', () => { }); const noRadio = screen.getByRole('radio', { - name: testData.noText, + name: /No, I don't have anymore scanned paper records to add for this patient/i, }); await userEvent.click(noRadio); @@ -159,10 +139,10 @@ describe('ReviewDetailsAddMoreChoiceStage', () => { describe('User Interactions', () => { it('allows selecting the yes radio button', async () => { - render(); + render(); const yesRadio = screen.getByRole('radio', { - name: testData.yesText, + name: /Yes I have more scanned paper records to add for this patient/i, }); await userEvent.click(yesRadio); @@ -172,10 +152,10 @@ describe('ReviewDetailsAddMoreChoiceStage', () => { }); it('allows selecting the no radio button', async () => { - render(); + render(); const noRadio = screen.getByRole('radio', { - name: testData.noText, + name: /No, I don't have anymore scanned paper records to add for this patient/i, }); await userEvent.click(noRadio); @@ -185,13 +165,13 @@ describe('ReviewDetailsAddMoreChoiceStage', () => { }); it('allows changing selection from yes to no', async () => { - render(); + render(); const yesRadio = screen.getByRole('radio', { - name: testData.yesText, + name: /Yes I have more scanned paper records to add for this patient/i, }); const noRadio = screen.getByRole('radio', { - name: testData.noText, + name: /No, I don't have anymore scanned paper records to add for this patient/i, }); await userEvent.click(yesRadio); @@ -207,7 +187,7 @@ describe('ReviewDetailsAddMoreChoiceStage', () => { }); it('prevents default form submission', async () => { - render(); + render(); const form = screen.getByRole('button', { name: 'Continue' }).closest('form'); const submitHandler = vi.fn((e: Event) => e.preventDefault()); @@ -222,10 +202,10 @@ describe('ReviewDetailsAddMoreChoiceStage', () => { describe('Navigation', () => { it('navigates to add more files when yes is selected', async () => { - render(); + render(); const yesRadio = screen.getByRole('radio', { - name: testData.yesText, + name: /Yes I have more scanned paper records to add for this patient/i, }); await userEvent.click(yesRadio); @@ -249,7 +229,7 @@ describe('ReviewDetailsAddMoreChoiceStage', () => { render(); const noRadio = screen.getByRole('radio', { - name: testData.noText, + name: /No, I don't have anymore scanned paper records to add for this patient/i, }); await userEvent.click(noRadio); @@ -273,7 +253,7 @@ describe('ReviewDetailsAddMoreChoiceStage', () => { render(); const noRadio = screen.getByRole('radio', { - name: testData.noText, + name: /No, I don't have anymore scanned paper records to add for this patient/i, }); await userEvent.click(noRadio); @@ -297,7 +277,7 @@ describe('ReviewDetailsAddMoreChoiceStage', () => { render(); const noRadio = screen.getByRole('radio', { - name: testData.noText, + name: /No, I don't have anymore scanned paper records to add for this patient/i, }); await userEvent.click(noRadio); @@ -315,18 +295,14 @@ describe('ReviewDetailsAddMoreChoiceStage', () => { describe('Accessibility', () => { it('passes axe accessibility tests in initial state', async () => { - const { container } = render( - , - ); + const { container } = render(); const results = await runAxeTest(container); expect(results).toHaveNoViolations(); }); it('passes axe accessibility tests in error state', async () => { - const { container } = render( - , - ); + const { container } = render(); const continueButton = screen.getByRole('button', { name: 'Continue' }); await userEvent.click(continueButton); @@ -340,12 +316,10 @@ describe('ReviewDetailsAddMoreChoiceStage', () => { }); it('passes axe accessibility tests with radio button selected', async () => { - const { container } = render( - , - ); + const { container } = render(); const yesRadio = screen.getByRole('radio', { - name: testData.yesText, + name: /Yes I have more scanned paper records to add for this patient/i, }); await userEvent.click(yesRadio); diff --git a/app/src/components/blocks/_admin/reviewDetailsAddMoreChoiceStage/ReviewDetailsAddMoreChoiceStage.tsx b/app/src/components/blocks/_admin/reviewDetailsAddMoreChoiceStage/ReviewDetailsAddMoreChoiceStage.tsx index 28925082b7..ebc1bd609e 100644 --- a/app/src/components/blocks/_admin/reviewDetailsAddMoreChoiceStage/ReviewDetailsAddMoreChoiceStage.tsx +++ b/app/src/components/blocks/_admin/reviewDetailsAddMoreChoiceStage/ReviewDetailsAddMoreChoiceStage.tsx @@ -4,7 +4,6 @@ import { useNavigate, useParams } from 'react-router-dom'; import { navigateUrlParam, routeChildren } from '../../../../types/generic/routes'; import BackButton from '../../../generic/backButton/BackButton'; import { ReviewDetails } from '../../../../types/generic/reviews'; -import { getConfigForDocType } from '../../../../helpers/utils/documentType'; type ReviewDetailsAddMoreChoicePageProps = { reviewData: ReviewDetails | null; @@ -20,13 +19,6 @@ const ReviewDetailsAddMoreChoiceStage: React.FC(); - if (!reviewData) { - navigate(routeChildren.ADMIN_REVIEW); - return <>; - } - - const reviewConfig = getConfigForDocType(reviewData?.snomedCode || ''); - const handleContinue = (): void => { if (!addMoreChoice || !reviewId) { setShowError(true); @@ -41,7 +33,7 @@ const ReviewDetailsAddMoreChoiceStage: React.FC 1 + reviewData!.files!.length > 1 ? routeChildren.ADMIN_REVIEW_UPLOAD_FILE_ORDER : routeChildren.ADMIN_REVIEW_UPLOAD, { reviewId }, @@ -62,7 +54,7 @@ const ReviewDetailsAddMoreChoiceStage: React.FC
- Do you want to add more files to this patient's record? + Do you want to add more files to this patients record? - {reviewConfig.content.addMoreFilesRadioYesText} + Yes I have more scanned paper records to add for this patient - {reviewConfig.content.addMoreFilesRadioNoText} + No, I don't have anymore scanned paper records to add for this + patient
diff --git a/app/src/components/blocks/_admin/reviewDetailsAssessmentStage/ExistingRecordTable.test.tsx b/app/src/components/blocks/_admin/reviewDetailsAssessmentStage/ExistingRecordTable.test.tsx index 07db8d1a50..d0376f8e98 100644 --- a/app/src/components/blocks/_admin/reviewDetailsAssessmentStage/ExistingRecordTable.test.tsx +++ b/app/src/components/blocks/_admin/reviewDetailsAssessmentStage/ExistingRecordTable.test.tsx @@ -14,7 +14,6 @@ describe('ExistingRecordTable', () => { fileName: 'existing-file-1.pdf', id: 'file-id-1', created: '2024-01-01', - author: 'Y1234', virusScannerResult: 'Clean', fileSize: 1024, version: '1', @@ -25,7 +24,6 @@ describe('ExistingRecordTable', () => { fileName: 'existing-file-2.pdf', id: 'file-id-2', created: '2024-01-02', - author: 'Y1234', virusScannerResult: 'Clean', fileSize: 2048, version: '1', @@ -36,7 +34,6 @@ describe('ExistingRecordTable', () => { fileName: 'existing-file-3.pdf', id: 'file-id-3', created: '2024-01-03', - author: 'Y1234', virusScannerResult: 'Clean', fileSize: 3072, version: '1', @@ -240,7 +237,6 @@ describe('ExistingRecordTable', () => { fileName: 'file-with-special-chars_123.pdf', id: 'special-id', created: '2024-01-01', - author: 'Y1234', virusScannerResult: 'Clean', fileSize: 1024, version: '1', @@ -263,7 +259,6 @@ describe('ExistingRecordTable', () => { 'this-is-a-very-long-filename-that-might-cause-layout-issues-in-the-table.pdf', id: 'long-id', created: '2024-01-01', - author: 'Y1234', virusScannerResult: 'Clean', fileSize: 1024, version: '1', @@ -292,7 +287,6 @@ describe('ExistingRecordTable', () => { fileName: 'file-with-params.pdf', id: 'file-id-with-special-chars-abc123', created: '2024-01-01', - author: 'Y1234', virusScannerResult: 'Clean', fileSize: 1024, version: '1', diff --git a/app/src/components/blocks/_admin/reviewDetailsAssessmentStage/ExistingRecordTable.tsx b/app/src/components/blocks/_admin/reviewDetailsAssessmentStage/ExistingRecordTable.tsx index 86bd6fea06..84dadd7d70 100644 --- a/app/src/components/blocks/_admin/reviewDetailsAssessmentStage/ExistingRecordTable.tsx +++ b/app/src/components/blocks/_admin/reviewDetailsAssessmentStage/ExistingRecordTable.tsx @@ -14,7 +14,7 @@ const ExistingRecordTable = ({ return (

Existing files

- +
Filename diff --git a/app/src/components/blocks/_admin/reviewDetailsAssessmentStage/ReviewDetailsAssessmentStage.test.tsx b/app/src/components/blocks/_admin/reviewDetailsAssessmentStage/ReviewDetailsAssessmentStage.test.tsx index c5c1f4c1d3..af9ed6b839 100644 --- a/app/src/components/blocks/_admin/reviewDetailsAssessmentStage/ReviewDetailsAssessmentStage.test.tsx +++ b/app/src/components/blocks/_admin/reviewDetailsAssessmentStage/ReviewDetailsAssessmentStage.test.tsx @@ -1,5 +1,6 @@ import { render, screen, waitFor } from '@testing-library/react'; import userEvent from '@testing-library/user-event'; +import { act } from 'react'; import { describe, expect, it, Mock, vi } from 'vitest'; import ReviewDetailsAssessmentStage from './ReviewDetailsAssessmentStage'; import { DOWNLOAD_STAGE } from '../../../../types/generic/downloadStage'; @@ -135,7 +136,6 @@ const createMockReviewData = ( created: '2023-12-01T10:00:00Z', virusScannerResult: 'Clean', id: 'existing-1', - author: 'Y1234', fileSize: 1024, version: '1', documentSnomedCodeType: snomedCode, @@ -175,14 +175,15 @@ const createMockUploadDocuments = (): ReviewUploadDocument[] => [ }, ]; -describe('ReviewDetailsAssessmentStage', () => { +describe('ReviewDetailsAssessmentPage', () => { beforeEach(() => { vi.clearAllMocks(); - mockUsePatientDetailsContext.mockReturnValue([null, mockSetPatientDetails]); }); describe('Rendering', () => { it('displays spinner when reviewData is null', () => { + mockUsePatientDetailsContext.mockReturnValue([null, mockSetPatientDetails]); + render( { }); it('displays spinner only when uploadDocuments is null/undefined or reviewData is null', () => { + mockUsePatientDetailsContext.mockReturnValue([null, mockSetPatientDetails]); + const { rerender } = render( { />, ); - expect(screen.getByText(/Review the new Scanned paper notes/i)).toBeInTheDocument(); + expect( + screen.getByText(/Review the new and existing Scanned paper notes/i), + ).toBeInTheDocument(); }); - it('renders page title for review the new scanned paper notes', () => { + it('renders page title for review with existing and new files', () => { + mockUsePatientDetailsContext.mockReturnValue([null, mockSetPatientDetails]); + render( { />, ); - expect(screen.getByText(/Review the new scanned paper notes/)).toBeInTheDocument(); + expect( + screen.getByText(/Review the new and existing Scanned paper notes/i), + ).toBeInTheDocument(); }); it('renders accept/reject radio buttons when only canBeDiscarded is true', () => { + mockUsePatientDetailsContext.mockReturnValue([null, mockSetPatientDetails]); + render( { uploadDocuments={createMockUploadDocuments()} downloadStage={DOWNLOAD_STAGE.SUCCEEDED} setDownloadStage={mockSetDownloadStage} - hasExistingRecordInStorage={true} + hasExistingRecordInStorage={false} />, ); @@ -261,7 +272,30 @@ describe('ReviewDetailsAssessmentStage', () => { expect(screen.getByRole('radio', { name: 'Reject record' })).toBeInTheDocument(); }); + it('renders add-all and choose-files radio buttons when no existing record', () => { + mockUsePatientDetailsContext.mockReturnValue([null, mockSetPatientDetails]); + + render( + , + ); + + expect(screen.getByLabelText('Add all these files')).toBeInTheDocument(); + expect(screen.getByLabelText('Choose which files to add')).toBeInTheDocument(); + expect( + screen.queryByText(/I don't need these files, they are duplicates/), + ).not.toBeInTheDocument(); + }); + it('renders all radio options when has existing record in storage', () => { + mockUsePatientDetailsContext.mockReturnValue([null, mockSetPatientDetails]); + render( { name: /Add all files to the existing Scanned paper notes/i, }), ).toBeInTheDocument(); + expect( + screen.getByRole('radio', { name: /Choose which files to add to the existing/i }), + ).toBeInTheDocument(); expect( screen.getByRole('radio', { name: /I don't need these files, they are duplicates/i, @@ -286,6 +323,8 @@ describe('ReviewDetailsAssessmentStage', () => { }); it('displays existing files table when available', () => { + mockUsePatientDetailsContext.mockReturnValue([null, mockSetPatientDetails]); + render( { }); it('displays new files table', () => { + mockUsePatientDetailsContext.mockReturnValue([null, mockSetPatientDetails]); + render( { }); it('displays "all files" viewing message by default', () => { + mockUsePatientDetailsContext.mockReturnValue([null, mockSetPatientDetails]); + render( { uploadDocuments={createMockUploadDocuments()} downloadStage={DOWNLOAD_STAGE.SUCCEEDED} setDownloadStage={mockSetDownloadStage} - hasExistingRecordInStorage={true} + hasExistingRecordInStorage={false} />, ); @@ -374,7 +417,9 @@ describe('ReviewDetailsAssessmentStage', () => { ); const viewButtons = screen.getAllByRole('button', { name: /View/i }); - await user.click(viewButtons[1]); + await act(async () => { + await user.click(viewButtons[1]); + }); await waitFor(() => { expect(mockSetDownloadStage).toHaveBeenCalledWith(DOWNLOAD_STAGE.PENDING); @@ -399,7 +444,9 @@ describe('ReviewDetailsAssessmentStage', () => { ); const viewButtons = screen.getAllByRole('button', { name: /View/i }); - await user.click(viewButtons[1]); + await act(async () => { + await user.click(viewButtons[1]); + }); await waitFor(() => { expect(mockGetReviewById).toHaveBeenCalled(); @@ -424,7 +471,9 @@ describe('ReviewDetailsAssessmentStage', () => { ); const viewButtons = screen.getAllByRole('button', { name: /View/i }); - await user.click(viewButtons[1]); + await act(async () => { + await user.click(viewButtons[1]); + }); await waitFor(() => { expect( @@ -457,7 +506,9 @@ describe('ReviewDetailsAssessmentStage', () => { const existingFileViewButton = screen.getByTestId('existing-record-table'); const viewButton = existingFileViewButton.querySelector('button'); - await user.click(viewButton!); + await act(async () => { + await user.click(viewButton!); + }); await waitFor(() => { expect( @@ -522,7 +573,9 @@ describe('ReviewDetailsAssessmentStage', () => { ); const viewButtons = screen.getAllByRole('button', { name: /View duplicate.pdf/i }); - await user.click(viewButtons[0]); // Click first duplicate + await act(async () => { + await user.click(viewButtons[0]); // Click first duplicate + }); await waitFor(() => { expect(screen.getByText(/\(new files\)/)).toBeInTheDocument(); @@ -572,7 +625,6 @@ describe('ReviewDetailsAssessmentStage', () => { created: '2023-12-01T10:00:00Z', virusScannerResult: 'Clean', id: 'existing-1', - author: 'Y1234', fileSize: 1024, version: '1', documentSnomedCodeType: '16521000000101' as DOCUMENT_TYPE, @@ -596,7 +648,9 @@ describe('ReviewDetailsAssessmentStage', () => { const existingFileViewButton = screen.getByTestId('existing-record-table'); const viewButton = existingFileViewButton.querySelector('button'); - await user.click(viewButton!); + await act(async () => { + await user.click(viewButton!); + }); await waitFor(() => { expect(screen.getByText(/\(existing files\)/)).toBeInTheDocument(); @@ -636,6 +690,28 @@ describe('ReviewDetailsAssessmentStage', () => { expect(addAllRadio).toBeChecked(); }); + it('allows selecting choose-files option', async () => { + const user = userEvent.setup(); + + render( + , + ); + + const chooseFilesRadio = screen.getByLabelText( + /Choose which files to add to the existing/i, + ); + await user.click(chooseFilesRadio); + + expect(chooseFilesRadio).toBeChecked(); + }); + it('allows selecting duplicate option', async () => { const user = userEvent.setup(); @@ -673,7 +749,7 @@ describe('ReviewDetailsAssessmentStage', () => { uploadDocuments={createMockUploadDocuments()} downloadStage={DOWNLOAD_STAGE.SUCCEEDED} setDownloadStage={mockSetDownloadStage} - hasExistingRecordInStorage={true} + hasExistingRecordInStorage={false} />, ); @@ -698,7 +774,7 @@ describe('ReviewDetailsAssessmentStage', () => { uploadDocuments={createMockUploadDocuments()} downloadStage={DOWNLOAD_STAGE.SUCCEEDED} setDownloadStage={mockSetDownloadStage} - hasExistingRecordInStorage={true} + hasExistingRecordInStorage={false} />, ); @@ -761,6 +837,34 @@ describe('ReviewDetailsAssessmentStage', () => { ); }); + it('navigates to choose which files when choose-files is selected', async () => { + const user = userEvent.setup(); + + render( + , + ); + + const chooseFilesRadio = screen.getByRole('radio', { + name: /Choose which files to add to the existing/i, + }); + await user.click(chooseFilesRadio); + + const continueButton = screen.getByRole('button', { name: 'Continue' }); + await user.click(continueButton); + + expect(mockedUseNavigate).toHaveBeenCalledWith( + '/admin/reviews/test-review-id.v1/files', + undefined, + ); + }); + it('navigates to no files choice when duplicate is selected', async () => { const user = userEvent.setup(); @@ -799,7 +903,7 @@ describe('ReviewDetailsAssessmentStage', () => { uploadDocuments={createMockUploadDocuments()} downloadStage={DOWNLOAD_STAGE.SUCCEEDED} setDownloadStage={mockSetDownloadStage} - hasExistingRecordInStorage={true} + hasExistingRecordInStorage={false} />, ); @@ -904,7 +1008,9 @@ describe('ReviewDetailsAssessmentStage', () => { ); const viewButtons = screen.getAllByRole('button', { name: /View/i }); - await user.click(viewButtons[1]); + await act(async () => { + await user.click(viewButtons[1]); + }); await waitFor(() => { expect(mockedUseNavigate).toHaveBeenCalledWith('/session-expired'); @@ -928,7 +1034,9 @@ describe('ReviewDetailsAssessmentStage', () => { ); const viewButtons = screen.getAllByRole('button', { name: /View/i }); - await user.click(viewButtons[1]); + await act(async () => { + await user.click(viewButtons[1]); + }); await waitFor(() => { expect(mockedUseNavigate).toHaveBeenCalledWith( @@ -975,7 +1083,7 @@ describe('ReviewDetailsAssessmentStage', () => { uploadDocuments={singleUploadDoc} downloadStage={DOWNLOAD_STAGE.SUCCEEDED} setDownloadStage={mockSetDownloadStage} - hasExistingRecordInStorage={true} + hasExistingRecordInStorage={false} />, ); @@ -1040,7 +1148,7 @@ describe('ReviewDetailsAssessmentStage', () => { uploadDocuments={multiUploadDocs} downloadStage={DOWNLOAD_STAGE.SUCCEEDED} setDownloadStage={mockSetDownloadStage} - hasExistingRecordInStorage={true} + hasExistingRecordInStorage={false} />, ); @@ -1075,47 +1183,4 @@ describe('ReviewDetailsAssessmentStage', () => { expect(screen.getByTestId('back-button')).toBeInTheDocument(); }); }); - - describe('Redirect behavior when no existing record in storage', () => { - it('redirects to add more choice page with replace option when hasExistingRecordInStorage is false', async () => { - vi.useFakeTimers(); - - render( - , - ); - - // Fast-forward timers to trigger the setTimeout - await vi.advanceTimersByTimeAsync(0); - - expect(mockedUseNavigate).toHaveBeenCalledWith( - '/admin/reviews/test-review-id.v1/add-more-choice', - { replace: true }, - ); - - vi.useRealTimers(); - }); - - it('renders empty fragment when redirecting', () => { - const { container } = render( - , - ); - - // Should render empty fragment (no content) - expect(container.firstChild).toBeNull(); - }); - }); }); diff --git a/app/src/components/blocks/_admin/reviewDetailsAssessmentStage/ReviewDetailsAssessmentStage.tsx b/app/src/components/blocks/_admin/reviewDetailsAssessmentStage/ReviewDetailsAssessmentStage.tsx index a0267f8ae9..340e70abbb 100644 --- a/app/src/components/blocks/_admin/reviewDetailsAssessmentStage/ReviewDetailsAssessmentStage.tsx +++ b/app/src/components/blocks/_admin/reviewDetailsAssessmentStage/ReviewDetailsAssessmentStage.tsx @@ -12,10 +12,7 @@ import Spinner from '../../../generic/spinner/Spinner'; import ExistingRecordTable from './ExistingRecordTable'; import useBaseAPIHeaders from '../../../../helpers/hooks/useBaseAPIHeaders'; import useBaseAPIUrl from '../../../../helpers/hooks/useBaseAPIUrl'; -import { - getFormattedDateFromString, - getFormattedDateTimeFromString, -} from '../../../../helpers/utils/formatDate'; +import { getFormattedDateFromString } from '../../../../helpers/utils/formatDate'; import { GetDocumentReviewDto, ReviewDetails, @@ -31,7 +28,6 @@ import DocumentUploadLloydGeorgePreview from '../../_documentUpload/documentUplo import { AxiosError } from 'axios'; import { errorToParams } from '../../../../helpers/utils/errorToParams'; import PatientSummary, { PatientInfo } from '../../../generic/patientSummary/PatientSummary'; -import { CreatedByText } from '../../../generic/createdBy/createdBy'; type FileAction = 'add-all' | 'choose-files' | 'duplicate' | 'accept' | 'reject' | ''; @@ -66,20 +62,10 @@ const ReviewDetailsAssessmentStage = ({ const baseUrl = useBaseAPIUrl(); const baseHeaders = useBaseAPIHeaders(); - if (!hasExistingRecordInStorage && reviewId !== undefined) { - setTimeout(() => { - navigateUrlParam(routeChildren.ADMIN_REVIEW_ADD_MORE_CHOICE, { reviewId }, navigate, { - replace: true, - }); - }, 0); - return <>; - } - const handleExistingFileView = async (filename: string, id: string): Promise => { if (!reviewData) { return; } - if (isLocal) { const file = reviewData.existingFiles?.find((f) => f.fileName === filename); if (!file) { @@ -223,7 +209,8 @@ const ReviewDetailsAssessmentStage = ({ if (reviewConfig.canBeUpdated === false && reviewConfig.canBeDiscarded) { pageTitle = 'Do you want to accept these records?'; } else if (reviewConfig.canBeUpdated && reviewConfig.canBeDiscarded) { - pageTitle = reviewConfig.content.reviewAssessmentPageTitle as string; + const andExisting = reviewData.existingFiles!.length > 0 ? ' and existing ' : ' '; + pageTitle = `Review the new${andExisting}${reviewTypeLabel.toSentenceCase()}`; } else { pageTitle = `Review the ${reviewTypeLabel.toSentenceCase()}`; } @@ -270,6 +257,15 @@ const ReviewDetailsAssessmentStage = ({ > Add all files to the existing {reviewTypeLabel.toSentenceCase()} + { + setFileAction(e.currentTarget.value as FileAction); + }} + > + Choose which files to add to the existing {reviewTypeLabel.toSentenceCase()} + You are currently viewing: all files

+ f.file.name.endsWith('.pdf'))} setMergedPdfBlob={(): void => {}} stitchedBlobLoaded={(): void => {}} - isReview={true} documentConfig={reviewConfig} - > - - + /> )} @@ -448,17 +438,8 @@ const ReviewDetailsAssessmentStage = ({ )} setMergedPdfBlob={(): void => {}} stitchedBlobLoaded={(): void => {}} - isReview={true} documentConfig={reviewConfig} - > - - + /> )} )} diff --git a/app/src/components/blocks/_admin/reviewDetailsCompleteStage/ReviewDetailsCompleteStage.test.tsx b/app/src/components/blocks/_admin/reviewDetailsCompleteStage/ReviewDetailsCompleteStage.test.tsx index 7821587ee8..414e649b2a 100644 --- a/app/src/components/blocks/_admin/reviewDetailsCompleteStage/ReviewDetailsCompleteStage.test.tsx +++ b/app/src/components/blocks/_admin/reviewDetailsCompleteStage/ReviewDetailsCompleteStage.test.tsx @@ -105,7 +105,7 @@ describe('ReviewDetailsCompletePage', () => { ); expect( - screen.getByRole('button', { name: 'Go to documents to review' }), + screen.getByRole('button', { name: 'Go to Documents to Review' }), ).toBeInTheDocument(); }); }); @@ -579,7 +579,7 @@ describe('ReviewDetailsCompletePage', () => { />, ); - const button = screen.getByRole('button', { name: 'Go to documents to review' }); + const button = screen.getByRole('button', { name: 'Go to Documents to Review' }); await user.click(button); expect(mockSetPatientDetails).toHaveBeenCalledWith(null); @@ -596,7 +596,7 @@ describe('ReviewDetailsCompletePage', () => { />, ); - const button = screen.getByRole('button', { name: 'Go to documents to review' }); + const button = screen.getByRole('button', { name: 'Go to Documents to Review' }); await user.click(button); expect(mockNavigate).toHaveBeenCalledWith(routeChildren.ADMIN_REVIEW, { @@ -615,7 +615,7 @@ describe('ReviewDetailsCompletePage', () => { />, ); - const button = screen.getByRole('button', { name: 'Go to documents to review' }); + const button = screen.getByRole('button', { name: 'Go to Documents to Review' }); await user.click(button); expect(mockSetPatientDetails).toHaveBeenCalledBefore(mockNavigate as Mock); diff --git a/app/src/components/blocks/_admin/reviewDetailsCompleteStage/ReviewDetailsCompleteStage.tsx b/app/src/components/blocks/_admin/reviewDetailsCompleteStage/ReviewDetailsCompleteStage.tsx index e48e7c1f29..88b5f24d95 100644 --- a/app/src/components/blocks/_admin/reviewDetailsCompleteStage/ReviewDetailsCompleteStage.tsx +++ b/app/src/components/blocks/_admin/reviewDetailsCompleteStage/ReviewDetailsCompleteStage.tsx @@ -109,17 +109,10 @@ const ReviewDetailsCompleteStage = ({ const getDefaultPrmEmailSupportMessage = (): JSX.Element => { return ( - <> -

- This document has been matched to the patient whose NHS number you entered. If - this patient is registered at your practice, you will see this document on the - list of documents to review again. -

-

- If you think you've made a mistake, contact the Patient Record Management team - at england.prmteam@nhs.net. -

- +

+ If you think you've made a mistake, contact the Patient Record Management team at{' '} + england.prmteam@nhs.net. +

); }; @@ -140,6 +133,16 @@ const ReviewDetailsCompleteStage = ({ }; const getPanelBody = (): JSX.Element => { + if (completeState === CompleteState.PATIENT_MATCHED) { + return ( +

+ This document has been matched to the patient whose NHS number you entered. If + this patient is registered at your practice, you will see this document on the + list of documents to review again. +

+ ); + } + if (completeState === CompleteState.PATIENT_UNKNOWN) { return (

@@ -269,7 +272,7 @@ const ReviewDetailsCompleteStage = ({ {getBody()} ); diff --git a/app/src/components/blocks/_admin/reviewDetailsDocumentSelectOrderStage/ReviewDetailsDocumentSelectOrderStage.tsx b/app/src/components/blocks/_admin/reviewDetailsDocumentSelectOrderStage/ReviewDetailsDocumentSelectOrderStage.tsx index 6bca6a24ab..2ca7a98cad 100644 --- a/app/src/components/blocks/_admin/reviewDetailsDocumentSelectOrderStage/ReviewDetailsDocumentSelectOrderStage.tsx +++ b/app/src/components/blocks/_admin/reviewDetailsDocumentSelectOrderStage/ReviewDetailsDocumentSelectOrderStage.tsx @@ -65,7 +65,6 @@ const ReviewDetailsDocumentSelectOrderStage = ({ confirmFiles={(): void => {}} onSuccess={onSuccess} isReview={true} - reviewData={reviewData} /> ); }; diff --git a/app/src/components/blocks/_admin/reviewDetailsDocumentSelectStage/ReviewDetailsDocumentSelectStage.test.tsx b/app/src/components/blocks/_admin/reviewDetailsDocumentSelectStage/ReviewDetailsDocumentSelectStage.test.tsx index a66c96d61e..b585a35326 100644 --- a/app/src/components/blocks/_admin/reviewDetailsDocumentSelectStage/ReviewDetailsDocumentSelectStage.test.tsx +++ b/app/src/components/blocks/_admin/reviewDetailsDocumentSelectStage/ReviewDetailsDocumentSelectStage.test.tsx @@ -1,6 +1,4 @@ -// need to use happy-dom for this test file as jsdom doesn't support DOMMatrix and scrollIntoView -// @vitest-environment happy-dom -import { render, screen, waitFor, fireEvent, RenderResult } from '@testing-library/react'; +import { render, screen, waitFor } from '@testing-library/react'; import { describe, expect, it, vi, beforeEach } from 'vitest'; import userEvent from '@testing-library/user-event'; import ReviewDetailsDocumentSelectStage from './ReviewDetailsDocumentSelectStage'; @@ -12,8 +10,6 @@ import { UploadDocument, } from '../../../../types/pages/UploadDocumentsPage/types'; import { routeChildren } from '../../../../types/generic/routes'; -import { getDocument } from 'pdfjs-dist'; -import { PDF_PARSING_ERROR_TYPE } from '../../../../helpers/utils/fileUploadErrorMessages'; const mockNavigate = vi.fn(); @@ -53,7 +49,7 @@ vi.mock('react-router-dom', () => ({ })); describe('ReviewDetailsDocumentSelectStage', () => { - const testReviewSnomed: DOCUMENT_TYPE = DOCUMENT_TYPE.LLOYD_GEORGE; + const testReviewSnoMed: DOCUMENT_TYPE = DOCUMENT_TYPE.LLOYD_GEORGE; let mockReviewData: ReviewDetails; let mockDocuments: UploadDocument[]; @@ -64,7 +60,7 @@ describe('ReviewDetailsDocumentSelectStage', () => { mockReviewData = new ReviewDetails( 'test-review-id', - testReviewSnomed, + testReviewSnoMed, '2024-01-01T12:00:00Z', 'Test Uploader', '2024-01-01T12:00:00Z', @@ -78,35 +74,39 @@ describe('ReviewDetailsDocumentSelectStage', () => { mockSetDocuments = vi.fn() as SetUploadDocuments; }); - const renderApp = (props?: { - reviewData?: ReviewDetails | null; - documents?: UploadDocument[]; - setDocuments?: SetUploadDocuments; - }): RenderResult => { - const defaultProps = { - reviewData: mockReviewData, - documents: mockDocuments, - setDocuments: mockSetDocuments, - }; - - return render(); - }; - describe('Rendering', () => { it('shows spinner when reviewData is null', () => { - renderApp({ reviewData: null }); + render( + , + ); expect(screen.getByTestId('mock-spinner')).toBeInTheDocument(); }); it('shows spinner when files is null', () => { - renderApp({ reviewData: { ...mockReviewData, files: null } as any }); + render( + , + ); expect(screen.getByTestId('mock-spinner')).toBeInTheDocument(); }); it('shows spinner when documents are not initialised', () => { - renderApp(); + render( + , + ); expect(screen.getByTestId('mock-spinner')).toBeInTheDocument(); }); @@ -120,14 +120,22 @@ describe('ReviewDetailsDocumentSelectStage', () => { file: new File(['test'], 'test.pdf', { type: 'application/pdf' }), state: DOCUMENT_UPLOAD_STATE.SELECTED, progress: 0, - docType: testReviewSnomed, + docType: testReviewSnoMed, attempts: 0, numPages: 1, validated: false, }, ]; - render( + const { rerender } = render( + , + ); + + rerender( { // Check that the actual DocumentSelectStage component is rendered // by looking for the page title - expect(screen.getByText('Choose scanned paper notes to upload')).toBeInTheDocument(); + expect( + screen.getByText('Choose scanned paper notes files to upload'), + ).toBeInTheDocument(); }); it('displays document information correctly', async () => { @@ -153,14 +163,28 @@ describe('ReviewDetailsDocumentSelectStage', () => { }), state: DOCUMENT_UPLOAD_STATE.SELECTED, progress: 0, - docType: testReviewSnomed, + docType: testReviewSnoMed, attempts: 0, numPages: 1, validated: false, }, ]; - renderApp({ documents: testDocuments }); + const { rerender } = render( + , + ); + + rerender( + , + ); await waitFor(() => { expect(screen.queryByTestId('mock-spinner')).not.toBeInTheDocument(); @@ -170,7 +194,7 @@ describe('ReviewDetailsDocumentSelectStage', () => { expect(screen.getByText('test-document.pdf')).toBeInTheDocument(); }); - it('navigates to previous page on back clicked', async () => { + it('provides correct back link based on review ID', async () => { const user = userEvent.setup(); const testDocuments: UploadDocument[] = [ { @@ -178,23 +202,42 @@ describe('ReviewDetailsDocumentSelectStage', () => { file: new File(['test'], 'test.pdf', { type: 'application/pdf' }), state: DOCUMENT_UPLOAD_STATE.SELECTED, progress: 0, - docType: testReviewSnomed, + docType: testReviewSnoMed, attempts: 0, numPages: 1, validated: false, }, ]; - renderApp({ documents: testDocuments }); + const { rerender } = render( + , + ); + + rerender( + , + ); await waitFor(() => { expect(screen.queryByTestId('mock-spinner')).not.toBeInTheDocument(); }); + const expectedBackLink = routeChildren.ADMIN_REVIEW_ADD_MORE_CHOICE.replaceAll( + ':reviewId', + 'test-review-id.1', + ); + const backButton = screen.getByTestId('back-button'); await user.click(backButton); - expect(mockNavigate).toHaveBeenCalledWith(-1); + expect(mockNavigate).toHaveBeenCalledWith(expectedBackLink); }); }); @@ -207,14 +250,28 @@ describe('ReviewDetailsDocumentSelectStage', () => { file: new File(['test'], 'test.pdf', { type: 'application/pdf' }), state: DOCUMENT_UPLOAD_STATE.SELECTED, progress: 0, - docType: testReviewSnomed, + docType: testReviewSnoMed, attempts: 0, numPages: 1, validated: false, }, ]; - renderApp({ documents: testDocuments }); + const { rerender } = render( + , + ); + + rerender( + , + ); await waitFor(() => { expect(screen.queryByTestId('mock-spinner')).not.toBeInTheDocument(); @@ -237,7 +294,7 @@ describe('ReviewDetailsDocumentSelectStage', () => { const user = userEvent.setup(); const customReviewData = new ReviewDetails( 'custom-id', - testReviewSnomed, + testReviewSnoMed, '2024-01-01T12:00:00Z', 'Test Uploader', '2024-01-01T12:00:00Z', @@ -253,14 +310,28 @@ describe('ReviewDetailsDocumentSelectStage', () => { file: new File(['test'], 'test.pdf', { type: 'application/pdf' }), state: DOCUMENT_UPLOAD_STATE.SELECTED, progress: 0, - docType: testReviewSnomed, + docType: testReviewSnoMed, attempts: 0, numPages: 1, validated: false, }, ]; - renderApp({ reviewData: customReviewData, documents: testDocuments }); + const { rerender } = render( + , + ); + + rerender( + , + ); await waitFor(() => { expect(screen.queryByTestId('mock-spinner')).not.toBeInTheDocument(); @@ -274,92 +345,4 @@ describe('ReviewDetailsDocumentSelectStage', () => { }); }); }); - - describe('Error handling', () => { - const errorCases = [ - ['password protected file', PDF_PARSING_ERROR_TYPE.PASSWORD_MISSING], - ['invalid PDF structure', PDF_PARSING_ERROR_TYPE.INVALID_PDF_STRUCTURE], - ['empty PDF', PDF_PARSING_ERROR_TYPE.EMPTY_PDF], - ]; - - it.each(errorCases)( - 'navigates to admin file errors page when user selects a %s', - async (_description, errorType) => { - const testDocuments: UploadDocument[] = [ - { - id: 'test-id', - file: new File(['test'], 'test.pdf', { type: 'application/pdf' }), - state: DOCUMENT_UPLOAD_STATE.SELECTED, - progress: 0, - docType: testReviewSnomed, - attempts: 0, - numPages: 1, - validated: false, - }, - ]; - - renderApp({ documents: testDocuments }); - - await waitFor(() => { - expect(screen.queryByTestId('mock-spinner')).not.toBeInTheDocument(); - }); - - // Set up mock to throw error AFTER component is ready - vi.mocked(getDocument).mockImplementationOnce(() => { - throw new Error(errorType as string); - }); - - const errorFile = new File(['test'], 'error-file.pdf', { type: 'application/pdf' }); - const dropzone = screen.getByTestId('dropzone'); - fireEvent.drop(dropzone, { - dataTransfer: { files: [errorFile] }, - }); - - await waitFor(() => { - expect(mockNavigate).toHaveBeenCalledWith( - routeChildren.ADMIN_REVIEW_FILE_ERRORS.replaceAll( - ':reviewId', - 'test-review-id.1', - ), - ); - }); - }, - ); - - it('navigates to admin file errors page when user selects a non-PDF file', async () => { - const testDocuments: UploadDocument[] = [ - { - id: 'test-id', - file: new File(['test'], 'test.pdf', { type: 'application/pdf' }), - state: DOCUMENT_UPLOAD_STATE.SELECTED, - progress: 0, - docType: testReviewSnomed, - attempts: 0, - numPages: 1, - validated: false, - }, - ]; - - renderApp({ documents: testDocuments }); - - await waitFor(() => { - expect(screen.queryByTestId('mock-spinner')).not.toBeInTheDocument(); - }); - - const nonPdfFile = new File(['test'], 'nonPdfFile.txt', { type: 'text/plain' }); - const dropzone = screen.getByTestId('dropzone'); - fireEvent.drop(dropzone, { - dataTransfer: { files: [nonPdfFile] }, - }); - - await waitFor(() => { - expect(mockNavigate).toHaveBeenCalledWith( - routeChildren.ADMIN_REVIEW_FILE_ERRORS.replaceAll( - ':reviewId', - 'test-review-id.1', - ), - ); - }); - }); - }); }); diff --git a/app/src/components/blocks/_admin/reviewDetailsDocumentSelectStage/ReviewDetailsDocumentSelectStage.tsx b/app/src/components/blocks/_admin/reviewDetailsDocumentSelectStage/ReviewDetailsDocumentSelectStage.tsx index 3dfa3bd040..df5845b547 100644 --- a/app/src/components/blocks/_admin/reviewDetailsDocumentSelectStage/ReviewDetailsDocumentSelectStage.tsx +++ b/app/src/components/blocks/_admin/reviewDetailsDocumentSelectStage/ReviewDetailsDocumentSelectStage.tsx @@ -55,15 +55,6 @@ const ReviewDetailsDocumentSelectStage = ({ ), ); }; - - const onError = (): void => { - navigate( - routeChildren.ADMIN_REVIEW_FILE_ERRORS.replaceAll( - ':reviewId', - `${reviewData?.id}.${reviewData?.version}`, - ), - ); - }; if (!reviewData?.snomedCode) { return ; } @@ -76,10 +67,10 @@ const ReviewDetailsDocumentSelectStage = ({ filesErrorRef={filesErrorRef} documentConfig={getConfigForDocType(reviewData.snomedCode)} onSuccessOverride={onSuccess} - onErrorOverride={onError} - backLinkOverride={(): void => { - navigate(-1); - }} + backLinkOverride={routeChildren.ADMIN_REVIEW_ADD_MORE_CHOICE.replaceAll( + ':reviewId', + `${reviewData?.id}.${reviewData?.version}`, + )} removeAllFilesLinkOverride={routeChildren.ADMIN_REVIEW_REMOVE_ALL.replaceAll( ':reviewId', `${reviewData?.id}.${reviewData?.version}`, diff --git a/app/src/components/blocks/_admin/reviewDetailsDontKnowNHSNumberStage/ReviewDetailsDontKnowNHSNumberStage.test.tsx b/app/src/components/blocks/_admin/reviewDetailsDontKnowNHSNumberStage/ReviewDetailsDontKnowNHSNumberStage.test.tsx index e836fd4d4b..933dddbab5 100644 --- a/app/src/components/blocks/_admin/reviewDetailsDontKnowNHSNumberStage/ReviewDetailsDontKnowNHSNumberStage.test.tsx +++ b/app/src/components/blocks/_admin/reviewDetailsDontKnowNHSNumberStage/ReviewDetailsDontKnowNHSNumberStage.test.tsx @@ -90,16 +90,6 @@ describe('ReviewDetailsDontKnowNHSNumberPage', () => { screen.getByText(/following their process for record transfers/i), ).toBeInTheDocument(); }); - - it('renders the go back button with correct text and data-testid', () => { - render( - , - ); - - const backButton = screen.getByTestId('back-button'); - expect(backButton).toBeInTheDocument(); - expect(backButton).toHaveTextContent('Go back'); - }); }); describe('User Interactions', () => { diff --git a/app/src/components/blocks/_admin/reviewDetailsDontKnowNHSNumberStage/ReviewDetailsDontKnowNHSNumberStage.tsx b/app/src/components/blocks/_admin/reviewDetailsDontKnowNHSNumberStage/ReviewDetailsDontKnowNHSNumberStage.tsx index f4fde1a499..1427a071f8 100644 --- a/app/src/components/blocks/_admin/reviewDetailsDontKnowNHSNumberStage/ReviewDetailsDontKnowNHSNumberStage.tsx +++ b/app/src/components/blocks/_admin/reviewDetailsDontKnowNHSNumberStage/ReviewDetailsDontKnowNHSNumberStage.tsx @@ -5,7 +5,6 @@ import { ReviewDetails } from '../../../../types/generic/reviews'; import { navigateUrlParam, routeChildren } from '../../../../types/generic/routes'; import { ReviewUploadDocument } from '../../../../types/pages/UploadDocumentsPage/types'; import Spinner from '../../../generic/spinner/Spinner'; -import BackButton from '../../../generic/backButton/BackButton'; type ReviewDetailsDontKnowNHSNumberStageProps = { reviewData: ReviewDetails | null; @@ -39,7 +38,6 @@ const ReviewDetailsDontKnowNHSNumberStage = ({

-

Download this document

diff --git a/app/src/components/blocks/_admin/reviewDetailsPatientSearchStage/ReviewDetailsPatientSearchStage.tsx b/app/src/components/blocks/_admin/reviewDetailsPatientSearchStage/ReviewDetailsPatientSearchStage.tsx index 8bfe9e4440..ecd22965ab 100644 --- a/app/src/components/blocks/_admin/reviewDetailsPatientSearchStage/ReviewDetailsPatientSearchStage.tsx +++ b/app/src/components/blocks/_admin/reviewDetailsPatientSearchStage/ReviewDetailsPatientSearchStage.tsx @@ -29,9 +29,8 @@ import { RecordLayout } from '../../../generic/recordCard/RecordCard'; import { RecordLoader, RecordLoaderProps } from '../../../generic/recordLoader/RecordLoader'; import { getConfigForDocType } from '../../../../helpers/utils/documentType'; import { DOWNLOAD_STAGE } from '../../../../types/generic/downloadStage'; +import { getFormattedDateFromString } from '../../../../helpers/utils/formatDate'; import { ReviewUploadDocument } from '../../../../types/pages/UploadDocumentsPage/types'; -import { getFormattedDateTimeFromString } from '../../../../helpers/utils/formatDate'; -import { CreatedByCard } from '../../../generic/createdBy/createdBy'; export const incorrectFormatMessage = "Enter patient's 10 digit NHS number"; @@ -138,6 +137,7 @@ const ReviewDetailsPatientSearchStage = ({ const recordDetailsProps: RecordLoaderProps = { downloadStage: DOWNLOAD_STAGE.SUCCEEDED, + lastUpdated: getFormattedDateFromString(reviewData.lastUpdated), childrenIfFailiure:

Failure: failed to load documents

, fileName: !reviewConfig.multifileReview && reviewData.files?.length === 1 @@ -236,13 +236,7 @@ const ReviewDetailsPatientSearchStage = ({ setMergedPdfBlob={(): void => {}} documentConfig={reviewConfig} isReview={true} - > - - + /> ); diff --git a/app/src/components/blocks/_admin/reviewsDetailsStage/ReviewsDetailsStage.test.tsx b/app/src/components/blocks/_admin/reviewsDetailsStage/ReviewsDetailsStage.test.tsx index dbfe647f16..8b6eede4e6 100644 --- a/app/src/components/blocks/_admin/reviewsDetailsStage/ReviewsDetailsStage.test.tsx +++ b/app/src/components/blocks/_admin/reviewsDetailsStage/ReviewsDetailsStage.test.tsx @@ -1,6 +1,6 @@ import { render, screen, waitFor } from '@testing-library/react'; import userEvent from '@testing-library/user-event'; -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { beforeEach, describe, expect, it, vi, Mock } from 'vitest'; import ReviewsDetailsPageComponent from './ReviewsDetailsStage'; import { runAxeTest } from '../../../../helpers/test/axeTestHelper'; import { buildPatientDetails } from '../../../../helpers/test/testBuilders'; @@ -214,7 +214,7 @@ describe('ReviewDetailsStage', () => { await waitFor(() => { expect( screen.getByText( - 'Check the patient details in the document shown matches these patient demographics:', + 'Check the patient details in this document match these patient demographics:', ), ).toBeInTheDocument(); }); diff --git a/app/src/components/blocks/_admin/reviewsDetailsStage/ReviewsDetailsStage.tsx b/app/src/components/blocks/_admin/reviewsDetailsStage/ReviewsDetailsStage.tsx index 619d31e5b5..61c232acc1 100644 --- a/app/src/components/blocks/_admin/reviewsDetailsStage/ReviewsDetailsStage.tsx +++ b/app/src/components/blocks/_admin/reviewsDetailsStage/ReviewsDetailsStage.tsx @@ -7,7 +7,7 @@ import useConfig from '../../../../helpers/hooks/useConfig'; import useRole from '../../../../helpers/hooks/useRole'; import useTitle from '../../../../helpers/hooks/useTitle'; import { getConfigForDocType } from '../../../../helpers/utils/documentType'; -import { getFormattedDateTimeFromString } from '../../../../helpers/utils/formatDate'; +import { getFormattedDateFromString } from '../../../../helpers/utils/formatDate'; import { setFullScreen } from '../../../../helpers/utils/fullscreen'; import { handleSearch as handlePatientSearch } from '../../../../helpers/utils/handlePatientSearch'; import { usePatientDetailsContext } from '../../../../providers/patientProvider/PatientProvider'; @@ -35,7 +35,6 @@ import { errorToParams } from '../../../../helpers/utils/errorToParams'; import waitForSeconds from '../../../../helpers/utils/waitForSeconds'; import DocumentUploadLloydGeorgePreview from '../../_documentUpload/documentUploadLloydGeorgePreview/DocumentUploadLloydGeorgePreview'; import { NHS_NUMBER_UNKNOWN } from '../../../../helpers/constants/numbers'; -import { CreatedByCard } from '../../../generic/createdBy/createdBy'; export type ReviewsDetailsStageProps = { reviewData: ReviewDetails; @@ -107,9 +106,9 @@ const ReviewsDetailsStage = ({ anchor.remove(); } }; - const recordDetailsProps: RecordLoaderProps = { downloadStage, + lastUpdated: getFormattedDateFromString(reviewData.lastUpdated), childrenIfFailiure:

Failure: failed to load documents

, fileName: !reviewConfig.multifileReview && reviewData.files && reviewData.files.length === 1 @@ -296,9 +295,7 @@ const ReviewsDetailsStage = ({

Check this document is for the correct patient

-

- Check the patient details in the document shown matches these patient demographics: -

+

Check the patient details in this document match these patient demographics:

@@ -335,15 +332,7 @@ const ReviewsDetailsStage = ({ setMergedPdfBlob={(): void => {}} documentConfig={reviewConfig} isReview={true} - > - - + />
diff --git a/app/src/components/blocks/_delete/deleteSubmitStage/DeleteSubmitStage.test.tsx b/app/src/components/blocks/_delete/deleteSubmitStage/DeleteSubmitStage.test.tsx index fee2c1102b..3a3dad3d35 100644 --- a/app/src/components/blocks/_delete/deleteSubmitStage/DeleteSubmitStage.test.tsx +++ b/app/src/components/blocks/_delete/deleteSubmitStage/DeleteSubmitStage.test.tsx @@ -388,19 +388,6 @@ describe('DeleteSubmitStage', () => { expect(mockedUseNavigate).toHaveBeenCalledWith(routes.SESSION_EXPIRED); }); }); - - it('navigates to previous route when back link is clicked', async () => { - renderComponent(DOCUMENT_TYPE.LLOYD_GEORGE, history); - - const backLink = screen.getByRole('link', { name: 'Go back' }); - expect(backLink).toBeInTheDocument(); - - userEvent.click(backLink); - - await waitFor(() => { - expect(mockedUseNavigate).toHaveBeenCalledWith(-1); - }); - }); }); }); diff --git a/app/src/components/blocks/_delete/deleteSubmitStage/DeleteSubmitStage.tsx b/app/src/components/blocks/_delete/deleteSubmitStage/DeleteSubmitStage.tsx index 8f852865ec..35d690946a 100644 --- a/app/src/components/blocks/_delete/deleteSubmitStage/DeleteSubmitStage.tsx +++ b/app/src/components/blocks/_delete/deleteSubmitStage/DeleteSubmitStage.tsx @@ -151,7 +151,14 @@ export const DeleteSubmitStageIndexView = ({ return ( <> - + {deletionStage === SUBMISSION_STATE.FAILED && } {showNoOptionSelectedMessage && ( { ).toBeInTheDocument(); }); - expect(screen.getByText(searchResults.references[0].fileName)).toBeInTheDocument(); - expect(screen.getByText(searchResults.references[1].fileName)).toBeInTheDocument(); + expect(screen.getByText(searchResults[0].fileName)).toBeInTheDocument(); + expect(screen.getByText(searchResults[1].fileName)).toBeInTheDocument(); }); }); diff --git a/app/src/components/blocks/_documentUpload/documentSelectOrderStage/DocumentSelectOrderStage.tsx b/app/src/components/blocks/_documentUpload/documentSelectOrderStage/DocumentSelectOrderStage.tsx index db09b81626..e209f9be97 100644 --- a/app/src/components/blocks/_documentUpload/documentSelectOrderStage/DocumentSelectOrderStage.tsx +++ b/app/src/components/blocks/_documentUpload/documentSelectOrderStage/DocumentSelectOrderStage.tsx @@ -24,9 +24,6 @@ import ErrorBox from '../../../layout/errorBox/ErrorBox'; import DocumentUploadLloydGeorgePreview from '../documentUploadLloydGeorgePreview/DocumentUploadLloydGeorgePreview'; import SpinnerButton from '../../../generic/spinnerButton/SpinnerButton'; import { DOCUMENT_TYPE_CONFIG } from '../../../../helpers/utils/documentType'; -import { CreatedByText } from '../../../generic/createdBy/createdBy'; -import { getFormattedDateTimeFromString } from '../../../../helpers/utils/formatDate'; -import { ReviewDetails } from '../../../../types/generic/reviews'; type Props = { documents: UploadDocument[] | ReviewUploadDocument[]; @@ -37,7 +34,6 @@ type Props = { confirmFiles: () => void; onSuccess?: () => void; isReview?: boolean; - reviewData?: ReviewDetails; }; type FormData = { @@ -55,7 +51,6 @@ const DocumentSelectOrderStage = ({ confirmFiles, onSuccess, isReview = false, - reviewData, }: Readonly): JSX.Element => { const navigate = useEnhancedNavigate(); const journey = getJourney(); @@ -477,18 +472,7 @@ const DocumentSelectOrderStage = ({ setStitchedBlobLoaded(loaded); }} documentConfig={documentConfig} - isReview={isReview} - > - {isReview && reviewData && ( - - )} - + />
{documents.length > 0 && stitchedBlobLoaded && (
{failedDocuments.length > 0 ? ( - <> +
+

Some of your files failed to upload

+ + {showFiles && ( +
+ {failedDocuments.map((doc) => ( +
+ {doc.file.name} +
+
+ ))} +
+ )} +
+

What you need to do

- We uploaded {documents.length - failedDocuments.length} out of{' '} - {documents.length} files. -
- {failedDocuments.length} files could not be uploaded. + You must note which files uploaded successfully, then return to the + patient's record to upload any files that failed.

-

There may be a problem with your files.

- -
-

Files that could not be uploaded

- - - {showFiles && ( -
- {failedDocuments.map((doc) => ( -
-

{doc.file.name}

-
- ))} -
- )} - -

What you need to do

-

You must note which files did not upload.

- -

- Remove any passwords from files and check that all files open correctly. - Then return to the patient's record to upload them again. -

- -

Get help

-

- Contact your local IT support desk to resolve the problems with these - files. -

- - -
- + +
) : ( <>

What happens next

- {patientDetails.canManageRecord && ( + {journey === 'update' && patientDetails.canManageRecord && (

You can now view the updated {documentConfig.displayName} for this patient in this service by{' '} diff --git a/app/src/components/blocks/_documentUpload/documentUploadIndex/DocumentUploadIndex.tsx b/app/src/components/blocks/_documentUpload/documentUploadIndex/DocumentUploadIndex.tsx index 37996c8790..10c8691376 100644 --- a/app/src/components/blocks/_documentUpload/documentUploadIndex/DocumentUploadIndex.tsx +++ b/app/src/components/blocks/_documentUpload/documentUploadIndex/DocumentUploadIndex.tsx @@ -146,29 +146,23 @@ const DocumentUploadIndex = ({ {documentTypesConfig .filter((doc) => doc.canUploadIndependently) .map((documentConfig) => ( - + => documentTypeSelected( - documentConfig.snomedCode as DOCUMENT_TYPE, + documentConfig.snomed_code as DOCUMENT_TYPE, ) } > - {documentConfig.content.uploadTitle} + {documentConfig.content.upload_title} - {( - documentConfig.content.uploadDescription as string[] - ).map((paragraph, index) => ( -

- {paragraph} -

- ))} + {documentConfig.content.upload_description} diff --git a/app/src/components/blocks/_documentUpload/documentUploadLloydGeorgePreview/DocumentUploadLloydGeorgePreview.tsx b/app/src/components/blocks/_documentUpload/documentUploadLloydGeorgePreview/DocumentUploadLloydGeorgePreview.tsx index 5e4fd0ac0d..8ae8e86f34 100644 --- a/app/src/components/blocks/_documentUpload/documentUploadLloydGeorgePreview/DocumentUploadLloydGeorgePreview.tsx +++ b/app/src/components/blocks/_documentUpload/documentUploadLloydGeorgePreview/DocumentUploadLloydGeorgePreview.tsx @@ -12,7 +12,6 @@ type Props = { documentConfig: DOCUMENT_TYPE_CONFIG; isReview?: boolean; showCurrentlyViewingText?: boolean; - children?: React.ReactNode; }; const DocumentUploadLloydGeorgePreview = ({ @@ -22,7 +21,6 @@ const DocumentUploadLloydGeorgePreview = ({ documentConfig, isReview = false, showCurrentlyViewingText, - children, }: Props): JSX.Element => { const [mergedPdfUrl, setMergedPdfUrl] = useState(''); const journey = getJourney(); @@ -74,8 +72,8 @@ const DocumentUploadLloydGeorgePreview = ({ {documentConfig.stitched ? ( <>

- {documentConfig.content.stitchedPreviewFirstParagraph} - + This shows how the final record will look when combined into a + single document.{' '} {journey === 'update' && `Any files added will appear after the existing ${documentConfig.displayName}.`}

@@ -108,7 +106,6 @@ const DocumentUploadLloydGeorgePreview = ({ )} )} - {isReview && <>{children}} {documents && mergedPdfUrl && ( )} diff --git a/app/src/components/blocks/_documentUpload/documentUploadingStage/DocumentUploadingStage.tsx b/app/src/components/blocks/_documentUpload/documentUploadingStage/DocumentUploadingStage.tsx index d8e6dde329..55331f8d86 100644 --- a/app/src/components/blocks/_documentUpload/documentUploadingStage/DocumentUploadingStage.tsx +++ b/app/src/components/blocks/_documentUpload/documentUploadingStage/DocumentUploadingStage.tsx @@ -59,7 +59,7 @@ const DocumentUploadingStage = ({ {documentConfig.stitched && ( {journey === 'update' - ? `Your files will be added to the existing ${documentConfig.displayName} when the upload is complete.` + ? 'Your files will be added to the existing record when the upload is complete.' : 'Your files will be combined into one document when the upload is complete.'} )} diff --git a/app/src/components/blocks/_lloydGeorge/lloydGeorgeViewRecordStage/LloydGeorgeViewRecordStage.tsx b/app/src/components/blocks/_lloydGeorge/lloydGeorgeViewRecordStage/LloydGeorgeViewRecordStage.tsx index d3f6fccea3..acdcab34f0 100644 --- a/app/src/components/blocks/_lloydGeorge/lloydGeorgeViewRecordStage/LloydGeorgeViewRecordStage.tsx +++ b/app/src/components/blocks/_lloydGeorge/lloydGeorgeViewRecordStage/LloydGeorgeViewRecordStage.tsx @@ -169,7 +169,6 @@ const LloydGeorgeViewRecordStage = ({ patientDetails, lloydGeorgeConfig as DOCUMENT_TYPE_CONFIG, ), - author: 'Y1234', version: 'mock-version-id', created: new Date().toISOString(), fileSize: 12345, diff --git a/app/src/components/blocks/_patientDocuments/documentView/DocumentView.test.tsx b/app/src/components/blocks/_patientDocuments/documentView/DocumentView.test.tsx index 3e7eecfb9b..4ab60d1b95 100644 --- a/app/src/components/blocks/_patientDocuments/documentView/DocumentView.test.tsx +++ b/app/src/components/blocks/_patientDocuments/documentView/DocumentView.test.tsx @@ -51,7 +51,6 @@ const EMBEDDED_PDF_VIEWER_TITLE = 'Embedded PDF Viewer'; const mockDocumentReference: DocumentReference = { id: 'test-id', fileName: 'test-document.pdf', - author: 'Y12345', created: '2023-01-01T10:00:00Z', url: 'https://example.com/document.pdf', contentType: 'application/pdf', @@ -62,20 +61,6 @@ const mockDocumentReference: DocumentReference = { isPdf: true, }; -const mockDocumentRefNotPDF: DocumentReference = { - id: 'test-id', - fileName: 'test-document.pdf', - author: 'Y12345', - created: '2023-01-01T10:00:00Z', - url: 'https://example.com/document.pdf', - contentType: '', - documentSnomedCodeType: DOCUMENT_TYPE.LLOYD_GEORGE, - version: '1', - virusScannerResult: 'clean', - fileSize: 1024, - isPdf: false, -}; - const mockPatientDetails = buildPatientDetails(); const simulateFullscreenChange = (isFullscreen: boolean): void => { @@ -237,8 +222,7 @@ describe('DocumentView', () => { it('displays formatted creation date', () => { renderComponent(); - expect(screen.queryByTestId('document-file-name')).not.toBeInTheDocument(); - expect(screen.getByText(/Created by practice:/)).toBeInTheDocument(); + expect(screen.getByText(/Last updated:/)).toBeInTheDocument(); }); it('displays document type label in record card', () => { @@ -248,12 +232,6 @@ describe('DocumentView', () => { buildDocumentConfig().content.viewDocumentTitle as string, ); }); - - it('displays file name, document is not a PDF', () => { - renderComponent(mockDocumentRefNotPDF); - - expect(screen.queryByTestId('document-file-name')).toBeInTheDocument(); - }); }); describe('Add Files functionality', () => { diff --git a/app/src/components/blocks/_patientDocuments/documentView/DocumentView.tsx b/app/src/components/blocks/_patientDocuments/documentView/DocumentView.tsx index 42988035c9..edac3e6364 100644 --- a/app/src/components/blocks/_patientDocuments/documentView/DocumentView.tsx +++ b/app/src/components/blocks/_patientDocuments/documentView/DocumentView.tsx @@ -72,17 +72,11 @@ const DocumentView = ({ return (
- {!documentReference.isPdf && ( -
- File name: {documentReference.fileName} -
- )}
- Created by practice: {documentReference.author} on{' '} - {getFormattedDate(new Date(documentReference.created))} + Filename: {documentReference.fileName} +
+
+ Last updated: {getFormattedDate(new Date(documentReference.created))}
diff --git a/app/src/components/blocks/generic/patientVerifyPage/PatientVerifyPage.test.tsx b/app/src/components/blocks/generic/patientVerifyPage/PatientVerifyPage.test.tsx index 1efc4e3f56..2f0ad4a7fc 100644 --- a/app/src/components/blocks/generic/patientVerifyPage/PatientVerifyPage.test.tsx +++ b/app/src/components/blocks/generic/patientVerifyPage/PatientVerifyPage.test.tsx @@ -71,7 +71,22 @@ describe('PatientVerifyPage', () => { const backLink = screen.getByRole('link', { name: /back/i }); await userEvent.click(backLink); - expect(mockNavigate).toHaveBeenCalledWith(-1); + expect(mockNavigate).toHaveBeenCalledWith('/patient/search'); + }); + + it('renders back button with custom backLinkOverride when clicked', async () => { + mockNavigate.mockClear(); + render( + , + ); + + const backLink = screen.getByRole('link', { name: /back/i }); + await userEvent.click(backLink); + + expect(mockNavigate).toHaveBeenCalledWith('/admin/reviews/test-123/search-patient'); }); it('renders patient summary', () => { @@ -316,12 +331,10 @@ describe('PatientVerifyPage', () => { }); await userEvent.click(confirmButton); - await waitFor(() => { - expect(screen.getByText('There is a problem')).toBeInTheDocument(); + await waitFor(async () => { + const results = await runAxeTest(document.body); + expect(results).toHaveNoViolations(); }); - - const results = await runAxeTest(document.body); - expect(results).toHaveNoViolations(); }); }); }); diff --git a/app/src/components/blocks/generic/patientVerifyPage/PatientVerifyPage.tsx b/app/src/components/blocks/generic/patientVerifyPage/PatientVerifyPage.tsx index 3cd9dd8d36..bffbebcc7c 100644 --- a/app/src/components/blocks/generic/patientVerifyPage/PatientVerifyPage.tsx +++ b/app/src/components/blocks/generic/patientVerifyPage/PatientVerifyPage.tsx @@ -3,6 +3,7 @@ import { Button, WarningCallout } from 'nhsuk-react-components'; import useTitle from '../../../../helpers/hooks/useTitle'; import BackButton from '../../../generic/backButton/BackButton'; import PatientSummary from '../../../generic/patientSummary/PatientSummary'; +import { routes } from '../../../../types/generic/routes'; import { useForm } from 'react-hook-form'; import usePatient from '../../../../helpers/hooks/usePatient'; import useRole from '../../../../helpers/hooks/useRole'; @@ -13,11 +14,13 @@ import { PatientDetails } from '../../../../types/generic/patientDetails'; type PatientVerifyPageProps = { onSubmit: (setInputError: Dispatch>) => void; reviewPatientDetails?: PatientDetails; + backLinkOverride?: string; }; const PatientVerifyPage = ({ onSubmit, reviewPatientDetails, + backLinkOverride, }: PatientVerifyPageProps): JSX.Element => { const role = useRole(); let patientDetails = usePatient(); @@ -36,7 +39,7 @@ const PatientVerifyPage = ({ return (
- + {inputError && ( { - describe('CreatedByCard', () => { - const defaultProps = { - odsCode: 'Y12345', - dateUploaded: '2024-01-15', - }; - - it('renders the card with odsCode and dateUploaded', () => { - render(); - - expect( - screen.getByText( - `Created by practice ${defaultProps.odsCode} on ${defaultProps.dateUploaded}`, - ), - ).toBeInTheDocument(); - }); - - it('applies custom cssClass when provided', () => { - const customClass = 'custom-test-class'; - const { container } = render( - , - ); - - const cardContent = container.querySelector(`.${customClass}`); - expect(cardContent).toBeInTheDocument(); - }); - - it('renders without cssClass when not provided', () => { - const { container } = render(); - - const cardContent = container.firstChild; - expect(cardContent).not.toHaveClass('custom-test-class'); - }); - }); - - describe('CreatedByText', () => { - const defaultProps = { - odsCode: 'A98765', - dateUploaded: '2024-06-20', - }; - - it('renders the text with odsCode and dateUploaded', () => { - render(); - - expect( - screen.getByText( - `Created by practice ${defaultProps.odsCode} on ${defaultProps.dateUploaded}`, - ), - ).toBeInTheDocument(); - }); - - it('renders as a paragraph element', () => { - render(); - - const paragraph = screen.getByText(/Created by practice/); - expect(paragraph.tagName).toBe('P'); - }); - - it('applies custom cssClass when provided', () => { - const customClass = 'text-style-class'; - const { container } = render( - , - ); - - const paragraph = container.querySelector(`.${customClass}`); - expect(paragraph).toBeInTheDocument(); - }); - - it('renders without cssClass when not provided', () => { - const { container } = render(); - - const paragraph = container.firstChild; - expect(paragraph).not.toHaveClass('text-style-class'); - }); - }); -}); diff --git a/app/src/components/generic/createdBy/createdBy.tsx b/app/src/components/generic/createdBy/createdBy.tsx deleted file mode 100644 index 3102ebd63c..0000000000 --- a/app/src/components/generic/createdBy/createdBy.tsx +++ /dev/null @@ -1,20 +0,0 @@ -import { Card } from 'nhsuk-react-components'; -import { JSX } from 'react'; - -export type CreatedByProps = { - odsCode: string; - dateUploaded: string; - cssClass?: string; -}; - -export const CreatedByCard = ({ odsCode, dateUploaded, cssClass }: CreatedByProps): JSX.Element => ( - - Created by practice {odsCode} on {dateUploaded} - -); - -export const CreatedByText = ({ odsCode, dateUploaded, cssClass }: CreatedByProps): JSX.Element => ( -

- Created by practice {odsCode} on {dateUploaded} -

-); diff --git a/app/src/components/generic/recordLoader/RecordLoader.test.tsx b/app/src/components/generic/recordLoader/RecordLoader.test.tsx index 8cc2375b5d..586516627c 100644 --- a/app/src/components/generic/recordLoader/RecordLoader.test.tsx +++ b/app/src/components/generic/recordLoader/RecordLoader.test.tsx @@ -343,6 +343,12 @@ describe('RecordDetails', () => { }); describe('Edge Cases', () => { + it('handles empty string for lastUpdated', () => { + render(); + + expect(screen.getByText('Last updated:')).toBeInTheDocument(); + }); + it('handles long date strings', () => { const longDate = 'Wednesday, 25th December 2024 at 12:30:45pm GMT'; render(); diff --git a/app/src/components/generic/recordLoader/RecordLoader.tsx b/app/src/components/generic/recordLoader/RecordLoader.tsx index 3d8f5cfbcb..917c4dac49 100644 --- a/app/src/components/generic/recordLoader/RecordLoader.tsx +++ b/app/src/components/generic/recordLoader/RecordLoader.tsx @@ -4,7 +4,7 @@ import ProgressBar from '../progressBar/ProgressBar'; export type RecordLoaderProps = { downloadStage: DOWNLOAD_STAGE; - lastUpdated?: string; + lastUpdated: string; childrenIfFailiure: React.JSX.Element; fileName: string; downloadAction?: (e: React.MouseEvent) => void; @@ -23,10 +23,6 @@ export const RecordLoader = ({ fileName, }; - if (!lastUpdated && !fileName) { - return <>; - } - switch (downloadStage) { case DOWNLOAD_STAGE.INITIAL: case DOWNLOAD_STAGE.PENDING: @@ -52,7 +48,7 @@ export const RecordLoader = ({ }; export type RecordDetailsProps = { - lastUpdated?: string; + lastUpdated: string; fileName: string; downloadAction?: (e: React.MouseEvent) => void; }; @@ -65,11 +61,9 @@ export const RecordDetails = ({ return (
- {lastUpdated && ( -
-

Last updated: {lastUpdated}

-
- )} +
+

Last updated: {lastUpdated}

+
{fileName && (

diff --git a/app/src/config/documentTypesConfig.json b/app/src/config/documentTypesConfig.json index 83d5ca0131..4c1b639cae 100644 --- a/app/src/config/documentTypesConfig.json +++ b/app/src/config/documentTypesConfig.json @@ -1,42 +1,42 @@ [ { "name": "Scanned Paper Notes", - "snomedCode": "16521000000101", - "configName": "scannedPaperNotesConfig", + "snomed_code": "16521000000101", + "config_name": "scannedPaperNotesConfig", "canUploadIndependently": true, "content": { - "uploadTitle": "Lloyd George scanned paper notes", - "uploadDescription": ["Upload and add files to this patient's scanned paper notes."] + "upload_title": "Scanned Lloyd George notes", + "upload_description": "Upload and add files to a scanned paper Lloyd George record." } }, { "name": "Electronic Health Record", - "snomedCode": "717301000000104", - "configName": "electronicHealthRecordConfig", + "snomed_code": "717301000000104", + "config_name": "electronicHealthRecordConfig", "canUploadIndependently": true, "content": { - "uploadTitle": "Electronic health record (EHR)", - "uploadDescription": ["Upload electronic health record (EHR) notes and attachments.", "Do this if, for example, GP2GP fails."] + "upload_title": "Electronic health record (EHR)", + "upload_description": "Upload the full summary file and attachments of an electronic health record. You might also call these a 'journal' or 'practice notes'. Upload this summary if, for example, GP2GP fails." } }, { "name": "Electronic Health Record Attachments", - "snomedCode": "24511000000107", - "configName": "electronicHealthRecordAttachmentsConfig", + "snomed_code": "24511000000107", + "config_name": "electronicHealthRecordAttachmentsConfig", "canUploadIndependently": false, "content": { - "uploadTitle": "", - "uploadDescription": "" + "upload_title": "", + "upload_description": "" } }, { "name": "Letters and Documents", - "snomedCode": "162931000000103", - "configName": "lettersAndDocumentsConfig", + "snomed_code": "162931000000103", + "config_name": "lettersAndDocumentsConfig", "canUploadIndependently": true, "content": { - "uploadTitle": "Other documents", - "uploadDescription": ["Upload other letters and documents that have arrived for this patient after they have left your practice. For example, letters, test results and referrals."] + "upload_title": "Other documents", + "upload_description": "Upload other letters and documents that have arrived for this patient after they have left your practice. For example, letters, test results and referrals." } } ] \ No newline at end of file diff --git a/app/src/config/electronicHealthRecordAttachmentsConfig.json b/app/src/config/electronicHealthRecordAttachmentsConfig.json index 143194f974..368beeaa26 100644 --- a/app/src/config/electronicHealthRecordAttachmentsConfig.json +++ b/app/src/config/electronicHealthRecordAttachmentsConfig.json @@ -18,14 +18,13 @@ "uploadFilesBulletPoints": [ "You can upload files in any format except .zip and .exe files", "If you add more than one attachment, we'll automatically zip them when you upload", - "Remove any passwords from the files", "If there's a problem with your files during upload, you'll need to resolve these before continuing" ], "chooseFilesMessage": "Choose files to upload", "chooseFilesButtonLabel": "Choose files", "chooseFilesWarningText": [ - "Electronic health record attachments are all the documents stored in the patient's EHR with the EHR notes. For example, letters, laboratory results, scans and X-rays.", - "EHR attachments must be uploaded as individual files. See [help and guidance](https://digital.nhs.uk/services/access-and-store-digital-patient-documents/help-and-guidance) for instructions on how to do this." + "Electronic health record attachments are all the documents stored in the patient's EHR with the EHR notes. For example, letters, laboratory results, scans and x rays.", + "EHR attachments must be uploaded as individual files. See [help and guidance](https://digital.nhs.uk/services/access-and-store-digital-patient-documents/help-and-guidance) for instructions on how best to do this." ], "confirmFilesTitle": "Check files are for the correct patient", "confirmFilesTableTitle": "Attachments to this EHR to upload", @@ -34,10 +33,6 @@ "previewUploadTitle": "Preview electronic health record attachment", "uploadFilesExtraParagraph": "", "reviewDocumentTitle": "EHR Attachments", - "skipDocumentLinkText": "Continue without uploading any EHR attachments", - "addMoreFilesRadioNoText": "", - "addMoreFilesRadioYesText": "", - "reviewAssessmentPageTitle": "", - "stitchedPreviewFirstParagraph": "" + "skipDocumentLinkText": "Continue without uploading any EHR attachments" } } \ No newline at end of file diff --git a/app/src/config/electronicHealthRecordConfig.json b/app/src/config/electronicHealthRecordConfig.json index 3d2725e639..a4e20b79e1 100644 --- a/app/src/config/electronicHealthRecordConfig.json +++ b/app/src/config/electronicHealthRecordConfig.json @@ -21,14 +21,13 @@ "There is no file size limit", "You can only upload a PDF file", "If your file is not a PDF, you'll need to convert it to one before uploading", - "Remove any passwords from the file", "If there's a problem with your files during upload, you'll need to resolve these before continuing" ], "chooseFilesMessage": "Choose a file to upload", "chooseFilesButtonLabel": "Choose PDF file", "chooseFilesWarningText": [ "The electronic health record (EHR) notes contain the patient's personal details and all notes from their consultations and interactions with the practice or other healthcare providers. You may also call them the 'journal', 'practice notes' or a 'full EHR summary'.", - "They are downloaded as a single file from the clinical system. See [help and guidance](https://digital.nhs.uk/services/access-and-store-digital-patient-documents/help-and-guidance) for instructions on how to do this.", + "They are downloaded as a single file from the clinical system. See [help and guidance](https://digital.nhs.uk/services/access-and-store-digital-patient-documents/help-and-guidance) for instructions on how best to do this.", "The file does not include attachments such as letters or other documents. You'll be asked to upload those separately in the next step." ], "confirmFilesTitle": "Check file is for the correct patient", diff --git a/app/src/config/lettersAndDocumentsConfig.json b/app/src/config/lettersAndDocumentsConfig.json index 7cfca13cb1..0f00538476 100644 --- a/app/src/config/lettersAndDocumentsConfig.json +++ b/app/src/config/lettersAndDocumentsConfig.json @@ -19,7 +19,6 @@ "uploadFilesBulletPoints": [ "There is no maximum number of size of files you can upload", "You can upload files in any format except .zip and .exe files", - "Remove any passwords from files", "If there is a problem with your files during upload, you'll need to resolve these before continuing" ], "chooseFilesMessage": "Choose files to upload", @@ -32,10 +31,6 @@ "beforeYouUploadTitle": "Before you upload", "previewUploadTitle": "Preview your PDF files", "uploadFilesExtraParagraph": "", - "reviewDocumentTitle": "Letters and documents", - "addMoreFilesRadioNoText": "", - "addMoreFilesRadioYesText": "", - "reviewAssessmentPageTitle": "", - "stitchedPreviewFirstParagraph": "" + "reviewDocumentTitle": "Letters and documents" } } \ No newline at end of file diff --git a/app/src/config/lloydGeorgeConfig.json b/app/src/config/lloydGeorgeConfig.json index fcf835621a..124e7f184f 100644 --- a/app/src/config/lloydGeorgeConfig.json +++ b/app/src/config/lloydGeorgeConfig.json @@ -16,8 +16,8 @@ ], "content": { "viewDocumentTitle": "Scanned paper notes", - "addFilesSelectTitle": "Add files to these scanned paper notes", - "uploadFilesSelectTitle": "Choose scanned paper notes to upload", + "addFilesSelectTitle": "Add scanned paper notes files to this record", + "uploadFilesSelectTitle": "Choose scanned paper notes files to upload", "uploadFilesBulletPoints": [ "You can only upload PDF files", "Check your files open correctly", @@ -31,12 +31,8 @@ "confirmFilesTableTitle": "Scanned paper notes to upload", "confirmFilesTableParagraph": "", "beforeYouUploadTitle": "Before you upload", - "previewUploadTitle": "Preview existing scanned paper notes record", + "previewUploadTitle": "Preview these scanned paper notes", "uploadFilesExtraParagraph": "You can add a note to the patient's electronic health record to say their Lloyd George record is stored in this service. Use SNOMED code 16521000000101.", - "reviewDocumentTitle": "Scanned paper notes", - "addMoreFilesRadioNoText": "No, I don't have anymore scanned paper notes to add for this patient", - "addMoreFilesRadioYesText": "Yes, I have more scanned paper notes to add for this patient", - "reviewAssessmentPageTitle": "Review the new scanned paper notes", - "stitchedPreviewFirstParagraph": "This shows how the final notes will look when combined into a single document. " + "reviewDocumentTitle": "Scanned paper notes" } } \ No newline at end of file diff --git a/app/src/helpers/requests/getDocumentSearchResults.test.ts b/app/src/helpers/requests/getDocumentSearchResults.test.ts index b6a51f87f1..16f97eaf4c 100644 --- a/app/src/helpers/requests/getDocumentSearchResults.test.ts +++ b/app/src/helpers/requests/getDocumentSearchResults.test.ts @@ -21,7 +21,7 @@ describe('[GET] getDocumentSearchResults', () => { test('Document search results handles a 2XX response', async () => { const searchResult = buildSearchResult(); - const mockResults = { references: [searchResult] }; + const mockResults = [searchResult]; mockedAxios.get.mockImplementation(() => Promise.resolve({ status: 200, data: mockResults }), ); diff --git a/app/src/helpers/requests/getDocumentSearchResults.ts b/app/src/helpers/requests/getDocumentSearchResults.ts index 71840cb4ef..85a53934ed 100644 --- a/app/src/helpers/requests/getDocumentSearchResults.ts +++ b/app/src/helpers/requests/getDocumentSearchResults.ts @@ -14,7 +14,7 @@ export type DocumentSearchResultsArgs = { }; export type GetDocumentSearchResultsResponse = { - references: Array; + data: Array; }; const getDocumentSearchResults = async ({ @@ -26,17 +26,16 @@ const getDocumentSearchResults = async ({ const gatewayUrl = baseUrl + endpoints.DOCUMENT_SEARCH; try { - const { data } = await axios.get(gatewayUrl, { + const response: GetDocumentSearchResultsResponse = await axios.get(gatewayUrl, { headers: { ...baseHeaders, }, params: { patientId: nhsNumber?.replaceAll(/\s/g, ''), // replace whitespace docType: docType === DOCUMENT_TYPE.ALL ? undefined : docType, - limit: 9999, }, }); - return data.references; + return response?.data; } catch (e) { if (isLocal) { return [ @@ -44,7 +43,6 @@ const getDocumentSearchResults = async ({ fileName: 'document_1.pdf', created: '2023-01-01T12:00:00Z', virusScannerResult: 'CLEAN', - author: 'Y12345', id: 'mock-document-id-1', fileSize: 1024, version: '1.0', diff --git a/app/src/helpers/test/testBuilders.ts b/app/src/helpers/test/testBuilders.ts index e985aaf9ca..77d53315fc 100644 --- a/app/src/helpers/test/testBuilders.ts +++ b/app/src/helpers/test/testBuilders.ts @@ -110,7 +110,6 @@ const buildUploadSession = (documents: Array): UploadSession => const buildSearchResult = (searchResultOverride?: Partial): SearchResult => { const result: SearchResult = { - author: 'Y12345', fileName: 'fileName.pdf', created: moment().format(), virusScannerResult: 'Clean', @@ -194,8 +193,8 @@ const buildDocumentConfig = ( acceptedFileTypes: ['PDF'], content: { viewDocumentTitle: 'Scanned paper notes', - addFilesSelectTitle: 'Add scanned paper notes to this record', - uploadFilesSelectTitle: 'Choose scanned paper notes to upload', + addFilesSelectTitle: 'Add scanned paper notes files to this record', + uploadFilesSelectTitle: 'Choose scanned paper notes files to upload', uploadFilesBulletPoints: [ 'You can only upload PDF files', 'Check your files open correctly', diff --git a/app/src/helpers/utils/documentType.ts b/app/src/helpers/utils/documentType.ts index eb9c382f52..f34d075b8e 100644 --- a/app/src/helpers/utils/documentType.ts +++ b/app/src/helpers/utils/documentType.ts @@ -26,11 +26,7 @@ export type ContentKey = | 'uploadFilesBulletPoints' | 'skipDocumentLinkText' | 'confirmFilesTableTitle' - | 'confirmFilesTableParagraph' - | 'addMoreFilesRadioNoText' - | 'addMoreFilesRadioYesText' - | 'reviewAssessmentPageTitle' - | 'stitchedPreviewFirstParagraph'; + | 'confirmFilesTableParagraph'; export interface IndividualDocumentTypeContent extends Record {} // The individual config for each document type @@ -52,14 +48,14 @@ export type DOCUMENT_TYPE_CONFIG = { content: IndividualDocumentTypeContent; }; -export type DocumentTypeContentKey = 'uploadTitle' | 'uploadDescription'; +export type DocumentTypeContentKey = 'upload_title' | 'upload_description'; export interface DocumentTypeContent extends Record {} // The document type as defined in the documentTypesConfig.json export interface DocumentType { name: string; - snomedCode: string; - configName: string; + snomed_code: string; + config_name: string; content: DocumentTypeContent; } diff --git a/app/src/helpers/utils/errorCodes.ts b/app/src/helpers/utils/errorCodes.ts index 0f05528d40..93664201c6 100644 --- a/app/src/helpers/utils/errorCodes.ts +++ b/app/src/helpers/utils/errorCodes.ts @@ -42,8 +42,6 @@ const errorCodes: { [key: string]: string } = { "You cannot access this patient's record because they are not registered at your practice. The patient's current practice can access this record if it's stored in this service.", UC_4002: 'There was an issue when attempting to virus scan your uploaded files', UC_4004: technicalIssueMsg, - UC_4006: - "1 or more files failed to upload. Remove any passwords from files and check that all files open correctly. Then return to the patient's record to upload them again.", }; export default errorCodes; diff --git a/app/src/helpers/utils/fileUploadErrorMessages.ts b/app/src/helpers/utils/fileUploadErrorMessages.ts index 9742df18fe..f002c6d80e 100644 --- a/app/src/helpers/utils/fileUploadErrorMessages.ts +++ b/app/src/helpers/utils/fileUploadErrorMessages.ts @@ -74,7 +74,7 @@ export const fileUploadErrorMessages: ErrorMessageType = { invalidFileType: { inline: 'This file is not in the correct file format', errorBox: 'This file is not in the correct file format', - selectFileError: 'You cannot upload this file type.', + selectFileError: 'This file is not a PDF. Convert to PDF and try again.', }, removeFilesToSkip: { inline: 'Remove files before you skip to the next step', diff --git a/app/src/helpers/utils/formatDate.test.ts b/app/src/helpers/utils/formatDate.test.ts index 942ef00589..2ff9ad24a6 100644 --- a/app/src/helpers/utils/formatDate.test.ts +++ b/app/src/helpers/utils/formatDate.test.ts @@ -1,71 +1,329 @@ import { describe, expect, it } from 'vitest'; -import { - getFormattedDate, - getFormattedDateTime, - formatDateWithDashes, - getFormattedDateFromString, - getFormattedDateTimeFromString, -} from './formatDate'; +import { getFormattedDate, formatDateWithDashes, getFormattedDateFromString } from './formatDate'; -describe('formatDate.ts', () => { - describe('getFormattedDate', () => { - it('formats date in en-GB locale', () => { - expect(getFormattedDate(new Date('2024-01-15T00:00:00Z'))).toBe('15 January 2024'); - expect(getFormattedDate(new Date('2024-02-29T00:00:00Z'))).toBe('29 February 2024'); - expect(getFormattedDate(new Date('2025-12-31T23:59:59Z'))).toBe('31 December 2025'); +describe('getFormattedDate', () => { + it('formats date in en-GB locale with full month name', () => { + const date = new Date('2025-12-18T10:30:00Z'); + const result = getFormattedDate(date); + expect(result).toBe('18 December 2025'); + }); + + it('formats date on 1st of month', () => { + const date = new Date('2025-01-01T00:00:00Z'); + const result = getFormattedDate(date); + expect(result).toBe('1 January 2025'); + }); + + it('formats date at end of month', () => { + const date = new Date('2025-12-31T23:59:59Z'); + const result = getFormattedDate(date); + expect(result).toBe('31 December 2025'); + }); + + it('formats date in February', () => { + const date = new Date('2025-02-14T12:00:00Z'); + const result = getFormattedDate(date); + expect(result).toBe('14 February 2025'); + }); + + it('formats date with single digit day', () => { + const date = new Date('2025-03-05T08:00:00Z'); + const result = getFormattedDate(date); + expect(result).toBe('5 March 2025'); + }); + + it('formats leap year date', () => { + const date = new Date('2024-02-29T00:00:00Z'); + const result = getFormattedDate(date); + expect(result).toBe('29 February 2024'); + }); + + it('formats different months correctly', () => { + const months = [ + { date: new Date('2025-01-15'), expected: '15 January 2025' }, + { date: new Date('2025-02-15'), expected: '15 February 2025' }, + { date: new Date('2025-03-15'), expected: '15 March 2025' }, + { date: new Date('2025-04-15'), expected: '15 April 2025' }, + { date: new Date('2025-05-15'), expected: '15 May 2025' }, + { date: new Date('2025-06-15'), expected: '15 June 2025' }, + { date: new Date('2025-07-15'), expected: '15 July 2025' }, + { date: new Date('2025-08-15'), expected: '15 August 2025' }, + { date: new Date('2025-09-15'), expected: '15 September 2025' }, + { date: new Date('2025-10-15'), expected: '15 October 2025' }, + { date: new Date('2025-11-15'), expected: '15 November 2025' }, + { date: new Date('2025-12-15'), expected: '15 December 2025' }, + ]; + + months.forEach(({ date, expected }) => { + expect(getFormattedDate(date)).toBe(expected); + }); + }); + + it('formats date in different years', () => { + const date1900 = new Date('1900-01-01'); + const date2000 = new Date('2000-06-15'); + const date2099 = new Date('2099-12-31'); + + expect(getFormattedDate(date1900)).toBe('1 January 1900'); + expect(getFormattedDate(date2000)).toBe('15 June 2000'); + expect(getFormattedDate(date2099)).toBe('31 December 2099'); + }); +}); + +describe('formatDateWithDashes', () => { + it('formats date with DD-MM-YYYY format', () => { + const date = new Date('2025-12-18T10:30:00Z'); + const result = formatDateWithDashes(date); + expect(result).toBe('18-12-2025'); + }); + + it('pads single digit day with leading zero', () => { + const date = new Date('2025-01-05T00:00:00Z'); + const result = formatDateWithDashes(date); + expect(result).toBe('05-01-2025'); + }); + + it('pads single digit month with leading zero', () => { + const date = new Date('2025-09-18T00:00:00Z'); + const result = formatDateWithDashes(date); + expect(result).toBe('18-09-2025'); + }); + + it('formats date on 1st of month with leading zero', () => { + const date = new Date('2025-03-01T00:00:00Z'); + const result = formatDateWithDashes(date); + expect(result).toBe('01-03-2025'); + }); + + it('formats date at end of month without leading zero', () => { + const date = new Date('2025-12-31T23:59:59Z'); + const result = formatDateWithDashes(date); + expect(result).toBe('31-12-2025'); + }); + + it('formats February dates correctly', () => { + const date = new Date('2025-02-14T12:00:00Z'); + const result = formatDateWithDashes(date); + expect(result).toBe('14-02-2025'); + }); + + it('formats leap year date', () => { + const date = new Date('2024-02-29T00:00:00Z'); + const result = formatDateWithDashes(date); + expect(result).toBe('29-02-2024'); + }); + + it('formats all months correctly', () => { + const months = [ + { date: new Date('2025-01-15'), expected: '15-01-2025' }, + { date: new Date('2025-02-15'), expected: '15-02-2025' }, + { date: new Date('2025-03-15'), expected: '15-03-2025' }, + { date: new Date('2025-04-15'), expected: '15-04-2025' }, + { date: new Date('2025-05-15'), expected: '15-05-2025' }, + { date: new Date('2025-06-15'), expected: '15-06-2025' }, + { date: new Date('2025-07-15'), expected: '15-07-2025' }, + { date: new Date('2025-08-15'), expected: '15-08-2025' }, + { date: new Date('2025-09-15'), expected: '15-09-2025' }, + { date: new Date('2025-10-15'), expected: '15-10-2025' }, + { date: new Date('2025-11-15'), expected: '15-11-2025' }, + { date: new Date('2025-12-15'), expected: '15-12-2025' }, + ]; + + months.forEach(({ date, expected }) => { + expect(formatDateWithDashes(date)).toBe(expected); + }); + }); + + it('formats dates with single digit day and month', () => { + const date = new Date('2025-01-01T00:00:00Z'); + const result = formatDateWithDashes(date); + expect(result).toBe('01-01-2025'); + }); + + it('formats dates in different years', () => { + const date1900 = new Date('1900-01-01'); + const date2000 = new Date('2000-06-05'); + const date2099 = new Date('2099-12-09'); + + expect(formatDateWithDashes(date1900)).toBe('01-01-1900'); + expect(formatDateWithDashes(date2000)).toBe('05-06-2000'); + expect(formatDateWithDashes(date2099)).toBe('09-12-2099'); + }); + + it('handles dates with different times consistently', () => { + // Use local date construction to avoid timezone issues + const midnight = new Date(2025, 5, 15, 0, 0, 0); + const noon = new Date(2025, 5, 15, 12, 0, 0); + const endOfDay = new Date(2025, 5, 15, 23, 59, 59); + + expect(formatDateWithDashes(midnight)).toBe('15-06-2025'); + expect(formatDateWithDashes(noon)).toBe('15-06-2025'); + expect(formatDateWithDashes(endOfDay)).toBe('15-06-2025'); + }); +}); + +describe('getFormattedDateFromString', () => { + describe('empty or undefined input', () => { + it('returns empty string for undefined', () => { + const result = getFormattedDateFromString(undefined); + expect(result).toBe(''); + }); + + it('returns empty string for empty string', () => { + const result = getFormattedDateFromString(''); + expect(result).toBe(''); }); }); - describe('getFormattedDateTime', () => { - it('formats date and time in en-GB locale', () => { - const result = getFormattedDateTime(new Date('2024-06-20T13:05:00Z')); - expect(result).toContain('20 June 2024'); - expect(/\d{1,2}:[0-5][0-9]/.test(result)).toBe(true); + describe('ISO date string format', () => { + it('formats ISO date string correctly', () => { + const result = getFormattedDateFromString('2025-12-18T10:30:00Z'); + expect(result).toBe('18 December 2025'); + }); + + it('formats ISO date without time', () => { + const result = getFormattedDateFromString('2025-01-15'); + expect(result).toBe('15 January 2025'); + }); + + it('formats ISO date with timezone offset', () => { + const result = getFormattedDateFromString('2025-06-15T14:30:00+01:00'); + expect(result).toBe('15 June 2025'); + }); + + it('formats ISO date string with milliseconds', () => { + const result = getFormattedDateFromString('2025-03-20T10:30:00.123Z'); + expect(result).toBe('20 March 2025'); + }); + }); + + describe('numeric timestamp format', () => { + it('formats numeric timestamp string (milliseconds)', () => { + const timestamp = '1734523800000'; // December 18, 2024 + const result = getFormattedDateFromString(timestamp); + expect(result).toContain('December'); + expect(result).toContain('2024'); + }); + + it('formats timestamp at epoch start', () => { + const result = getFormattedDateFromString('0'); + expect(result).toBe('1 January 1970'); + }); + + it('formats recent timestamp', () => { + // January 1, 2025 00:00:00 UTC + const timestamp = '1735689600000'; + const result = getFormattedDateFromString(timestamp); + expect(result).toBe('1 January 2025'); + }); + + it('formats future timestamp', () => { + // December 31, 2099 23:59:59 UTC + const timestamp = String(new Date('2099-12-31T23:59:59Z').getTime()); + const result = getFormattedDateFromString(timestamp); + expect(result).toBe('31 December 2099'); }); }); - describe('formatDateWithDashes', () => { - it('formats date as DD-MM-YYYY with zero padding', () => { - expect(formatDateWithDashes(new Date('2025-01-05T00:00:00Z'))).toBe('05-01-2025'); - expect(formatDateWithDashes(new Date('2025-12-18T10:30:00Z'))).toBe('18-12-2025'); - expect(formatDateWithDashes(new Date('2024-02-29T00:00:00Z'))).toBe('29-02-2024'); + describe('various date string formats', () => { + it('formats US date format (MM/DD/YYYY)', () => { + const result = getFormattedDateFromString('12/18/2025'); + expect(result).toContain('December'); + expect(result).toContain('2025'); + }); + + it('formats date with full month name', () => { + const result = getFormattedDateFromString('December 18, 2025'); + expect(result).toBe('18 December 2025'); + }); + + it('formats short date format', () => { + const result = getFormattedDateFromString('2025-12-18'); + expect(result).toBe('18 December 2025'); }); }); - describe('getFormattedDateFromString', () => { - it('returns empty string for undefined or empty input', () => { - expect(getFormattedDateFromString(undefined)).toBe(''); - expect(getFormattedDateFromString('')).toBe(''); + describe('edge cases', () => { + it('handles leap year date', () => { + const result = getFormattedDateFromString('2024-02-29'); + expect(result).toBe('29 February 2024'); }); - it('formats ISO date strings', () => { - expect(getFormattedDateFromString('2025-12-18T10:30:00Z')).toBe('18 December 2025'); - expect(getFormattedDateFromString('2025-01-15')).toBe('15 January 2025'); - expect(getFormattedDateFromString('2024-02-29')).toBe('29 February 2024'); + it('handles date at start of year', () => { + const result = getFormattedDateFromString('2025-01-01T00:00:00Z'); + expect(result).toBe('1 January 2025'); }); - it('formats numeric timestamp strings', () => { - expect(getFormattedDateFromString('0')).toBe('1 January 1970'); - expect(getFormattedDateFromString('1735689600000')).toBe('1 January 2025'); + it('handles date at end of year', () => { + const result = getFormattedDateFromString('2025-12-31T23:59:59Z'); + expect(result).toBe('31 December 2025'); + }); + + it('formats timestamp string with spaces (treated as NaN)', () => { + const result = getFormattedDateFromString(' 12345 '); + // This will be treated as numeric timestamp + expect(result).toBeTruthy(); + }); + + it('handles various ISO formats', () => { + const formats = [ + { input: '2025-06-15T12:00:00Z', expected: '15 June 2025' }, + { input: '2025-06-15T12:00:00.000Z', expected: '15 June 2025' }, + { input: '2025-06-15', expected: '15 June 2025' }, + ]; + + formats.forEach(({ input, expected }) => { + expect(getFormattedDateFromString(input)).toBe(expected); + }); }); }); - describe('getFormattedDateTimeFromString', () => { - it('returns empty string for undefined input', () => { - expect(getFormattedDateTimeFromString(undefined)).toBe(''); + describe('timestamp conversion logic', () => { + it('distinguishes between numeric string and ISO string', () => { + const numericTimestamp = '1735689600000'; + const isoString = '2025-01-01T00:00:00Z'; + + const numericResult = getFormattedDateFromString(numericTimestamp); + const isoResult = getFormattedDateFromString(isoString); + + expect(numericResult).toBe('1 January 2025'); + expect(isoResult).toBe('1 January 2025'); + }); + + it('handles very large timestamp', () => { + // Far future date + const timestamp = String(new Date('2099-12-31').getTime()); + const result = getFormattedDateFromString(timestamp); + expect(result).toContain('2099'); + }); + + it('handles small timestamp (early 1970s)', () => { + const timestamp = '86400000'; // 1 day after epoch + const result = getFormattedDateFromString(timestamp); + expect(result).toBe('2 January 1970'); }); + }); + + describe('consistency with getFormattedDate', () => { + it('produces same output as getFormattedDate for ISO string', () => { + const dateString = '2025-06-15T10:30:00Z'; + const date = new Date(dateString); + + const fromString = getFormattedDateFromString(dateString); + const fromDate = getFormattedDate(date); - it('formats ISO date strings with time', () => { - const result = getFormattedDateTimeFromString('2022-11-11T18:45:00'); - expect(result).toContain('11 November 2022'); - expect(/\d{1,2}:[0-5][0-9]/.test(result)).toBe(true); + expect(fromString).toBe(fromDate); }); - it('formats numeric timestamp strings with time', () => { - const ts = String(new Date('2024-07-21T09:30:00Z').getTime()); - const result = getFormattedDateTimeFromString(ts); - expect(result).toContain('21 July 2024'); - expect(/\d{1,2}:[0-5][0-9]/.test(result)).toBe(true); + it('produces same output as getFormattedDate for numeric timestamp', () => { + const timestamp = Date.now(); + const timestampString = String(timestamp); + const date = new Date(timestamp); + + const fromString = getFormattedDateFromString(timestampString); + const fromDate = getFormattedDate(date); + + expect(fromString).toBe(fromDate); }); }); }); diff --git a/app/src/helpers/utils/formatDate.ts b/app/src/helpers/utils/formatDate.ts index 31d796ac2b..849ec91742 100644 --- a/app/src/helpers/utils/formatDate.ts +++ b/app/src/helpers/utils/formatDate.ts @@ -2,17 +2,6 @@ export const getFormattedDate = (date: Date): string => { return date.toLocaleDateString('en-GB', { day: 'numeric', month: 'long', year: 'numeric' }); }; -export const getFormattedDateTime = (date: Date): string => { - return date.toLocaleDateString('en-GB', { - day: 'numeric', - month: 'long', - year: 'numeric', - hour: '2-digit', - minute: 'numeric', - hour12: true, - }); -}; - export const formatDateWithDashes = (date: Date): string => { const day = String(date.getDate()).padStart(2, '0'); const month = String(date.getMonth() + 1).padStart(2, '0'); @@ -30,13 +19,3 @@ export const getFormattedDateFromString = (dateString: string | undefined): stri } return getFormattedDate(new Date(Number(dateString))); }; - -export const getFormattedDateTimeFromString = (dateString: string | undefined): string => { - if (!dateString) { - return ''; - } - if (Number.isNaN(Number(dateString))) { - return getFormattedDateTime(new Date(dateString)); - } - return getFormattedDateTime(new Date(Number(dateString))); -}; diff --git a/app/src/pages/adminRoutesPage/AdminRoutesPage.tsx b/app/src/pages/adminRoutesPage/AdminRoutesPage.tsx index 69daec7c7d..49f04efce2 100644 --- a/app/src/pages/adminRoutesPage/AdminRoutesPage.tsx +++ b/app/src/pages/adminRoutesPage/AdminRoutesPage.tsx @@ -15,7 +15,6 @@ import ReviewsDetailsStage from '../../components/blocks/_admin/reviewsDetailsSt import ReviewDetailsPatientSearchStage from '../../components/blocks/_admin/reviewDetailsPatientSearchStage/ReviewDetailsPatientSearchStage'; import { ReviewsPage } from '../../components/blocks/_admin/reviewsPage/ReviewsPage'; import PatientVerifyPage from '../../components/blocks/generic/patientVerifyPage/PatientVerifyPage'; -import DocumentSelectFileErrorsPage from '../../components/blocks/_documentUpload/documentSelectFileErrorsPage/DocumentSelectFileErrorsPage'; import useConfig from '../../helpers/hooks/useConfig'; import { getLastURLPath } from '../../helpers/utils/urlManipulations'; import { routeChildren, routes } from '../../types/generic/routes'; @@ -178,10 +177,6 @@ const AdminRoutesPage = (): JSX.Element => { /> } /> - } - /> } @@ -215,6 +210,10 @@ const AdminRoutesPage = (): JSX.Element => { } /> diff --git a/app/src/pages/documentUploadPage/DocumentUploadPage.tsx b/app/src/pages/documentUploadPage/DocumentUploadPage.tsx index 1f618fa35c..df6849a245 100644 --- a/app/src/pages/documentUploadPage/DocumentUploadPage.tsx +++ b/app/src/pages/documentUploadPage/DocumentUploadPage.tsx @@ -14,7 +14,7 @@ import useBaseAPIUrl from '../../helpers/hooks/useBaseAPIUrl'; import useConfig from '../../helpers/hooks/useConfig'; import usePatient from '../../helpers/hooks/usePatient'; import { uploadDocumentToS3 } from '../../helpers/requests/uploadDocuments'; -import { errorToParams } from '../../helpers/utils/errorToParams'; +import { errorCodeToParams, errorToParams } from '../../helpers/utils/errorToParams'; import { isLocal, isMock } from '../../helpers/utils/isLocal'; import { markDocumentsAsUploading, @@ -44,12 +44,10 @@ import { getUploadSession, goToNextDocType, goToPreviousDocType, - handleDocumentStatusUpdates, reduceDocumentsForUpload, startIntervalTimer, } from '../../helpers/utils/documentUpload'; import DocumentUploadIndex from '../../components/blocks/_documentUpload/documentUploadIndex/DocumentUploadIndex'; -import { UPDATE_DOCUMENT_STATE_FREQUENCY_MILLISECONDS } from '../../helpers/constants/network'; const DocumentUploadPage = (): React.JSX.Element => { const patientDetails = usePatient(); @@ -76,6 +74,9 @@ const DocumentUploadPage = (): React.JSX.Element => { const [showSkipLink, setShowSkipLink] = useState(undefined); const [documentTypeList, setDocumentTypeList] = useState([]); + const UPDATE_DOCUMENT_STATE_FREQUENCY_MILLISECONDS = 5000; + const MAX_POLLING_TIME = 600000; + useEffect(() => { const journeyParam = getJourney(); if (journeyParam === 'update') { @@ -90,15 +91,44 @@ const DocumentUploadPage = (): React.JSX.Element => { }, []); useEffect(() => { - handleDocumentStatusUpdates( - journey, - navigate, - intervalTimer, - interval, - documents, - virusReference, - completeRef, - ); + const journeyParam = getJourney(); + + if (journeyParam === 'update' && journey !== journeyParam) { + globalThis.clearInterval(intervalTimer); + navigate(routes.SERVER_ERROR); + return; + } + + if (interval.current * UPDATE_DOCUMENT_STATE_FREQUENCY_MILLISECONDS > MAX_POLLING_TIME) { + window.clearInterval(intervalTimer); + navigate(routes.SERVER_ERROR); + return; + } + + const hasVirus = documents.some((d) => d.state === DOCUMENT_UPLOAD_STATE.INFECTED); + const docWithError = + documents.length === 1 && + documents.find((d) => d.state === DOCUMENT_UPLOAD_STATE.ERROR); + const allFinished = + documents.length > 0 && + documents.every( + (d) => + d.state === DOCUMENT_UPLOAD_STATE.SUCCEEDED || + d.state === DOCUMENT_UPLOAD_STATE.ERROR, + ); + + if (hasVirus && !virusReference.current) { + virusReference.current = true; + window.clearInterval(intervalTimer); + navigate(routeChildren.DOCUMENT_UPLOAD_INFECTED); + } else if (docWithError) { + const errorParams = docWithError.error ? errorCodeToParams(docWithError.error) : ''; + navigate(routes.SERVER_ERROR + errorParams); + } else if (allFinished && !completeRef.current) { + completeRef.current = true; + window.clearInterval(intervalTimer); + navigate.withParams(routeChildren.DOCUMENT_UPLOAD_COMPLETED); + } }, [ baseHeaders, baseUrl, diff --git a/app/src/pages/lloydGeorgeRecordPage/LloydGeorgeRecordPage.test.tsx b/app/src/pages/lloydGeorgeRecordPage/LloydGeorgeRecordPage.test.tsx index d3d37ed678..1113e93a5d 100644 --- a/app/src/pages/lloydGeorgeRecordPage/LloydGeorgeRecordPage.test.tsx +++ b/app/src/pages/lloydGeorgeRecordPage/LloydGeorgeRecordPage.test.tsx @@ -25,10 +25,6 @@ vi.mock('../../helpers/hooks/useBaseAPIHeaders'); vi.mock('../../helpers/hooks/useBaseAPIUrl'); vi.mock('../../helpers/hooks/useRole'); -vi.mock('../../providers/analyticsProvider/AnalyticsProvider', () => ({ - useAnalyticsContext: (): [null, () => void] => [null, (): void => {}], -})); - const mockAxios = axios as Mocked; const mockPatientDetails = buildPatientDetails(); const mockedUsePatient = usePatient as Mock; diff --git a/app/src/pages/mockLoginPage/MockLoginPage.tsx b/app/src/pages/mockLoginPage/MockLoginPage.tsx index f8e0e8f2fe..9de0a2685e 100644 --- a/app/src/pages/mockLoginPage/MockLoginPage.tsx +++ b/app/src/pages/mockLoginPage/MockLoginPage.tsx @@ -59,7 +59,7 @@ const MockLoginPage = (): React.JSX.Element => {

{ }); describe('Rendering', () => { - it('renders page headers', async () => { + it('renders page headers', () => { render(); const contentHeaders = [ @@ -38,11 +38,8 @@ describe('PrivacyPage', () => { 'Feedback form privacy notice', 'Contact us', ]; - - await waitFor(async () => { - contentHeaders.forEach((str) => { - expect(screen.getByRole('heading', { name: str })).toBeInTheDocument(); - }); + contentHeaders.forEach((str) => { + expect(screen.getByRole('heading', { name: str })).toBeInTheDocument(); }); }); diff --git a/app/src/pages/serverErrorPage/ServerErrorPage.test.tsx b/app/src/pages/serverErrorPage/ServerErrorPage.test.tsx index ddd7da93af..17445e22ee 100644 --- a/app/src/pages/serverErrorPage/ServerErrorPage.test.tsx +++ b/app/src/pages/serverErrorPage/ServerErrorPage.test.tsx @@ -1,10 +1,10 @@ import { render, screen, waitFor } from '@testing-library/react'; +import { act } from 'react'; import ServerErrorPage from './ServerErrorPage'; import userEvent from '@testing-library/user-event'; import { unixTimestamp } from '../../helpers/utils/createTimestamp'; import { runAxeTest } from '../../helpers/test/axeTestHelper'; import { afterEach, beforeEach, describe, expect, it, Mock, vi } from 'vitest'; -import { routes } from '../../types/generic/routes'; const mockedUseNavigate = vi.fn(); const mockSearchParamsGet = vi.fn(); @@ -38,12 +38,12 @@ describe('ServerErrorPage', () => { expect(screen.getByText('There was an unexplained error')).toBeInTheDocument(); expect( screen.getByText( - "Try again by returning to the home page. You'll need to enter any information you submitted again.", + "Try again by returning to the previous page. You'll need to enter any information you submitted again.", ), ).toBeInTheDocument(); expect( screen.getByRole('button', { - name: 'Go to home', + name: 'Return to previous page', }), ).toBeInTheDocument(); expect( @@ -128,14 +128,16 @@ describe('ServerErrorPage', () => { mockSearchParamsGet.mockReturnValue(mockEncoded); render(); - const homeButtonLink = screen.getByRole('button', { - name: 'Go to home', + const returnButtonLink = screen.getByRole('button', { + name: 'Return to previous page', + }); + expect(returnButtonLink).toBeInTheDocument(); + act(() => { + userEvent.click(returnButtonLink); }); - expect(homeButtonLink).toBeInTheDocument(); - await userEvent.click(homeButtonLink); await waitFor(() => { - expect(mockedUseNavigate).toHaveBeenCalledWith(routes.HOME); + expect(mockedUseNavigate).toHaveBeenCalledWith(-2); }); }); }); diff --git a/app/src/pages/serverErrorPage/ServerErrorPage.tsx b/app/src/pages/serverErrorPage/ServerErrorPage.tsx index 0436db7976..1d1d0c7caf 100644 --- a/app/src/pages/serverErrorPage/ServerErrorPage.tsx +++ b/app/src/pages/serverErrorPage/ServerErrorPage.tsx @@ -3,7 +3,6 @@ import { ButtonLink } from 'nhsuk-react-components'; import errorCodes from '../../helpers/utils/errorCodes'; import { unixTimestamp } from '../../helpers/utils/createTimestamp'; import useTitle from '../../helpers/hooks/useTitle'; -import { routes } from '../../types/generic/routes'; type ServerError = [errorCode: string | null, interactionId: string | null]; @@ -27,17 +26,27 @@ const ServerErrorPage = (): React.JSX.Element => {

Sorry, there is a problem with the service

{errorMessage}

- Try again by returning to the home page. You'll need to enter any information you - submitted again. + Try again by returning to the previous page. You'll need to enter any information + you submitted again.

{ e.preventDefault(); - navigate(routes.HOME); + const errorUrl = window.location.href; + // Navigate back two paces incase the previous page has an error in the prefetch + navigate(-2); + + // If this code is reached, we can assume that the component + // has not destroyed and navigate(-2) has no where to go + const urlAfterMinusTwoNavigate = window.location.href; + const urlHasNotChanged = errorUrl === urlAfterMinusTwoNavigate; + if (urlHasNotChanged) { + navigate(-1); + } }} > - Go to home + Return to previous page

If this error keeps appearing

diff --git a/app/src/pages/startPage/StartPage.test.tsx b/app/src/pages/startPage/StartPage.test.tsx index 8a50a73320..f763ec8acb 100644 --- a/app/src/pages/startPage/StartPage.test.tsx +++ b/app/src/pages/startPage/StartPage.test.tsx @@ -24,9 +24,11 @@ describe('StartPage', () => { it('renders start page content', () => { const contentStrings = [ - 'view, upload, download or remove a document', - "add files to a patient's scanned paper notes", - 'review and action pending patient documents', + 'This service gives you access to Lloyd George digital health records. ' + + 'You may have received a note within a patient record, stating that the paper record has been digitised.', + 'If you are part of a GP practice, you can use this service to:', + 'view a patient record', + 'remove a patient record', 'If you are managing records on behalf of NHS England, you can:', 'Not every patient will have a digital record available.', 'Before you start', @@ -37,18 +39,18 @@ describe('StartPage', () => { render(); - screen.getByText( - 'This service gives you access to digital Lloyd George records. Within a record we store the following documents:', - ); - - expect( - screen.getAllByText('download a report on the records stored in this service'), - ).toHaveLength(2); - contentStrings.forEach((s) => { expect(screen.getByText(s)).toBeInTheDocument(); }); + const downloadPatientRecord = screen.getAllByText('download a patient record'); + expect(downloadPatientRecord).toHaveLength(2); + + const downloadOdsReport = screen.getAllByText( + 'download a report on the records held within this service', + ); + expect(downloadOdsReport).toHaveLength(2); + expect(screen.getByText(/Contact the/i)).toBeInTheDocument(); expect( screen.getByRole('link', { diff --git a/app/src/pages/startPage/StartPage.tsx b/app/src/pages/startPage/StartPage.tsx index fb542a3d60..7e593861d8 100644 --- a/app/src/pages/startPage/StartPage.tsx +++ b/app/src/pages/startPage/StartPage.tsx @@ -33,30 +33,21 @@ const StartPage = (): React.JSX.Element => { <>

{pageHeader}

- This service gives you access to digital Lloyd George records. Within a record we - store the following documents: -

-
    -
  • Lloyd George scanned paper notes
  • -
  • Electronic health record notes
  • -
  • Electronic health record attachments
  • -
  • Patient letters and documents
  • -
-

- You may have received a note within a patient's Lloyd George notes, stating that - they have been digitised. + This service gives you access to Lloyd George digital health records. You may have + received a note within a patient record, stating that the paper record has been + digitised.

If you are part of a GP practice, you can use this service to:

    -
  • view, upload, download or remove a document
  • -
  • add files to a patient's scanned paper notes
  • -
  • review and action pending patient documents
  • -
  • download a report on the records stored in this service
  • +
  • view a patient record
  • +
  • download a patient record
  • +
  • remove a patient record
  • +
  • download a report on the records held within this service

If you are managing records on behalf of NHS England, you can:

    -
  • download documents
  • -
  • download a report on the records stored in this service
  • +
  • download a patient record
  • +
  • download a report on the records held within this service

Not every patient will have a digital record available.

Before you start

diff --git a/app/src/types/generic/routes.ts b/app/src/types/generic/routes.ts index cc118920d7..aed587d44b 100644 --- a/app/src/types/generic/routes.ts +++ b/app/src/types/generic/routes.ts @@ -83,7 +83,6 @@ export enum routeChildren { ADMIN_REVIEW_REMOVE_ALL = '/admin/reviews/:reviewId/remove-all', ADMIN_REVIEW_UPLOAD_FILE_ORDER = '/admin/reviews/:reviewId/upload-file-order', ADMIN_REVIEW_UPLOAD = '/admin/reviews/:reviewId/upload', - ADMIN_REVIEW_FILE_ERRORS = '/admin/reviews/:reviewId/file-errors', REVIEWS = 'reviews/*', COOKIES_POLICY_UPDATED = '/cookies-policy/confirmation', diff --git a/app/src/types/generic/searchResult.ts b/app/src/types/generic/searchResult.ts index 85a5516437..20c6a7dc6f 100644 --- a/app/src/types/generic/searchResult.ts +++ b/app/src/types/generic/searchResult.ts @@ -3,7 +3,6 @@ import { DOCUMENT_TYPE } from '../../helpers/utils/documentType'; export type SearchResult = { fileName: string; created: string; - author: string; virusScannerResult: string; id: string; fileSize: number; diff --git a/app/src/types/pages/UploadDocumentsPage/types.ts b/app/src/types/pages/UploadDocumentsPage/types.ts index db18794322..121c6a8b00 100644 --- a/app/src/types/pages/UploadDocumentsPage/types.ts +++ b/app/src/types/pages/UploadDocumentsPage/types.ts @@ -28,7 +28,6 @@ export enum DOCUMENT_STATUS { CANCELLED = 'cancelled', INFECTED = 'infected', NOT_FOUND = 'not-found', - INVALID = 'invalid', } export enum UploadDocumentType { diff --git a/lambdas/enums/document_status.py b/lambdas/enums/document_status.py index 65ccac376b..2a16cd5bec 100644 --- a/lambdas/enums/document_status.py +++ b/lambdas/enums/document_status.py @@ -6,7 +6,6 @@ class DocumentStatus(Enum): FORBIDDEN = ("forbidden", "UC_4003") NOT_FOUND = ("not-found", "UC_4004") INFECTED = ("infected", "UC_4005") - INVALID = ("invalid", "UC_4006") @property def code(self): diff --git a/lambdas/enums/dynamo_filter.py b/lambdas/enums/dynamo_filter.py index aa0f6e77dd..5a3749f0ee 100644 --- a/lambdas/enums/dynamo_filter.py +++ b/lambdas/enums/dynamo_filter.py @@ -15,5 +15,3 @@ class AttributeOperator(Enum): class ConditionOperator(Enum): OR = "|" AND = "&" - EQUAL = "=" - NOT_EQUAL = "<>" diff --git a/lambdas/enums/lambda_error.py b/lambdas/enums/lambda_error.py index d5c2a62e5f..57563fd3db 100644 --- a/lambdas/enums/lambda_error.py +++ b/lambdas/enums/lambda_error.py @@ -43,9 +43,7 @@ def create_error_response( return error_response def to_str( - self, - params: Optional[dict] = None, - details: Optional[str] = None, + self, params: Optional[dict] = None, details: Optional[str] = None ) -> str: message = self.value["message"] if "%" in message and params: @@ -61,9 +59,7 @@ def create_error_body( **kwargs, ) -> str: return self.create_error_response( - params=params, - details=details, - **kwargs, + params=params, details=details, **kwargs ).create() """ @@ -444,10 +440,6 @@ def create_error_body( "err_code": "UC_4005", "message": "Some of the given document references are not referring to clean files", } - UploadConfirmResultFilesInvalid = { - "err_code": "UC_4006", - "message": "Some of the given document references are password protected or corrupted", - } UploadConfirmResultAWSFailure = { "err_code": "UC_5004", "message": "Error occurred with an AWS service", diff --git a/lambdas/enums/virus_scan_result.py b/lambdas/enums/virus_scan_result.py index ade769ab5c..484d9e9027 100644 --- a/lambdas/enums/virus_scan_result.py +++ b/lambdas/enums/virus_scan_result.py @@ -7,7 +7,6 @@ class VirusScanResult(StrEnum): INFECTED_ALLOWED = "InfectedAllowed" UNSCANNABLE = "Unscannable" ERROR = "Error" - INVALID = "Invalid" SCAN_RESULT_TAG_KEY = "scan-result" diff --git a/lambdas/handlers/bulk_upload_metadata_processor_handler.py b/lambdas/handlers/bulk_upload_metadata_processor_handler.py index 85a0c88825..048214cd38 100644 --- a/lambdas/handlers/bulk_upload_metadata_processor_handler.py +++ b/lambdas/handlers/bulk_upload_metadata_processor_handler.py @@ -18,13 +18,13 @@ @set_request_context_for_logging @override_error_check @ensure_environment_variables( - names=["STAGING_STORE_BUCKET_NAME", "METADATA_SQS_QUEUE_URL"], + names=["STAGING_STORE_BUCKET_NAME", "METADATA_SQS_QUEUE_URL"] ) @handle_lambda_exceptions def lambda_handler(event, _context): feature_flag_service = FeatureFlagService() send_to_review_flag_object = feature_flag_service.get_feature_flags_by_flag( - FeatureFlags.BULK_UPLOAD_SEND_TO_REVIEW_ENABLED.value, + FeatureFlags.BULK_UPLOAD_SEND_TO_REVIEW_ENABLED.value ) send_to_review_enabled = send_to_review_flag_object[ FeatureFlags.BULK_UPLOAD_SEND_TO_REVIEW_ENABLED.value @@ -32,12 +32,11 @@ def lambda_handler(event, _context): if send_to_review_enabled: logger.info( - "Bulk upload send to review queue is enabled for metadata processor", + "Bulk upload send to review queue is enabled for metadata processor" ) raw_pre_format_type = event.get( - "preFormatType", - LloydGeorgePreProcessFormat.GENERAL, + "preFormatType", LloydGeorgePreProcessFormat.GENERAL ) formatter_service_class = get_formatter_service(raw_pre_format_type) input_file_location = event.get("inputFileLocation", "") @@ -59,12 +58,12 @@ def lambda_handler(event, _context): if not input_file_location: logger.error( - "Failed to start metadata processing due to missing field: inputFileLocation", + "Failed to start metadata processing due to missing field: inputFileLocation" ) return logger.info( - f"Starting metadata processing for file location: {input_file_location}", + f"Starting metadata processing for file location: {input_file_location}" ) fixed_values = event.get("fixedValues", {}) diff --git a/lambdas/handlers/document_reference_search_handler.py b/lambdas/handlers/document_reference_search_handler.py index b6f4ad126e..74c5d37bff 100755 --- a/lambdas/handlers/document_reference_search_handler.py +++ b/lambdas/handlers/document_reference_search_handler.py @@ -10,32 +10,32 @@ from utils.decorators.handle_lambda_exceptions import handle_lambda_exceptions from utils.decorators.override_error_check import override_error_check from utils.decorators.set_audit_arg import set_request_context_for_logging -from utils.decorators.validate_patient_id import validate_patient_id +from utils.decorators.validate_patient_id import ( + extract_nhs_number_from_event, + validate_patient_id, +) from utils.document_type_utils import extract_document_type_to_enum from utils.lambda_exceptions import DocumentRefSearchException from utils.lambda_response import ApiGatewayResponse from utils.request_context import request_context -from utils.utilities import camelize_dict logger = LoggingService(__name__) @set_request_context_for_logging @validate_patient_id -@ensure_environment_variables(names=["LLOYD_GEORGE_DYNAMODB_NAME"]) +@ensure_environment_variables(names=["DYNAMODB_TABLE_LIST"]) @override_error_check @handle_lambda_exceptions def lambda_handler(event, context): request_context.app_interaction = LoggingAppInteraction.VIEW_PATIENT.value logger.info("Starting document reference search process") - nhs_number, next_page_token, limit = extract_querystring_params(event) - + nhs_number = extract_nhs_number_from_event(event) + doc_type = event.get("queryStringParameters", {}).get("docType", None) try: - document_snomed_code = ( - extract_document_type_to_enum(doc_type) if doc_type else None - ) + document_snomed_code = extract_document_type_to_enum(doc_type) if doc_type else None except ValueError: raise DocumentRefSearchException(400, LambdaError.DocTypeInvalid) @@ -43,40 +43,30 @@ def lambda_handler(event, context): document_reference_search_service = DocumentReferenceSearchService() upload_lambda_enabled_flag_object = FeatureFlagService().get_feature_flags_by_flag( - FeatureFlags.UPLOAD_DOCUMENT_ITERATION_2_ENABLED, + FeatureFlags.UPLOAD_DOCUMENT_ITERATION_2_ENABLED ) doc_upload_iteration2_enabled = upload_lambda_enabled_flag_object[ FeatureFlags.UPLOAD_DOCUMENT_ITERATION_2_ENABLED ] + additional_filters = {} if doc_upload_iteration2_enabled: additional_filters["doc_status"] = "final" if document_snomed_code: additional_filters["document_snomed_code"] = document_snomed_code[0].value - logger.info("Searching for patient references with pagination.") - - response_dict = ( - document_reference_search_service.get_paginated_references_by_nhs_number( - nhs_number=nhs_number, - limit=limit, - next_page_token=next_page_token, - filter=additional_filters, - ) + response = document_reference_search_service.get_document_references( + nhs_number, + check_upload_completed=True, + additional_filters=additional_filters ) - response = camelize_dict(response_dict) logger.info("User is able to view docs", {"Result": "Successful viewing docs"}) - return ApiGatewayResponse( - 200, - json.dumps(response), - "GET", - ).create_api_gateway_response() - - -def extract_querystring_params(event): - nhs_number = event["queryStringParameters"]["patientId"] - next_page_token = event["queryStringParameters"].get("nextPageToken") - limit = event["queryStringParameters"].get("limit") - - return nhs_number, next_page_token, limit + if response: + return ApiGatewayResponse( + 200, json.dumps(response), "GET" + ).create_api_gateway_response() + else: + return ApiGatewayResponse( + 204, json.dumps([]), "GET" + ).create_api_gateway_response() diff --git a/lambdas/handlers/fhir_document_reference_search_handler.py b/lambdas/handlers/fhir_document_reference_search_handler.py index 8f342c82ce..a6c1093de6 100644 --- a/lambdas/handlers/fhir_document_reference_search_handler.py +++ b/lambdas/handlers/fhir_document_reference_search_handler.py @@ -31,7 +31,7 @@ @ensure_environment_variables( - names=["LLOYD_GEORGE_DYNAMODB_NAME", "DOCUMENT_RETRIEVE_ENDPOINT_APIM"], + names=["DYNAMODB_TABLE_LIST", "DOCUMENT_RETRIEVE_ENDPOINT_APIM"] ) @set_request_context_for_logging @validate_patient_id_fhir @@ -51,7 +51,7 @@ def lambda_handler(event: Dict[str, Any], context: Any) -> Dict[str, Any]: selected_role_id = event.get("headers", {}).get(HEADER_CIS2_USER_ID, "") nhs_number, search_filters = parse_query_parameters( - event.get("queryStringParameters", {}), + event.get("queryStringParameters", {}) ) request_context.patient_nhs_no = nhs_number @@ -70,14 +70,12 @@ def lambda_handler(event: Dict[str, Any], context: Any) -> Dict[str, Any]: if document_references["total"] < 1: logger.info(f"No document references found for NHS number: {nhs_number}") return ApiGatewayResponse( - 200, - json.dumps(document_references), - "GET", + 200, json.dumps(document_references), "GET" ).create_api_gateway_response() def parse_query_parameters( - query_string: Dict[str, str], + query_string: Dict[str, str] ) -> Tuple[Optional[str], Dict[str, str]]: """ Parse and extract NHS number and search filters from query parameters. @@ -107,9 +105,7 @@ def parse_query_parameters( def validate_user_access( - bearer_token: str, - selected_role_id: str, - nhs_number: str, + bearer_token: str, selected_role_id: str, nhs_number: str ) -> None: """ Validate that the user has permission to access the requested patient data. @@ -135,9 +131,7 @@ def validate_user_access( userinfo = oidc_service.fetch_userinfo(bearer_token) org_ods_code = oidc_service.fetch_user_org_code(userinfo, selected_role_id) smartcard_role_code, _ = oidc_service.fetch_user_role_code( - userinfo, - selected_role_id, - "R", + userinfo, selected_role_id, "R" ) except (OidcApiException, AuthorisationException) as e: logger.error(f"Authorization failed: {e}") @@ -146,8 +140,7 @@ def validate_user_access( try: # Validate patient access search_patient_service = SearchPatientDetailsService( - smartcard_role_code, - org_ods_code, + smartcard_role_code, org_ods_code ) search_patient_service.handle_search_patient_request(nhs_number, False) except SearchPatientException as e: diff --git a/lambdas/handlers/report_s3_content_handler.py b/lambdas/handlers/report_s3_content_handler.py new file mode 100644 index 0000000000..0ffb4cb277 --- /dev/null +++ b/lambdas/handlers/report_s3_content_handler.py @@ -0,0 +1,21 @@ +from services.reporting.report_s3_content_service import ReportS3ContentService +from utils.audit_logging_setup import LoggingService +from utils.decorators.ensure_env_var import ensure_environment_variables +from utils.decorators.handle_lambda_exceptions import handle_lambda_exceptions +from utils.decorators.override_error_check import override_error_check +from utils.decorators.set_audit_arg import set_request_context_for_logging + +logger = LoggingService(__name__) + + +@ensure_environment_variables(names=["BULK_STAGING_BUCKET_NAME"]) +@ensure_environment_variables(names=["STATISTICAL_REPORTS_BUCKET"]) +@override_error_check +@handle_lambda_exceptions +@set_request_context_for_logging +def lambda_handler(event, context): + logger.info("Report S3 content lambda invoked") + + service = ReportS3ContentService() + + service.process_s3_content() diff --git a/lambdas/models/document_reference.py b/lambdas/models/document_reference.py index 613d88343c..ecd2cd224e 100644 --- a/lambdas/models/document_reference.py +++ b/lambdas/models/document_reference.py @@ -102,7 +102,7 @@ class DocumentReference(BaseModel): author: str | None = None content_type: str = Field(default=DEFAULT_CONTENT_TYPE) created: str = Field( - default_factory=lambda: datetime.now(timezone.utc).strftime(DATE_FORMAT), + default_factory=lambda: datetime.now(timezone.utc).strftime(DATE_FORMAT) ) document_scan_creation: Optional[str] = Field( default_factory=lambda: datetime.date(datetime.now()).isoformat(), @@ -125,10 +125,10 @@ class DocumentReference(BaseModel): ] = Field(default="preliminary") doc_type: str = Field(default=None, exclude=True) document_snomed_code_type: Optional[str] = Field( - default=SnomedCodes.LLOYD_GEORGE.value.code, + default=SnomedCodes.LLOYD_GEORGE.value.code ) file_location: str = "" - file_name: str | None + file_name: str file_size: int | None = Field(default=None) last_updated: int = Field( default_factory=lambda: int(datetime.now(timezone.utc).timestamp()), @@ -140,12 +140,11 @@ class DocumentReference(BaseModel): s3_version_id: Optional[str] = Field(default=None, alias="S3VersionID") s3_upload_key: str = Field(default=None, exclude=True) status: Literal["current", "superseded", "entered-in-error"] = Field( - default="current", + default="current" ) sub_folder: str = Field(default=None, exclude=True) ttl: Optional[int] = Field( - alias=str(DocumentReferenceMetadataFields.TTL.value), - default=None, + alias=str(DocumentReferenceMetadataFields.TTL.value), default=None ) uploaded: bool = Field(default=None) uploading: bool = Field(default=None) @@ -175,8 +174,7 @@ def set_location_properties(cls, data, *args, **kwargs): if "s3_file_key" not in data: data["s3_file_key"] = cls._build_final_s3_key(data) data["file_location"] = cls._build_s3_location( - data["s3_bucket_name"], - current_s3_file_key, + data["s3_bucket_name"], current_s3_file_key ) return data diff --git a/lambdas/repositories/bulk_upload/bulk_upload_sqs_repository.py b/lambdas/repositories/bulk_upload/bulk_upload_sqs_repository.py index 2837f6e150..9bdba35e41 100644 --- a/lambdas/repositories/bulk_upload/bulk_upload_sqs_repository.py +++ b/lambdas/repositories/bulk_upload/bulk_upload_sqs_repository.py @@ -38,7 +38,7 @@ def send_message_to_review_queue( request_context.patient_nhs_no = staging_metadata.nhs_number review_files = [ ReviewMessageFile( - file_name=file.file_path.split("/")[-1], + file_name=file.stored_file_name.split("/")[-1], file_path=file.file_path.lstrip("/"), ) for file in staging_metadata.files @@ -56,7 +56,7 @@ def send_message_to_review_queue( logger.info( f"Sending message to review queue for NHS number {staging_metadata.nhs_number} " - f"with failure reason: {failure_reason}", + f"with failure reason: {failure_reason}" ) self.sqs_repository.send_message_standard( @@ -80,9 +80,7 @@ def put_sqs_message_back_to_queue(self, sqs_message: dict): ) def send_message_to_pdf_stitching_queue( - self, - queue_url: str, - message: PdfStitchingSqsMessage, + self, queue_url: str, message: PdfStitchingSqsMessage ): self.sqs_repository.send_message_standard( queue_url=queue_url, diff --git a/lambdas/requirements/layers/requirements_files_lambda_layer.txt b/lambdas/requirements/layers/requirements_files_lambda_layer.txt deleted file mode 100644 index 0d4bff021d..0000000000 --- a/lambdas/requirements/layers/requirements_files_lambda_layer.txt +++ /dev/null @@ -1 +0,0 @@ -msoffcrypto-tool==6.0.0 \ No newline at end of file diff --git a/lambdas/ruff.toml b/lambdas/ruff.toml index 9b92d3839a..b461c7568d 100644 --- a/lambdas/ruff.toml +++ b/lambdas/ruff.toml @@ -32,8 +32,7 @@ line-length = 130 # Enable the pycodestyle (`E`) and Pyflakes (`F`) rules by default. # Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or # McCabe complexity (`C901`) by default. -# COM812: Enforce trailing commas on multi-line constructs. -select = ["E", "F", "COM812", "RET505"] +select = ["E", "F"] ignore = [] # Allow autofix for all enabled rules (when `--fix`) is provided. diff --git a/lambdas/services/base/s3_service.py b/lambdas/services/base/s3_service.py index ef04724e61..86a2234dd7 100644 --- a/lambdas/services/base/s3_service.py +++ b/lambdas/services/base/s3_service.py @@ -40,9 +40,7 @@ def __init__(self, custom_aws_role=None): if custom_aws_role: self.iam_service = IAMService() self.custom_client, self.expiration_time = self.iam_service.assume_role( - self.custom_aws_role, - "s3", - config=self.config, + self.custom_aws_role, "s3", config=self.config ) # S3 Location should be a minimum of a s3_object_key but can also be a directory location in the form of @@ -50,13 +48,11 @@ def __init__(self, custom_aws_role=None): def create_upload_presigned_url(self, s3_bucket_name: str, s3_object_location: str): if self.custom_client: if datetime.now(timezone.utc) > self.expiration_time - timedelta( - minutes=10, + minutes=10 ): logger.info(S3Service.EXPIRED_SESSION_WARNING) self.custom_client, self.expiration_time = self.iam_service.assume_role( - self.custom_aws_role, - "s3", - config=self.config, + self.custom_aws_role, "s3", config=self.config ) return self.custom_client.generate_presigned_post( s3_bucket_name, @@ -69,13 +65,11 @@ def create_upload_presigned_url(self, s3_bucket_name: str, s3_object_location: s def create_put_presigned_url(self, s3_bucket_name: str, file_key: str): if self.custom_client: if datetime.now(timezone.utc) > self.expiration_time - timedelta( - minutes=10, + minutes=10 ): logger.info(S3Service.EXPIRED_SESSION_WARNING) self.custom_client, self.expiration_time = self.iam_service.assume_role( - self.custom_aws_role, - "s3", - config=self.config, + self.custom_aws_role, "s3", config=self.config ) logger.info("Generating presigned URL") return self.custom_client.generate_presigned_url( @@ -88,13 +82,11 @@ def create_put_presigned_url(self, s3_bucket_name: str, file_key: str): def create_download_presigned_url(self, s3_bucket_name: str, file_key: str): if self.custom_client: if datetime.now(timezone.utc) > self.expiration_time - timedelta( - minutes=10, + minutes=10 ): logger.info(S3Service.EXPIRED_SESSION_WARNING) self.custom_client, self.expiration_time = self.iam_service.assume_role( - self.custom_aws_role, - "s3", - config=self.config, + self.custom_aws_role, "s3", config=self.config ) logger.info("Generating presigned URL") return self.custom_client.generate_presigned_url( @@ -151,32 +143,24 @@ def copy_across_bucket( if_none_match, False, ) - raise e + else: + raise e else: logger.error(f"Copy failed: {e}") raise e def delete_object( - self, - s3_bucket_name: str, - file_key: str, - version_id: str | None = None, + self, s3_bucket_name: str, file_key: str, version_id: str | None = None ): if version_id is None: return self.client.delete_object(Bucket=s3_bucket_name, Key=file_key) return self.client.delete_object( - Bucket=s3_bucket_name, - Key=file_key, - VersionId=version_id, + Bucket=s3_bucket_name, Key=file_key, VersionId=version_id ) def create_object_tag( - self, - s3_bucket_name: str, - file_key: str, - tag_key: str, - tag_value: str, + self, s3_bucket_name: str, file_key: str, tag_key: str, tag_value: str ): return self.client.put_object_tagging( Bucket=s3_bucket_name, @@ -184,7 +168,7 @@ def create_object_tag( Tagging={ "TagSet": [ {"Key": tag_key, "Value": tag_value}, - ], + ] }, ) @@ -198,7 +182,7 @@ def get_tag_value(self, s3_bucket_name: str, file_key: str, tag_key: str) -> str return key_value_pair["Value"] raise TagNotFoundException( - f"Object {file_key} doesn't have a tag of key {tag_key}", + f"Object {file_key} doesn't have a tag of key {tag_key}" ) def file_exist_on_s3(self, s3_bucket_name: str, file_key: str) -> bool: @@ -234,11 +218,8 @@ def get_file_size(self, s3_bucket_name: str, object_key: str) -> int: def get_head_object(self, bucket: str, key: str): return self.client.head_object(Bucket=bucket, Key=key) - def get_object_stream(self, bucket: str, key: str, byte_range: str | None = None): - params = {"Bucket": bucket, "Key": key} - if byte_range: - params["Range"] = byte_range - response = self.client.get_object(**params) + def get_object_stream(self, bucket: str, key: str): + response = self.client.get_object(Bucket=bucket, Key=key) return response.get("Body") def stream_s3_object_to_memory(self, bucket: str, key: str) -> BytesIO: @@ -266,13 +247,21 @@ def upload_file_obj( logger.info(f"Uploaded file object to s3://{s3_bucket_name}/{file_key}") except ClientError as e: logger.error( - f"Failed to upload file object to s3://{s3_bucket_name}/{file_key} - {e}", + f"Failed to upload file object to s3://{s3_bucket_name}/{file_key} - {e}" ) raise e def save_or_create_file(self, source_bucket: str, file_key: str, body: bytes): return self.client.put_object( - Bucket=source_bucket, - Key=file_key, - Body=BytesIO(body), + Bucket=source_bucket, Key=file_key, Body=BytesIO(body) ) + + def get_object_tags_versioned(self, bucket: str, key: str, version_id: str | None): + try: + params = {"Bucket": bucket, "Key": key} + if version_id: + params["VersionId"] = version_id + response = self.client.get_object_tagging(**params) + return response.get("TagSet", []) + except ClientError: + return [] diff --git a/lambdas/services/bulk_upload_service.py b/lambdas/services/bulk_upload_service.py index 9b46673744..21bf7eddfb 100644 --- a/lambdas/services/bulk_upload_service.py +++ b/lambdas/services/bulk_upload_service.py @@ -78,20 +78,20 @@ def process_message_queue(self, records: list): logger.error(error) logger.info( - "Cannot validate patient due to PDS responded with Too Many Requests", + "Cannot validate patient due to PDS responded with Too Many Requests" ) logger.info("Cannot process for now due to PDS rate limit reached.") logger.info( - "All remaining messages in this batch will be returned to sqs queue to retry later.", + "All remaining messages in this batch will be returned to sqs queue to retry later." ) all_unprocessed_message = records[index - 1 :] for unprocessed_message in all_unprocessed_message: self.sqs_repository.put_sqs_message_back_to_queue( - unprocessed_message, + unprocessed_message ) raise BulkUploadException( - "Bulk upload process paused due to PDS rate limit reached", + "Bulk upload process paused due to PDS rate limit reached" ) except ( ClientError, @@ -104,15 +104,14 @@ def process_message_queue(self, records: list): logger.info("Continue on next message") logger.info( - f"Finish Processing successfully {len(records) - len(self.unhandled_messages)} of {len(records)} messages", + f"Finish Processing successfully {len(records) - len(self.unhandled_messages)} of {len(records)} messages" ) if self.unhandled_messages: logger.info("Unable to process the following messages:") for message in self.unhandled_messages: message_body = json.loads(message.get("body", "{}")) request_context.patient_nhs_no = message_body.get( - "NHS-NO", - "no number found", + "NHS-NO", "no number found" ) logger.info(message_body) @@ -123,12 +122,7 @@ def handle_sqs_message(self, message: dict): try: staging_metadata_json = message["body"] staging_metadata = StagingSqsMetadata.model_validate_json( - staging_metadata_json, - ) - uploader_ods = ( - staging_metadata.files[0].gp_practice_code - if staging_metadata.files - else "" + staging_metadata_json ) except (pydantic.ValidationError, KeyError) as e: logger.error(f"Got incomprehensible message: {message}") @@ -143,18 +137,16 @@ def handle_sqs_message(self, message: dict): file_names.append(os.path.basename(file_metadata.stored_file_name)) file_metadata.scan_date = validate_scan_date(file_metadata.scan_date) file_metadata.file_path = self.strip_leading_slash( - file_metadata.file_path, + file_metadata.file_path ) request_context.patient_nhs_no = staging_metadata.nhs_number validate_nhs_number(staging_metadata.nhs_number) pds_patient_details = getting_patient_info_from_pds( - staging_metadata.nhs_number, + staging_metadata.nhs_number ) - patient_ods_code = ( pds_patient_details.get_ods_code_or_inactive_status_for_gp() ) - validate_lg_file_names(file_names, staging_metadata.nhs_number) if not self.bypass_pds: @@ -163,29 +155,25 @@ def handle_sqs_message(self, message: dict): name_validation_accepted_reason, is_name_validation_based_on_historic_name, ) = validate_filename_with_patient_details_lenient( - file_names, - pds_patient_details, + file_names, pds_patient_details ) accepted_reason = self.concatenate_acceptance_reason( - accepted_reason, - name_validation_accepted_reason, + accepted_reason, name_validation_accepted_reason ) else: is_name_validation_based_on_historic_name = ( validate_filename_with_patient_details_strict( - file_names, - pds_patient_details, + file_names, pds_patient_details ) ) if is_name_validation_based_on_historic_name: accepted_reason = self.concatenate_acceptance_reason( - accepted_reason, - "Patient matched on historical name", + accepted_reason, "Patient matched on historical name" ) if not allowed_to_ingest_ods_code(patient_ods_code): raise LGInvalidFilesException( - "Patient not registered at your practice", + "Patient not registered at your practice" ) patient_death_notification_status = ( pds_patient_details.get_death_notification_status() @@ -193,13 +181,11 @@ def handle_sqs_message(self, message: dict): if patient_death_notification_status: deceased_accepted_reason = f"Patient is deceased - {patient_death_notification_status.name}" accepted_reason = self.concatenate_acceptance_reason( - accepted_reason, - deceased_accepted_reason, + accepted_reason, deceased_accepted_reason ) if patient_ods_code is PatientOdsInactiveStatus.RESTRICTED: accepted_reason = self.concatenate_acceptance_reason( - accepted_reason, - "PDS record is restricted", + accepted_reason, "PDS record is restricted" ) except ( @@ -209,18 +195,20 @@ def handle_sqs_message(self, message: dict): PatientRecordAlreadyExistException, ) as error: logger.info( - f"Detected issue related to patient number: {staging_metadata.nhs_number}", + f"Detected issue related to patient number: {staging_metadata.nhs_number}" ) logger.error(error) logger.info("Will stop processing Lloyd George record for this patient.") reason = str(error) + uploader_ods = ( + staging_metadata.files[0].gp_practice_code + if staging_metadata.files + else "" + ) self.dynamo_repository.write_report_upload_to_dynamo( - staging_metadata, - UploadStatus.FAILED, - reason, - patient_ods_code, + staging_metadata, UploadStatus.FAILED, reason, patient_ods_code ) if isinstance(error, (InvalidNhsNumberException, PatientNotFoundException)): logger.info("Invalid NHS number detected. Will set as placeholder") @@ -229,34 +217,29 @@ def handle_sqs_message(self, message: dict): return logger.info( - "NHS Number and filename validation complete. Checking virus scan has marked files as Clean", + "NHS Number and filename validation complete. Checking virus scan has marked files as Clean" ) try: self.resolve_source_file_path(staging_metadata) self.bulk_upload_s3_repository.check_virus_result( - staging_metadata, - self.file_path_cache, + staging_metadata, self.file_path_cache ) logger.info("Virus scan validation complete. Checking PDF file integrity") self.bulk_upload_s3_repository.check_pdf_integrity( - staging_metadata, - self.file_path_cache, + staging_metadata, self.file_path_cache ) except VirusScanNoResultException as e: logger.info(e) logger.info( - f"Waiting on virus scan results for: {staging_metadata.nhs_number}, adding message back to queue", + f"Waiting on virus scan results for: {staging_metadata.nhs_number}, adding message back to queue" ) if staging_metadata.retries > 14: err = ( "File was not scanned for viruses before maximum retries attempted" ) self.dynamo_repository.write_report_upload_to_dynamo( - staging_metadata, - UploadStatus.FAILED, - err, - patient_ods_code, + staging_metadata, UploadStatus.FAILED, err, patient_ods_code ) else: self.sqs_repository.put_staging_metadata_back_to_queue(staging_metadata) @@ -264,7 +247,7 @@ def handle_sqs_message(self, message: dict): except (VirusScanFailedException, DocumentInfectedException) as e: logger.info(e) logger.info( - f"Virus scan results check failed for: {staging_metadata.nhs_number}, removing from queue", + f"Virus scan results check failed for: {staging_metadata.nhs_number}, removing from queue" ) logger.info("Will stop processing Lloyd George record for this patient") @@ -278,7 +261,7 @@ def handle_sqs_message(self, message: dict): except CorruptedFileException as e: logger.info(e) logger.info( - f"PDF integrity check failed for: {staging_metadata.nhs_number}, removing from queue", + f"PDF integrity check failed for: {staging_metadata.nhs_number}, removing from queue" ) logger.info("Will stop processing Lloyd George record for this patient") @@ -292,7 +275,7 @@ def handle_sqs_message(self, message: dict): except S3FileNotFoundException as e: logger.info(e) logger.info( - f"One or more of the files is not accessible from S3 bucket for patient {staging_metadata.nhs_number}", + f"One or more of the files is not accessible from S3 bucket for patient {staging_metadata.nhs_number}" ) logger.info("Will stop processing Lloyd George record for this patient") @@ -310,7 +293,7 @@ def handle_sqs_message(self, message: dict): self.dynamo_repository.init_transaction() logger.info( - "Transaction initialised. Transferring files to main S3 bucket and creating metadata", + "Transaction initialised. Transferring files to main S3 bucket and creating metadata" ) try: @@ -336,13 +319,10 @@ def handle_sqs_message(self, message: dict): return logger.info( - "File transfer complete. Removing uploaded files from staging bucket", + "File transfer complete. Removing uploaded files from staging bucket" ) self.bulk_upload_s3_repository.remove_ingested_file_from_source_bucket() - if uploader_ods != patient_ods_code: - logger.info("Ingested files for a different practice.") - logger.info( f"Completed file ingestion for patient {staging_metadata.nhs_number}", {"Result": "Successful upload"}, @@ -364,7 +344,7 @@ def handle_sqs_message(self, message: dict): message=pdf_stitching_sqs_message, ) logger.info( - f"Message sent to stitching queue for patient {staging_metadata.nhs_number}", + f"Message sent to stitching queue for patient {staging_metadata.nhs_number}" ) def resolve_source_file_path(self, staging_metadata: StagingSqsMetadata): @@ -387,43 +367,38 @@ def resolve_source_file_path(self, staging_metadata: StagingSqsMetadata): file_path_in_nfd_form = convert_to_nfd_form(file_path_in_metadata) if self.bulk_upload_s3_repository.file_exists_on_staging_bucket( - file_path_in_nfc_form, + file_path_in_nfc_form ): resolved_file_paths[file_path_in_metadata] = file_path_in_nfc_form elif self.bulk_upload_s3_repository.file_exists_on_staging_bucket( - file_path_in_nfd_form, + file_path_in_nfd_form ): resolved_file_paths[file_path_in_metadata] = file_path_in_nfd_form else: logger.info( - "No file matching the provided file path was found on S3 bucket", + "No file matching the provided file path was found on S3 bucket" ) logger.info("Please check whether files are named correctly") raise S3FileNotFoundException( - f"Failed to access file {sample_file_path}", + f"Failed to access file {sample_file_path}" ) self.file_path_cache = resolved_file_paths def create_lg_records_and_copy_files( - self, - staging_metadata: StagingSqsMetadata, - current_gp_ods: str, + self, staging_metadata: StagingSqsMetadata, current_gp_ods: str ): nhs_number = staging_metadata.nhs_number for file_metadata in staging_metadata.files: document_reference = self.convert_to_document_reference( - file_metadata, - nhs_number, - current_gp_ods, + file_metadata, nhs_number, current_gp_ods ) source_file_key = self.file_path_cache[file_metadata.file_path] dest_file_key = document_reference.s3_file_key copy_result = self.bulk_upload_s3_repository.copy_to_lg_bucket( - source_file_key=source_file_key, - dest_file_key=dest_file_key, + source_file_key=source_file_key, dest_file_key=dest_file_key ) s3_bucket_name = self.bulk_upload_s3_repository.lg_bucket_name @@ -431,8 +406,7 @@ def create_lg_records_and_copy_files( document_reference.file_size = ( self.bulk_upload_s3_repository.s3_repository.get_file_size( - s3_bucket_name=s3_bucket_name, - object_key=dest_file_key, + s3_bucket_name=s3_bucket_name, object_key=dest_file_key ) ) document_reference.set_uploaded_to_true() @@ -446,7 +420,7 @@ def rollback_transaction(self): logger.info("Rolled back an incomplete transaction") except ClientError as e: logger.error( - f"Failed to rollback the incomplete transaction due to error: {e}", + f"Failed to rollback the incomplete transaction due to error: {e}" ) def convert_to_document_reference( @@ -497,7 +471,7 @@ def send_to_review_queue_if_enabled( uploader_ods=uploader_ods, ) logger.info( - f"Sent failed record to review queue with reason: {review_reason}", + f"Sent failed record to review queue with reason: {review_reason}" ) except Exception as e: logger.error( diff --git a/lambdas/services/create_document_reference_service.py b/lambdas/services/create_document_reference_service.py index 2fedcd837e..28d45813f0 100644 --- a/lambdas/services/create_document_reference_service.py +++ b/lambdas/services/create_document_reference_service.py @@ -10,13 +10,13 @@ from models.fhir.R4.fhir_document_reference import Attachment, DocumentReferenceInfo from pydantic import ValidationError from services.base.ssm_service import SSMService -from services.feature_flags_service import FeatureFlagService from services.post_fhir_document_reference_service import ( PostFhirDocumentReferenceService, ) from utils import upload_file_configs from utils.audit_logging_setup import LoggingService from utils.common_query_filters import get_document_type_filter +from utils.constants.ssm import UPLOAD_PILOT_ODS_ALLOWED_LIST from utils.dynamo_query_filter_builder import DynamoQueryFilterBuilder from utils.exceptions import ( ConfigNotFoundException, @@ -49,16 +49,13 @@ class CreateDocumentReferenceService: def __init__(self): self.post_fhir_doc_ref_service = PostFhirDocumentReferenceService() self.ssm_service = SSMService() - self.feature_flag_service = FeatureFlagService() self.lg_dynamo_table = os.getenv("LLOYD_GEORGE_DYNAMODB_NAME") self.staging_bucket_name = os.getenv("STAGING_STORE_BUCKET_NAME") self.upload_sub_folder = "user_upload" def create_document_reference_request( - self, - nhs_number: str, - documents_list: list[dict], + self, nhs_number: str, documents_list: list[dict] ): upload_document_names = [] url_responses = {} @@ -76,19 +73,18 @@ def create_document_reference_request( for validated_doc in upload_request_documents: snomed_code = validated_doc.doc_type - config = upload_file_configs.get_config_by_snomed_code(snomed_code) + config = upload_file_configs.get_config_by_snomed_code( + snomed_code + ) if config.single_file_only: self.check_existing_records_and_remove_failed_upload( nhs_number, - snomed_code, + snomed_code ) document_reference = self.create_document_reference( - nhs_number, - user_ods_code, - validated_doc, - snomed_code, + nhs_number, user_ods_code, validated_doc, snomed_code ) self.validate_document_file_type(validated_doc, config) @@ -130,21 +126,13 @@ def create_document_reference_request( raise DocumentRefException(400, LambdaError.DocRefInvalidFiles) def validate_document_file_type(self, validated_doc, document_config): - if not is_file_type_allowed( - validated_doc.file_name, - document_config.accepted_file_types, - ): + if not is_file_type_allowed(validated_doc.file_name, document_config.accepted_file_types): raise LGInvalidFilesException( - f"Unsupported file type for file: {validated_doc.file_name}", + f"Unsupported file type for file: {validated_doc.file_name}" ) def build_and_process_fhir_doc_ref( - self, - nhs_number, - user_ods_code, - validated_doc, - snomed_code, - document_reference, + self, nhs_number, user_ods_code, validated_doc, snomed_code, document_reference ): doc_ref_info = self.build_doc_ref_info( validated_doc, @@ -154,11 +142,11 @@ def build_and_process_fhir_doc_ref( ) fhir_doc_ref = doc_ref_info.create_fhir_document_reference_object( - document_reference, + document_reference ) fhir_response = self.post_fhir_doc_ref_service.process_fhir_document_reference( - fhir_doc_ref.model_dump_json(), + fhir_doc_ref.model_dump_json() ) return fhir_response @@ -171,11 +159,7 @@ def validate_patient_user_ods_codes_match(self, user_ods_code, patient_ods_code) raise DocumentRefException(401, LambdaError.DocRefUnauthorizedOdsCode) def build_doc_ref_info( - self, - validated_doc, - nhs_number, - snomed_code, - user_ods_code, + self, validated_doc, nhs_number, snomed_code, user_ods_code ) -> DocumentReferenceInfo: attachment_details = Attachment( title=validated_doc.file_name, @@ -192,16 +176,14 @@ def build_doc_ref_info( return doc_ref_info def check_if_user_ods_code_is_in_pilot(self, ods_code) -> bool: - pilot_ods_codes = ( - self.feature_flag_service.get_allowed_list_of_ods_codes_for_upload_pilot() - ) - if ods_code in pilot_ods_codes or pilot_ods_codes == []: + pilot_ods_codes = self.get_allowed_list_of_ods_codes_for_upload_pilot() + if ods_code in pilot_ods_codes: return True - raise OdsErrorException() + else: + raise OdsErrorException() def parse_documents_list( - self, - document_list: list[dict], + self, document_list: list[dict] ) -> list[UploadRequestDocument]: upload_request_document_list = [] for document in document_list: @@ -268,7 +250,7 @@ def check_existing_records_and_remove_failed_upload( ) if not previous_records: logger.info( - "No record was found for this patient. Will continue to create doc ref.", + "No record was found for this patient. Will continue to create doc ref." ) return @@ -279,7 +261,7 @@ def check_existing_records_and_remove_failed_upload( def stop_if_upload_is_in_process(self, previous_records: list[DocumentReference]): if any( self.post_fhir_doc_ref_service.document_service.is_upload_in_process( - document, + document ) for document in previous_records ): @@ -294,7 +276,7 @@ def stop_if_all_records_uploaded(self, previous_records: list[DocumentReference] if all_records_uploaded: logger.info( "The patient already has a full set of record. " - "We should not be processing the new Lloyd George record upload.", + "We should not be processing the new Lloyd George record upload." ) logger.error( f"{LambdaError.DocRefRecordAlreadyInPlace.to_str()}", @@ -309,23 +291,30 @@ def remove_records_of_failed_upload( ): logger.info( "Found previous records of failed upload. " - "Will delete those records before creating new document references.", + "Will delete those records before creating new document references." ) logger.info("Deleting files from s3...") for record in failed_upload_records: s3_bucket_name, s3_file_key = record._parse_s3_location( - record.file_location, + record.file_location ) self.post_fhir_doc_ref_service.s3_service.delete_object( - s3_bucket_name, - s3_file_key, + s3_bucket_name, s3_file_key ) logger.info("Deleting dynamodb record...") self.post_fhir_doc_ref_service.document_service.hard_delete_metadata_records( - table_name=table_name, - document_references=failed_upload_records, + table_name=table_name, document_references=failed_upload_records ) logger.info("Previous failed records are deleted.") + + def get_allowed_list_of_ods_codes_for_upload_pilot(self) -> list[str]: + logger.info( + "Starting ssm request to retrieve allowed list of ODS codes for Upload Pilot" + ) + response = self.ssm_service.get_ssm_parameter(UPLOAD_PILOT_ODS_ALLOWED_LIST) + if not response: + logger.warning("No ODS codes found in allowed list for Upload Pilot") + return response diff --git a/lambdas/services/document_reference_search_service.py b/lambdas/services/document_reference_search_service.py index f53ddc07a9..a4046bb45e 100644 --- a/lambdas/services/document_reference_search_service.py +++ b/lambdas/services/document_reference_search_service.py @@ -1,8 +1,9 @@ +import json import os from json import JSONDecodeError from botocore.exceptions import ClientError -from enums.dynamo_filter import AttributeOperator, ConditionOperator +from enums.dynamo_filter import AttributeOperator from enums.infrastructure import MAP_MTLS_TO_DYNAMO from enums.lambda_error import LambdaError from enums.metadata_field_names import DocumentReferenceMetadataFields @@ -16,7 +17,6 @@ from utils.audit_logging_setup import LoggingService from utils.common_query_filters import NotDeleted, UploadCompleted from utils.dynamo_query_filter_builder import DynamoQueryFilterBuilder -from utils.dynamo_utils import build_mixed_condition_expression from utils.exceptions import DynamoServiceException from utils.lambda_exceptions import DocumentRefSearchException from utils.lambda_header_utils import validate_common_name_in_mtls @@ -36,23 +36,20 @@ def get_document_references( """ Fetch document references for a given NHS number. - Args: - nhs_number (str): NHS number - return_fhir (bool, optional): Return FHIR document references. Defaults to False. - additional_filters (dict, optional): Additional filters to apply to DynamoDB query. (Defaults to None.) - check_upload_completed (bool): Check upload of document is complete. (Defaults to True.) - api_request_context (dict, optional): API request context, used to obtain MTLS common name. (Defaults to {}.) - Returns: - List of document references or FHIR DocumentReferences. + :param nhs_number: The NHS number to search for. + :param return_fhir: If True, return FHIR DocumentReference objects. + :param additional_filters: Additional filters to apply to the search. + :param check_upload_completed: If True, check if the upload is completed before returning the results. + :return: List of document references or FHIR DocumentReferences. """ common_name = validate_common_name_in_mtls( - api_request_context=api_request_context, + api_request_context=api_request_context ) try: - table_name = self._get_table_name(common_name) + list_of_table_names = self._get_table_names(common_name) results = self._search_tables_for_documents( nhs_number, - table_name, + list_of_table_names, return_fhir, additional_filters, check_upload_completed, @@ -70,51 +67,54 @@ def get_document_references( ) raise DocumentRefSearchException(500, LambdaError.DocRefClient) - def _get_table_name(self, common_name: MtlsCommonNames | None) -> str: - logger.info("Getting table name for document search") + def _get_table_names(self, common_name: MtlsCommonNames | None) -> list[str]: + table_list = [] + try: + table_list = json.loads(os.environ["DYNAMODB_TABLE_LIST"]) + except JSONDecodeError as e: + logger.error(f"Failed to decode table list: {str(e)}") + raise + if not common_name or common_name not in MtlsCommonNames: - return os.environ["LLOYD_GEORGE_DYNAMODB_NAME"] + return table_list - return str(MAP_MTLS_TO_DYNAMO[common_name]) + return [str(MAP_MTLS_TO_DYNAMO[common_name])] def _search_tables_for_documents( self, nhs_number: str, - table_name: str, + table_names: list[str], return_fhir: bool, filters=None, check_upload_completed=False, ): document_resources = [] - logger.info(f"Searching for results in {table_name}") - filter_expression = self._build_filter_expression( - filters, - check_upload_completed, - ) - - if "coredocumentmetadata" not in table_name.lower(): - documents = self.fetch_documents_from_table_with_nhs_number( - nhs_number, - table_name, - query_filter=filter_expression, - ) - else: - documents = self.fetch_documents_from_table( - search_condition=nhs_number, - search_key="NhsNumber", - table_name=table_name, - query_filter=filter_expression, + for table_name in table_names: + logger.info(f"Searching for results in {table_name}") + filter_expression = self._get_filter_expression( + filters, upload_completed=check_upload_completed ) - if check_upload_completed: - self._validate_upload_status(documents) + if "coredocumentmetadata" not in table_name.lower(): + documents = self.fetch_documents_from_table_with_nhs_number( + nhs_number, table_name, query_filter=filter_expression + ) + else: + documents = self.fetch_documents_from_table( + search_condition=nhs_number, + search_key="NhsNumber", + table_name=table_name, + query_filter=filter_expression, + ) - processed_documents = self._process_documents( - documents, - return_fhir=return_fhir, - ) - document_resources.extend(processed_documents) + if check_upload_completed: + self._validate_upload_status(documents) + + processed_documents = self._process_documents( + documents, return_fhir=return_fhir + ) + document_resources.extend(processed_documents) logger.info(f"Found {len(document_resources)} document references") @@ -123,6 +123,16 @@ def _search_tables_for_documents( return document_resources or None + def _get_filter_expression( + self, filters: dict[str, str | None] = None, upload_completed=False + ): + if filters: + return self._build_filter_expression(filters) + elif upload_completed: + return UploadCompleted + else: + return None + def _create_fhir_bundle(self, document_resources: list[dict]) -> dict: entries = [ BundleEntry(resource=doc_resource) for doc_resource in document_resources @@ -145,9 +155,7 @@ def _validate_upload_status(self, documents: list[DocumentReference]): raise DocumentRefSearchException(423, LambdaError.UploadInProgressError) def _process_documents( - self, - documents: list[DocumentReference], - return_fhir: bool, + self, documents: list[DocumentReference], return_fhir: bool ) -> list[dict]: results = [] for document in documents: @@ -177,21 +185,11 @@ def _build_document_model(self, document: DocumentReference) -> dict: "version", "content_type", "document_snomed_code_type", - "author", }, ) return document_formatted - def _build_filter_expression( - self, - filter_values: dict[str, str] | None, - upload_completed=False, - ): - if not filter_values: - if not upload_completed: - return NotDeleted - return UploadCompleted - + def _build_filter_expression(self, filter_values: dict[str, str]): filter_builder = DynamoQueryFilterBuilder() for filter_key, filter_value in filter_values.items(): if filter_key == "custodian": @@ -212,13 +210,16 @@ def _build_filter_expression( elif filter_key == "document_snomed_code": filter_builder.add_condition( attribute=str( - DocumentReferenceMetadataFields.DOCUMENT_SNOMED_CODE_TYPE.value, + DocumentReferenceMetadataFields.DOCUMENT_SNOMED_CODE_TYPE.value ), attr_operator=AttributeOperator.EQUAL, filter_value=filter_value, ) - - return filter_builder.build() & NotDeleted + if filter_values: + filter_expression = filter_builder.build() & NotDeleted + else: + filter_expression = NotDeleted + return filter_expression def create_document_reference_fhir_response( self, @@ -241,119 +242,10 @@ def create_document_reference_fhir_response( attachment=document_details, custodian=document_reference.current_gp_ods, snomed_code_doc_type=SnomedCodes.find_by_code( - document_reference.document_snomed_code_type, + document_reference.document_snomed_code_type ), ) .create_fhir_document_reference_object(document_reference) .model_dump(exclude_none=True) ) return fhir_document_reference - - def get_paginated_references_by_nhs_number( - self, - nhs_number: str, - limit: int | None = None, - next_page_token: str | None = None, - filter: dict | None = None, - api_request_context: dict = {}, - return_fhir: bool = False, - ): - - filter_expression, condition_attribute_names, condition_attribute_values = ( - self._build_pagination_filter(filter) - ) - - common_name = validate_common_name_in_mtls( - api_request_context=api_request_context, - ) - - references, next_page_token = self.query_table_with_paginator( - table_name=self._get_table_name(common_name), - index_name="NhsNumberIndex", - search_key="NhsNumber", - search_condition=nhs_number, - limit=limit, - start_key=next_page_token, - filter_expression=filter_expression, - expression_attribute_names=condition_attribute_names, - expression_attribute_values=condition_attribute_values, - ) - - logger.info("Validating upload status") - self._validate_upload_status(references) - - document_references = self._process_documents( - references, - return_fhir=return_fhir, - ) - - return { - "references": document_references, - "next_page_token": next_page_token, - } - - def _build_pagination_filter( - self, - filter_values: dict[str, str] | None, - ) -> tuple[str, dict, dict]: - logger.info("Creating filter for pagination") - conditions = [ - { - "field": DocumentReferenceMetadataFields.DELETED.value, - "operator": ConditionOperator.EQUAL.value, - "value": "", - }, - { - "field": DocumentReferenceMetadataFields.DELETED.value, - "operator": "attribute_not_exists", - }, - ] - - query_filter, condition_attribute_names, condition_attribute_values = ( - build_mixed_condition_expression(conditions=conditions, join_operator="OR") - ) - - if filter_values: - logger.info("Adding additional filters for pagination") - additional_conditions = [] - for filter_key, filter_value in filter_values.items(): - if filter_key == "custodian": - additional_conditions.append( - { - "field": DocumentReferenceMetadataFields.CUSTODIAN.value, - "operator": ConditionOperator.EQUAL.value, - "value": filter_value, - }, - ) - elif filter_key == "document_snomed_code": - additional_conditions.append( - { - "field": DocumentReferenceMetadataFields.DOCUMENT_SNOMED_CODE_TYPE.value, - "operator": ConditionOperator.EQUAL.value, - "value": filter_value, - }, - ) - elif filter_key == "doc_status": - additional_conditions.append( - { - "field": DocumentReferenceMetadataFields.DOC_STATUS.value, - "operator": ConditionOperator.EQUAL.value, - "value": filter_value, - }, - ) - - ( - additional_filter, - additional_condition_attribute_names, - additional_condition_attribute_values, - ) = build_mixed_condition_expression(conditions=additional_conditions) - condition_attribute_names.update(additional_condition_attribute_names) - condition_attribute_values.update(additional_condition_attribute_values) - - return ( - f"({query_filter}) AND " + additional_filter, - condition_attribute_names, - condition_attribute_values, - ) - - return query_filter, condition_attribute_names, condition_attribute_values diff --git a/lambdas/services/document_review_processor_service.py b/lambdas/services/document_review_processor_service.py index f39e555fc5..a16cdf413c 100644 --- a/lambdas/services/document_review_processor_service.py +++ b/lambdas/services/document_review_processor_service.py @@ -43,10 +43,7 @@ def process_review_message(self, review_message: ReviewMessageBody) -> None: review_files = self._move_files_to_review_bucket(review_message, review_id) custodian = self._get_patient_custodian(review_message) document_upload_review = self._build_review_record( - review_message, - review_id, - review_files, - custodian, + review_message, review_id, review_files, custodian ) try: self.document_review_service.create_dynamo_entry(document_upload_review) @@ -67,13 +64,13 @@ def _get_patient_custodian(self, review_message: ReviewMessageBody) -> str: or review_message.nhs_number == NHS_NUMBER_PLACEHOLDER ): logger.info( - "No valid NHS number found in message. Using uploader ODS as custodian", + "No valid NHS number found in message. Using uploader ODS as custodian" ) return review_message.uploader_ods validate_nhs_number(review_message.nhs_number) pds_service = get_pds_service() patient_details = pds_service.fetch_patient_details( - review_message.nhs_number, + review_message.nhs_number ) return patient_details.general_practice_ods except PdsErrorException: @@ -85,7 +82,7 @@ def _get_patient_custodian(self, review_message: ReviewMessageBody) -> str: InvalidNhsNumberException, ): logger.info( - "Patient not found in PDS. Using uploader ODS as custodian, and nhs number placeholder", + "Patient not found in PDS. Using uploader ODS as custodian, and nhs number placeholder" ) review_message.nhs_number = NHS_NUMBER_PLACEHOLDER return review_message.uploader_ods @@ -109,9 +106,7 @@ def _build_review_record( ) def _move_files_to_review_bucket( - self, - message_data: ReviewMessageBody, - review_record_id: str, + self, message_data: ReviewMessageBody, review_record_id: str ) -> list[DocumentReviewFileDetails]: new_file_keys: list[DocumentReviewFileDetails] = [] @@ -120,7 +115,7 @@ def _move_files_to_review_bucket( new_file_key = f"{review_record_id}/{object_key}" logger.info( - f"Copying file from ({file.file_path}) in staging to review bucket: {new_file_key}", + f"Copying file from ({file.file_path}) in staging to review bucket: {new_file_key}" ) try: @@ -143,8 +138,8 @@ def _move_files_to_review_bucket( new_file_keys.append( DocumentReviewFileDetails( file_name=file.file_name, - file_location=f"{self.s3_service.S3_PREFIX}{self.review_bucket_name}/{new_file_key}", - ), + file_location=new_file_key, + ) ) return new_file_keys @@ -153,8 +148,7 @@ def _delete_files_from_staging(self, message_data: ReviewMessageBody) -> None: try: logger.info(f"Deleting file from staging bucket: {file.file_path}") self.s3_service.delete_object( - s3_bucket_name=self.staging_bucket_name, - file_key=file.file_path, + s3_bucket_name=self.staging_bucket_name, file_key=file.file_path ) except Exception as e: logger.error(f"Error deleting files from staging: {str(e)}") diff --git a/lambdas/services/document_upload_review_service.py b/lambdas/services/document_upload_review_service.py index 38c0d00090..cbbd5478ec 100644 --- a/lambdas/services/document_upload_review_service.py +++ b/lambdas/services/document_upload_review_service.py @@ -4,7 +4,7 @@ from boto3.dynamodb.conditions import Attr, ConditionBase from botocore.exceptions import ClientError from enums.document_review_status import DocumentReviewStatus -from enums.dynamo_filter import AttributeOperator, ConditionOperator +from enums.dynamo_filter import AttributeOperator from enums.lambda_error import ErrorMessage from enums.metadata_field_names import DocumentReferenceMetadataFields from models.document_review import DocumentUploadReviewReference @@ -55,8 +55,7 @@ def query_docs_pending_review_with_paginator( filter_expression, condition_attribute_names, condition_attribute_values = ( self.build_paginator_query_filter( - nhs_number=nhs_number, - uploader=uploader, + nhs_number=nhs_number, uploader=uploader ) ) references, last_evaluated_key = self.query_table_with_paginator( @@ -77,8 +76,7 @@ def query_docs_pending_review_with_paginator( raise DocumentReviewException(ErrorMessage.FAILED_TO_QUERY_DYNAMO) def _validate_review_references( - self, - items: list[dict], + self, items: list[dict] ) -> list[DocumentUploadReviewReference]: try: logger.info("Validating document review search response") @@ -91,48 +89,42 @@ def _validate_review_references( raise DocumentReviewException(ErrorMessage.FAILED_TO_VALIDATE.value) def build_paginator_query_filter( - self, - nhs_number: str | None = None, - uploader: str | None = None, + self, nhs_number: str | None = None, uploader: str | None = None ): conditions = [ { "field": "ReviewStatus", - "operator": ConditionOperator.EQUAL.value, + "operator": "=", "value": DocumentReviewStatus.PENDING_REVIEW.value, - }, + } ] if nhs_number: conditions.append( { "field": "NhsNumber", - "operator": ConditionOperator.EQUAL.value, + "operator": "=", "value": nhs_number, - }, + } ) if uploader: conditions.append( { "field": "Author", - "operator": ConditionOperator.EQUAL.value, + "operator": "=", "value": uploader, - }, + } ) return build_mixed_condition_expression(conditions) def get_document( - self, - document_id: str, - version: int | None, + self, document_id: str, version: int | None ) -> DocumentUploadReviewReference | None: try: sort_key = {"Version": version} response = self.get_item( - table_name=self.table_name, - document_id=document_id, - sort_key=sort_key, + table_name=self.table_name, document_id=document_id, sort_key=sort_key ) return response @@ -153,7 +145,7 @@ def update_document_review_custodian( for review in patient_documents: if review.custodian == updated_ods_code: logger.info( - f"Custodian {updated_ods_code} already assigned to review ID: {review.id}", + f"Custodian {updated_ods_code} already assigned to review ID: {review.id}" ) continue @@ -168,15 +160,11 @@ def update_document_review_custodian( if review.review_status == DocumentReviewStatus.PENDING_REVIEW: self._handle_pending_review_custodian_update( - review, - updated_ods_code, - review_update_field, + review, updated_ods_code, review_update_field ) else: self._handle_standard_custodian_update( - review, - updated_ods_code, - review_update_field, + review, updated_ods_code, review_update_field ) except (ClientError, DocumentReviewException) as e: @@ -245,9 +233,7 @@ def get_document_review_by_id(self, document_id: str, document_version: int): return self.get_item(document_id, {"Version": document_version}) def update_pending_review_status( - self, - review_update: DocumentUploadReviewReference, - field_names: set[str], + self, review_update: DocumentUploadReviewReference, field_names: set[str] ) -> None: self.update_review_document_with_status_filter( review_update, @@ -267,9 +253,7 @@ def update_review_document_with_status_filter( & Attr("ReviewStatus").eq(status) ) self.update_document_review_for_patient( - review_update, - field_names, - condition_expression, + review_update, field_names, condition_expression ) def update_document_review_for_patient( @@ -302,10 +286,7 @@ def update_document_review_for_patient( raise DocumentReviewException(ErrorMessage.FAILED_TO_UPDATE_DYNAMO) def update_document_review_with_transaction( - self, - new_review_item, - existing_review_item, - additional_update_fields=None, + self, new_review_item, existing_review_item, additional_update_fields=None ): transact_items = [] try: @@ -314,9 +295,7 @@ def update_document_review_with_transaction( action="Update", key={"ID": new_review_item.id, "Version": new_review_item.version}, update_fields=new_review_item.model_dump( - exclude_none=True, - by_alias=True, - exclude={"version", "id"}, + exclude_none=True, by_alias=True, exclude={"version", "id"} ), conditions=[{"field": "ID", "operator": "attribute_not_exists"}], ) @@ -337,9 +316,7 @@ def update_document_review_with_transaction( "Version": existing_review_item.version, }, update_fields=existing_review_item.model_dump( - exclude_none=True, - by_alias=True, - include=existing_update_fields, + exclude_none=True, by_alias=True, include=existing_update_fields ), conditions=[ { @@ -379,20 +356,18 @@ def update_document_review_with_transaction( return response def delete_document_review_files( - self, - document_review: DocumentUploadReviewReference, + self, document_review: DocumentUploadReviewReference ): for file in document_review.files: location_without_prefix = file.file_location.replace( - self.s3_service.S3_PREFIX, - "", + self.s3_service.S3_PREFIX, "" ) bucket, file_key = location_without_prefix.split("/", 1) try: self.s3_service.delete_object(bucket, file_key) except ClientError as e: logger.warning( - f"Unable to delete file {file.file_name} from S3 due to error: {e}", + f"Unable to delete file {file.file_name} from S3 due to error: {e}" ) logger.warning(f"Skipping file deletion for {file.file_name}") continue @@ -406,16 +381,12 @@ def build_review_dynamo_filter( filter_builder = DynamoQueryFilterBuilder() if status: filter_builder.add_condition( - "ReviewStatus", - AttributeOperator.EQUAL, - status, + "ReviewStatus", AttributeOperator.EQUAL, status ) if nhs_number: filter_builder.add_condition( - "NhsNumber", - AttributeOperator.EQUAL, - nhs_number, + "NhsNumber", AttributeOperator.EQUAL, nhs_number ) if uploader: diff --git a/lambdas/services/feature_flags_service.py b/lambdas/services/feature_flags_service.py index f592108d02..dba5ef6715 100644 --- a/lambdas/services/feature_flags_service.py +++ b/lambdas/services/feature_flags_service.py @@ -130,36 +130,33 @@ def get_feature_flags_by_flag(self, flag: str): def get_allowed_list_of_ods_codes_for_upload_pilot(self) -> list[str]: logger.info( - "Starting ssm request to retrieve allowed list of ODS codes for Upload Pilot", + "Starting ssm request to retrieve allowed list of ODS codes for Upload Pilot" ) - response = self.ssm_service.get_ssm_parameter( - UPLOAD_PILOT_ODS_ALLOWED_LIST, - ).split(",") - if not response or response == ["*"]: + response = self.ssm_service.get_ssm_parameter(UPLOAD_PILOT_ODS_ALLOWED_LIST) + if not response: logger.warning("No ODS codes found in allowed list for Upload Pilot") return [] - return response + return response.split(",") def check_if_ods_code_is_in_pilot(self) -> bool: ods_code = "" if isinstance(request_context.authorization, dict): ods_code = request_context.authorization.get( - "selected_organisation", - {}, + "selected_organisation", {} ).get("org_ods_code", "") if not ods_code: return False pilot_ods_codes = self.get_allowed_list_of_ods_codes_for_upload_pilot() - return ods_code in pilot_ods_codes or pilot_ods_codes == [] + return ods_code in pilot_ods_codes def validate_feature_flag(self, flag_name: str): flag_object = self.get_feature_flags_by_flag(flag_name) if not flag_object.get(flag_name, False): logger.info( - f"Feature flag '{flag_name}' not enabled, event will not be processed", + f"Feature flag '{flag_name}' not enabled, event will not be processed" ) raise FeatureFlagsException(404, LambdaError.FeatureFlagDisabled) diff --git a/lambdas/services/get_document_upload_status.py b/lambdas/services/get_document_upload_status.py index 734671a3ec..113b08908b 100644 --- a/lambdas/services/get_document_upload_status.py +++ b/lambdas/services/get_document_upload_status.py @@ -24,16 +24,12 @@ def _determine_document_status(self, doc_ref, nhs_number): if doc_ref.doc_status == "cancelled": if doc_ref.virus_scanner_result == VirusScanResult.INFECTED: return DocumentStatus.INFECTED.display, DocumentStatus.INFECTED.code - if doc_ref.virus_scanner_result == VirusScanResult.INVALID: - return DocumentStatus.INVALID.display, DocumentStatus.INVALID.code return DocumentStatus.CANCELLED.display, DocumentStatus.CANCELLED.code return doc_ref.doc_status, None def get_document_references_by_id( - self, - nhs_number: str, - document_ids: list[str], + self, nhs_number: str, document_ids: list[str] ) -> dict: """ Checks the status of a list of documents for a given patient. @@ -46,8 +42,7 @@ def get_document_references_by_id( A dictionary with a list of document IDs and their corresponding statuses. """ found_docs = self.document_service.get_batch_document_references_by_id( - document_ids, - SupportedDocumentTypes.LG, + document_ids, SupportedDocumentTypes.LG ) found_docs_by_id = {doc.id: doc for doc in found_docs} results = {} diff --git a/lambdas/services/pdf_stitch_service.py b/lambdas/services/pdf_stitch_service.py new file mode 100755 index 0000000000..d08a20d1ab --- /dev/null +++ b/lambdas/services/pdf_stitch_service.py @@ -0,0 +1,34 @@ +import os +from uuid import uuid4 + +from pypdf import PdfReader, PdfWriter +from utils.audit_logging_setup import LoggingService + +logger = LoggingService(__name__) + + +def stitch_pdf(filenames: list[str], temp_folder: str = "/tmp/") -> str: + """ + Given a list of local PDF files, stitch them into one file and return the local file path of a resulting file. + + Example usage: + filenames = ["file1.pdf", "file2.pdf", "file3.pdf"] + tmp_folder = "/tmp/" + stitch_pdf(filename, tmp_folder) + + Result: + "/tmp/(filename_of_stitched_file).pdf" + """ + merger = PdfWriter() + for filename in filenames: + merger.append(filename) + output_filename = os.path.join(temp_folder, f"{str(uuid4())}.pdf") + merger.write(output_filename) + return output_filename + + +def count_page_number(filename: str) -> int: + """ + Return the total number of pages in a PDF file + """ + return len(PdfReader(filename).pages) diff --git a/lambdas/services/post_fhir_document_reference_service.py b/lambdas/services/post_fhir_document_reference_service.py index bec8b561de..255c1c6bde 100644 --- a/lambdas/services/post_fhir_document_reference_service.py +++ b/lambdas/services/post_fhir_document_reference_service.py @@ -3,9 +3,7 @@ from enums.mtls import MtlsCommonNames from enums.snomed_codes import SnomedCode, SnomedCodes from models.document_reference import DocumentReference -from models.fhir.R4.fhir_document_reference import ( - SNOMED_URL, -) +from models.fhir.R4.fhir_document_reference import SNOMED_URL from models.fhir.R4.fhir_document_reference import ( DocumentReference as FhirDocumentReference, ) @@ -27,9 +25,7 @@ def __init__(self): super().__init__() def process_fhir_document_reference( - self, - fhir_document: str, - api_request_context: dict = {}, + self, fhir_document: str, api_request_context: dict = {} ) -> str: """ Process a FHIR Document Reference request @@ -44,7 +40,7 @@ def process_fhir_document_reference( common_name = validate_common_name_in_mtls(api_request_context) validated_fhir_doc = FhirDocumentReference.model_validate_json( - fhir_document, + fhir_document ) # Extract NHS number and author from the FHIR document @@ -76,9 +72,7 @@ def process_fhir_document_reference( ) presigned_url = self._handle_document_save( - document_reference, - validated_fhir_doc, - dynamo_table, + document_reference, validated_fhir_doc, dynamo_table ) return self._create_fhir_response(document_reference, presigned_url) @@ -113,9 +107,7 @@ def _extract_author_from_fhir(self, fhir_doc: FhirDocumentReference) -> str | No raise DocumentRefException(400, LambdaError.DocRefNoParse) def _determine_document_type( - self, - fhir_doc: FhirDocumentReference, - common_name: MtlsCommonNames | None, + self, fhir_doc: FhirDocumentReference, common_name: MtlsCommonNames | None ) -> SnomedCode: if not common_name: """Determine the document type based on SNOMED code in the FHIR document""" @@ -127,7 +119,7 @@ def _determine_document_type( return snomed_code else: logger.error( - f"SNOMED code {coding.code} - {coding.display} is not supported", + f"SNOMED code {coding.code} - {coding.display} is not supported" ) raise DocumentRefException(400, LambdaError.DocRefInvalidType) logger.error("SNOMED code not found in FHIR document") @@ -155,12 +147,6 @@ def _create_document_reference( if not custodian: custodian = current_gp_ods - title = fhir_doc.content[0].attachment.title or None - - if doc_type != SnomedCodes.PATIENT_DATA.value and title is None: - logger.error("FHIR document validation error: attachment.title missing") - raise DocumentRefException(400, LambdaError.DocRefNoParse) - sub_folder, raw_request = ( ("user_upload", None) if doc_type != SnomedCodes.PATIENT_DATA.value @@ -175,7 +161,7 @@ def _create_document_reference( s3_bucket_name=self.staging_bucket_name, author=author, content_type=fhir_doc.content[0].attachment.contentType, - file_name=title, + file_name=fhir_doc.content[0].attachment.title, document_snomed_code_type=doc_type.code, doc_status="preliminary", status="current", diff --git a/lambdas/services/reporting/csv_report_generator_service.py b/lambdas/services/reporting/csv_report_generator_service.py new file mode 100644 index 0000000000..52adc831fd --- /dev/null +++ b/lambdas/services/reporting/csv_report_generator_service.py @@ -0,0 +1,48 @@ +import csv +from io import StringIO + +from utils.audit_logging_setup import LoggingService + +logger = LoggingService(__name__) + + +class CsvReportGenerator: + def generate_s3_inventory_csv(self, bucket, objects) -> str: + """ + Generates a CSV report for current S3 objects only (list_objects_v2). + """ + logger.info(f"Generating S3 inventory CSV for bucket {bucket}") + + output = StringIO() + writer = csv.writer(output) + + writer.writerow( + [ + "bucket", + "key", + "last_modified", + "size", + "etag", + "storage_class", + "tags", + ] + ) + + for obj in objects: + tags = obj.get("Tags", []) + tag_str = ";".join(f"{t['Key']}={t['Value']}" for t in tags) + + writer.writerow( + [ + bucket, + obj["Key"], + obj["LastModified"].isoformat(), + obj.get("Size"), + obj.get("ETag"), + obj.get("StorageClass"), + tag_str, + ] + ) + + logger.info(f"Finished CSV generation for {bucket}") + return output.getvalue() diff --git a/lambdas/services/reporting/report_s3_content_service.py b/lambdas/services/reporting/report_s3_content_service.py new file mode 100644 index 0000000000..83e836a388 --- /dev/null +++ b/lambdas/services/reporting/report_s3_content_service.py @@ -0,0 +1,47 @@ +import os +from concurrent.futures import ThreadPoolExecutor, as_completed +from io import BytesIO + +from services.base.s3_service import S3Service +from services.reporting.csv_report_generator_service import CsvReportGenerator +from utils.audit_logging_setup import LoggingService + +logger = LoggingService(__name__) + + +class ReportS3ContentService: + def __init__(self): + self.bulk_staging_store = os.getenv("BULK_STAGING_BUCKET_NAME") + self.statistic_reports_bucket = os.getenv("STATISTICAL_REPORTS_BUCKET") + self.s3_service = S3Service() + self.csv_generator = CsvReportGenerator() + + def _fetch_tags(self, bucket: str, obj: dict) -> dict: + tags = self.s3_service.get_object_tags_versioned(bucket, obj["Key"], None) + obj["Tags"] = tags + return obj + + def process_s3_content(self): + for bucket in [self.bulk_staging_store]: + logger.info(f"Listing current objects for bucket {bucket}") + + objects = self.s3_service.list_all_objects(bucket) + + with ThreadPoolExecutor(max_workers=20) as executor: + futures = [ + executor.submit(self._fetch_tags, bucket, obj) for obj in objects + ] + for _ in as_completed(futures): + pass + + logger.info(f"Generating CSV for bucket {bucket}") + csv_content = self.csv_generator.generate_s3_inventory_csv(bucket, objects) + + logger.info(f"Uploading report for bucket {bucket}") + self.s3_service.upload_file_obj( + BytesIO(csv_content.encode("utf-8")), + self.statistic_reports_bucket, + f"s3-content-report/{bucket}-inventory.csv", + ) + + logger.info(f"Completed report for {bucket}") diff --git a/lambdas/services/update_document_reference_service.py b/lambdas/services/update_document_reference_service.py index 5073cebdd5..ed0be0c954 100644 --- a/lambdas/services/update_document_reference_service.py +++ b/lambdas/services/update_document_reference_service.py @@ -8,10 +8,10 @@ from pydantic import ValidationError from services.base.ssm_service import SSMService from services.document_service import DocumentService -from services.feature_flags_service import FeatureFlagService from services.put_fhir_document_reference_service import PutFhirDocumentReferenceService from utils.audit_logging_setup import LoggingService from utils.common_query_filters import CurrentStatusFile, NotDeleted +from utils.constants.ssm import UPLOAD_PILOT_ODS_ALLOWED_LIST from utils.dynamo_utils import DocTypeTableRouter from utils.exceptions import ( InvalidNhsNumberException, @@ -38,14 +38,10 @@ def __init__(self): self.fhir_doc_ref_service = PutFhirDocumentReferenceService() self.document_service = DocumentService() self.ssm_service = SSMService() - self.feature_flag_service = FeatureFlagService() self.doctype_table_router = DocTypeTableRouter() def update_document_reference_request( - self, - nhs_number: str, - document: dict, - doc_ref_id: str, + self, nhs_number: str, document: dict, doc_ref_id: str ): self.validate_doc_ref_exists(doc_ref_id) @@ -68,16 +64,12 @@ def update_document_reference_request( self.validate_user_patient_ods_match(patient_ods_code, user_ods_code) validate_files_for_access_and_store( - [update_request_document], - pds_patient_details, + [update_request_document], pds_patient_details ) self.stop_if_upload_is_in_progress(nhs_number) fhir_response = self.build_and_process_fhir_doc_ref( - nhs_number, - doc_ref_id, - update_request_document, - user_ods_code, + nhs_number, doc_ref_id, update_request_document, user_ods_code ) fhir_response_data = json.loads(fhir_response) @@ -105,19 +97,12 @@ def update_document_reference_request( raise DocumentRefException(400, LambdaError.DocRefInvalidFiles) def build_and_process_fhir_doc_ref( - self, - nhs_number, - doc_ref_id, - update_request_document, - user_ods_code, + self, nhs_number, doc_ref_id, update_request_document, user_ods_code ): snomed_code_type = self.get_snomed_code_from_doc(update_request_document) doc_ref_info = self.build_doc_ref_info( - nhs_number, - update_request_document, - snomed_code_type, - user_ods_code, + nhs_number, update_request_document, snomed_code_type, user_ods_code ) logger.info(f"Updating document reference for client id: {doc_ref_id}") @@ -125,12 +110,11 @@ def build_and_process_fhir_doc_ref( validate_doc_version = update_request_document.version_id fhir_doc_ref = doc_ref_info.create_fhir_document_reference_object_basic( - doc_ref_id, - validate_doc_version, + doc_ref_id, validate_doc_version ) fhir_response = self.fhir_doc_ref_service.process_fhir_document_reference( - fhir_doc_ref.model_dump_json(), + fhir_doc_ref.model_dump_json() ) return fhir_response @@ -169,11 +153,7 @@ def get_snomed_code_from_doc(self, update_request_document): return snomed_code_type def build_doc_ref_info( - self, - nhs_number, - update_request_document, - snomed_code_type, - user_ods_code, + self, nhs_number, update_request_document, snomed_code_type, user_ods_code ): attachment_details = Attachment( title=update_request_document.file_name, @@ -189,17 +169,16 @@ def build_doc_ref_info( return doc_ref_info def check_if_ods_code_is_in_pilot(self, ods_code) -> bool: - pilot_ods_codes = ( - self.feature_flag_service.get_allowed_list_of_ods_codes_for_upload_pilot() - ) - if ods_code in pilot_ods_codes or pilot_ods_codes == []: + pilot_ods_codes = self.get_allowed_list_of_ods_codes_for_upload_pilot() + if ods_code in pilot_ods_codes: return True - raise OdsErrorException() + else: + raise OdsErrorException() def parse_document(self, document: dict) -> UploadRequestDocument: try: validated_doc: UploadRequestDocument = UploadRequestDocument.model_validate( - document, + document ) except ValidationError as e: logger.error( @@ -228,3 +207,12 @@ def stop_if_upload_is_in_progress(self, nhs_number: str): {"Result": UPDATE_REFERENCE_FAILED_MESSAGE}, ) raise DocumentRefException(423, LambdaError.UploadInProgressError) + + def get_allowed_list_of_ods_codes_for_upload_pilot(self) -> list[str]: + logger.info( + "Starting ssm request to retrieve allowed list of ODS codes for Upload Pilot" + ) + response = self.ssm_service.get_ssm_parameter(UPLOAD_PILOT_ODS_ALLOWED_LIST) + if not response: + logger.warning("No ODS codes found in allowed list for Upload Pilot") + return response diff --git a/lambdas/services/upload_document_reference_service.py b/lambdas/services/upload_document_reference_service.py index 9a15b41377..1161c479bd 100644 --- a/lambdas/services/upload_document_reference_service.py +++ b/lambdas/services/upload_document_reference_service.py @@ -1,4 +1,3 @@ -import io import os from typing import Optional @@ -44,9 +43,7 @@ def __init__(self): self.bucket_router = DocTypeS3BucketRouter() def handle_upload_document_reference_request( - self, - object_key: str, - object_size: int = 0, + self, object_key: str, object_size: int = 0 ): """Handle the upload document reference request with comprehensive error handling""" if not object_key: @@ -62,16 +59,13 @@ def handle_upload_document_reference_request( self._get_infrastructure_for_document_key(object_parts) preliminary_document_reference = self._fetch_preliminary_document_reference( - document_key, - nhs_number, + document_key, nhs_number ) if not preliminary_document_reference: return self._process_preliminary_document_reference( - preliminary_document_reference, - object_key, - object_size, + preliminary_document_reference, object_key, object_size ) except Exception as e: @@ -93,14 +87,12 @@ def _get_infrastructure_for_document_key(self, object_parts: list[str]) -> None: self.destination_bucket_name = self.bucket_router.resolve(doc_type) except KeyError: logger.error( - f"SNOMED code {doc_type.code} - {doc_type.display_name} is not supported", + f"SNOMED code {doc_type.code} - {doc_type.display_name} is not supported" ) raise InvalidDocTypeException(400, LambdaError.DocTypeDB) def _fetch_preliminary_document_reference( - self, - document_key: str, - nhs_number: str | None = None, + self, document_key: str, nhs_number: str | None = None ) -> Optional[DocumentReference]: """Fetch document reference from the database""" try: @@ -110,7 +102,7 @@ def _fetch_preliminary_document_reference( else: if not nhs_number: logger.error( - f"Failed to process object key with ID: {document_key}", + f"Failed to process object key with ID: {document_key}" ) raise FileProcessingException(400, LambdaError.DocRefInvalidFiles) @@ -126,24 +118,24 @@ def _fetch_preliminary_document_reference( if not documents: logger.error( - f"No document with the following key found in {self.table_name} table: {document_key}", + f"No document with the following key found in {self.table_name} table: {document_key}" ) logger.info("Skipping this object") return None if len(documents) > 1: logger.warning( - f"Multiple documents found for key {document_key}, using first one", + f"Multiple documents found for key {document_key}, using first one" ) return documents[0] except ClientError as e: logger.error( - f"Error fetching document reference for key {document_key}: {str(e)}", + f"Error fetching document reference for key {document_key}: {str(e)}" ) raise DocumentServiceException( - f"Failed to fetch document reference: {str(e)}", + f"Failed to fetch document reference: {str(e)}" ) def _process_preliminary_document_reference( @@ -155,37 +147,20 @@ def _process_preliminary_document_reference( """Process the preliminary (uploading) document reference with virus scanning and file operations""" try: virus_scan_result = self._perform_virus_scan( - preliminary_document_reference, - object_key, + preliminary_document_reference, object_key ) + preliminary_document_reference.virus_scanner_result = virus_scan_result if virus_scan_result == VirusScanResult.CLEAN: - is_file_protected = False - if getattr(preliminary_document_reference, "file_name", None): - file_type_extension = ( - preliminary_document_reference.file_name.split(".")[-1].lower() - ) - is_file_protected = self.is_file_invalid( - object_key, - file_type_extension, - ) - if is_file_protected: - logger.warning( - f"Document {preliminary_document_reference.id} is password protected or corrupt, " - f"marking as such in database", - ) - virus_scan_result = VirusScanResult.INVALID - else: - self._process_clean_document( - preliminary_document_reference, - object_key, - ) + self._process_clean_document( + preliminary_document_reference, + object_key, + ) else: logger.warning( - f"Document {preliminary_document_reference.id} failed virus scan", + f"Document {preliminary_document_reference.id} failed virus scan" ) - preliminary_document_reference.virus_scanner_result = virus_scan_result preliminary_document_reference.file_size = object_size preliminary_document_reference.uploaded = True preliminary_document_reference.uploading = False @@ -199,7 +174,7 @@ def _process_preliminary_document_reference( and self.doc_type.code != SnomedCodes.PATIENT_DATA.value.code ): self._finalize_and_supersede_with_transaction( - preliminary_document_reference, + preliminary_document_reference ) # Update NRL Pointer @@ -210,7 +185,7 @@ def _process_preliminary_document_reference( except Exception as e: logger.error( - f"Error processing document reference {preliminary_document_reference.id}: {str(e)}", + f"Error processing document reference {preliminary_document_reference.id}: {str(e)}" ) raise @@ -225,7 +200,7 @@ def _finalize_and_supersede_with_transaction(self, new_document: DocumentReferen """ try: logger.info( - f"Checking for existing final documents to supersede for NHS number {new_document.nhs_number}", + f"Checking for existing final documents to supersede for NHS number {new_document.nhs_number}" ) existing_docs: list[DocumentReference] = ( @@ -269,7 +244,7 @@ def _finalize_and_supersede_with_transaction(self, new_document: DocumentReferen # Supersede existing final documents if existing_docs: logger.info( - f"Superseding {len(existing_docs)} existing final document(s) for NHS number {new_document.nhs_number}", + f"Superseding {len(existing_docs)} existing final document(s) for NHS number {new_document.nhs_number}" ) for doc in existing_docs: @@ -323,28 +298,28 @@ def _finalize_and_supersede_with_transaction(self, new_document: DocumentReferen f" and superseded {len(existing_docs)} document(s)" if existing_docs else "" - ), + ) ) except ClientError as e: error_code = e.response.get("Error", {}).get("Code", "") if error_code == "TransactionCanceledException": logger.error( - f"Transaction cancelled - concurrent update detected for NHS number {new_document.nhs_number}", + f"Transaction cancelled - concurrent update detected for NHS number {new_document.nhs_number}" ) raise TransactionConflictException( f"Concurrent update detected while finalizing document for NHS number {new_document.nhs_number}. " - f"Another process may have already finalized a document for this patient.", + f"Another process may have already finalized a document for this patient." ) raise except Exception as e: if isinstance(e, TransactionConflictException): logger.error( - f"Cancelling preliminary document {new_document.id} due to transaction conflict", + f"Cancelling preliminary document {new_document.id} due to transaction conflict" ) else: logger.error( - f"Unexpected error while finalizing document for {new_document.nhs_number}: {e}", + f"Unexpected error while finalizing document for {new_document.nhs_number}: {e}" ) new_document.doc_status = "cancelled" @@ -353,55 +328,47 @@ def _finalize_and_supersede_with_transaction(self, new_document: DocumentReferen new_document.file_size = None self._update_dynamo_table(new_document) self.delete_file_from_bucket( - new_document.file_location, - new_document.s3_version_id, + new_document.file_location, new_document.s3_version_id ) def document_reference_key(self, document_id): return {DocumentReferenceMetadataFields.ID.value: document_id} def _perform_virus_scan( - self, - document_reference: DocumentReference, - object_key: str, + self, document_reference: DocumentReference, object_key: str ) -> VirusScanResult: """Perform a virus scan on the document""" try: return self.virus_scan_service.scan_file( - object_key, - nhs_number=document_reference.nhs_number, + object_key, nhs_number=document_reference.nhs_number ) except Exception as e: logger.error( - f"Virus scan failed for document {document_reference.id}: {str(e)}", + f"Virus scan failed for document {document_reference.id}: {str(e)}" ) return VirusScanResult.ERROR def _process_clean_document( - self, - document_reference: DocumentReference, - object_key: str, + self, document_reference: DocumentReference, object_key: str ): """Process a document that passed virus scanning""" try: self.copy_files_from_staging_bucket(document_reference, object_key) logger.info( - f"Successfully processed clean document: {document_reference.id}", + f"Successfully processed clean document: {document_reference.id}" ) except Exception as e: logger.error( - f"Error processing clean document {document_reference.id}: {str(e)}", + f"Error processing clean document {document_reference.id}: {str(e)}" ) document_reference.doc_status = "cancelled" raise FileProcessingException(f"Failed to process clean document: {str(e)}") def copy_files_from_staging_bucket( - self, - document_reference: DocumentReference, - source_file_key: str, + self, document_reference: DocumentReference, source_file_key: str ): """Copy files from staging bucket to destination bucket""" try: @@ -423,8 +390,7 @@ def copy_files_from_staging_bucket( ) document_reference.s3_bucket_name = self.destination_bucket_name document_reference.file_location = document_reference._build_s3_location( - self.destination_bucket_name, - dest_file_key, + self.destination_bucket_name, dest_file_key ) document_reference.s3_version_id = copy_result.get("VersionId") return copy_result @@ -432,7 +398,7 @@ def copy_files_from_staging_bucket( except ClientError as e: logger.error(f"Error copying files from staging bucket: {str(e)}") raise FileProcessingException( - f"Failed to copy file from staging bucket: {str(e)}", + f"Failed to copy file from staging bucket: {str(e)}" ) def delete_file_from_staging_bucket(self, source_file_key: str): @@ -448,10 +414,10 @@ def delete_file_from_bucket(self, file_location: str, version_id: str): """Delete file from bucket""" try: s3_bucket_name, source_file_key = DocumentReference._parse_s3_location( - file_location, + file_location ) logger.info( - f"Deleting file from bucket: {s3_bucket_name}/{source_file_key}", + f"Deleting file from bucket: {s3_bucket_name}/{source_file_key}" ) self.s3_service.delete_object(s3_bucket_name, source_file_key, version_id) @@ -492,13 +458,5 @@ def _update_dynamo_table( except ClientError as e: logger.error(f"Error updating DynamoDB table: {str(e)}") raise DocumentServiceException( - f"Failed to update document in database: {str(e)}", + f"Failed to update document in database: {str(e)}" ) - - def is_file_invalid(self, object_key: str, file_type_extension: str) -> bool: - entire_object = self.s3_service.get_object_stream( - self.staging_s3_bucket_name, - object_key, - ) - file_stream = io.BytesIO(entire_object.read()) - return check_file_locked_or_corrupt(file_stream, file_type_extension) diff --git a/lambdas/tests/e2e/api/fhir/test_retrieve_document_fhir_api_failure.py b/lambdas/tests/e2e/api/fhir/test_retrieve_document_fhir_api_failure.py index e1cfebe33a..0640a412bc 100644 --- a/lambdas/tests/e2e/api/fhir/test_retrieve_document_fhir_api_failure.py +++ b/lambdas/tests/e2e/api/fhir/test_retrieve_document_fhir_api_failure.py @@ -1,5 +1,5 @@ -import uuid from datetime import datetime, timezone +import uuid import pytest from enums.document_retention import DocumentRetentionDays @@ -20,13 +20,11 @@ def _assert_operation_outcome(body, code): @pytest.mark.parametrize( "doc_status, response_status", [ - ("deprecated", 404), + ("deprecated", 200), # TODO Fix in NDR-363, this should return a 404 ], ) def test_retrieval_of_deleted_document_reference( - test_data, - doc_status, - response_status, + test_data, doc_status, response_status ): deletion_date = datetime.now(timezone.utc) document_ttl_days = DocumentRetentionDays.SOFT_DELETE @@ -35,7 +33,7 @@ def test_retrieval_of_deleted_document_reference( pdm_record = create_and_store_pdm_record( test_data, doc_status=doc_status, - Deleted=deletion_date.strftime("%Y-%m-%dT%H:%M:%S.%fZ"), + deleted=deletion_date.strftime("%Y-%m-%dT%H:%M:%S.%fZ"), ttl=document_reference_ttl, ) @@ -43,7 +41,7 @@ def test_retrieval_of_deleted_document_reference( assert response.status_code == response_status response_json = response.json() - _assert_operation_outcome(body=response_json, code="RESOURCE_NOT_FOUND") + assert response_json.get("docStatus") == doc_status @pytest.mark.parametrize( @@ -53,10 +51,7 @@ def test_retrieval_of_deleted_document_reference( ], ) def test_retrieve_non_existant_document_reference( - record_id, - expected_status, - expected_code, - expected_diagnostics, + record_id, expected_status, expected_code, expected_diagnostics ): response = get_pdm_document_reference(record_id) assert response.status_code == expected_status @@ -78,9 +73,7 @@ def test_forbidden_with_invalid_cert(test_data, temp_cert_and_key): cert_path, key_path = temp_cert_and_key response = get_pdm_document_reference( - pdm_record["id"], - client_cert_path=cert_path, - client_key_path=key_path, + pdm_record["id"], client_cert_path=cert_path, client_key_path=key_path ) body = response.json() @@ -110,10 +103,7 @@ def test_retrieve_invalid_resource_type(test_data): ], ) def test_incorrectly_formatted_path_param_id( - test_data, - param, - expected_status, - expected_code, + test_data, param, expected_status, expected_code ): response = get_pdm_document_reference( endpoint_override=param, diff --git a/lambdas/tests/e2e/api/fhir/test_search_patient_fhir_api.py b/lambdas/tests/e2e/api/fhir/test_search_patient_fhir_api.py index 92783b115c..b2022432e9 100644 --- a/lambdas/tests/e2e/api/fhir/test_search_patient_fhir_api.py +++ b/lambdas/tests/e2e/api/fhir/test_search_patient_fhir_api.py @@ -1,7 +1,4 @@ -from datetime import datetime, timezone - import pytest -from enums.document_retention import DocumentRetentionDays from tests.e2e.api.fhir.conftest import ( MTLS_ENDPOINT, PDM_SNOMED, @@ -111,10 +108,7 @@ def test_multiple_cancelled_search_patient_details(test_data): ], ) def test_search_edge_cases( - nhs_number, - expected_status, - expected_code, - expected_diagnostics, + nhs_number, expected_status, expected_code, expected_diagnostics ): response = search_document_reference(nhs_number) assert response.status_code == expected_status @@ -135,9 +129,7 @@ def test_search_patient_unauthorized_mtls(test_data, temp_cert_and_key): cert_path, key_path = temp_cert_and_key response = search_document_reference( - "9912003071", - client_cert_path=cert_path, - client_key_path=key_path, + "9912003071", client_cert_path=cert_path, client_key_path=key_path ) body = response.json() @@ -150,49 +142,3 @@ def test_search_invalid_resource_type(test_data): response = search_document_reference("9912003071", resource_type="FooBar") assert response.status_code == 400 - - -def test_search_patient_details_deleted_are_not_returned(test_data): - created_record_1 = create_and_store_pdm_record(test_data) - expected_record_id_1 = created_record_1["id"] - - deletion_date = datetime.now(timezone.utc) - document_ttl_days = DocumentRetentionDays.SOFT_DELETE - ttl_seconds = document_ttl_days * 24 * 60 * 60 - document_reference_ttl = int(deletion_date.timestamp() + ttl_seconds) - created_record_2 = create_and_store_pdm_record( - test_data, - doc_status="deprecated", - Deleted=deletion_date.strftime("%Y-%m-%dT%H:%M:%S.%fZ"), - ttl=document_reference_ttl, - ) - expected_record_id_2 = created_record_2["id"] - - response = search_document_reference("9912003071") - assert response.status_code == 200 - - bundle = response.json() - assert bundle["total"] < 2 - entries = bundle.get("entry", []) - assert entries - - # Find the entry with the matching record_id - matching_entry = next( - ( - e - for e in entries - if e["resource"].get("id") == f"{PDM_SNOMED}~{expected_record_id_1}" - ), - None, - ) - assert matching_entry - # Assert deleted item doesn't exist - non_matching_entry = next( - ( - e - for e in entries - if e["resource"].get("id") == f"{PDM_SNOMED}~{expected_record_id_2}" - ), - None, - ) - assert non_matching_entry is None diff --git a/lambdas/tests/e2e/api/fhir/test_upload_document_fhir_api_success.py b/lambdas/tests/e2e/api/fhir/test_upload_document_fhir_api_success.py index dd1a7d4204..41812f0642 100644 --- a/lambdas/tests/e2e/api/fhir/test_upload_document_fhir_api_success.py +++ b/lambdas/tests/e2e/api/fhir/test_upload_document_fhir_api_success.py @@ -1,5 +1,4 @@ import base64 -import json import logging import os @@ -43,8 +42,7 @@ def condition(response_json): return response_json["content"][0]["attachment"].get("data", False) raw_retrieve_response = retrieve_document_with_retry( - upload_response["id"], - condition, + upload_response["id"], condition ) retrieve_response = raw_retrieve_response.json() @@ -87,8 +85,7 @@ def test_create_document_without_author_or_type(test_data): with open(sample_pdf_path, "rb") as f: record["data"] = base64.b64encode(f.read()).decode("utf-8") payload = pdm_data_helper.create_upload_payload( - record=record, - exclude=["type", "author"], + record=record, exclude=["type", "author"] ) for field in ["type", "author"]: @@ -106,32 +103,3 @@ def test_create_document_without_author_or_type(test_data): assert doc_ref["Item"]["RawRequest"] == payload for field in ["type", "author"]: assert field not in doc_ref["Item"]["RawRequest"] - - -def test_create_document_without_title(test_data): - record = { - "ods": "H81109", - "nhs_number": "9912003071", - } - - sample_pdf_path = os.path.join(os.path.dirname(__file__), "files", "dummy.pdf") - with open(sample_pdf_path, "rb") as f: - record["data"] = base64.b64encode(f.read()).decode("utf-8") - payload = pdm_data_helper.create_upload_payload(record=record, exclude=["title"]) - assert "title" not in payload - - raw_upload_response = upload_document(payload) - assert raw_upload_response.status_code == 201 - record["id"] = raw_upload_response.json()["id"].split("~")[1] - test_data.append(record) - - doc_ref = pdm_data_helper.retrieve_document_reference(record=record) - assert "Item" in doc_ref - assert "RawRequest" in doc_ref["Item"] - assert doc_ref["Item"]["RawRequest"] == payload - raw_request = json.loads(doc_ref["Item"]["RawRequest"]) - assert "content" in raw_request - content = raw_request["content"] - assert "attachment" in content[0] - attachment = raw_request["content"][0]["attachment"] - assert "title" not in attachment diff --git a/lambdas/tests/e2e/api/test_search_patient_api.py b/lambdas/tests/e2e/api/test_search_patient_api.py index 2a28349c66..ad7c7c79f6 100644 --- a/lambdas/tests/e2e/api/test_search_patient_api.py +++ b/lambdas/tests/e2e/api/test_search_patient_api.py @@ -43,7 +43,7 @@ def test_search_patient_details(test_data, snapshot_json): "entry.0.resource.date", "entry.0.resource.content.0.attachment.url", "timestamp", - ), + ) ) @@ -81,17 +81,13 @@ def test_multiple_cancelled_search_patient_details(test_data, snapshot_json): assert bundle["entry"][0] == snapshot_json( exclude=paths( - "resource.id", - "resource.date", - "resource.content.0.attachment.url", - ), + "resource.id", "resource.date", "resource.content.0.attachment.url" + ) ) assert bundle["entry"][1] == snapshot_json( exclude=paths( - "resource.id", - "resource.date", - "resource.content.0.attachment.url", - ), + "resource.id", "resource.date", "resource.content.0.attachment.url" + ) ) @@ -114,7 +110,7 @@ def test_no_records(snapshot_json): "entry.0.resource.date", "entry.0.resource.content.0.attachment.url", "timestamp", - ), + ) ) @@ -132,70 +128,3 @@ def test_invalid_patient(snapshot_json): bundle = response.json() assert bundle == snapshot_json - - -def test_search_patient_details_deleted_are_not_returned(test_data): - lloyd_george_record = {} - test_data.append(lloyd_george_record) - - lloyd_george_record["id"] = str(uuid.uuid4()) - lloyd_george_record["nhs_number"] = "9449305943" - lloyd_george_record["data"] = io.BytesIO(b"Sample PDF Content") - - data_helper.create_metadata(lloyd_george_record) - data_helper.create_resource(lloyd_george_record) - - second_lloyd_george_record = {} - test_data.append(second_lloyd_george_record) - - second_lloyd_george_record["id"] = str(uuid.uuid4()) - second_lloyd_george_record["nhs_number"] = "9449305943" - second_lloyd_george_record["data"] = io.BytesIO(b"Sample PDF Content") - - data_helper.create_metadata(second_lloyd_george_record) - data_helper.create_resource(second_lloyd_george_record) - - url = f"https://{API_ENDPOINT}/FhirDocumentReference?subject:identifier=https://fhir.nhs.uk/Id/nhs-number|{lloyd_george_record['nhs_number']}&_id={second_lloyd_george_record['id']}" - headers = { - "Authorization": "Bearer 123", - "X-Api-Key": API_KEY, - "X-Correlation-Id": "1234", - } - - delete_response = requests.request("DELETE", url, headers=headers) - assert delete_response.status_code == 204 - - url = f"https://{API_ENDPOINT}/FhirDocumentReference?subject:identifier=https://fhir.nhs.uk/Id/nhs-number|{lloyd_george_record['nhs_number']}" - headers = { - "Authorization": "Bearer 123", - "X-Api-Key": API_KEY, - "X-Correlation-Id": "1234", - } - response = requests.request("GET", url, headers=headers) - bundle = response.json() - assert bundle["total"] < 2 - entries = bundle.get("entry", []) - assert entries - - # Find the entry with the matching record_id - matching_entry = next( - ( - e - for e in entries - if e["resource"].get("id") - == f"{LLOYD_GEORGE_SNOMED}~{lloyd_george_record['id']}" - ), - None, - ) - assert matching_entry - # Assert deleted item doesn't exist - non_matching_entry = next( - ( - e - for e in entries - if e["resource"].get("id") - == f"{LLOYD_GEORGE_SNOMED}~{second_lloyd_george_record['id']}" - ), - None, - ) - assert non_matching_entry is None diff --git a/lambdas/tests/e2e/api/test_upload_document_api.py b/lambdas/tests/e2e/api/test_upload_document_api.py index 204eea9368..cc9319bc23 100644 --- a/lambdas/tests/e2e/api/test_upload_document_api.py +++ b/lambdas/tests/e2e/api/test_upload_document_api.py @@ -18,7 +18,7 @@ data_helper = LloydGeorgeDataHelper() -def create_upload_payload(lloyd_george_record, exclude: list[str] | None = None): +def create_upload_payload(lloyd_george_record): sample_payload = { "resourceType": "DocumentReference", "type": { @@ -27,28 +27,28 @@ def create_upload_payload(lloyd_george_record, exclude: list[str] | None = None) "system": "http://snomed.info/sct", "code": f"{LLOYD_GEORGE_SNOMED}", "display": "Lloyd George record folder", - }, - ], + } + ] }, "subject": { "identifier": { "system": "https://fhir.nhs.uk/Id/nhs-number", "value": lloyd_george_record["nhs_number"], - }, + } }, "author": [ { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": lloyd_george_record["ods"], - }, - }, + } + } ], "custodian": { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": lloyd_george_record["ods"], - }, + } }, "content": [ { @@ -57,18 +57,11 @@ def create_upload_payload(lloyd_george_record, exclude: list[str] | None = None) "contentType": "application/pdf", "language": "en-GB", "title": "1of1_Lloyd_George_Record_[Paula Esme VESEY]_[9730153973]_[22-01-1960].pdf", - }, - }, + } + } ], } - if exclude: - for field in exclude: - if field == "title": - sample_payload["content"][0]["attachment"].pop(field, None) - else: - sample_payload.pop(field, None) - if "data" in lloyd_george_record: sample_payload["content"][0]["attachment"]["data"] = lloyd_george_record["data"] return json.dumps(sample_payload) @@ -112,10 +105,10 @@ def condition(response_json): assert base64.b64decode(base64_data, validate=True) assert upload_response == snapshot_json( - exclude=paths("id", "date", "content.0.attachment.url"), + exclude=paths("id", "date", "content.0.attachment.url") ) assert retrieve_response == snapshot_json( - exclude=paths("id", "date", "content.0.attachment.data"), + exclude=paths("id", "date", "content.0.attachment.data") ) @@ -160,11 +153,8 @@ def condition(response_json): assert upload_response == snapshot_json(exclude=paths("id", "date")) assert retrieve_response == snapshot_json( exclude=paths( - "id", - "date", - "content.0.attachment.url", - "content.0.attachment.size", - ), + "id", "date", "content.0.attachment.url", "content.0.attachment.size" + ) ) @@ -205,7 +195,7 @@ def condition(response_json): retrieve_response = raw_retrieve_response.json() assert upload_response == snapshot_json( - exclude=paths("id", "date", "content.0.attachment.url"), + exclude=paths("id", "date", "content.0.attachment.url") ) assert retrieve_response == snapshot_json(exclude=paths("id", "date")) @@ -231,29 +221,3 @@ def test_create_document_does_not_save_raw(test_data): doc_ref = data_helper.retrieve_document_reference(record=lloyd_george_record) assert "Item" in doc_ref assert "RawRequest" not in doc_ref["Item"] - - -def test_create_document_without_title_raises_error(test_data): - lloyd_george_record = {} - lloyd_george_record["ods"] = "H81109" - lloyd_george_record["nhs_number"] = "9449303304" - - sample_pdf_path = os.path.join(os.path.dirname(__file__), "files", "dummy.pdf") - with open(sample_pdf_path, "rb") as f: - lloyd_george_record["data"] = base64.b64encode(f.read()).decode("utf-8") - payload = create_upload_payload(lloyd_george_record, exclude=["title"]) - - url = f"https://{API_ENDPOINT}/FhirDocumentReference" - headers = {"Authorization": "Bearer 123", "X-Api-Key": API_KEY} - - retrieve_response = requests.post(url, headers=headers, data=payload) - assert retrieve_response.status_code == 400 - - json_response = retrieve_response.json() - assert ( - json_response["issue"][0]["details"]["coding"][0]["code"] == "VALIDATION_ERROR" - ) - assert ( - json_response["issue"][0]["diagnostics"] - == "Failed to parse document upload request data" - ) diff --git a/lambdas/tests/e2e/helpers/data_helper.py b/lambdas/tests/e2e/helpers/data_helper.py index 3d6fc87d18..c810b16823 100644 --- a/lambdas/tests/e2e/helpers/data_helper.py +++ b/lambdas/tests/e2e/helpers/data_helper.py @@ -43,8 +43,7 @@ def build_env(self, table_name, bucket_name): "ndr-dev": "internal-dev.api.service.nhs.uk", } self.apim_url = apim_map.get( - str(self.workspace), - "internal-dev.api.service.nhs.uk", + str(self.workspace), "internal-dev.api.service.nhs.uk" ) domain = ( @@ -62,11 +61,7 @@ def build_env(self, table_name, bucket_name): self.mtls_endpoint = f"mtls.{self.workspace}.{domain}" def build_record( - self, - nhs_number="9912003071", - data=None, - doc_status=None, - size=None, + self, nhs_number="9912003071", data=None, doc_status=None, size=None ): record = { "id": str(uuid.uuid4()), @@ -114,8 +109,7 @@ def create_resource(self, record): def retrieve_document_reference(self, record): return self.dynamo_service.get_item( - table_name=self.dynamo_table, - key={"ID": record["id"]}, + table_name=self.dynamo_table, key={"ID": record["id"]} ) def create_upload_payload(self, record, exclude=[], return_json=False): @@ -128,28 +122,28 @@ def create_upload_payload(self, record, exclude=[], return_json=False): "system": "https://snomed.info/sct", "code": f"{self.snomed_code}", "display": "Confidential patient data", - }, - ], + } + ] }, "subject": { "identifier": { "system": "https://fhir.nhs.uk/Id/nhs-number", "value": record["nhs_number"], - }, + } }, "author": [ { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": record["ods"], - }, - }, + } + } ], "custodian": { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": record["ods"], - }, + } }, "content": [ { @@ -158,16 +152,13 @@ def create_upload_payload(self, record, exclude=[], return_json=False): "contentType": "application/pdf", "language": "en-GB", "title": "1of1_pdm_record_[Paula Esme VESEY]_[9730153973]_[22-01-1960].pdf", - }, - }, + } + } ], } for field in exclude: - if field == "title": - payload["content"][0]["attachment"].pop(field, None) - else: - payload.pop(field, None) + payload.pop(field, None) if "data" in record: payload["content"][0]["attachment"]["data"] = record["data"] @@ -269,7 +260,7 @@ def add_virus_scan_tag(self, key, result, date): "TagSet": [ {"Key": "scan-result", "Value": result}, {"Key": "scan-date", "Value": date}, - ], + ] }, ) @@ -284,9 +275,7 @@ def check_record_exists_in_s3_with_version(self, key, version_id): try: if version_id: _ = s3_client.head_object( - Bucket=self.s3_bucket, - Key=key, - VersionId=version_id, + Bucket=self.s3_bucket, Key=key, VersionId=version_id ) else: _ = s3_client.head_object(Bucket=self.s3_bucket, Key=key) @@ -294,4 +283,5 @@ def check_record_exists_in_s3_with_version(self, key, version_id): except s3_client.exceptions.ClientError as e: if e.response["Error"]["Code"] == "404": return False - raise + else: + raise diff --git a/lambdas/tests/unit/conftest.py b/lambdas/tests/unit/conftest.py index dbd9e699d8..b3afba0f82 100644 --- a/lambdas/tests/unit/conftest.py +++ b/lambdas/tests/unit/conftest.py @@ -18,7 +18,6 @@ REGION_NAME = "eu-west-2" -S3_PREFIX = "s3://" MOCK_TABLE_NAME = "test-table" MOCK_BUCKET = "test-s3-bucket" MOCK_CLOUDFRONT_URL = "test-cloudfront-url.com" @@ -85,7 +84,6 @@ TEST_UUID = "1234-4567-8912-HSDF-TEST" TEST_FILE_KEY = "test_file_key" TEST_FILE_NAME = "test.pdf" -TEST_FILE_SIZE = 24000 TEST_VIRUS_SCANNER_RESULT = "not_scanned" TEST_DOCUMENT_LOCATION = f"s3://{MOCK_BUCKET}/{TEST_FILE_KEY}" TEST_CURRENT_GP_ODS = "Y12345" @@ -156,8 +154,7 @@ def set_env(monkeypatch): monkeypatch.setenv(MOCK_PDM_TABLE_NAME_ENV_NAME, MOCK_PDM_TABLE_NAME) monkeypatch.setenv(MOCK_PDM_BUCKET_ENV_NAME, MOCK_PDM_BUCKET) monkeypatch.setenv( - "DYNAMODB_TABLE_LIST", - json.dumps([MOCK_PDM_TABLE_NAME, MOCK_LG_TABLE_NAME]), + "DYNAMODB_TABLE_LIST", json.dumps([MOCK_PDM_TABLE_NAME, MOCK_LG_TABLE_NAME]) ) monkeypatch.setenv(MOCK_ZIP_OUTPUT_BUCKET_ENV_NAME, MOCK_ZIP_OUTPUT_BUCKET) monkeypatch.setenv(MOCK_ZIP_TRACE_TABLE_ENV_NAME, MOCK_ZIP_TRACE_TABLE) @@ -179,43 +176,34 @@ def set_env(monkeypatch): monkeypatch.setenv(MOCK_OIDC_CLIENT_SECRET_ENV_NAME, OIDC_CLIENT_SECRET) monkeypatch.setenv(MOCK_JWT_PUBLIC_KEY_NAME, JWT_PUBLIC_KEY) monkeypatch.setenv( - SSM_PARAM_JWT_TOKEN_PUBLIC_KEY_ENV_NAME, - SSM_PARAM_JWT_TOKEN_PUBLIC_KEY, + SSM_PARAM_JWT_TOKEN_PUBLIC_KEY_ENV_NAME, SSM_PARAM_JWT_TOKEN_PUBLIC_KEY ) monkeypatch.setenv(MOCK_AUTH_DYNAMODB_NAME, "test_dynamo") monkeypatch.setenv(MOCK_FEEDBACK_SENDER_EMAIL_ENV_NAME, MOCK_FEEDBACK_SENDER_EMAIL) monkeypatch.setenv( - MOCK_FEEDBACK_EMAIL_SUBJECT_ENV_NAME, - MOCK_FEEDBACK_EMAIL_SUBJECT, + MOCK_FEEDBACK_EMAIL_SUBJECT_ENV_NAME, MOCK_FEEDBACK_EMAIL_SUBJECT ) monkeypatch.setenv( - MOCK_EMAIL_RECIPIENT_SSM_PARAM_KEY_ENV_NAME, - MOCK_EMAIL_RECIPIENT_SSM_PARAM_KEY, + MOCK_EMAIL_RECIPIENT_SSM_PARAM_KEY_ENV_NAME, MOCK_EMAIL_RECIPIENT_SSM_PARAM_KEY ) monkeypatch.setenv( - MOCK_APPCONFIG_APPLICATION_ENV_NAME, - MOCK_APPCONFIG_APPLICATION_ID, + MOCK_APPCONFIG_APPLICATION_ENV_NAME, MOCK_APPCONFIG_APPLICATION_ID ) monkeypatch.setenv( - MOCK_APPCONFIG_ENVIRONMENT_ENV_NAME, - MOCK_APPCONFIG_ENVIRONMENT_ID, + MOCK_APPCONFIG_ENVIRONMENT_ENV_NAME, MOCK_APPCONFIG_ENVIRONMENT_ID ) monkeypatch.setenv( - MOCK_APPCONFIG_CONFIGURATION_ENV_NAME, - MOCK_APPCONFIG_CONFIGURATION_ID, + MOCK_APPCONFIG_CONFIGURATION_ENV_NAME, MOCK_APPCONFIG_CONFIGURATION_ID ) monkeypatch.setenv( - MOCK_PRESIGNED_URL_ROLE_ARN_KEY, - MOCK_PRESIGNED_URL_ROLE_ARN_VALUE, + MOCK_PRESIGNED_URL_ROLE_ARN_KEY, MOCK_PRESIGNED_URL_ROLE_ARN_VALUE ) monkeypatch.setenv(MOCK_STATISTICS_TABLE_NAME, MOCK_STATISTICS_TABLE) monkeypatch.setenv( - MOCK_STATISTICAL_REPORTS_BUCKET_ENV_NAME, - MOCK_STATISTICS_REPORT_BUCKET_NAME, + MOCK_STATISTICAL_REPORTS_BUCKET_ENV_NAME, MOCK_STATISTICS_REPORT_BUCKET_NAME ) monkeypatch.setenv( - "STITCH_METADATA_DYNAMODB_NAME", - STITCH_METADATA_DYNAMODB_NAME_VALUE, + "STITCH_METADATA_DYNAMODB_NAME", STITCH_METADATA_DYNAMODB_NAME_VALUE ) monkeypatch.setenv("NRL_API_ENDPOINT", FAKE_URL) monkeypatch.setenv("ACCESS_AUDIT_TABLE_NAME", AUTH_STATE_TABLE_NAME) @@ -226,12 +214,10 @@ def set_env(monkeypatch): monkeypatch.setenv("APIM_API_URL", APIM_API_URL) monkeypatch.setenv("CLOUDFRONT_URL", "mock-cloudfront-url.com") monkeypatch.setenv( - "UNSTITCHED_LLOYD_GEORGE_DYNAMODB_NAME", - MOCK_UNSTITCHED_LG_TABLE_NAME, + "UNSTITCHED_LLOYD_GEORGE_DYNAMODB_NAME", MOCK_UNSTITCHED_LG_TABLE_NAME ) monkeypatch.setenv( - "DOCUMENT_RETRIEVE_ENDPOINT_APIM", - f"{APIM_API_URL}/DocumentReference", + "DOCUMENT_RETRIEVE_ENDPOINT_APIM", f"{APIM_API_URL}/DocumentReference" ) monkeypatch.setenv("VIRUS_SCAN_STUB", "True") monkeypatch.setenv("ITOC_TESTING_SLACK_BOT_TOKEN", MOCK_SLACK_BOT_TOKEN) @@ -363,8 +349,7 @@ class MockError(Enum): MOCK_CLIENT_ERROR = ClientError( - {"Error": {"Code": 500, "Message": "Test error message"}}, - "TEST", + {"Error": {"Code": 500, "Message": "Test error message"}}, "TEST" ) diff --git a/lambdas/tests/unit/handlers/test_document_reference_search_handler.py b/lambdas/tests/unit/handlers/test_document_reference_search_handler.py index a4d4768e09..4b6523dad2 100755 --- a/lambdas/tests/unit/handlers/test_document_reference_search_handler.py +++ b/lambdas/tests/unit/handlers/test_document_reference_search_handler.py @@ -1,15 +1,9 @@ import json -from copy import deepcopy from enum import Enum import pytest from enums.feature_flags import FeatureFlags -from enums.snomed_codes import SnomedCodes -from handlers.document_reference_search_handler import ( - extract_querystring_params, - lambda_handler, -) -from tests.unit.conftest import TEST_NHS_NUMBER +from handlers.document_reference_search_handler import lambda_handler from tests.unit.helpers.data.dynamo.dynamo_responses import EXPECTED_RESPONSE from utils.lambda_exceptions import DocumentRefSearchException from utils.lambda_response import ApiGatewayResponse @@ -26,35 +20,36 @@ class MockError(Enum): @pytest.fixture def mocked_service(set_env, mocker): mocked_class = mocker.patch( - "handlers.document_reference_search_handler.DocumentReferenceSearchService", + "handlers.document_reference_search_handler.DocumentReferenceSearchService" + ) + mocker.patch( + "handlers.document_reference_search_handler.FeatureFlagService.get_feature_flags_by_flag" ) mocked_service = mocked_class.return_value yield mocked_service -@pytest.fixture -def mocked_feature_flags(mocker): - feature_flag_service = mocker.patch( - "handlers.document_reference_search_handler.FeatureFlagService", - ) - yield feature_flag_service.return_value +def test_lambda_handler_returns_200( + mocked_service, valid_id_event_without_auth_header, context +): + mocked_service.get_document_references.return_value = EXPECTED_RESPONSE * 2 + expected = ApiGatewayResponse( + 200, json.dumps(EXPECTED_RESPONSE * 2), "GET" + ).create_api_gateway_response() -def test_lambda_handler_returns_200( - mocked_service, - valid_id_event_without_auth_header, - context, - mocked_feature_flags, + actual = lambda_handler(valid_id_event_without_auth_header, context) + + assert expected == actual + + +def test_lambda_handler_returns_204( + mocked_service, valid_id_event_without_auth_header, context ): - mocked_service.get_paginated_references_by_nhs_number.return_value = { - "references": EXPECTED_RESPONSE * 2, - "next_page_token": None, - } + mocked_service.get_document_references.return_value = [] expected = ApiGatewayResponse( - 200, - json.dumps({"references": EXPECTED_RESPONSE * 2, "nextPageToken": None}), - "GET", + 204, json.dumps([]), "GET" ).create_api_gateway_response() actual = lambda_handler(valid_id_event_without_auth_header, context) @@ -63,13 +58,10 @@ def test_lambda_handler_returns_200( def test_lambda_handler_raises_exception_returns_500( - mocked_service, - valid_id_event_without_auth_header, - context, - mocked_feature_flags, + mocked_service, valid_id_event_without_auth_header, context ): - mocked_service.get_paginated_references_by_nhs_number.side_effect = ( - DocumentRefSearchException(500, MockError.Error) + mocked_service.get_document_references.side_effect = DocumentRefSearchException( + 500, MockError.Error ) expected = ApiGatewayResponse( 500, @@ -81,60 +73,48 @@ def test_lambda_handler_raises_exception_returns_500( def test_lambda_handler_when_id_not_valid_returns_400( - set_env, - invalid_id_event, - context, - mocked_feature_flags, + set_env, invalid_id_event, context ): expected_body = json.dumps( { "message": "Invalid patient number 900000000900", "err_code": "PN_4001", "interaction_id": "88888888-4444-4444-4444-121212121212", - }, + } ) expected = ApiGatewayResponse( - 400, - expected_body, - "GET", + 400, expected_body, "GET" ).create_api_gateway_response() actual = lambda_handler(invalid_id_event, context) assert expected == actual def test_lambda_handler_when_id_not_supplied_returns_400( - set_env, - missing_id_event, - context, - mocked_feature_flags, + set_env, missing_id_event, context ): expected_body = json.dumps( { "message": "An error occurred due to missing key", "err_code": "PN_4002", "interaction_id": "88888888-4444-4444-4444-121212121212", - }, + } ) expected = ApiGatewayResponse( - 400, - expected_body, - "GET", + 400, expected_body, "GET" ).create_api_gateway_response() actual = lambda_handler(missing_id_event, context) assert expected == actual -def test_lambda_handler_when_dynamo_tables_env_variable_not_supplied_then_return_500_response( - valid_id_event_without_auth_header, - context, - mocked_feature_flags, +def test_lambda_handler_when_dynamo_tables_env_variable_not_supplied_then_return_400_response( + valid_id_event_without_auth_header, context ): expected_body = json.dumps( { - "message": "An error occurred due to missing environment variable: 'LLOYD_GEORGE_DYNAMODB_NAME'", + "message": "An error occurred due to missing environment variable: 'DYNAMODB_TABLE_LIST'", "err_code": "ENV_5001", "interaction_id": "88888888-4444-4444-4444-121212121212", - }, + } ) expected = ApiGatewayResponse( 500, @@ -146,205 +126,133 @@ def test_lambda_handler_when_dynamo_tables_env_variable_not_supplied_then_return def test_lambda_handler_with_feature_flag_enabled_applies_doc_status_filter( - set_env, - valid_id_event_without_auth_header, - context, - mocked_service, - mocked_feature_flags, + set_env, mocker, valid_id_event_without_auth_header, context ): - mocked_service.get_paginated_references_by_nhs_number.return_value = { - "references": EXPECTED_RESPONSE, - "next_page_token": None, - } + mocked_service_class = mocker.patch( + "handlers.document_reference_search_handler.DocumentReferenceSearchService" + ) + mocked_service = mocked_service_class.return_value + mocked_service.get_document_references.return_value = EXPECTED_RESPONSE - mocked_feature_flags.get_feature_flags_by_flag.return_value = { - FeatureFlags.UPLOAD_DOCUMENT_ITERATION_2_ENABLED: True, + mocked_feature_flag_service = mocker.patch( + "handlers.document_reference_search_handler.FeatureFlagService" + ) + mocked_feature_flag_instance = mocked_feature_flag_service.return_value + mocked_feature_flag_instance.get_feature_flags_by_flag.return_value = { + FeatureFlags.UPLOAD_DOCUMENT_ITERATION_2_ENABLED: True } expected = ApiGatewayResponse( - 200, - json.dumps( - { - "references": EXPECTED_RESPONSE, - "nextPageToken": None, - }, - ), - "GET", + 200, json.dumps(EXPECTED_RESPONSE), "GET" ).create_api_gateway_response() actual = lambda_handler(valid_id_event_without_auth_header, context) assert expected == actual - mocked_feature_flags.get_feature_flags_by_flag.assert_called_once_with( - FeatureFlags.UPLOAD_DOCUMENT_ITERATION_2_ENABLED, + mocked_feature_flag_instance.get_feature_flags_by_flag.assert_called_once_with( + FeatureFlags.UPLOAD_DOCUMENT_ITERATION_2_ENABLED ) - mocked_service.get_paginated_references_by_nhs_number.assert_called_once_with( - nhs_number=TEST_NHS_NUMBER, - limit=None, - next_page_token=None, - filter={"doc_status": "final"}, + mocked_service.get_document_references.assert_called_once_with( + "9000000009", + check_upload_completed=True, + additional_filters={"doc_status": "final"}, ) def test_lambda_handler_with_feature_flag_disabled_no_doc_status_filter( - set_env, - valid_id_event_without_auth_header, - context, - mocked_service, - mocked_feature_flags, + set_env, mocker, valid_id_event_without_auth_header, context ): + mocked_service_class = mocker.patch( + "handlers.document_reference_search_handler.DocumentReferenceSearchService" + ) + mocked_service = mocked_service_class.return_value + mocked_service.get_document_references.return_value = EXPECTED_RESPONSE - mocked_service.get_paginated_references_by_nhs_number.return_value = { - "references": EXPECTED_RESPONSE, - "next_page_token": None, - } - - mocked_feature_flags.get_feature_flags_by_flag.return_value = { - FeatureFlags.UPLOAD_DOCUMENT_ITERATION_2_ENABLED: False, + mocked_feature_flag_service = mocker.patch( + "handlers.document_reference_search_handler.FeatureFlagService" + ) + mocked_feature_flag_instance = mocked_feature_flag_service.return_value + mocked_feature_flag_instance.get_feature_flags_by_flag.return_value = { + FeatureFlags.UPLOAD_DOCUMENT_ITERATION_2_ENABLED: False } expected = ApiGatewayResponse( - 200, - json.dumps( - { - "references": EXPECTED_RESPONSE, - "nextPageToken": None, - }, - ), - "GET", + 200, json.dumps(EXPECTED_RESPONSE), "GET" ).create_api_gateway_response() actual = lambda_handler(valid_id_event_without_auth_header, context) assert expected == actual - mocked_feature_flags.get_feature_flags_by_flag.assert_called_once_with( - FeatureFlags.UPLOAD_DOCUMENT_ITERATION_2_ENABLED, + mocked_feature_flag_instance.get_feature_flags_by_flag.assert_called_once_with( + FeatureFlags.UPLOAD_DOCUMENT_ITERATION_2_ENABLED ) - mocked_service.get_paginated_references_by_nhs_number.assert_called_once_with( - nhs_number=TEST_NHS_NUMBER, - limit=None, - next_page_token=None, - filter={}, + mocked_service.get_document_references.assert_called_once_with( + "9000000009", + check_upload_completed=True, + additional_filters={}, ) - def test_lambda_handler_with_doc_type_applies_doc_type_filter( - set_env, - valid_id_event_without_auth_header, - context, - mocked_service, - mocked_feature_flags, + set_env, mocker, valid_id_event_without_auth_header, context ): - mocked_service.get_paginated_references_by_nhs_number.return_value = { - "references": EXPECTED_RESPONSE, - "next_page_token": None, - } + mocked_service_class = mocker.patch( + "handlers.document_reference_search_handler.DocumentReferenceSearchService" + ) + mocked_service = mocked_service_class.return_value + mocked_service.get_document_references.return_value = EXPECTED_RESPONSE - mocked_feature_flags.get_feature_flags_by_flag.return_value = { - FeatureFlags.UPLOAD_DOCUMENT_ITERATION_2_ENABLED: False, + mocked_feature_flag_service = mocker.patch( + "handlers.document_reference_search_handler.FeatureFlagService" + ) + mocked_feature_flag_instance = mocked_feature_flag_service.return_value + mocked_feature_flag_instance.get_feature_flags_by_flag.return_value = { + FeatureFlags.UPLOAD_DOCUMENT_ITERATION_2_ENABLED: False } expected = ApiGatewayResponse( - 200, - json.dumps({"references": EXPECTED_RESPONSE, "nextPageToken": None}), - "GET", + 200, json.dumps(EXPECTED_RESPONSE), "GET" ).create_api_gateway_response() - doc_type = SnomedCodes.LLOYD_GEORGE.value.code + doc_type = "16521000000101" valid_id_event_without_auth_header["queryStringParameters"]["docType"] = doc_type actual = lambda_handler(valid_id_event_without_auth_header, context) assert expected == actual - mocked_feature_flags.get_feature_flags_by_flag.assert_called_once_with( - FeatureFlags.UPLOAD_DOCUMENT_ITERATION_2_ENABLED, + mocked_feature_flag_instance.get_feature_flags_by_flag.assert_called_once_with( + FeatureFlags.UPLOAD_DOCUMENT_ITERATION_2_ENABLED ) - mocked_service.get_paginated_references_by_nhs_number.assert_called_once_with( - nhs_number=TEST_NHS_NUMBER, - limit=None, - next_page_token=None, - # check_upload_completed=True, - filter={"document_snomed_code": doc_type}, + mocked_service.get_document_references.assert_called_once_with( + "9000000009", + check_upload_completed=True, + additional_filters={"document_snomed_code": doc_type}, ) -def test_extract_querystring_params_next_page_token_present( - valid_id_event_without_auth_header, -): - event = deepcopy(valid_id_event_without_auth_header) - event["queryStringParameters"].update({"nextPageToken": "abc"}) - - expected = (TEST_NHS_NUMBER, "abc", None) - - actual = extract_querystring_params(event) - - assert expected == actual - - -def test_extract_querystring_params_no_next_page_token( - valid_id_event_without_auth_header, -): - expected = (TEST_NHS_NUMBER, None, None) - actual = extract_querystring_params(valid_id_event_without_auth_header) - assert expected == actual - - -def test_extract_querystring_params_limit_passed(valid_id_event_without_auth_header): - event = deepcopy(valid_id_event_without_auth_header) - event["queryStringParameters"].update({"limit": "10"}) - - expected = (TEST_NHS_NUMBER, None, "10") - actual = extract_querystring_params(event) - - assert expected == actual - - -def test_handler_uses_pagination_expected_params_passed( - valid_id_event_without_auth_header, - mocked_service, - context, - mocked_feature_flags, -): - - limit_event = deepcopy(valid_id_event_without_auth_header) - limit_event["queryStringParameters"].update({"limit": "10"}) - - token_event = deepcopy(valid_id_event_without_auth_header) - token_event["queryStringParameters"].update({"nextPageToken": "abc"}) - - events = [limit_event, token_event] - - for event in events: - lambda_handler(event, context) - mocked_service.get_paginated_references_by_nhs_number.assert_called() - - def test_lambda_handler_with_invalid_doc_type_returns_400( - set_env, - valid_id_event_without_auth_header, - context, - mocked_service, - mocked_feature_flags, + set_env, mocker, valid_id_event_without_auth_header, context ): + mocker.patch( + "handlers.document_reference_search_handler.DocumentReferenceSearchService" + ) + mocker.patch( + "handlers.document_reference_search_handler.FeatureFlagService" + ) invalid_doc_type = "invalid_doc_type" - valid_id_event_without_auth_header["queryStringParameters"][ - "docType" - ] = invalid_doc_type + valid_id_event_without_auth_header["queryStringParameters"]["docType"] = invalid_doc_type expected_body = json.dumps( { "message": "Invalid document type requested", "err_code": "VDT_4002", "interaction_id": "88888888-4444-4444-4444-121212121212", - }, + } ) expected = ApiGatewayResponse( - 400, - expected_body, - "GET", + 400, expected_body, "GET" ).create_api_gateway_response() actual = lambda_handler(valid_id_event_without_auth_header, context) - assert expected == actual + assert expected == actual \ No newline at end of file diff --git a/lambdas/tests/unit/handlers/test_report_s3_content_handler.py b/lambdas/tests/unit/handlers/test_report_s3_content_handler.py new file mode 100644 index 0000000000..da90d525db --- /dev/null +++ b/lambdas/tests/unit/handlers/test_report_s3_content_handler.py @@ -0,0 +1,45 @@ +from types import SimpleNamespace + +import pytest + +from lambdas.handlers.report_s3_content_handler import lambda_handler + + +@pytest.fixture(autouse=True) +def patch_env_vars(monkeypatch): + env_vars = { + "LLOYD_GEORGE_BUCKET_NAME": "bucket-a", + "STATISTICAL_REPORTS_BUCKET": "bucket-b", + "BULK_STAGING_BUCKET_NAME": "bucket-c", + } + for key, value in env_vars.items(): + monkeypatch.setenv(key, value) + + +@pytest.fixture +def lambda_context(): + return SimpleNamespace(aws_request_id="test-request-id") + + +def test_lambda_handler_invokes_service(mocker, lambda_context): + mock_service_cls = mocker.patch( + "lambdas.handlers.report_s3_content_handler.ReportS3ContentService" + ) + mock_service = mock_service_cls.return_value + + lambda_handler({}, lambda_context) + + mock_service_cls.assert_called_once() + mock_service.process_s3_content.assert_called_once() + + +def test_lambda_handler_runs_without_event_data(mocker, lambda_context): + mock_service_cls = mocker.patch( + "lambdas.handlers.report_s3_content_handler.ReportS3ContentService" + ) + mock_service = mock_service_cls.return_value + + lambda_handler({}, lambda_context) + + mock_service_cls.assert_called_once() + mock_service.process_s3_content.assert_called_once() diff --git a/lambdas/tests/unit/helpers/data/dynamo/dynamo_responses.py b/lambdas/tests/unit/helpers/data/dynamo/dynamo_responses.py index f420ca608e..b957c819f4 100755 --- a/lambdas/tests/unit/helpers/data/dynamo/dynamo_responses.py +++ b/lambdas/tests/unit/helpers/data/dynamo/dynamo_responses.py @@ -1,19 +1,15 @@ -from tests.unit.conftest import TEST_CURRENT_GP_ODS, TEST_FILE_SIZE, TEST_NHS_NUMBER - MOCK_SEARCH_RESPONSE = { "Items": [ { "ID": "3d8683b9-1665-40d2-8499-6e8302d507ff", - "Author": TEST_CURRENT_GP_ODS, "ContentType": "application/pdf", "Created": "2024-01-01T12:00:00.000Z", "DocStatus": "final", "FileLocation": "s3://test-s3-bucket/9000000009/test-key-123", "FileName": "document.csv", - "FileSize": TEST_FILE_SIZE, - "NhsNumber": TEST_NHS_NUMBER, + "NhsNumber": "9000000009", "VirusScannerResult": "Clean", - "CurrentGpOds": TEST_CURRENT_GP_ODS, + "CurrentGpOds": "Y12345", "Uploaded": "True", "Uploading": "False", "LastUpdated": 1704110400, # Timestamp: 2024-01-01T12:00:00 @@ -21,16 +17,14 @@ }, { "ID": "4d8683b9-1665-40d2-8499-6e8302d507ff", - "Author": TEST_CURRENT_GP_ODS, "ContentType": "application/pdf", "Created": "2024-01-01T12:00:00.000Z", "DocStatus": "final", "FileLocation": "s3://test-s3-bucket/9000000009/test-key-223", "FileName": "results.pdf", - "FileSize": TEST_FILE_SIZE, - "NhsNumber": TEST_NHS_NUMBER, + "NhsNumber": "9000000009", "VirusScannerResult": "Clean", - "CurrentGpOds": TEST_CURRENT_GP_ODS, + "CurrentGpOds": "Y12345", "Uploaded": "True", "Uploading": "False", "LastUpdated": 1704110400, # Timestamp: 2024-01-01T12:00:00 @@ -38,16 +32,14 @@ }, { "ID": "5d8683b9-1665-40d2-8499-6e8302d507ff", - "Author": TEST_CURRENT_GP_ODS, "ContentType": "application/pdf", "Created": "2024-01-01T12:00:00.000Z", "DocStatus": "final", "FileLocation": "s3://test-s3-bucket/9000000009/test-key-323", "FileName": "output.csv", - "FileSize": TEST_FILE_SIZE, - "NhsNumber": TEST_NHS_NUMBER, + "NhsNumber": "9000000009", "VirusScannerResult": "Clean", - "CurrentGpOds": TEST_CURRENT_GP_ODS, + "CurrentGpOds": "Y12345", "Uploaded": "True", "Uploading": "False", "LastUpdated": 1704110400, # Timestamp: 2024-01-01T12:00:00 diff --git a/lambdas/tests/unit/helpers/data/test_documents.py b/lambdas/tests/unit/helpers/data/test_documents.py index dae5903ce0..6035c1e474 100644 --- a/lambdas/tests/unit/helpers/data/test_documents.py +++ b/lambdas/tests/unit/helpers/data/test_documents.py @@ -9,7 +9,6 @@ MOCK_ARF_BUCKET, MOCK_LG_BUCKET, MOCK_LG_STAGING_STORE_BUCKET_ENV_NAME, - TEST_FILE_SIZE, TEST_NHS_NUMBER, TEST_UUID, ) @@ -56,22 +55,19 @@ def create_test_lloyd_george_doc_store_refs( refs[0].file_name = filename_1 refs[0].s3_file_key = f"{TEST_NHS_NUMBER}/test-key-1" refs[0].file_location = f"s3://{MOCK_LG_BUCKET}/{TEST_NHS_NUMBER}/test-key-1" - refs[0].file_size = TEST_FILE_SIZE refs[0].s3_bucket_name = MOCK_LG_BUCKET refs[0].document_snomed_code_type = SnomedCodes.LLOYD_GEORGE.value.code refs[1].file_name = filename_2 refs[1].s3_file_key = f"{TEST_NHS_NUMBER}/test-key-2" refs[1].file_location = f"s3://{MOCK_LG_BUCKET}/{TEST_NHS_NUMBER}/test-key-2" - refs[1].file_size = TEST_FILE_SIZE refs[1].s3_bucket_name = MOCK_LG_BUCKET refs[1].document_snomed_code_type = SnomedCodes.LLOYD_GEORGE.value.code refs[2].file_name = filename_3 refs[2].s3_file_key = f"{TEST_NHS_NUMBER}/test-key-3" refs[2].file_location = f"s3://{MOCK_LG_BUCKET}/{TEST_NHS_NUMBER}/test-key-3" - refs[2].file_size = TEST_FILE_SIZE refs[2].s3_bucket_name = MOCK_LG_BUCKET refs[2].document_snomed_code_type = SnomedCodes.LLOYD_GEORGE.value.code - + if override: refs = [doc_ref.model_copy(update=override) for doc_ref in refs] return refs @@ -94,8 +90,7 @@ def create_test_arf_doc_store_refs( def create_test_doc_refs( - override: Optional[Dict] = None, - file_names: Optional[List[str]] = None, + override: Optional[Dict] = None, file_names: Optional[List[str]] = None ) -> List[DocumentReference]: if not file_names: file_names = [ @@ -123,8 +118,7 @@ def create_test_doc_refs( def create_test_doc_refs_as_dict( - override: Optional[Dict] = None, - file_names: Optional[List[str]] = None, + override: Optional[Dict] = None, file_names: Optional[List[str]] = None ) -> List[Dict]: test_doc_refs = create_test_doc_refs(override, file_names) return [ @@ -143,7 +137,7 @@ def create_valid_fhir_doc_json(nhs_number: str = "9000000009"): "identifier": { "system": "https://fhir.nhs.uk/Id/nhs-number", "value": nhs_number, - }, + } }, "type": { "coding": [ @@ -151,22 +145,22 @@ def create_valid_fhir_doc_json(nhs_number: str = "9000000009"): "system": "http://snomed.info/sct", "code": SnomedCodes.LLOYD_GEORGE.value.code, "display": SnomedCodes.LLOYD_GEORGE.value.display_name, - }, - ], + } + ] }, "custodian": { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "A12345", - }, + } }, "author": [ { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "A12345", - }, - }, + } + } ], "content": [ { @@ -175,9 +169,9 @@ def create_valid_fhir_doc_json(nhs_number: str = "9000000009"): "language": "en-GB", "title": "test-file.pdf", "creation": "2023-01-01T12:00:00Z", - }, - }, + } + } ], "meta": {"versionId": "1"}, - }, + } ) diff --git a/lambdas/tests/unit/repositories/bulk_upload/test_bulk_upload_sqs_repository.py b/lambdas/tests/unit/repositories/bulk_upload/test_bulk_upload_sqs_repository.py index 5d0a3dbf33..fa76981406 100644 --- a/lambdas/tests/unit/repositories/bulk_upload/test_bulk_upload_sqs_repository.py +++ b/lambdas/tests/unit/repositories/bulk_upload/test_bulk_upload_sqs_repository.py @@ -32,13 +32,13 @@ def sample_staging_metadata(): files=[ BulkUploadQueueMetadata( file_path="staging/9000000009/test1.pdf", - stored_file_name="lg_test1.pdf", + stored_file_name="test1.pdf", gp_practice_code="Y12345", scan_date="2024-01-01", ), BulkUploadQueueMetadata( file_path="staging/9000000009/test2.pdf", - stored_file_name="lg_test2.pdf", + stored_file_name="test2.pdf", gp_practice_code="Y12345", scan_date="2024-01-01", ), @@ -48,9 +48,7 @@ def sample_staging_metadata(): def test_put_staging_metadata_back_to_queue_and_increases_retries( - set_env, - mock_uuid, - repo_under_test, + set_env, mock_uuid, repo_under_test ): TEST_STAGING_METADATA.retries = 2 metadata_copy = copy.deepcopy(TEST_STAGING_METADATA) @@ -90,9 +88,7 @@ def test_send_message_to_pdf_stitching_queue(set_env, repo_under_test): def test_sends_message_to_review_queue_with_correct_structure_and_fields( - set_env, - repo_under_test, - mock_uuid, + set_env, repo_under_test, mock_uuid ): repo_under_test.send_message_to_review_queue( staging_metadata=TEST_STAGING_METADATA, diff --git a/lambdas/tests/unit/services/reporting/test_csv_report_generator_service.py b/lambdas/tests/unit/services/reporting/test_csv_report_generator_service.py new file mode 100644 index 0000000000..d06f37b4c3 --- /dev/null +++ b/lambdas/tests/unit/services/reporting/test_csv_report_generator_service.py @@ -0,0 +1,24 @@ +from datetime import datetime, timezone + +from services.reporting.csv_report_generator_service import CsvReportGenerator + + +def test_generate_s3_inventory_csv(): + generator = CsvReportGenerator() + + objects = [ + { + "Key": "file1.txt", + "LastModified": datetime(2024, 1, 1, tzinfo=timezone.utc), + "Size": 123, + "ETag": "etag1", + "StorageClass": "STANDARD", + "Tags": [{"Key": "autodelete", "Value": "true"}], + } + ] + + csv_output = generator.generate_s3_inventory_csv("bucket-a", objects) + + assert "bucket-a" in csv_output + assert "file1.txt" in csv_output + assert "autodelete=true" in csv_output diff --git a/lambdas/tests/unit/services/reporting/test_report_s3_content_service.py b/lambdas/tests/unit/services/reporting/test_report_s3_content_service.py new file mode 100644 index 0000000000..7ce32ff5e1 --- /dev/null +++ b/lambdas/tests/unit/services/reporting/test_report_s3_content_service.py @@ -0,0 +1,43 @@ +from datetime import datetime, timezone + +from services.reporting.report_s3_content_service import ReportS3ContentService + + +def test_process_s3_content(mocker): + service = ReportS3ContentService() + + service.bulk_staging_store = "bucket-a" + service.statistic_reports_bucket = "reports-bucket" + + service.s3_service = mocker.Mock() + service.csv_generator = mocker.Mock() + + fake_objects = [ + { + "Key": "file1.txt", + "LastModified": datetime.now(tz=timezone.utc), + "Size": 123, + "ETag": "etag1", + "StorageClass": "STANDARD", + } + ] + + service.s3_service.list_all_objects.return_value = fake_objects + service.s3_service.get_object_tags_versioned.return_value = [ + {"Key": "autodelete", "Value": "true"} + ] + service.csv_generator.generate_s3_inventory_csv.return_value = "csv-data" + + service.process_s3_content() + + service.s3_service.list_all_objects.assert_called_once_with("bucket-a") + + service.s3_service.get_object_tags_versioned.assert_called_once_with( + "bucket-a", "file1.txt", None + ) + + service.csv_generator.generate_s3_inventory_csv.assert_called_once_with( + "bucket-a", fake_objects + ) + + service.s3_service.upload_file_obj.assert_called_once() diff --git a/lambdas/tests/unit/services/test_create_document_reference_service.py b/lambdas/tests/unit/services/test_create_document_reference_service.py index 3f8cd77ea8..8a1332e037 100644 --- a/lambdas/tests/unit/services/test_create_document_reference_service.py +++ b/lambdas/tests/unit/services/test_create_document_reference_service.py @@ -65,13 +65,13 @@ def mock_create_doc_ref_service(set_env, mocker): @pytest.fixture def mock_fhir_doc_ref_base_service(mocker, setup_request_context): mock_document_service = mocker.patch( - "services.fhir_document_reference_service_base.DocumentService", + "services.fhir_document_reference_service_base.DocumentService" ) mock_s3_service = mocker.patch( - "services.fhir_document_reference_service_base.S3Service", + "services.fhir_document_reference_service_base.S3Service" ) mock_dynamo_service = mocker.patch( - "services.fhir_document_reference_service_base.DynamoDBService", + "services.fhir_document_reference_service_base.DynamoDBService" ) service = FhirDocumentReferenceServiceBase() service.document_service = mock_document_service.return_value @@ -98,9 +98,9 @@ def mock_process_fhir_document_reference(mocker): return_value=json.dumps( { "content": [ - {"attachment": {"url": "https://test-bucket.s3.amazonaws.com/"}}, - ], - }, + {"attachment": {"url": "https://test-bucket.s3.amazonaws.com/"}} + ] + } ), ) @@ -119,15 +119,13 @@ def mock_create_document_reference(mock_create_doc_ref_service, mocker): @pytest.fixture() def mock_remove_records(mock_create_doc_ref_service, mocker): yield mocker.patch.object( - mock_create_doc_ref_service, - "remove_records_of_failed_upload", + mock_create_doc_ref_service, "remove_records_of_failed_upload" ) @pytest.fixture() def mock_check_existing_records_and_remove_failed_upload( - mock_create_doc_ref_service, - mocker, + mock_create_doc_ref_service, mocker ): yield mocker.patch.object( mock_create_doc_ref_service, @@ -138,7 +136,7 @@ def mock_check_existing_records_and_remove_failed_upload( @pytest.fixture() def mock_check_for_duplicate_files(mocker): yield mocker.patch( - "services.create_document_reference_service.check_for_duplicate_files", + "services.create_document_reference_service.check_for_duplicate_files" ) @@ -152,8 +150,7 @@ def mock_getting_patient_info_from_pds(mocker, mock_pds_patient): @pytest.fixture def mock_fetch_available_document_references_by_type( - mocker, - mock_fhir_doc_ref_base_service, + mocker, mock_fhir_doc_ref_base_service ): mock = mocker.patch.object( mock_fhir_doc_ref_base_service.document_service, @@ -172,12 +169,10 @@ def undo_mocking_for_is_upload_in_process(mock_fhir_doc_ref_base_service): @pytest.fixture def mock_get_allowed_list_of_ods_codes_for_upload_pilot( - mock_create_doc_ref_service, - mocker, + mock_create_doc_ref_service, mocker ): return mocker.patch.object( - mock_create_doc_ref_service.feature_flag_service, - "get_allowed_list_of_ods_codes_for_upload_pilot", + mock_create_doc_ref_service, "get_allowed_list_of_ods_codes_for_upload_pilot" ) @@ -191,8 +186,7 @@ def test_create_document_reference_request_empty_list( ): with pytest.raises(DocumentRefException) as e: mock_create_doc_ref_service.create_document_reference_request( - TEST_NHS_NUMBER, - [], + TEST_NHS_NUMBER, [] ) assert e.value == DocumentRefException(400, LambdaError.DocRefInvalidFiles) @@ -212,13 +206,12 @@ def test_create_document_reference_request_with_lg_list_happy_path( mock_check_for_duplicate_files, ): mock_get_allowed_list_of_ods_codes_for_upload_pilot.return_value = [ - TEST_CURRENT_GP_ODS, + TEST_CURRENT_GP_ODS ] mock_presigned_url_response = "https://test-bucket.s3.amazonaws.com/" url_references = mock_create_doc_ref_service.create_document_reference_request( - TEST_NHS_NUMBER, - LG_FILE_LIST, + TEST_NHS_NUMBER, LG_FILE_LIST ) expected_response = { "uuid1": mock_presigned_url_response, @@ -228,8 +221,7 @@ def test_create_document_reference_request_with_lg_list_happy_path( assert url_references == expected_response mock_check_existing_records_and_remove_failed_upload.assert_called_with( - TEST_NHS_NUMBER, - LG_FILE_LIST[0]["docType"], + TEST_NHS_NUMBER, LG_FILE_LIST[0]["docType"] ) mock_check_for_duplicate_files.assert_called_once() @@ -261,20 +253,19 @@ def test_create_document_reference_request_raise_error_when_invalid_lg( file_name=file["fileName"], doc_type=SupportedDocumentTypes.LG, document_snomed_code_type=SnomedCodes.LLOYD_GEORGE.value.code, - ), + ) ) side_effects.append(document_references[index]) mock_create_document_reference.side_effect = side_effects mock_check_for_duplicate_files.side_effect = LGInvalidFilesException("test") mock_get_allowed_list_of_ods_codes_for_upload_pilot.return_value = [ - TEST_CURRENT_GP_ODS, + TEST_CURRENT_GP_ODS ] with pytest.raises(DocumentRefException): mock_create_doc_ref_service.create_document_reference_request( - TEST_NHS_NUMBER, - LG_FILE_LIST, + TEST_NHS_NUMBER, LG_FILE_LIST ) mock_create_document_reference.assert_has_calls( @@ -302,8 +293,7 @@ def test_create_document_reference_failed_to_parse_pds_response( with pytest.raises(Exception) as exc_info: mock_create_doc_ref_service.create_document_reference_request( - TEST_NHS_NUMBER, - LG_FILE_LIST, + TEST_NHS_NUMBER, LG_FILE_LIST ) exception = exc_info.value @@ -325,8 +315,7 @@ def test_cdr_nhs_number_not_found_raises_search_patient_exception( with pytest.raises(Exception) as exc_info: mock_create_doc_ref_service.create_document_reference_request( - TEST_NHS_NUMBER, - LG_FILE_LIST, + TEST_NHS_NUMBER, LG_FILE_LIST ) exception = exc_info.value @@ -348,13 +337,12 @@ def test_cdr_non_pdf_file_raises_exception( ): mock_check_for_duplicate_files.side_effect = LGInvalidFilesException mock_get_allowed_list_of_ods_codes_for_upload_pilot.return_value = [ - TEST_CURRENT_GP_ODS, + TEST_CURRENT_GP_ODS ] with pytest.raises(Exception) as exc_info: mock_create_doc_ref_service.create_document_reference_request( - TEST_NHS_NUMBER, - LG_FILE_LIST, + TEST_NHS_NUMBER, LG_FILE_LIST ) exception = exc_info.value @@ -375,23 +363,18 @@ def test_create_document_reference_request_lg_upload_throw_lambda_error_if_uploa ): two_minutes_ago = 1698661380 # 2023-10-30T10:23:00 mock_records_upload_in_process = create_test_lloyd_george_doc_store_refs( - override={ - "uploaded": False, - "uploading": True, - "last_updated": two_minutes_ago, - }, + override={"uploaded": False, "uploading": True, "last_updated": two_minutes_ago} ) mock_fetch_available_document_references_by_type.return_value = ( mock_records_upload_in_process ) mock_get_allowed_list_of_ods_codes_for_upload_pilot.return_value = [ - TEST_CURRENT_GP_ODS, + TEST_CURRENT_GP_ODS ] with pytest.raises(DocumentRefException) as e: mock_create_doc_ref_service.create_document_reference_request( - TEST_NHS_NUMBER, - LG_FILE_LIST, + TEST_NHS_NUMBER, LG_FILE_LIST ) assert e.value == DocumentRefException(423, LambdaError.UploadInProgressError) @@ -409,13 +392,12 @@ def test_create_document_reference_request_lg_upload_throw_lambda_error_if_got_a create_test_lloyd_george_doc_store_refs() ) mock_get_allowed_list_of_ods_codes_for_upload_pilot.return_value = [ - TEST_CURRENT_GP_ODS, + TEST_CURRENT_GP_ODS ] with pytest.raises(DocumentRefException) as e: mock_create_doc_ref_service.create_document_reference_request( - TEST_NHS_NUMBER, - LG_FILE_LIST, + TEST_NHS_NUMBER, LG_FILE_LIST ) assert e.value == DocumentRefException(422, LambdaError.DocRefRecordAlreadyInPlace) @@ -429,7 +411,7 @@ def test_check_existing_records_remove_previous_failed_upload_and_continue( mocker, ): mock_doc_refs_of_failed_upload = create_test_lloyd_george_doc_store_refs( - override={"uploaded": False}, + override={"uploaded": False} ) mock_fetch_available_document_references_by_type.return_value = ( mock_doc_refs_of_failed_upload @@ -438,18 +420,15 @@ def test_check_existing_records_remove_previous_failed_upload_and_continue( mock_create_doc_ref_service.stop_if_upload_is_in_process = mocker.MagicMock() mock_create_doc_ref_service.check_existing_records_and_remove_failed_upload( - TEST_NHS_NUMBER, - mock_doc_refs_of_failed_upload[0].document_snomed_code_type, + TEST_NHS_NUMBER, mock_doc_refs_of_failed_upload[0].document_snomed_code_type ) mock_remove_records.assert_called_with( - MOCK_LG_TABLE_NAME, - mock_doc_refs_of_failed_upload, + MOCK_LG_TABLE_NAME, mock_doc_refs_of_failed_upload ) def test_parse_documents_list_for_valid_input( - mock_fhir_doc_ref_base_service, - mock_create_doc_ref_service, + mock_fhir_doc_ref_base_service, mock_create_doc_ref_service ): mock_input = LG_FILE_LIST expected = PARSED_LG_FILE_LIST @@ -467,7 +446,7 @@ def test_parse_documents_list_raise_lambda_error_when_no_type( { "fileName": "test1.txt", "contentType": "text/plain", - }, + } ] with pytest.raises(DocumentRefException): @@ -483,7 +462,7 @@ def test_parse_documents_list_raise_lambda_error_when_doc_type_is_invalid( "fileName": "test1.txt", "contentType": "text/plain", "docType": "banana", - }, + } ] with pytest.raises(DocumentRefException): @@ -491,9 +470,7 @@ def test_parse_documents_list_raise_lambda_error_when_doc_type_is_invalid( def test_prepare_doc_object_lg_happy_path( - mocker, - mock_fhir_doc_ref_base_service, - mock_create_doc_ref_service, + mocker, mock_fhir_doc_ref_base_service, mock_create_doc_ref_service ): validated_document = UploadRequestDocument.model_validate(LG_FILE_LIST[0]) nhs_number = "1234567890" @@ -511,10 +488,7 @@ def test_prepare_doc_object_lg_happy_path( ) actual_document_reference = mock_create_doc_ref_service.create_document_reference( - nhs_number, - current_gp_ods, - validated_document, - snomed_code_type="SNOMED", + nhs_number, current_gp_ods, validated_document, snomed_code_type="SNOMED" ) assert actual_document_reference == mocked_doc @@ -547,7 +521,7 @@ def test_check_existing_records_does_nothing_if_no_record_exist( assert ( mock_create_doc_ref_service.check_existing_records_and_remove_failed_upload( TEST_NHS_NUMBER, - SupportedDocumentTypes.LG, + SupportedDocumentTypes.LG ) is None ) @@ -568,14 +542,14 @@ def test_check_existing_records_throw_error_if_upload_in_progress( "uploaded": False, "uploading": True, "last_updated": two_minutes_ago, - }, + } ) ) with pytest.raises(Exception) as e: mock_create_doc_ref_service.check_existing_records_and_remove_failed_upload( TEST_NHS_NUMBER, - SupportedDocumentTypes.LG, + SupportedDocumentTypes.LG ) ex = e.value assert isinstance(ex, DocumentRefException) @@ -598,7 +572,7 @@ def test_check_existing_records_throw_error_if_got_a_full_set_of_uploaded_record with pytest.raises(Exception) as e: mock_create_doc_ref_service.check_existing_records_and_remove_failed_upload( TEST_NHS_NUMBER, - SupportedDocumentTypes.LG, + SupportedDocumentTypes.LG ) ex = e.value @@ -610,12 +584,10 @@ def test_check_existing_records_throw_error_if_got_a_full_set_of_uploaded_record def test_remove_records_of_failed_upload( - mock_fhir_doc_ref_base_service, - mock_create_doc_ref_service, - mocker, + mock_fhir_doc_ref_base_service, mock_create_doc_ref_service, mocker ): mock_doc_refs_of_failed_upload = create_test_lloyd_george_doc_store_refs( - override={"uploaded": False}, + override={"uploaded": False} ) mock_create_doc_ref_service.post_fhir_doc_ref_service.s3_service = ( @@ -651,8 +623,7 @@ def test_ods_code_not_in_pilot_raises_exception( with pytest.raises(DocumentRefException) as exc_info: mock_create_doc_ref_service.create_document_reference_request( - TEST_NHS_NUMBER, - LG_FILE_LIST, + TEST_NHS_NUMBER, LG_FILE_LIST ) mock_create_document_reference.assert_not_called() @@ -663,6 +634,23 @@ def test_ods_code_not_in_pilot_raises_exception( assert exception.message == "ODS code does not match any of the allowed." +def test_get_allowed_list_of_ods_codes_for_upload_pilot( + mock_fhir_doc_ref_base_service, mock_create_doc_ref_service, mock_ssm +): + mock_ssm.get_ssm_parameter.return_value = MOCK_ALLOWED_ODS_CODES_LIST_PILOT[ + "Parameter" + ]["Value"] + expected = "PI001,PI002,PI003" + + actual = ( + mock_create_doc_ref_service.get_allowed_list_of_ods_codes_for_upload_pilot() + ) + + mock_ssm.get_ssm_parameter.assert_called_once() + + assert actual == expected + + def test_patient_ods_does_not_match_user_ods_and_raises_exception( mock_fhir_doc_ref_base_service, mock_create_doc_ref_service, @@ -672,8 +660,7 @@ def test_patient_ods_does_not_match_user_ods_and_raises_exception( with pytest.raises(DocumentRefException) as exc_info: mock_create_doc_ref_service.create_document_reference_request( - TEST_NHS_NUMBER, - LG_FILE_LIST, + TEST_NHS_NUMBER, LG_FILE_LIST ) mock_create_document_reference.assert_not_called() @@ -696,53 +683,49 @@ def test_unable_to_find_config_raises_exception( mock_process_fhir_document_reference, ): mock_get_allowed_list_of_ods_codes_for_upload_pilot.return_value = [ - TEST_CURRENT_GP_ODS, + TEST_CURRENT_GP_ODS ] with pytest.raises(DocumentRefException) as exc_info: mock_create_doc_ref_service.create_document_reference_request( - TEST_NHS_NUMBER, - ARF_FILE_LIST, + TEST_NHS_NUMBER, ARF_FILE_LIST ) exception = exc_info.value assert isinstance(exception, DocumentRefException) assert exception.status_code == 400 - assert exception.message == "Invalid files or id" + assert ( + exception.message + == "Invalid files or id" + ) mock_process_fhir_document_reference.assert_not_called() - def test_check_existing_records_fetches_previous_records_for_doc_type( mock_fhir_doc_ref_base_service, mock_create_doc_ref_service, mock_fetch_available_document_references_by_type, mock_remove_records, - mocker, + mocker ): doc_type = SupportedDocumentTypes.LG - expected_query_filter = ( - NotDeleted - & DynamoQueryFilterBuilder() - .add_condition( - DocumentReferenceMetadataFields.DOCUMENT_SNOMED_CODE_TYPE, - AttributeOperator.EQUAL, - doc_type, - ) - .build() - ) + expected_query_filter = NotDeleted & DynamoQueryFilterBuilder().add_condition( + DocumentReferenceMetadataFields.DOCUMENT_SNOMED_CODE_TYPE, + AttributeOperator.EQUAL, + doc_type + ).build() mocker.patch( - "services.create_document_reference_service.get_document_type_filter", + "services.create_document_reference_service.get_document_type_filter" ).return_value = expected_query_filter mock_create_doc_ref_service.check_existing_records_and_remove_failed_upload( TEST_NHS_NUMBER, - doc_type, + doc_type ) mock_fetch_available_document_references_by_type.assert_called_with( nhs_number=TEST_NHS_NUMBER, doc_type=doc_type, - query_filter=expected_query_filter, - ) + query_filter=expected_query_filter + ) \ No newline at end of file diff --git a/lambdas/tests/unit/services/test_document_reference_search_service.py b/lambdas/tests/unit/services/test_document_reference_search_service.py index 712eb9ac8a..65396a4490 100644 --- a/lambdas/tests/unit/services/test_document_reference_search_service.py +++ b/lambdas/tests/unit/services/test_document_reference_search_service.py @@ -1,4 +1,5 @@ import json +from json import JSONDecodeError from unittest.mock import MagicMock, call import pytest @@ -12,46 +13,38 @@ from models.document_reference import DocumentReference from pydantic import ValidationError from services.document_reference_search_service import DocumentReferenceSearchService -from tests.unit.conftest import ( - APIM_API_URL, - MOCK_LG_TABLE_NAME, - TEST_CURRENT_GP_ODS, - TEST_FILE_SIZE, - TEST_NHS_NUMBER, -) +from tests.unit.conftest import APIM_API_URL from tests.unit.helpers.data.dynamo.dynamo_responses import MOCK_SEARCH_RESPONSE from utils.common_query_filters import NotDeleted, UploadCompleted from utils.exceptions import DynamoServiceException from utils.lambda_exceptions import DocumentRefSearchException MOCK_DOCUMENT_REFERENCE = [ - DocumentReference.model_validate(MOCK_SEARCH_RESPONSE["Items"][0]), + DocumentReference.model_validate(MOCK_SEARCH_RESPONSE["Items"][0]) ] +MOCK_FILE_SIZE = 24000 + EXPECTED_RESPONSE = { "created": "2024-01-01T12:00:00.000Z", - "author": TEST_CURRENT_GP_ODS, "fileName": "document.csv", "virusScannerResult": "Clean", "id": "3d8683b9-1665-40d2-8499-6e8302d507ff", - "fileSize": TEST_FILE_SIZE, + "fileSize": MOCK_FILE_SIZE, "version": "1", "contentType": "application/pdf", "documentSnomedCodeType": SnomedCodes.LLOYD_GEORGE.value.code, } -MOCK_NEXT_PAGE_TOKEN = "thisisaencodedtoken" - @pytest.fixture def mock_document_service(mocker, set_env): service = DocumentReferenceSearchService() mock_s3_service = mocker.patch.object(service, "s3_service") - mocker.patch.object(mock_s3_service, "get_file_size", return_value=TEST_FILE_SIZE) + mocker.patch.object(mock_s3_service, "get_file_size", return_value=MOCK_FILE_SIZE) mocker.patch.object(service, "dynamo_service") mocker.patch.object(service, "fetch_documents_from_table_with_nhs_number") mocker.patch.object(service, "is_upload_in_process", return_value=False) - mocker.patch.object(service, "query_table_with_paginator") return service @@ -65,18 +58,23 @@ def mock_filter_builder(mocker): return mock_filter +def test_get_document_references_raise_json_error_when_no_table_list( + mock_document_service, monkeypatch +): + monkeypatch.setenv("DYNAMODB_TABLE_LIST", "") + with pytest.raises(JSONDecodeError): + mock_document_service._get_table_names(None) + + def test_search_tables_for_documents_raise_validation_error( - mock_document_service, - validation_error, + mock_document_service, validation_error ): mock_document_service.fetch_documents_from_table_with_nhs_number.side_effect = ( validation_error ) with pytest.raises(ValidationError): mock_document_service._search_tables_for_documents( - "1234567890", - "table1", - return_fhir=True, + "1234567890", ["table1", "table2"], return_fhir=True ) @@ -87,16 +85,14 @@ def test_get_document_references_raise_client_error(mock_document_service): "Error": { "Code": "test", "Message": "test", - }, + } }, "test", ) ) with pytest.raises(ClientError): mock_document_service._search_tables_for_documents( - "1234567890", - "table1", - return_fhir=True, + "1234567890", ["table1", "table2"], return_fhir=True ) @@ -107,7 +103,7 @@ def test_get_document_references_raise_dynamodb_error(mock_document_service): with pytest.raises(DynamoServiceException): mock_document_service._search_tables_for_documents( "1234567890", - "table1", + ["table1", "table2"], return_fhir=True, check_upload_completed=False, ) @@ -119,9 +115,7 @@ def test_get_document_references_dynamo_return_empty_response_with_fhir( mock_document_service.fetch_documents_from_table_with_nhs_number.return_value = [] actual = mock_document_service._search_tables_for_documents( - "1234567890", - "table1", - return_fhir=True, + "1234567890", ["table1", "table2"], return_fhir=True ) assert actual["resourceType"] == "Bundle" assert actual["entry"] == [] @@ -132,16 +126,13 @@ def test_get_document_references_dynamo_return_empty_response(mock_document_serv mock_document_service.fetch_documents_from_table_with_nhs_number.return_value = [] actual = mock_document_service._search_tables_for_documents( - "1234567890", - "table1", - return_fhir=False, + "1234567890", ["table1", "table2"], return_fhir=False ) assert actual is None def test_get_document_references_dynamo_return_successful_response_single_table( - mock_document_service, - monkeypatch, + mock_document_service, monkeypatch ): monkeypatch.setenv("DYNAMODB_TABLE_LIST", json.dumps(["test_table"])) @@ -150,9 +141,7 @@ def test_get_document_references_dynamo_return_successful_response_single_table( ) expected_results = MOCK_DOCUMENT_REFERENCE actual = mock_document_service.fetch_documents_from_table_with_nhs_number( - "111111111", - "test_table", - NotDeleted, + "111111111", "test_table", NotDeleted ) assert actual == expected_results @@ -165,6 +154,26 @@ def test_build_document_model_response(mock_document_service, monkeypatch): assert actual == expected_results +def test_get_document_references_dynamo_return_successful_response_multiple_tables( + mock_document_service, mocker +): + mock_fetch_documents = mocker.MagicMock(return_value=MOCK_DOCUMENT_REFERENCE) + mock_document_service.fetch_documents_from_table_with_nhs_number = ( + mock_fetch_documents + ) + mock_document_service._validate_upload_status = mocker.MagicMock() + mock_document_service._process_documents = mocker.MagicMock( + return_value=[EXPECTED_RESPONSE] + ) + expected_results = [EXPECTED_RESPONSE, EXPECTED_RESPONSE] + + actual = mock_document_service._search_tables_for_documents( + "1111111111", ["table1", "table2"], False + ) + + assert actual == expected_results + + def test_get_document_references_raise_error_when_upload_is_in_process( mock_document_service, ): @@ -175,30 +184,25 @@ def test_get_document_references_raise_error_when_upload_is_in_process( def test_get_document_references_success(mock_document_service, mocker): - mock_get_table_names = mocker.MagicMock(return_value="table1") - mock_document_service._get_table_name = mock_get_table_names + mock_get_table_names = mocker.MagicMock(return_value=["table1", "table2"]) + mock_document_service._get_table_names = mock_get_table_names mock_search_document = mocker.MagicMock(return_value=[{"id": "123"}]) mock_document_service._search_tables_for_documents = mock_search_document result = mock_document_service.get_document_references( - "1234567890", - return_fhir=False, + "1234567890", return_fhir=False ) assert result == [{"id": "123"}] mock_get_table_names.assert_called_once() mock_search_document.assert_called_once_with( - "1234567890", - "table1", - False, - None, - True, + "1234567890", ["table1", "table2"], False, None, True ) def test_get_document_references_exception(mock_document_service, mocker): - mock_document_service._get_table_name = mocker.MagicMock( - side_effect=DynamoServiceException, + mock_document_service._get_table_names = mocker.MagicMock( + side_effect=DynamoServiceException ) with pytest.raises(DocumentRefSearchException) as exc_info: @@ -220,24 +224,26 @@ def test_search_tables_for_documents_non_fhir(mock_document_service, mocker): mock_document_service._process_documents = mock_process_document_non_fhir result_non_fhir = mock_document_service._search_tables_for_documents( "1234567890", - "table1", + ["table1", "table2"], return_fhir=False, check_upload_completed=True, ) - assert result_non_fhir == [mock_document_id] + assert result_non_fhir == [mock_document_id, mock_document_id] mock_process_document_non_fhir.assert_has_calls( [ call(MOCK_DOCUMENT_REFERENCE, return_fhir=False), - ], + call(MOCK_DOCUMENT_REFERENCE, return_fhir=False), + ] ) - assert mock_fetch_document_method.call_count == 1 + assert mock_fetch_document_method.call_count == 2 mock_fetch_document_method.assert_has_calls( [ call("1234567890", "table1", query_filter=UploadCompleted), - ], + call("1234567890", "table2", query_filter=UploadCompleted), + ] ) @@ -253,26 +259,29 @@ def test_search_tables_for_documents_fhir(mock_document_service, mocker): mock_document_service._process_documents = mock_process_document_fhir result_fhir = mock_document_service._search_tables_for_documents( "1234567890", - "table1", + ["table1", "table2"], return_fhir=True, check_upload_completed=True, ) assert result_fhir["resourceType"] == "Bundle" assert result_fhir["type"] == "searchset" - assert result_fhir["total"] == 1 - assert len(result_fhir["entry"]) == 1 + assert result_fhir["total"] == 2 + assert len(result_fhir["entry"]) == 2 assert result_fhir["entry"][0]["resource"] == mock_fhir_doc + assert result_fhir["entry"][1]["resource"] == mock_fhir_doc mock_fetch_document_method.assert_has_calls( [ call("1234567890", "table1", query_filter=UploadCompleted), - ], + call("1234567890", "table2", query_filter=UploadCompleted), + ] ) mock_process_document_fhir.assert_has_calls( [ call(MOCK_DOCUMENT_REFERENCE, return_fhir=True), - ], + call(MOCK_DOCUMENT_REFERENCE, return_fhir=True), + ] ) @@ -288,12 +297,11 @@ def test_validate_upload_status_raises_exception(mock_document_service): def test_process_documents_return_fhir(mock_document_service): mock_document_service.create_document_reference_fhir_response = MagicMock( - return_value={"fhir": "response"}, + return_value={"fhir": "response"} ) result = mock_document_service._process_documents( - MOCK_DOCUMENT_REFERENCE, - return_fhir=True, + MOCK_DOCUMENT_REFERENCE, return_fhir=True ) assert result == [{"fhir": "response"}] @@ -311,13 +319,13 @@ def test_create_document_reference_fhir_response(mock_document_service, mocker): mock_document_reference.document_snomed_code_type = "16521000000101" mock_attachment = mocker.patch( - "services.document_reference_search_service.Attachment", + "services.document_reference_search_service.Attachment" ) mock_attachment_instance = mocker.MagicMock() mock_attachment.return_value = mock_attachment_instance mock_doc_ref_info = mocker.patch( - "services.document_reference_search_service.DocumentReferenceInfo", + "services.document_reference_search_service.DocumentReferenceInfo" ) mock_doc_ref_info_instance = mocker.MagicMock() mock_doc_ref_info.return_value = mock_doc_ref_info_instance @@ -336,7 +344,7 @@ def test_create_document_reference_fhir_response(mock_document_service, mocker): "identifier": { "system": "https://fhir.nhs.uk/Id/nhs-number", "value": "9000000009", - }, + } }, "content": [ { @@ -346,28 +354,28 @@ def test_create_document_reference_fhir_response(mock_document_service, mocker): "title": "test_document.pdf", "creation": "2023-05-01", "url": f"{APIM_API_URL}/DocumentReference/123", - }, - }, + } + } ], "author": [ { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "Y05868", - }, - }, + } + } ], "custodian": { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "Y05868", - }, + } }, } mock_fhir_doc_ref.model_dump.return_value = expected_fhir_response result = mock_document_service.create_document_reference_fhir_response( - mock_document_reference, + mock_document_reference ) mock_attachment.assert_called_once_with( @@ -391,8 +399,7 @@ def test_create_document_reference_fhir_response(mock_document_service, mocker): @freeze_time("2023-05-01T12:00:00Z") def test_create_document_reference_fhir_response_integration( - mock_document_service, - mocker, + mock_document_service, mocker ): mock_document_reference = mocker.MagicMock() mock_document_reference.nhs_number = "9000000009" @@ -416,7 +423,7 @@ def test_create_document_reference_fhir_response_integration( "identifier": { "system": "https://fhir.nhs.uk/Id/nhs-number", "value": "9000000009", - }, + } }, "date": "2023-05-01T12:00:00", "content": [ @@ -427,22 +434,22 @@ def test_create_document_reference_fhir_response_integration( "title": "test_document.pdf", "creation": "2023-05-01", "url": f"{APIM_API_URL}/DocumentReference/16521000000101~Y05868-1634567890", - }, - }, + } + } ], "author": [ { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "Y12345", - }, - }, + } + } ], "custodian": { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "Y12345", - }, + } }, "type": { "coding": [ @@ -450,14 +457,14 @@ def test_create_document_reference_fhir_response_integration( "system": "http://snomed.info/sct", "code": "16521000000101", "display": "Lloyd George record folder", - }, - ], + } + ] }, "meta": {"versionId": "1"}, } result = mock_document_service.create_document_reference_fhir_response( - mock_document_reference, + mock_document_reference ) assert isinstance(result, dict) @@ -476,8 +483,7 @@ def test_build_filter_expression_custodian(mock_document_service): def test_build_filter_expression_custodian_mocked( - mock_document_service, - mock_filter_builder, + mock_document_service, mock_filter_builder ): filter_values = {"custodian": "12345"} @@ -498,116 +504,6 @@ def test_build_filter_expression_defaults(mock_document_service): assert actual_filter == expected_filter - -def test_get_paginated_references_by_nhs_number_returns_references_and_token( - mock_document_service, -): - mock_document_service.query_table_with_paginator.return_value = ( - MOCK_DOCUMENT_REFERENCE, - MOCK_NEXT_PAGE_TOKEN, - ) - expected = { - "references": [EXPECTED_RESPONSE], - "next_page_token": MOCK_NEXT_PAGE_TOKEN, - } - - actual = mock_document_service.get_paginated_references_by_nhs_number( - nhs_number=TEST_NHS_NUMBER, - ) - - mock_document_service.query_table_with_paginator.assert_called_with( - table_name=MOCK_LG_TABLE_NAME, - index_name="NhsNumberIndex", - search_key="NhsNumber", - search_condition=TEST_NHS_NUMBER, - limit=None, - start_key=None, - filter_expression="#Deleted_attr = :Deleted_condition_val OR attribute_not_exists(#Deleted_attr)", - expression_attribute_names={"#Deleted_attr": "Deleted"}, - expression_attribute_values={":Deleted_condition_val": ""}, - ) - - assert actual == expected - - -def test_get_paginated_references_by_nhs_number_handles_filters(mock_document_service): - mock_document_service.query_table_with_paginator.return_value = ( - MOCK_DOCUMENT_REFERENCE, - MOCK_NEXT_PAGE_TOKEN, - ) - - mock_document_service.get_paginated_references_by_nhs_number( - nhs_number=TEST_NHS_NUMBER, - filter={"doc_status": "final"}, - ) - - mock_document_service.query_table_with_paginator.assert_called_with( - table_name=MOCK_LG_TABLE_NAME, - index_name="NhsNumberIndex", - search_key="NhsNumber", - search_condition=TEST_NHS_NUMBER, - limit=None, - start_key=None, - filter_expression=( - "(#Deleted_attr = :Deleted_condition_val OR attribute_not_exists(#Deleted_attr)) " - "AND #DocStatus_attr = :DocStatus_condition_val" - ), - expression_attribute_names={ - "#Deleted_attr": "Deleted", - "#DocStatus_attr": "DocStatus", - }, - expression_attribute_values={ - ":Deleted_condition_val": "", - ":DocStatus_condition_val": "final", - }, - ) - - -def test_build_pagination_filter_no_addition_filter_passed_returns_not_deleted_filter( - mock_document_service, -): - expected_filter_expression = ( - "#Deleted_attr = :Deleted_condition_val OR attribute_not_exists(#Deleted_attr)" - ) - expected_condition_attribute_names = {"#Deleted_attr": "Deleted"} - expected_condition_attribute_values = {":Deleted_condition_val": ""} - - ( - actual_filter_expression, - actual_condition_attribute_names, - actual_condition_attribute_values, - ) = mock_document_service._build_pagination_filter(None) - - assert expected_filter_expression == actual_filter_expression - assert expected_condition_attribute_names == actual_condition_attribute_names - assert expected_condition_attribute_values == actual_condition_attribute_values - - -def test_build_pagination_filter_handles_additional_filters(mock_document_service): - expected_filter_expression = ( - "(#Deleted_attr = :Deleted_condition_val OR attribute_not_exists(#Deleted_attr)) " - "AND #DocStatus_attr = :DocStatus_condition_val" - ) - expected_condition_attribute_names = { - "#Deleted_attr": "Deleted", - "#DocStatus_attr": "DocStatus", - } - expected_condition_attribute_values = { - ":Deleted_condition_val": "", - ":DocStatus_condition_val": "final", - } - - ( - actual_filter_expression, - actual_condition_attribute_names, - actual_condition_attribute_values, - ) = mock_document_service._build_pagination_filter({"doc_status": "final"}) - - assert actual_filter_expression == expected_filter_expression - assert actual_condition_attribute_names == expected_condition_attribute_names - assert actual_condition_attribute_values == expected_condition_attribute_values - - def test_build_filter_expression_document_snomed_code(mock_document_service): filter_values = {"document_snomed_code": "16521000000101"} expected_filter = Attr("DocumentSnomedCodeType").eq("16521000000101") & ( diff --git a/lambdas/tests/unit/services/test_document_review_processor_service.py b/lambdas/tests/unit/services/test_document_review_processor_service.py index c0e3a46262..70d746ab14 100644 --- a/lambdas/tests/unit/services/test_document_review_processor_service.py +++ b/lambdas/tests/unit/services/test_document_review_processor_service.py @@ -9,7 +9,6 @@ from models.pds_models import PatientDetails from models.sqs.review_message_body import ReviewMessageBody, ReviewMessageFile from services.document_review_processor_service import ReviewProcessorService -from tests.unit.conftest import MOCK_DOCUMENT_REVIEW_BUCKET, S3_PREFIX, TEST_NHS_NUMBER from utils.exceptions import ( InvalidResourceIdException, PatientNotFoundException, @@ -20,17 +19,13 @@ @pytest.fixture def mock_document_upload_review_service(mocker): return mocker.patch( - "services.document_review_processor_service.DocumentUploadReviewService", + "services.document_review_processor_service.DocumentUploadReviewService" ) @pytest.fixture def mock_s3_service(mocker): - mock_s3 = mocker.patch( - "services.document_review_processor_service.S3Service", - ).return_value - mock_s3.S3_PREFIX = S3_PREFIX - yield mock_s3 + return mocker.patch("services.document_review_processor_service.S3Service") @pytest.fixture @@ -47,7 +42,7 @@ def sample_review_message(): ReviewMessageFile( file_name="test_document.pdf", file_path="staging/9000000009/test_document.pdf", - ), + ) ], nhs_number="9000000009", failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, @@ -72,10 +67,20 @@ def mock_pds_service(mocker): return mock_pds +def test_service_initializes_with_correct_environment_variables( + set_env, mock_document_upload_review_service, mock_s3_service +): + service = ReviewProcessorService() + + assert service.review_table_name == "test_document_review" + assert service.staging_bucket_name == "test_staging_bulk_store" + assert service.review_bucket_name == "test_document_review_bucket" + mock_document_upload_review_service.assert_called_once() + mock_s3_service.assert_called_once() + + def test_process_review_message_success( - service_under_test, - sample_review_message, - mocker, + service_under_test, sample_review_message, mocker ): mock_move = mocker.patch.object(service_under_test, "_move_files_to_review_bucket") mock_delete = mocker.patch.object(service_under_test, "_delete_files_from_staging") @@ -84,7 +89,7 @@ def test_process_review_message_success( DocumentReviewFileDetails( file_name="test_document.pdf", file_location="9000000009/test-upload-id-123/test_document.pdf", - ), + ) ] service_under_test.process_review_message(sample_review_message) @@ -134,9 +139,7 @@ def test_process_review_message_multiple_files(service_under_test, mocker): def test_process_review_message_s3_copy_error( - service_under_test, - sample_review_message, - mocker, + service_under_test, sample_review_message, mocker ): mocker.patch.object( service_under_test, @@ -152,9 +155,7 @@ def test_process_review_message_s3_copy_error( def test_process_review_message_dynamo_error_not_precondition( - service_under_test, - sample_review_message, - mocker, + service_under_test, sample_review_message, mocker ): mocker.patch.object( service_under_test, @@ -163,7 +164,7 @@ def test_process_review_message_dynamo_error_not_precondition( DocumentReviewFileDetails( file_name="document_1.pdf", file_location="9000000009/test-upload-id-456/document_1.pdf", - ), + ) ], ) service_under_test.document_review_service.create_dynamo_entry.side_effect = ( @@ -178,9 +179,7 @@ def test_process_review_message_dynamo_error_not_precondition( def test_process_review_message_continues_dynamo_conditional_check_failure( - service_under_test, - sample_review_message, - mocker, + service_under_test, sample_review_message, mocker ): mocker.patch.object( service_under_test, @@ -189,7 +188,7 @@ def test_process_review_message_continues_dynamo_conditional_check_failure( DocumentReviewFileDetails( file_name="document_1.pdf", file_location="9000000009/test-upload-id-456/document_1.pdf", - ), + ) ], ) mocker.patch.object(service_under_test, "_delete_files_from_staging") @@ -199,7 +198,7 @@ def test_process_review_message_continues_dynamo_conditional_check_failure( "Error": { "Code": "ConditionalCheckFailedException", "Message": "DynamoDB error", - }, + } }, "PutItem", ) @@ -215,14 +214,11 @@ def test_build_review_record_success(service_under_test, sample_review_message): DocumentReviewFileDetails( file_name="test_document.pdf", file_location="9000000009/test-review-id/test_document.pdf", - ), + ) ] result = service_under_test._build_review_record( - sample_review_message, - "test-review-id", - files, - "Y12345", + sample_review_message, "test-review-id", files, "Y12345" ) assert isinstance(result, DocumentUploadReviewReference) @@ -269,10 +265,7 @@ def test_build_review_record_with_multiple_files(service_under_test): ] result = service_under_test._build_review_record( - message, - "test-review-id", - files, - "Y12345", + message, "test-review-id", files, "Y12345" ) assert len(result.files) == 2 @@ -284,22 +277,18 @@ def test_move_files_success(service_under_test, sample_review_message, mocker): mocker.patch("uuid.uuid4", return_value="123412342") files = service_under_test._move_files_to_review_bucket( - sample_review_message, - "test-review-id-123", + sample_review_message, "test-review-id-123" ) expected_key = "test-review-id-123/123412342" assert len(files) == 1 assert files[0].file_name == "test_document.pdf" - assert ( - files[0].file_location - == f"{S3_PREFIX}{MOCK_DOCUMENT_REVIEW_BUCKET}/{expected_key}" - ) + assert files[0].file_location == expected_key service_under_test.s3_service.copy_across_bucket.assert_called_once_with( source_bucket="test_staging_bulk_store", - source_file_key=f"staging/{TEST_NHS_NUMBER}/test_document.pdf", + source_file_key="staging/9000000009/test_document.pdf", dest_bucket="test_document_review_bucket", dest_file_key=expected_key, if_none_match=True, @@ -312,14 +301,14 @@ def test_move_multiple_files_success(service_under_test, mocker): files=[ ReviewMessageFile( file_name="document_1.pdf", - file_path=f"staging/{TEST_NHS_NUMBER}/document_1.pdf", + file_path="staging/9000000009/document_1.pdf", ), ReviewMessageFile( file_name="document_2.pdf", - file_path=f"staging/{TEST_NHS_NUMBER}/document_2.pdf", + file_path="staging/9000000009/document_2.pdf", ), ], - nhs_number=TEST_NHS_NUMBER, + nhs_number="9000000009", failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, uploader_ods="Y12345", ) @@ -329,15 +318,9 @@ def test_move_multiple_files_success(service_under_test, mocker): assert len(files) == 2 assert files[0].file_name == "document_1.pdf" - assert ( - files[0].file_location - == f"{S3_PREFIX}{MOCK_DOCUMENT_REVIEW_BUCKET}/test-review-id/123412342" - ) + assert files[0].file_location == "test-review-id/123412342" assert files[1].file_name == "document_2.pdf" - assert ( - files[1].file_location - == f"{S3_PREFIX}{MOCK_DOCUMENT_REVIEW_BUCKET}/test-review-id/56785678" - ) + assert files[1].file_location == "test-review-id/56785678" assert service_under_test.s3_service.copy_across_bucket.call_count == 2 @@ -350,21 +333,19 @@ def test_move_files_copy_error(service_under_test, sample_review_message): with pytest.raises(ClientError): service_under_test._move_files_to_review_bucket( - sample_review_message, - "test-review-id", + sample_review_message, "test-review-id" ) def test_move_files_to_review_bucket_continues_file_already_exists_in_review_bucket( - service_under_test, - sample_review_message, + service_under_test, sample_review_message ): service_under_test.s3_service.copy_across_bucket.side_effect = ClientError( { "Error": { "Code": "PreconditionFailed", "Message": "At least one of the pre-conditions you specified did not hold", - }, + } }, "CopyObject", ) @@ -418,7 +399,7 @@ def test_workflow_handles_multiple_different_patients(service_under_test): ReviewMessageFile( file_name=f"doc_{i}.pdf", file_path=f"staging/900000000{i}/doc_{i}.pdf", - ), + ) ], nhs_number=f"900000000{i}", failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, @@ -437,9 +418,7 @@ def test_workflow_handles_multiple_different_patients(service_under_test): def test_get_patient_custodian_returns_gp_ods_from_pds( - service_under_test, - sample_review_message, - mock_pds_service, + service_under_test, sample_review_message, mock_pds_service ): result = service_under_test._get_patient_custodian(sample_review_message) @@ -448,13 +427,12 @@ def test_get_patient_custodian_returns_gp_ods_from_pds( def test_get_patient_custodian_returns_uploader_ods_when_nhs_number_is_none( - service_under_test, - mock_pds_service, + service_under_test, mock_pds_service ): message = ReviewMessageBody( upload_id="test-upload-id", files=[ - ReviewMessageFile(file_name="test.pdf", file_path="staging/test/test.pdf"), + ReviewMessageFile(file_name="test.pdf", file_path="staging/test/test.pdf") ], nhs_number="", failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, @@ -468,13 +446,12 @@ def test_get_patient_custodian_returns_uploader_ods_when_nhs_number_is_none( def test_get_patient_custodian_returns_uploader_ods_when_nhs_number_is_placeholder( - service_under_test, - mock_pds_service, + service_under_test, mock_pds_service ): message = ReviewMessageBody( upload_id="test-upload-id", files=[ - ReviewMessageFile(file_name="test.pdf", file_path="staging/test/test.pdf"), + ReviewMessageFile(file_name="test.pdf", file_path="staging/test/test.pdf") ], nhs_number="0000000000", failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, @@ -488,9 +465,7 @@ def test_get_patient_custodian_returns_uploader_ods_when_nhs_number_is_placehold def test_get_patient_custodian_returns_uploader_ods_on_pds_error( - service_under_test, - sample_review_message, - mock_pds_service, + service_under_test, sample_review_message, mock_pds_service ): mock_pds_service.fetch_patient_details.side_effect = PdsErrorException("PDS error") @@ -502,13 +477,11 @@ def test_get_patient_custodian_returns_uploader_ods_on_pds_error( def test_get_patient_custodian_returns_uploader_ods_on_invalid_resource_id( - service_under_test, - sample_review_message, - mock_pds_service, + service_under_test, sample_review_message, mock_pds_service ): mock_pds_service.fetch_patient_details.side_effect = InvalidResourceIdException( - "Invalid NHS number", + "Invalid NHS number" ) result = service_under_test._get_patient_custodian(sample_review_message) @@ -518,12 +491,10 @@ def test_get_patient_custodian_returns_uploader_ods_on_invalid_resource_id( def test_get_patient_custodian_handles_patient_not_found_sets_placeholder( - service_under_test, - sample_review_message, - mock_pds_service, + service_under_test, sample_review_message, mock_pds_service ): mock_pds_service.fetch_patient_details.side_effect = PatientNotFoundException( - "Patient not found", + "Patient not found" ) result = service_under_test._get_patient_custodian(sample_review_message) diff --git a/lambdas/tests/unit/services/test_feature_flags_service.py b/lambdas/tests/unit/services/test_feature_flags_service.py index b2e98b47f1..c04c9218cc 100644 --- a/lambdas/tests/unit/services/test_feature_flags_service.py +++ b/lambdas/tests/unit/services/test_feature_flags_service.py @@ -53,8 +53,7 @@ def mock_feature_flag_service(set_env, mocker, setup_request_context): def test_request_app_config_data_valid_response_returns_data( - mock_requests, - mock_feature_flag_service, + mock_requests, mock_feature_flag_service ): mock_requests.get(test_url, json=success_200_all_response, status_code=200) @@ -65,8 +64,7 @@ def test_request_app_config_data_valid_response_returns_data( def test_request_app_config_data_invalid_json_raises_exception( - mock_requests, - mock_feature_flag_service, + mock_requests, mock_feature_flag_service ): invalid_json = "invalid:" mock_requests.get(test_url, text=invalid_json, status_code=500) @@ -80,8 +78,7 @@ def test_request_app_config_data_invalid_json_raises_exception( def test_request_app_config_data_400_raises_not_found_exception( - mock_requests, - mock_feature_flag_service, + mock_requests, mock_feature_flag_service ): mock_requests.get(test_url, json=err_response, status_code=400) @@ -94,8 +91,7 @@ def test_request_app_config_data_400_raises_not_found_exception( def test_request_app_config_data_catch_all_raises_failure_exception( - mock_requests, - mock_feature_flag_service, + mock_requests, mock_feature_flag_service ): mock_requests.get(test_url, json=err_response, status_code=500) @@ -121,8 +117,7 @@ def test_get_feature_flags_returns_all_flags(mock_requests, mock_feature_flag_se def test_get_feature_flags_no_flags_returns_empty( - mock_requests, - mock_feature_flag_service, + mock_requests, mock_feature_flag_service ): mock_requests.get(test_url, json=empty_response, status_code=200) mock_feature_flag_service.request_app_config_data.return_value = empty_response @@ -135,8 +130,7 @@ def test_get_feature_flags_no_flags_returns_empty( def test_get_feature_flags_invalid_raises_exception( - mock_requests, - mock_feature_flag_service, + mock_requests, mock_feature_flag_service ): mock_requests.get(test_url, json=err_response, status_code=200) mock_feature_flag_service.request_app_config_data.return_value = err_response @@ -149,8 +143,7 @@ def test_get_feature_flags_invalid_raises_exception( def test_get_feature_flags_by_flag_returns_single_flag( - mock_requests, - mock_feature_flag_service, + mock_requests, mock_feature_flag_service ): mock_requests.get(test_url, json=success_200_with_filter_reponse, status_code=200) mock_feature_flag_service.request_app_config_data.return_value = ( @@ -165,8 +158,7 @@ def test_get_feature_flags_by_flag_returns_single_flag( def test_get_feature_flags_by_flag_no_flag_raises_exception( - mock_requests, - mock_feature_flag_service, + mock_requests, mock_feature_flag_service ): mock_requests.get(test_url, json=empty_response, status_code=200) mock_feature_flag_service.request_app_config_data.return_value = empty_response @@ -180,8 +172,7 @@ def test_get_feature_flags_by_flag_no_flag_raises_exception( def test_get_feature_flags_by_flag_invalid_raises_exception( - mock_requests, - mock_feature_flag_service, + mock_requests, mock_feature_flag_service ): mock_requests.get(test_url, json=err_response, status_code=200) mock_feature_flag_service.request_app_config_data.return_value = err_response @@ -204,15 +195,14 @@ def test_get_allowed_list_of_ods_codes_for_upload_pilot(mock_feature_flag_servic assert actual_codes == expected_codes mock_feature_flag_service.ssm_service.get_ssm_parameter.assert_called_with( - UPLOAD_PILOT_ODS_ALLOWED_LIST, + UPLOAD_PILOT_ODS_ALLOWED_LIST ) def test_get_allowed_list_of_ods_codes_for_upload_pilot_no_codes_found( - mock_feature_flag_service, - caplog, + mock_feature_flag_service, caplog ): - mock_feature_flag_service.ssm_service.get_ssm_parameter.return_value = "*" + mock_feature_flag_service.ssm_service.get_ssm_parameter.return_value = [] result = mock_feature_flag_service.get_allowed_list_of_ods_codes_for_upload_pilot() @@ -231,10 +221,7 @@ def test_get_allowed_list_of_ods_codes_for_upload_pilot_no_codes_found( ], ) def test_check_if_ods_code_is_in_pilot( - mocker, - mock_feature_flag_service, - auth_context, - expected_result, + mocker, mock_feature_flag_service, auth_context, expected_result ): mock_context = mocker.MagicMock() mock_context.authorization = auth_context @@ -322,8 +309,7 @@ def test_get_feature_flags_by_flag_overwrites_upload_flag( def test_get_feature_flags_by_flag_for_non_upload_flag( - mocker, - mock_feature_flag_service, + mocker, mock_feature_flag_service ): flag_name = "some_other_flag" mocker.patch.object(mock_feature_flag_service, "check_if_ods_code_is_in_pilot") diff --git a/lambdas/tests/unit/services/test_get_document_upload_status.py b/lambdas/tests/unit/services/test_get_document_upload_status.py index 3000e11a6c..b646a327f0 100644 --- a/lambdas/tests/unit/services/test_get_document_upload_status.py +++ b/lambdas/tests/unit/services/test_get_document_upload_status.py @@ -15,12 +15,9 @@ def mock_document_service(): @pytest.fixture -def get_document_upload_status_service(mock_document_service, mocker): - mocker.patch( - "services.get_document_upload_status.DocumentService", - return_value=mock_document_service, - ) +def get_document_upload_status_service(mock_document_service): service = GetDocumentUploadStatusService() + service.document_service = mock_document_service return service @@ -83,13 +80,11 @@ def test_get_document_references_by_id_found_documents( ) result = get_document_upload_status_service.get_document_references_by_id( - nhs_number, - document_ids, + nhs_number, document_ids ) mock_document_service.get_batch_document_references_by_id.assert_called_once_with( - document_ids, - SupportedDocumentTypes.LG, + document_ids, SupportedDocumentTypes.LG ) assert len(result) == 2 assert result["doc-id-1"]["status"] == "final" @@ -106,12 +101,11 @@ def test_get_document_references_by_id_not_found_documents( nhs_number = "1234567890" document_ids = ["doc-id-1", "non-existent-id"] mock_document_service.get_batch_document_references_by_id.return_value = [ - sample_document_references[0], + sample_document_references[0] ] result = get_document_upload_status_service.get_document_references_by_id( - nhs_number, - document_ids, + nhs_number, document_ids ) assert len(result) == 2 @@ -129,12 +123,11 @@ def test_get_document_references_by_id_access_denied( nhs_number = "1234567890" document_ids = ["doc-id-3"] mock_document_service.get_batch_document_references_by_id.return_value = [ - sample_document_references[2], + sample_document_references[2] ] result = get_document_upload_status_service.get_document_references_by_id( - nhs_number, - document_ids, + nhs_number, document_ids ) assert len(result) == 1 @@ -150,12 +143,11 @@ def test_get_document_references_by_id_infected_document( nhs_number = "1234567890" document_ids = ["doc-id-4"] mock_document_service.get_batch_document_references_by_id.return_value = [ - sample_document_references[3], + sample_document_references[3] ] result = get_document_upload_status_service.get_document_references_by_id( - nhs_number, - document_ids, + nhs_number, document_ids ) assert len(result) == 1 @@ -163,38 +155,8 @@ def test_get_document_references_by_id_infected_document( assert result["doc-id-4"]["error_code"] == DocumentStatus.INFECTED.code -def test_get_document_references_by_id_invalid_document( - get_document_upload_status_service, - mock_document_service, -): - nhs_number = "1234567890" - document_ids = ["doc-id-invalid"] - - cancelled_doc = DocumentReference( - id="doc-id-invalid", - nhs_number="1234567890", - file_name="invalid_file.pdf", - doc_status=DocumentStatus.CANCELLED.display, - virus_scanner_result=VirusScanResult.INVALID, - ) - - mock_document_service.get_batch_document_references_by_id.return_value = [ - cancelled_doc, - ] - - result = get_document_upload_status_service.get_document_references_by_id( - nhs_number, - document_ids, - ) - - assert len(result) == 1 - assert result["doc-id-invalid"]["status"] == DocumentStatus.INVALID.display - assert result["doc-id-invalid"]["error_code"] == DocumentStatus.INVALID.code - - def test_get_document_references_by_id_cancelled_document( - get_document_upload_status_service, - mock_document_service, + get_document_upload_status_service, mock_document_service ): nhs_number = "1234567890" document_ids = ["doc-id-cancelled"] @@ -209,12 +171,11 @@ def test_get_document_references_by_id_cancelled_document( ) mock_document_service.get_batch_document_references_by_id.return_value = [ - cancelled_doc, + cancelled_doc ] result = get_document_upload_status_service.get_document_references_by_id( - nhs_number, - document_ids, + nhs_number, document_ids ) assert len(result) == 1 @@ -230,12 +191,11 @@ def test_get_document_references_by_id_deleted_document( nhs_number = "1234567890" document_ids = ["doc-id-5"] mock_document_service.get_batch_document_references_by_id.return_value = [ - sample_document_references[4], + sample_document_references[4] ] result = get_document_upload_status_service.get_document_references_by_id( - nhs_number, - document_ids, + nhs_number, document_ids ) assert len(result) == 0 @@ -256,8 +216,7 @@ def test_get_document_references_by_id_multiple_mixed_statuses( ] result = get_document_upload_status_service.get_document_references_by_id( - nhs_number, - document_ids, + nhs_number, document_ids ) assert len(result) == 4 @@ -277,21 +236,18 @@ def test_get_document_references_by_id_multiple_mixed_statuses( def test_get_document_references_by_id_no_results( - get_document_upload_status_service, - mock_document_service, + get_document_upload_status_service, mock_document_service ): nhs_number = "1234567890" document_ids = ["doc-id-6"] mock_document_service.get_batch_document_references_by_id.return_value = [] result = get_document_upload_status_service.get_document_references_by_id( - nhs_number, - document_ids, + nhs_number, document_ids ) mock_document_service.get_batch_document_references_by_id.assert_called_once_with( - document_ids, - SupportedDocumentTypes.LG, + document_ids, SupportedDocumentTypes.LG ) assert result["doc-id-6"]["status"] == DocumentStatus.NOT_FOUND.display assert result["doc-id-6"]["error_code"] == DocumentStatus.NOT_FOUND.code diff --git a/lambdas/tests/unit/services/test_get_fhir_document_reference_service.py b/lambdas/tests/unit/services/test_get_fhir_document_reference_service.py index a018e56dea..47ed6dcd96 100644 --- a/lambdas/tests/unit/services/test_get_fhir_document_reference_service.py +++ b/lambdas/tests/unit/services/test_get_fhir_document_reference_service.py @@ -33,8 +33,7 @@ def test_get_document_reference_service(patched_service): patched_service.document_service.fetch_documents_from_table.return_value = documents actual = patched_service.get_document_references( - "3d8683b9-1665-40d2-8499-6e8302d507ff", - MOCK_LG_TABLE_NAME, + "3d8683b9-1665-40d2-8499-6e8302d507ff", MOCK_LG_TABLE_NAME ) assert actual == documents[0] @@ -45,14 +44,11 @@ def test_handle_get_document_reference_request(patched_service, mocker, set_env) expected = documents[0] mock_document_ref = documents[0] mocker.patch.object( - patched_service, - "get_document_references", - return_value=mock_document_ref, + patched_service, "get_document_references", return_value=mock_document_ref ) actual = patched_service.handle_get_document_reference_request( - SnomedCodes.LLOYD_GEORGE.value.code, - "test-id", + SnomedCodes.LLOYD_GEORGE.value.code, "test-id" ) assert expected == actual @@ -86,8 +82,7 @@ def test_get_presigned_url(patched_service, mocker): ) result = patched_service.get_presigned_url( - "test-s3-bucket", - "9000000009/test-key-123", + "test-s3-bucket", "9000000009/test-key-123" ) assert result == "https://example.com/path/to/resource" @@ -111,7 +106,7 @@ def test_get_document_references_empty_result(patched_service): def test_get_presigned_url_failure(patched_service, mocker): # Test when S3 service raises an exception patched_service.s3_service.create_download_presigned_url.side_effect = Exception( - "S3 error", + "S3 error" ) with pytest.raises(Exception) as exc_info: @@ -122,8 +117,7 @@ def test_get_presigned_url_failure(patched_service, mocker): def test_create_document_reference_fhir_response_with_presign_url_document_data( - patched_service, - mocker, + patched_service, mocker ): # Test creating FHIR response with different document data test_doc = create_test_doc_store_refs()[0] @@ -131,12 +125,12 @@ def test_create_document_reference_fhir_response_with_presign_url_document_data( # Modify the document reference to test different values modified_doc = copy.deepcopy(test_doc) modified_doc.file_name = "different_file.pdf" - modified_doc.file_size = 8000000 modified_doc.created = "2023-05-15T10:30:00.000Z" modified_doc.document_scan_creation = "2023-05-15" + patched_service.s3_service.get_file_size.return_value = 8000000 patched_service.get_presigned_url = mocker.MagicMock( - return_value="https://new-test-url.com", + return_value="https://new-test-url.com" ) result = patched_service.create_document_reference_fhir_response(modified_doc) @@ -171,8 +165,7 @@ def test_create_document_reference_fhir_response_with_binary_document_data( def test_create_document_reference_fhir_response_non_final_status( - patched_service, - mocker, + patched_service, mocker ): """Test FHIR response creation for documents with non-final status.""" test_doc = create_test_doc_store_refs()[0] @@ -194,8 +187,7 @@ def test_create_document_reference_fhir_response_non_final_status( def test_create_document_reference_fhir_response_when_patient_is_deceased( - patched_service, - mocker, + patched_service, mocker ): """Test FHIR response creation for documents with non-final status.""" test_doc = create_test_doc_store_refs()[0] diff --git a/lambdas/tests/unit/services/test_pdf_stitch_service.py b/lambdas/tests/unit/services/test_pdf_stitch_service.py new file mode 100644 index 0000000000..2634e1a1fb --- /dev/null +++ b/lambdas/tests/unit/services/test_pdf_stitch_service.py @@ -0,0 +1,68 @@ +import os +import tempfile +from io import BytesIO + +import pytest +from pypdf import PdfWriter +from pypdf.errors import PyPdfError +from services.pdf_stitch_service import count_page_number, stitch_pdf + + +def test_stitch_pdf(): + test_pdf_folder = "tests/unit/helpers/data/pdf/" + input_test_files = [ + f"{test_pdf_folder}/{filename}" + for filename in ["file1.pdf", "file2.pdf", "file3.pdf"] + ] + + stitched_file = stitch_pdf(input_test_files) + assert count_page_number(stitched_file) == sum( + count_page_number(filepath) for filepath in input_test_files + ) + + os.remove(stitched_file) + + +def test_stitch_pdf_with_given_desc_folder(): + test_pdf_folder = "tests/unit/helpers/data/pdf/" + test_desc_folder = tempfile.mkdtemp() + + input_test_files = [ + f"{test_pdf_folder}/{filename}" + for filename in ["file1.pdf", "file2.pdf", "file3.pdf"] + ] + + stitched_file = stitch_pdf(input_test_files, test_desc_folder) + + assert stitched_file.startswith(test_desc_folder) + + os.remove(stitched_file) + + +def test_stitch_pdf_raise_error_if_fail_to_perform_stitching(): + test_pdf_folder = "tests/unit/helpers/data/pdf/" + input_test_files = [ + f"{test_pdf_folder}/{filename}" for filename in ["invalid_pdf.pdf", "file1.pdf"] + ] + + with pytest.raises(PyPdfError): + stitch_pdf(input_test_files) + + +def test_stitch_pdf_raise_error_when_input_file_not_found(): + test_file = "non-exist-file.pdf" + + with pytest.raises(FileNotFoundError): + stitch_pdf([test_file]) + + +def create_in_memory_pdf(page_count: int = 1) -> BytesIO: + # Creates a PDF in memory with the received number of pages + writer = PdfWriter() + for _ in range(page_count): + writer.add_blank_page(width=72, height=72) + + stream = BytesIO() + writer.write(stream) + stream.seek(0) + return stream diff --git a/lambdas/tests/unit/services/test_pdf_stitching_service.py b/lambdas/tests/unit/services/test_pdf_stitching_service.py index fbdcaf9d2d..4be6959917 100644 --- a/lambdas/tests/unit/services/test_pdf_stitching_service.py +++ b/lambdas/tests/unit/services/test_pdf_stitching_service.py @@ -23,7 +23,6 @@ MOCK_LG_TABLE_NAME, MOCK_UNSTITCHED_LG_TABLE_NAME, TEST_BASE_DIRECTORY, - TEST_CURRENT_GP_ODS, TEST_NHS_NUMBER, TEST_UUID, ) @@ -94,8 +93,7 @@ def mock_upload_stitched_file(mocker, mock_service): @pytest.fixture def mock_update_stitched_reference_with_version_id(mocker, mock_service): return mocker.patch.object( - mock_service, - "update_stitched_reference_with_version_id", + mock_service, "update_stitched_reference_with_version_id" ) @@ -137,10 +135,7 @@ def mock_rollback_reference_migration(mocker, mock_service): @pytest.fixture def mock_download_fileobj(): def _mock_download_fileobj( - s3_object_data: dict[str, BytesIO], - Bucket: str, - Key: str, - Fileobj: BytesIO, + s3_object_data: dict[str, BytesIO], Bucket: str, Key: str, Fileobj: BytesIO ): if Key in s3_object_data: Fileobj.write(s3_object_data[Key].read()) @@ -157,16 +152,14 @@ def _mock_download_fileobj( ], ) def test_retrieve_multipart_references_returns_multipart_references( - mock_service, - doc_type, + mock_service, doc_type ): mock_service.document_service.fetch_available_document_references_by_type.return_value = ( TEST_DOCUMENT_REFERENCES ) actual = mock_service.retrieve_multipart_references( - nhs_number=TEST_NHS_NUMBER, - doc_type=doc_type, + nhs_number=TEST_NHS_NUMBER, doc_type=doc_type ) assert actual == TEST_DOCUMENT_REFERENCES @@ -175,12 +168,11 @@ def test_retrieve_multipart_references_returns_multipart_references( def test_retrieve_multipart_references_returns_empty_list_if_LG_stitched(mock_service): mock_service.document_service.fetch_available_document_references_by_type.return_value = [ - TEST_1_OF_1_DOCUMENT_REFERENCE, + TEST_1_OF_1_DOCUMENT_REFERENCE ] actual = mock_service.retrieve_multipart_references( - nhs_number=TEST_NHS_NUMBER, - doc_type=SupportedDocumentTypes.LG, + nhs_number=TEST_NHS_NUMBER, doc_type=SupportedDocumentTypes.LG ) assert actual == [] @@ -228,7 +220,7 @@ def set_stitched_reference(document_reference, stitch_file_size, *args, **kwargs mock_migrate_multipart_references.assert_called_once() mock_write_stitching_reference.assert_called_once() mock_publish_nrl_message.assert_called_once_with( - snomed_code_doc_type=test_message.snomed_code_doc_type, + snomed_code_doc_type=test_message.snomed_code_doc_type ) @@ -350,10 +342,7 @@ def test_process_stitching(mock_service, mock_download_fileobj): mock_service.s3_service.client.download_fileobj.side_effect = ( lambda Bucket, Key, Fileobj: mock_download_fileobj( - s3_object_data, - Bucket, - Key, - Fileobj, + s3_object_data, Bucket, Key, Fileobj ) ) @@ -371,14 +360,12 @@ def test_migrate_multipart_references(mock_service): table_name=MOCK_UNSTITCHED_LG_TABLE_NAME, item={ "ContentType": "application/pdf", - "Author": TEST_CURRENT_GP_ODS, "Created": "2024-01-01T12:00:00.000Z", "DocumentScanCreation": "2024-01-01", "DocStatus": "final", "DocumentSnomedCodeType": "16521000000101", "FileLocation": f"{TEST_DOCUMENT_REFERENCES[0].file_location}", "FileName": f"{TEST_DOCUMENT_REFERENCES[0].file_name}", - "FileSize": TEST_DOCUMENT_REFERENCES[0].file_size, "ID": f"{TEST_DOCUMENT_REFERENCES[0].id}", "LastUpdated": 1704110400, "NhsNumber": f"{TEST_DOCUMENT_REFERENCES[0].nhs_number}", @@ -395,13 +382,11 @@ def test_migrate_multipart_references(mock_service): item={ "ContentType": "application/pdf", "Created": "2024-01-01T12:00:00.000Z", - "Author": TEST_CURRENT_GP_ODS, "DocStatus": "final", "DocumentScanCreation": "2024-01-01", "DocumentSnomedCodeType": "16521000000101", "FileLocation": f"{TEST_DOCUMENT_REFERENCES[1].file_location}", "FileName": f"{TEST_DOCUMENT_REFERENCES[1].file_name}", - "FileSize": TEST_DOCUMENT_REFERENCES[1].file_size, "ID": f"{TEST_DOCUMENT_REFERENCES[1].id}", "LastUpdated": 1704110400, "NhsNumber": f"{TEST_DOCUMENT_REFERENCES[1].nhs_number}", @@ -418,13 +403,11 @@ def test_migrate_multipart_references(mock_service): item={ "ContentType": "application/pdf", "Created": "2024-01-01T12:00:00.000Z", - "Author": TEST_CURRENT_GP_ODS, "DocStatus": "final", "DocumentScanCreation": "2024-01-01", "DocumentSnomedCodeType": "16521000000101", "FileLocation": f"{TEST_DOCUMENT_REFERENCES[2].file_location}", "FileName": f"{TEST_DOCUMENT_REFERENCES[2].file_name}", - "FileSize": TEST_DOCUMENT_REFERENCES[2].file_size, "ID": f"{TEST_DOCUMENT_REFERENCES[2].id}", "LastUpdated": 1704110400, "NhsNumber": f"{TEST_DOCUMENT_REFERENCES[2].nhs_number}", @@ -450,8 +433,7 @@ def test_migrate_multipart_references(mock_service): def test_migrate_multipart_references_handles_client_error_on_create( - mock_service, - caplog, + mock_service, caplog ): mock_service.multipart_references = TEST_DOCUMENT_REFERENCES mock_service.dynamo_service.create_item.side_effect = MOCK_CLIENT_ERROR @@ -464,8 +446,7 @@ def test_migrate_multipart_references_handles_client_error_on_create( def test_migrate_multipart_references_handles_client_error_on_delete( - mock_service, - caplog, + mock_service, caplog ): mock_service.multipart_references = TEST_DOCUMENT_REFERENCES mock_service.dynamo_service.delete_item.side_effect = MOCK_CLIENT_ERROR @@ -523,7 +504,7 @@ def test_publish_nrl_message(mock_service, mock_uuid): ) mock_service.publish_nrl_message( - snomed_code_doc_type=SnomedCodes.LLOYD_GEORGE.value, + snomed_code_doc_type=SnomedCodes.LLOYD_GEORGE.value ) mock_service.sqs_service.send_message_fifo.assert_called_once_with( @@ -539,7 +520,7 @@ def test_publish_nrl_message_handles_client_error(mock_service): with pytest.raises(PdfStitchingException): mock_service.publish_nrl_message( - snomed_code_doc_type=SnomedCodes.LLOYD_GEORGE.value, + snomed_code_doc_type=SnomedCodes.LLOYD_GEORGE.value ) @@ -567,9 +548,7 @@ def test_sort_multipart_object_keys_raises_exception(mock_service): def test_rollback_stitching_process_successfully_rolls_back( - mock_service, - mock_rollback_stitched_reference, - mock_rollback_reference_migration, + mock_service, mock_rollback_stitched_reference, mock_rollback_reference_migration ): mock_service.stitched_reference = TEST_1_OF_1_DOCUMENT_REFERENCE mock_service.multipart_references = TEST_DOCUMENT_REFERENCES @@ -586,12 +565,10 @@ def test_rollback_stitched_reference(mock_service): mock_service.rollback_stitched_reference() mock_service.dynamo_service.delete_item.assert_called_once_with( - table_name=MOCK_LG_TABLE_NAME, - key={"ID": TEST_1_OF_1_DOCUMENT_REFERENCE.id}, + table_name=MOCK_LG_TABLE_NAME, key={"ID": TEST_1_OF_1_DOCUMENT_REFERENCE.id} ) mock_service.s3_service.delete_object.assert_called_once_with( - s3_bucket_name=MOCK_LG_BUCKET, - file_key=f"{TEST_NHS_NUMBER}/test-key-123", + s3_bucket_name=MOCK_LG_BUCKET, file_key=f"{TEST_NHS_NUMBER}/test-key-123" ) @@ -616,99 +593,8 @@ def test_rollback_reference_migration(mock_service): mock_service.rollback_reference_migration() - mock_service.dynamo_service.create_item.assert_has_calls( - [ - call( - table_name=MOCK_LG_TABLE_NAME, - item={ - "ContentType": "application/pdf", - "Created": TEST_DOCUMENT_REFERENCES[0].created, - "Author": TEST_CURRENT_GP_ODS, - "CurrentGpOds": TEST_DOCUMENT_REFERENCES[0].current_gp_ods, - "DocStatus": "final", - "DocumentScanCreation": "2024-01-01", - "DocumentSnomedCodeType": "16521000000101", - "FileLocation": f"{TEST_DOCUMENT_REFERENCES[0].file_location}", - "FileName": f"{TEST_DOCUMENT_REFERENCES[0].file_name}", - "FileSize": TEST_DOCUMENT_REFERENCES[0].file_size, - "ID": f"{TEST_DOCUMENT_REFERENCES[0].id}", - "LastUpdated": 1704110400, - "NhsNumber": f"{TEST_DOCUMENT_REFERENCES[0].nhs_number}", - "S3FileKey": f"{TEST_DOCUMENT_REFERENCES[0].s3_file_key}", - "Status": "current", - "Version": "1", - "Uploaded": True, - "Uploading": False, - "VirusScannerResult": "Clean", - }, - ), - call( - table_name=MOCK_LG_TABLE_NAME, - item={ - "ContentType": "application/pdf", - "Created": TEST_DOCUMENT_REFERENCES[1].created, - "CurrentGpOds": TEST_DOCUMENT_REFERENCES[1].current_gp_ods, - "Author": TEST_CURRENT_GP_ODS, - "DocStatus": "final", - "DocumentScanCreation": "2024-01-01", - "DocumentSnomedCodeType": "16521000000101", - "FileLocation": f"{TEST_DOCUMENT_REFERENCES[1].file_location}", - "FileName": f"{TEST_DOCUMENT_REFERENCES[1].file_name}", - "FileSize": TEST_DOCUMENT_REFERENCES[1].file_size, - "ID": f"{TEST_DOCUMENT_REFERENCES[1].id}", - "LastUpdated": 1704110400, - "NhsNumber": f"{TEST_DOCUMENT_REFERENCES[1].nhs_number}", - "S3FileKey": f"{TEST_DOCUMENT_REFERENCES[1].s3_file_key}", - "Status": "current", - "Version": "1", - "Uploaded": True, - "Uploading": False, - "VirusScannerResult": "Clean", - }, - ), - call( - table_name=MOCK_LG_TABLE_NAME, - item={ - "ContentType": "application/pdf", - "Created": TEST_DOCUMENT_REFERENCES[2].created, - "CurrentGpOds": TEST_DOCUMENT_REFERENCES[2].current_gp_ods, - "Author": TEST_CURRENT_GP_ODS, - "DocStatus": "final", - "DocumentScanCreation": "2024-01-01", - "DocumentSnomedCodeType": "16521000000101", - "FileLocation": f"{TEST_DOCUMENT_REFERENCES[2].file_location}", - "FileName": f"{TEST_DOCUMENT_REFERENCES[2].file_name}", - "FileSize": TEST_DOCUMENT_REFERENCES[2].file_size, - "ID": f"{TEST_DOCUMENT_REFERENCES[2].id}", - "LastUpdated": 1704110400, - "NhsNumber": f"{TEST_DOCUMENT_REFERENCES[2].nhs_number}", - "S3FileKey": f"{TEST_DOCUMENT_REFERENCES[2].s3_file_key}", - "Status": "current", - "Version": "1", - "Uploaded": True, - "Uploading": False, - "VirusScannerResult": "Clean", - }, - ), - ], - ) - - mock_service.dynamo_service.delete_item.assert_has_calls( - [ - call( - table_name=MOCK_UNSTITCHED_LG_TABLE_NAME, - key={"ID": TEST_DOCUMENT_REFERENCES[0].id}, - ), - call( - table_name=MOCK_UNSTITCHED_LG_TABLE_NAME, - key={"ID": TEST_DOCUMENT_REFERENCES[1].id}, - ), - call( - table_name=MOCK_UNSTITCHED_LG_TABLE_NAME, - key={"ID": TEST_DOCUMENT_REFERENCES[2].id}, - ), - ], - ) + assert mock_service.dynamo_service.create_item.call_count == 3 + assert mock_service.dynamo_service.delete_item.call_count == 3 def test_rollback_reference_migration_handles_exception(mock_service): @@ -732,15 +618,13 @@ def test_update_stitched_reference_with_version_id(mock_service): mock_service.update_stitched_reference_with_version_id() mock_service.s3_service.get_head_object.assert_called_once_with( - MOCK_LG_BUCKET, - TEST_1_OF_1_DOCUMENT_REFERENCE.s3_file_key, + MOCK_LG_BUCKET, TEST_1_OF_1_DOCUMENT_REFERENCE.s3_file_key ) assert mock_service.stitched_reference.s3_version_id == test_version_id def test_process_manual_trigger_calls_process_message_for_each_nhs_number( - mocker, - mock_service, + mocker, mock_service ): test_ods_code = "A12345" test_nhs_numbers = ["1234567890", "9876543210"] @@ -751,7 +635,7 @@ def test_process_manual_trigger_calls_process_message_for_each_nhs_number( return_value=test_nhs_numbers, ) mock_send_message = mocker.patch( - "lambdas.services.pdf_stitching_service.SQSService.send_message_batch_standard", + "lambdas.services.pdf_stitching_service.SQSService.send_message_batch_standard" ) mock_service.process_manual_trigger(ods_code=test_ods_code, queue_url="url") @@ -761,8 +645,7 @@ def test_process_manual_trigger_calls_process_message_for_each_nhs_number( def test_calculate_created_date_returns_now_when_no_created_values( - mock_service, - patch_pdf_stitching_service_datetime, + mock_service, patch_pdf_stitching_service_datetime ): mock_service.multipart_references = copy.deepcopy(TEST_DOCUMENT_REFERENCES) for r in mock_service.multipart_references: @@ -770,13 +653,12 @@ def test_calculate_created_date_returns_now_when_no_created_values( actual = mock_service.calculate_created_date() assert actual == patch_pdf_stitching_service_datetime.strftime( - "%Y-%m-%dT%H:%M:%S.%fZ", + "%Y-%m-%dT%H:%M:%S.%fZ" ) def test_calculate_created_date_returns_now_when_all_created_invalid( - mock_service, - patch_pdf_stitching_service_datetime, + mock_service, patch_pdf_stitching_service_datetime ): mock_service.multipart_references = copy.deepcopy(TEST_DOCUMENT_REFERENCES) mock_service.multipart_references[0].created = "not-a-date" @@ -785,7 +667,7 @@ def test_calculate_created_date_returns_now_when_all_created_invalid( actual = mock_service.calculate_created_date() assert actual == patch_pdf_stitching_service_datetime.strftime( - "%Y-%m-%dT%H:%M:%S.%fZ", + "%Y-%m-%dT%H:%M:%S.%fZ" ) @@ -813,8 +695,7 @@ def test_retrieve_multipart_references_returns_empty_if_any_1of1_present_for_lg( ) actual = mock_service.retrieve_multipart_references( - nhs_number=TEST_NHS_NUMBER, - doc_type=SupportedDocumentTypes.LG, + nhs_number=TEST_NHS_NUMBER, doc_type=SupportedDocumentTypes.LG ) assert actual == [] @@ -875,8 +756,7 @@ def set_stitched_reference(document_reference, stitch_file_size, *args, **kwargs mock_create_stitched_reference.side_effect = set_stitched_reference mock_upload_stitched_file.side_effect = PdfStitchingException( - 400, - LambdaError.StitchError, + 400, LambdaError.StitchError ) with pytest.raises(PdfStitchingException): @@ -892,7 +772,7 @@ def test_process_manual_trigger_returns_early_if_no_nhs_numbers(mock_service, mo return_value=[], ) mock_send_batch = mocker.patch( - "lambdas.services.pdf_stitching_service.SQSService.send_message_batch_standard", + "lambdas.services.pdf_stitching_service.SQSService.send_message_batch_standard" ) mock_service.process_manual_trigger(ods_code="A12345", queue_url="url") @@ -901,9 +781,7 @@ def test_process_manual_trigger_returns_early_if_no_nhs_numbers(mock_service, mo def test_process_manual_trigger_logs_error_when_batch_has_failures( - mock_service, - mocker, - caplog, + mock_service, mocker, caplog ): test_nhs_numbers = ["1234567890"] mocker.patch.object( diff --git a/lambdas/tests/unit/services/test_pdm_get_fhir_document_reference_search_service.py b/lambdas/tests/unit/services/test_pdm_get_fhir_document_reference_search_service.py index b75c50f199..3ec59484fa 100644 --- a/lambdas/tests/unit/services/test_pdm_get_fhir_document_reference_search_service.py +++ b/lambdas/tests/unit/services/test_pdm_get_fhir_document_reference_search_service.py @@ -4,12 +4,12 @@ from freezegun import freeze_time from models.document_reference import DocumentReference from services.document_reference_search_service import DocumentReferenceSearchService -from tests.unit.conftest import APIM_API_URL, MOCK_LG_TABLE_NAME +from tests.unit.conftest import APIM_API_URL from tests.unit.helpers.data.dynamo.dynamo_responses import MOCK_SEARCH_RESPONSE from utils.lambda_header_utils import validate_common_name_in_mtls MOCK_DOCUMENT_REFERENCE = [ - DocumentReference.model_validate(MOCK_SEARCH_RESPONSE["Items"][0]), + DocumentReference.model_validate(MOCK_SEARCH_RESPONSE["Items"][0]) ] MOCK_FILE_SIZE = 24000 @@ -77,20 +77,16 @@ def mock_mtls_common_names(monkeypatch): }, }, }, - "dev_COREDocumentMetadata", + ["dev_COREDocumentMetadata"], ), - ({}, MOCK_LG_TABLE_NAME), + ({}, ["test_pdm_dynamoDB_table", "test_lg_dynamoDB_table"]), ], ) def test_get_pdm_table( - set_env, - mock_document_service, - common_name, - expected, - mock_mtls_common_names, + set_env, mock_document_service, common_name, expected, mock_mtls_common_names ): cn = validate_common_name_in_mtls(common_name) - tables = mock_document_service._get_table_name(cn) + tables = mock_document_service._get_table_names(cn) assert tables == expected @@ -105,13 +101,13 @@ def test_create_document_reference_fhir_response(mock_document_service, mocker): mock_document_reference.document_snomed_code_type = "717391000000106" mock_attachment = mocker.patch( - "services.document_reference_search_service.Attachment", + "services.document_reference_search_service.Attachment" ) mock_attachment_instance = mocker.MagicMock() mock_attachment.return_value = mock_attachment_instance mock_doc_ref_info = mocker.patch( - "services.document_reference_search_service.DocumentReferenceInfo", + "services.document_reference_search_service.DocumentReferenceInfo" ) mock_doc_ref_info_instance = mocker.MagicMock() mock_doc_ref_info.return_value = mock_doc_ref_info_instance @@ -130,7 +126,7 @@ def test_create_document_reference_fhir_response(mock_document_service, mocker): "identifier": { "system": "https://fhir.nhs.uk/Id/nhs-number", "value": "9000000009", - }, + } }, "content": [ { @@ -140,28 +136,28 @@ def test_create_document_reference_fhir_response(mock_document_service, mocker): "title": "test_document.pdf", "creation": "2023-05-01", "url": f"{APIM_API_URL}/DocumentReference/123", - }, - }, + } + } ], "author": [ { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "Y05868", - }, - }, + } + } ], "custodian": { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "Y05868", - }, + } }, } mock_fhir_doc_ref.model_dump.return_value = expected_fhir_response result = mock_document_service.create_document_reference_fhir_response( - mock_document_reference, + mock_document_reference ) mock_attachment.assert_called_once_with( @@ -185,8 +181,7 @@ def test_create_document_reference_fhir_response(mock_document_service, mocker): @freeze_time("2023-05-01T12:00:00Z") def test_create_document_reference_fhir_response_integration( - mock_document_service, - mocker, + mock_document_service, mocker ): mock_document_reference = mocker.MagicMock() mock_document_reference.nhs_number = "9000000009" @@ -210,7 +205,7 @@ def test_create_document_reference_fhir_response_integration( "identifier": { "system": "https://fhir.nhs.uk/Id/nhs-number", "value": "9000000009", - }, + } }, "date": "2023-05-01T12:00:00", "content": [ @@ -221,22 +216,22 @@ def test_create_document_reference_fhir_response_integration( "title": "test_document.pdf", "creation": "2023-05-01", "url": f"{APIM_API_URL}/DocumentReference/717391000000106~Y05868-1634567890", - }, - }, + } + } ], "author": [ { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "Y12345", - }, - }, + } + } ], "custodian": { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "Y12345", - }, + } }, "type": { "coding": [ @@ -244,88 +239,14 @@ def test_create_document_reference_fhir_response_integration( "system": "http://snomed.info/sct", "code": "717391000000106", "display": "Confidential patient data", - }, - ], - }, - "meta": {"versionId": "1"}, - } - - result = mock_document_service.create_document_reference_fhir_response( - mock_document_reference, - ) - - assert isinstance(result, dict) - assert result == expected_fhir_response - - -@freeze_time("2023-05-01T12:00:00Z") -def test_create_document_reference_fhir_response_no_title( - mock_document_service, - mocker, -): - mock_document_reference = mocker.MagicMock() - mock_document_reference.nhs_number = "9000000009" - mock_document_reference.file_name = None - mock_document_reference.created = "2023-05-01T12:00:00" - mock_document_reference.document_scan_creation = "2023-05-01" - mock_document_reference.id = "Y05868-1634567890" - mock_document_reference.current_gp_ods = "Y12345" - mock_document_reference.author = "Y12345" - mock_document_reference.doc_status = "final" - mock_document_reference.custodian = "Y12345" - mock_document_reference.document_snomed_code_type = "717391000000106" - mock_document_reference.version = "1" - - expected_fhir_response = { - "id": "717391000000106~Y05868-1634567890", - "resourceType": "DocumentReference", - "status": "current", - "docStatus": "final", - "subject": { - "identifier": { - "system": "https://fhir.nhs.uk/Id/nhs-number", - "value": "9000000009", - }, - }, - "date": "2023-05-01T12:00:00", - "content": [ - { - "attachment": { - "contentType": "application/pdf", - "language": "en-GB", - "creation": "2023-05-01", - "url": f"{APIM_API_URL}/DocumentReference/717391000000106~Y05868-1634567890", - }, - }, - ], - "author": [ - { - "identifier": { - "system": "https://fhir.nhs.uk/Id/ods-organization-code", - "value": "Y12345", - }, - }, - ], - "custodian": { - "identifier": { - "system": "https://fhir.nhs.uk/Id/ods-organization-code", - "value": "Y12345", - }, - }, - "type": { - "coding": [ - { - "system": "http://snomed.info/sct", - "code": "717391000000106", - "display": "Confidential patient data", - }, - ], + } + ] }, "meta": {"versionId": "1"}, } result = mock_document_service.create_document_reference_fhir_response( - mock_document_reference, + mock_document_reference ) assert isinstance(result, dict) diff --git a/lambdas/tests/unit/services/test_pdm_post_fhir_document_reference_service.py b/lambdas/tests/unit/services/test_pdm_post_fhir_document_reference_service.py index 427cecf029..ca143721c9 100644 --- a/lambdas/tests/unit/services/test_pdm_post_fhir_document_reference_service.py +++ b/lambdas/tests/unit/services/test_pdm_post_fhir_document_reference_service.py @@ -6,30 +6,22 @@ from enums.mtls import MtlsCommonNames from enums.snomed_codes import SnomedCode, SnomedCodes from models.fhir.R4.base_models import Identifier, Reference -from models.fhir.R4.fhir_document_reference import ( - Attachment, -) +from models.fhir.R4.fhir_document_reference import Attachment from models.fhir.R4.fhir_document_reference import ( DocumentReference as FhirDocumentReference, ) -from models.fhir.R4.fhir_document_reference import ( - DocumentReferenceContent, -) +from models.fhir.R4.fhir_document_reference import DocumentReferenceContent from services.fhir_document_reference_service_base import ( FhirDocumentReferenceServiceBase, ) from services.post_fhir_document_reference_service import ( PostFhirDocumentReferenceService, ) -from tests.unit.conftest import ( - APIM_API_URL, -) +from tests.unit.conftest import APIM_API_URL from tests.unit.conftest import ( EXPECTED_PARSED_PATIENT_BASE_CASE as mock_pds_patient_details, ) -from tests.unit.conftest import ( - TEST_UUID, -) +from tests.unit.conftest import TEST_UUID from utils.lambda_exceptions import DocumentRefException from utils.request_context import request_context @@ -53,13 +45,13 @@ def mock_post_fhir_doc_ref_service(set_env): @pytest.fixture def mock_fhir_doc_ref_base_service(mocker, setup_request_context): mock_document_service = mocker.patch( - "services.fhir_document_reference_service_base.DocumentService", + "services.fhir_document_reference_service_base.DocumentService" ) mock_s3_service = mocker.patch( - "services.fhir_document_reference_service_base.S3Service", + "services.fhir_document_reference_service_base.S3Service" ) mock_dynamo_service = mocker.patch( - "services.fhir_document_reference_service_base.DynamoDBService", + "services.fhir_document_reference_service_base.DynamoDBService" ) service = FhirDocumentReferenceServiceBase() service.document_service = mock_document_service.return_value @@ -78,8 +70,8 @@ def mock_mtls_common_names(monkeypatch): "PDM": [ "ndrclient.main.int.pdm.national.nhs.uk", "client.dev.ndr.national.nhs.uk", - ], - }, + ] + } ), ) @@ -119,7 +111,7 @@ def valid_fhir_doc_json(): "identifier": { "system": "https://fhir.nhs.uk/Id/nhs-number", "value": "9000000009", - }, + } }, "type": { "coding": [ @@ -127,22 +119,22 @@ def valid_fhir_doc_json(): "system": "http://snomed.info/sct", "code": SnomedCodes.LLOYD_GEORGE.value.code, "display": SnomedCodes.LLOYD_GEORGE.value.display_name, - }, - ], + } + ] }, "custodian": { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "A12345", - }, + } }, "author": [ { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "A12345", - }, - }, + } + } ], "content": [ { @@ -151,10 +143,10 @@ def valid_fhir_doc_json(): "language": "en-GB", "title": "test-file.pdf", "creation": "2023-01-01T12:00:00Z", - }, - }, + } + } ], - }, + } ) @@ -169,13 +161,13 @@ def valid_fhir_doc_json_only_required(): "identifier": { "system": "https://fhir.nhs.uk/Id/nhs-number", "value": "9000000009", - }, + } }, "custodian": { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "A12345", - }, + } }, "content": [ { @@ -184,10 +176,10 @@ def valid_fhir_doc_json_only_required(): "language": "en-GB", "title": "test-file.pdf", "creation": "2023-01-01T12:00:00Z", - }, - }, + } + } ], - }, + } ) @@ -233,7 +225,7 @@ def valid_mtls_fhir_doc_json(): "identifier": { "system": "https://fhir.nhs.uk/Id/nhs-number", "value": "9000000009", - }, + } }, "type": { "coding": [ @@ -241,22 +233,22 @@ def valid_mtls_fhir_doc_json(): "system": "http://snomed.info/sct", "code": SnomedCodes.PATIENT_DATA.value.code, "display": SnomedCodes.PATIENT_DATA.value.display_name, - }, - ], + } + ] }, "custodian": { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "A12345", - }, + } }, "author": [ { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "A12345", - }, - }, + } + } ], "content": [ { @@ -265,91 +257,10 @@ def valid_mtls_fhir_doc_json(): "language": "en-GB", "title": "test-file.pdf", "creation": "2023-01-01T12:00:00Z", - }, - }, + } + } ], - }, - ) - - -@pytest.fixture -def invalid_fhir_doc_json_missing_content(): - return json.dumps( - { - "resourceType": "DocumentReference", - "docStatus": "final", - "status": "current", - "subject": { - "identifier": { - "system": "https://fhir.nhs.uk/Id/nhs-number", - "value": "9000000009", - }, - }, - "type": { - "coding": [ - { - "system": "http://snomed.info/sct", - "code": SnomedCodes.PATIENT_DATA.value.code, - "display": SnomedCodes.PATIENT_DATA.value.display_name, - }, - ], - }, - "custodian": { - "identifier": { - "system": "https://fhir.nhs.uk/Id/ods-organization-code", - "value": "A12345", - }, - }, - "author": [ - { - "identifier": { - "system": "https://fhir.nhs.uk/Id/ods-organization-code", - "value": "A12345", - }, - }, - ], - }, - ) - - -@pytest.fixture -def invalid_fhir_doc_json_missing_content_attachment(): - return json.dumps( - { - "resourceType": "DocumentReference", - "docStatus": "final", - "status": "current", - "subject": { - "identifier": { - "system": "https://fhir.nhs.uk/Id/nhs-number", - "value": "9000000009", - }, - }, - "type": { - "coding": [ - { - "system": "http://snomed.info/sct", - "code": SnomedCodes.PATIENT_DATA.value.code, - "display": SnomedCodes.PATIENT_DATA.value.display_name, - }, - ], - }, - "custodian": { - "identifier": { - "system": "https://fhir.nhs.uk/Id/ods-organization-code", - "value": "A12345", - }, - }, - "author": [ - { - "identifier": { - "system": "https://fhir.nhs.uk/Id/ods-organization-code", - "value": "A12345", - }, - }, - ], - "content": [{}], - }, + } ) @@ -387,22 +298,20 @@ def valid_fhir_doc_object_without_optional(valid_fhir_doc_json_only_required): def test_get_dynamo_table_for_patient_data_doc_type( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service ): """Test _get_dynamo_table_for_doc_type method with a non-Lloyd George document type.""" patient_data_code = SnomedCodes.PATIENT_DATA.value result = mock_post_fhir_doc_ref_service._get_dynamo_table_for_doc_type( - patient_data_code, + patient_data_code ) assert result == str(DynamoTables.CORE) def test_get_dynamo_table_for_unsupported_doc_type( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service ): """Test _get_dynamo_table_for_doc_type method with a non-Lloyd George document type.""" @@ -416,8 +325,7 @@ def test_get_dynamo_table_for_unsupported_doc_type( def test_get_dynamo_table_for_lloyd_george_doc_type( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service ): """Test _get_dynamo_table_for_doc_type method with Lloyd George document type.""" lg_code = SnomedCodes.LLOYD_GEORGE.value @@ -438,8 +346,7 @@ def test_process_mtls_fhir_document_reference_with_binary( custom_endpoint = f"{APIM_API_URL}/DocumentReference" result = mock_post_fhir_doc_ref_service.process_fhir_document_reference( - valid_mtls_fhir_doc_with_binary, - valid_mtls_request_context, + valid_mtls_fhir_doc_with_binary, valid_mtls_request_context ) assert isinstance(result, str) @@ -450,25 +357,20 @@ def test_process_mtls_fhir_document_reference_with_binary( def test_determine_document_type_with_correct_common_name( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - mocker, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service, mocker ): """Test _determine_document_type method when type is missing entirely.""" fhir_doc = mocker.MagicMock(spec=FhirDocumentReference) fhir_doc.type = None result = mock_post_fhir_doc_ref_service._determine_document_type( - fhir_doc, - MtlsCommonNames.PDM, + fhir_doc, MtlsCommonNames.PDM ) assert result == SnomedCodes.PATIENT_DATA.value def test_s3_file_key_for_pdm( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - mocker, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service, mocker ): """Test _create_document_reference method without custodian information.""" @@ -479,16 +381,15 @@ def test_s3_file_key_for_pdm( contentType="application/pdf", title="test-file.pdf", creation="2023-01-01T12:00:00Z", - ), - ), + ) + ) ] fhir_doc.author = [ Reference( identifier=Identifier( - system="https://fhir.nhs.uk/Id/ods-organization-code", - value="B67890", - ), - ), + system="https://fhir.nhs.uk/Id/ods-organization-code", value="B67890" + ) + ) ] fhir_doc.custodian = None @@ -513,9 +414,7 @@ def test_s3_file_key_for_pdm( def test_create_pdm_document_reference_with_raw_request( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - mocker, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service, mocker ): """Test _create_document_reference method with raw_request included (pdm).""" @@ -526,22 +425,20 @@ def test_create_pdm_document_reference_with_raw_request( contentType="application/pdf", title="test-file.pdf", creation="2023-01-01T12:00:00Z", - ), - ), + ) + ) ] fhir_doc.custodian = Reference( identifier=Identifier( - system="https://fhir.nhs.uk/Id/ods-organization-code", - value="A12345", - ), + system="https://fhir.nhs.uk/Id/ods-organization-code", value="A12345" + ) ) fhir_doc.author = [ Reference( identifier=Identifier( - system="https://fhir.nhs.uk/Id/ods-organization-code", - value="B67890", - ), - ), + system="https://fhir.nhs.uk/Id/ods-organization-code", value="B67890" + ) + ) ] doc_type = SnomedCodes.PATIENT_DATA.value @@ -564,9 +461,7 @@ def test_create_pdm_document_reference_with_raw_request( def test_create_lg_document_reference_with_raw_request( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - mocker, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service, mocker ): """Test _create_document_reference method with raw_request included (LG, should be empty).""" @@ -577,22 +472,20 @@ def test_create_lg_document_reference_with_raw_request( contentType="application/pdf", title="test-file.pdf", creation="2023-01-01T12:00:00Z", - ), - ), + ) + ) ] fhir_doc.custodian = Reference( identifier=Identifier( - system="https://fhir.nhs.uk/Id/ods-organization-code", - value="A12345", - ), + system="https://fhir.nhs.uk/Id/ods-organization-code", value="A12345" + ) ) fhir_doc.author = [ Reference( identifier=Identifier( - system="https://fhir.nhs.uk/Id/ods-organization-code", - value="B67890", - ), - ), + system="https://fhir.nhs.uk/Id/ods-organization-code", value="B67890" + ) + ) ] doc_type = SnomedCodes.LLOYD_GEORGE.value @@ -615,11 +508,9 @@ def test_create_lg_document_reference_with_raw_request( def test_create_pdm_document_reference_without_author_or_type( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - mocker, + mock_post_fhir_doc_ref_service, mocker ): - """Test _create_document_reference method without author or type.""" + """Test _create_document_reference method with raw_request included (LG, should be empty).""" fhir_doc = mocker.MagicMock(spec=FhirDocumentReference) fhir_doc.content = [ @@ -628,14 +519,13 @@ def test_create_pdm_document_reference_without_author_or_type( contentType="application/pdf", title="test-file.pdf", creation="2023-01-01T12:00:00Z", - ), - ), + ) + ) ] fhir_doc.custodian = Reference( identifier=Identifier( - system="https://fhir.nhs.uk/Id/ods-organization-code", - value="A12345", - ), + system="https://fhir.nhs.uk/Id/ods-organization-code", value="A12345" + ) ) fhir_doc.author = [] fhir_doc.type = [] @@ -656,88 +546,3 @@ def test_create_pdm_document_reference_without_author_or_type( assert result.custodian == "A12345" assert result.current_gp_ods == "C13579" assert result.author is None - - -def test_create_pdm_document_reference_without_title( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - mocker, -): - """Test _create_document_reference method without title""" - - fhir_doc = mocker.MagicMock(spec=FhirDocumentReference) - fhir_doc.content = [ - DocumentReferenceContent( - attachment=Attachment( - contentType="application/pdf", - creation="2023-01-01T12:00:00Z", - ), - ), - ] - fhir_doc.custodian = Reference( - identifier=Identifier( - system="https://fhir.nhs.uk/Id/ods-organization-code", - value="A12345", - ), - ) - fhir_doc.author = [ - Reference( - identifier=Identifier( - system="https://fhir.nhs.uk/Id/ods-organization-code", - value="B67890", - ), - ), - ] - - doc_type = SnomedCodes.PATIENT_DATA.value - result = mock_post_fhir_doc_ref_service._create_document_reference( - nhs_number="9000000009", - author="B67890", - doc_type=doc_type, - fhir_doc=fhir_doc, - current_gp_ods="C13579", - raw_fhir_doc=json.dumps({"foo": "bar"}), - ) - - assert result.raw_request == json.dumps({"foo": "bar"}) - assert result.nhs_number == "9000000009" - assert result.document_snomed_code_type == SnomedCodes.PATIENT_DATA.value.code - assert result.custodian == "A12345" - assert result.current_gp_ods == "C13579" - assert result.file_name is None - - -def test_process_fhir_document_reference_without_content_raises_error( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - mock_mtls_common_names, - invalid_fhir_doc_json_missing_content, - valid_mtls_request_context, -): - with pytest.raises(DocumentRefException) as excinfo: - mock_post_fhir_doc_ref_service.process_fhir_document_reference( - invalid_fhir_doc_json_missing_content, - valid_mtls_request_context, - ) - - assert excinfo.value.status_code == 400 - assert excinfo.value.error == LambdaError.DocRefNoParse - assert excinfo.value.message == "Failed to parse document upload request data" - - -def test_process_fhir_document_reference_without_content_attachment_raises_error( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - mock_mtls_common_names, - invalid_fhir_doc_json_missing_content_attachment, - valid_mtls_request_context, -): - with pytest.raises(DocumentRefException) as excinfo: - mock_post_fhir_doc_ref_service.process_fhir_document_reference( - invalid_fhir_doc_json_missing_content_attachment, - valid_mtls_request_context, - ) - - assert excinfo.value.status_code == 400 - assert excinfo.value.error == LambdaError.DocRefNoParse - assert excinfo.value.message == "Failed to parse document upload request data" diff --git a/lambdas/tests/unit/services/test_pdm_upload_document_reference_service.py b/lambdas/tests/unit/services/test_pdm_upload_document_reference_service.py index 908cb7218e..c0b3d14383 100644 --- a/lambdas/tests/unit/services/test_pdm_upload_document_reference_service.py +++ b/lambdas/tests/unit/services/test_pdm_upload_document_reference_service.py @@ -34,7 +34,7 @@ def mock_document_reference(): doc_ref.doc_status = "uploading" doc_ref.version = "1" doc_ref._build_s3_location = Mock( - return_value="s3://test-lg-bucket/9000000001/test-doc-id", + return_value="s3://test-lg-bucket/9000000001/test-doc-id" ) return doc_ref @@ -44,7 +44,7 @@ def mock_virus_scan_service( mocker, ): mock = mocker.patch( - "services.upload_document_reference_service.get_virus_scan_service", + "services.upload_document_reference_service.get_virus_scan_service" ) yield mock @@ -79,7 +79,7 @@ def mock_pdm_document_reference(): doc_ref.file_size = 1234567890 doc_ref.doc_status = "uploading" doc_ref._build_s3_location = Mock( - return_value=f"s3://test-staging-bucket/fhir_upload/{SnomedCodes.PATIENT_DATA.value.code}/9000000001/test-doc-id", + return_value=f"s3://test-staging-bucket/fhir_upload/{SnomedCodes.PATIENT_DATA.value.code}/9000000001/test-doc-id" ) return doc_ref @@ -118,9 +118,7 @@ def test_handle_upload_document_reference_request_with_none_object_key(pdm_servi def test_handle_upload_document_reference_request_success( - service, - mock_pdm_document_reference, - mocker, + service, mock_pdm_document_reference, mocker ): """Test successful handling of the upload document reference request""" object_key = ( @@ -132,7 +130,7 @@ def test_handle_upload_document_reference_request_success( [mock_pdm_document_reference], ] service.virus_scan_service.scan_file = mocker.MagicMock( - return_value=VirusScanResult.CLEAN, + return_value=VirusScanResult.CLEAN ) service.handle_upload_document_reference_request(object_key, object_size) @@ -148,26 +146,24 @@ def test_handle_upload_document_reference_request_with_exception(pdm_service): object_key = "staging/test-doc-id" pdm_service.document_service.fetch_documents_from_table.side_effect = Exception( - "Test error", + "Test error" ) pdm_service.handle_upload_document_reference_request(object_key) def test_fetch_preliminary_document_reference_success( - pdm_service, - mock_pdm_document_reference, + pdm_service, mock_pdm_document_reference ): """Test successful document reference fetching""" document_key = "test-doc-id" nhs_number = "12345" pdm_service.document_service.fetch_documents_from_table.return_value = [ - mock_pdm_document_reference, + mock_pdm_document_reference ] result = pdm_service._fetch_preliminary_document_reference( - document_key=document_key, - nhs_number=nhs_number, + document_key=document_key, nhs_number=nhs_number ) assert result == mock_pdm_document_reference @@ -186,8 +182,7 @@ def test_fetch_preliminary_document_reference_no_documents_found(pdm_service): pdm_service.document_service.fetch_documents_from_table.return_value = [] result = pdm_service._fetch_preliminary_document_reference( - document_key=document_key, - nhs_number=nhs_number, + document_key=document_key, nhs_number=nhs_number ) assert result is None @@ -199,14 +194,12 @@ def test_fetch_preliminary_document_reference_no_nhs_number(pdm_service): nhs_number = None with pytest.raises(FileProcessingException): pdm_service._fetch_preliminary_document_reference( - document_key=document_key, - nhs_number=nhs_number, + document_key=document_key, nhs_number=nhs_number ) def test_fetch_preliminary_document_reference_multiple_documents_warning( - pdm_service, - mock_document_reference, + pdm_service, mock_document_reference ): """Test handling when multiple documents are found""" document_key = "test-doc-id" @@ -218,8 +211,7 @@ def test_fetch_preliminary_document_reference_multiple_documents_warning( ] result = pdm_service._fetch_preliminary_document_reference( - document_key=document_key, - nhs_number=nhs_number, + document_key=document_key, nhs_number=nhs_number ) assert result == mock_document_reference @@ -235,35 +227,27 @@ def test_fetch_preliminary_document_reference_exception(pdm_service): with pytest.raises(DocumentServiceException): pdm_service._fetch_preliminary_document_reference( - document_key=document_key, - nhs_number=nhs_number, + document_key=document_key, nhs_number=nhs_number ) def test__process_preliminary_document_reference_clean_virus_scan( - pdm_service, - mock_pdm_document_reference, - mocker, + pdm_service, mock_pdm_document_reference, mocker ): """Test processing document reference with a clean virus scan""" object_key = "12345/test-doc-id" mocker.patch.object( - pdm_service, - "_perform_virus_scan", - return_value=VirusScanResult.CLEAN, + pdm_service, "_perform_virus_scan", return_value=VirusScanResult.CLEAN ) mock_process_clean = mocker.patch.object(pdm_service, "_process_clean_document") mock_finalize_transaction = mocker.patch.object( - pdm_service, - "_finalize_and_supersede_with_transaction", + pdm_service, "_finalize_and_supersede_with_transaction" ) mock_delete = mocker.patch.object(pdm_service, "delete_file_from_staging_bucket") pdm_service._process_preliminary_document_reference( - mock_pdm_document_reference, - object_key, - 1222, + mock_pdm_document_reference, object_key, 1222 ) mock_process_clean.assert_called_once() @@ -273,28 +257,21 @@ def test__process_preliminary_document_reference_clean_virus_scan( assert mock_pdm_document_reference.uploading is False mock_delete.assert_called_once() - def test__process_preliminary_document_reference_infected_virus_scan( - pdm_service, - mock_document_reference, - mocker, + pdm_service, mock_document_reference, mocker ): """Test processing document reference with an infected virus scan""" object_key = "staging/test-doc-id" mocker.patch.object( - pdm_service, - "_perform_virus_scan", - return_value=VirusScanResult.INFECTED, + pdm_service, "_perform_virus_scan", return_value=VirusScanResult.INFECTED ) mock_delete = mocker.patch.object(pdm_service, "delete_file_from_staging_bucket") mock_process_clean = mocker.patch.object(pdm_service, "_process_clean_document") mock_update_dynamo = mocker.patch.object(pdm_service, "_update_dynamo_table") pdm_service._process_preliminary_document_reference( - mock_document_reference, - object_key, - 1222, + mock_document_reference, object_key, 1222 ) mock_process_clean.assert_not_called() @@ -303,8 +280,7 @@ def test__process_preliminary_document_reference_infected_virus_scan( def test_perform_virus_scan_returns_clean_hardcoded( - pdm_service, - mock_document_reference, + pdm_service, mock_document_reference ): """Test virus scan returns hardcoded CLEAN result""" object_key = "staging/test-doc-id" @@ -313,9 +289,7 @@ def test_perform_virus_scan_returns_clean_hardcoded( def test_perform_virus_scan_exception_returns_infected( - pdm_service, - mock_document_reference, - mocker, + pdm_service, mock_document_reference, mocker ): """Test virus scan exception handling returns INFECTED for safety""" mock_virus_service = mocker.patch.object(pdm_service, "virus_scan_service") @@ -342,9 +316,7 @@ def test_process_clean_document_success(pdm_service, mock_document_reference, mo def test_process_clean_document_exception_restores_original_values( - pdm_service, - mock_document_reference, - mocker, + pdm_service, mock_document_reference, mocker ): """Test that original values are restored when processing fails""" object_key = "staging/test-doc-id" @@ -370,8 +342,7 @@ def test_process_clean_document_exception_restores_original_values( def test_copy_files_from_staging_bucket_success( - pdm_service, - mock_pdm_document_reference, + pdm_service, mock_pdm_document_reference ): """Test successful file copying from staging bucket""" source_file_key = ( @@ -380,8 +351,7 @@ def test_copy_files_from_staging_bucket_success( expected_dest_key = "9000000001/test-doc-id" pdm_service.copy_files_from_staging_bucket( - mock_pdm_document_reference, - source_file_key, + mock_pdm_document_reference, source_file_key ) pdm_service.s3_service.copy_across_bucket.assert_called_once_with( @@ -396,14 +366,13 @@ def test_copy_files_from_staging_bucket_success( def test_copy_files_from_staging_bucket_client_error( - pdm_service, - mock_document_reference, + pdm_service, mock_document_reference ): """Test handling of ClientError during file copying""" source_file_key = "staging/test-doc-id" client_error = ClientError( error_response={ - "Error": {"Code": "NoSuchBucket", "Message": "Bucket does not exist"}, + "Error": {"Code": "NoSuchBucket", "Message": "Bucket does not exist"} }, operation_name="CopyObject", ) @@ -411,8 +380,7 @@ def test_copy_files_from_staging_bucket_client_error( with pytest.raises(FileProcessingException): pdm_service.copy_files_from_staging_bucket( - mock_document_reference, - source_file_key, + mock_document_reference, source_file_key ) @@ -423,8 +391,7 @@ def test_delete_file_from_staging_bucket_success(pdm_service): pdm_service.delete_file_from_staging_bucket(source_file_key) pdm_service.s3_service.delete_object.assert_called_once_with( - MOCK_STAGING_STORE_BUCKET, - source_file_key, + MOCK_STAGING_STORE_BUCKET, source_file_key ) @@ -437,8 +404,7 @@ def test_delete_pdm_file_from_staging_bucket_success(pdm_service): pdm_service.delete_file_from_staging_bucket(source_file_key) pdm_service.s3_service.delete_object.assert_called_once_with( - MOCK_STAGING_STORE_BUCKET, - source_file_key, + MOCK_STAGING_STORE_BUCKET, source_file_key ) @@ -447,7 +413,7 @@ def test_delete_file_from_staging_bucket_client_error(pdm_service): source_file_key = "staging/test-doc-id" client_error = ClientError( error_response={ - "Error": {"Code": "NoSuchKey", "Message": "Key does not exist"}, + "Error": {"Code": "NoSuchKey", "Message": "Key does not exist"} }, operation_name="DeleteObject", ) @@ -461,8 +427,7 @@ def test_delete_file_from_staging_bucket_client_error(pdm_service): def test_update_dynamo_table_clean_scan_result( - pdm_service, - mock_pdm_document_reference, + pdm_service, mock_pdm_document_reference ): """Test updating DynamoDB table with a clean scan result""" pdm_service._update_dynamo_table(mock_pdm_document_reference) @@ -494,10 +459,7 @@ def test_update_dynamo_table_client_error(pdm_service, mock_document_reference): """Test handling of ClientError during DynamoDB update""" client_error = ClientError( error_response={ - "Error": { - "Code": "ResourceNotFoundException", - "Message": "Table not found", - }, + "Error": {"Code": "ResourceNotFoundException", "Message": "Table not found"} }, operation_name="UpdateItem", ) @@ -523,17 +485,13 @@ def test_handle_upload_document_reference_request_no_document_found(pdm_service) def test_process_preliminary_document_reference_exception_during_processing( - pdm_service, - mock_document_reference, - mocker, + pdm_service, mock_document_reference, mocker ): """Test that exceptions during processing are properly raised""" object_key = "staging/test-doc-id" mocker.patch.object( - pdm_service, - "_perform_virus_scan", - return_value=VirusScanResult.CLEAN, + pdm_service, "_perform_virus_scan", return_value=VirusScanResult.CLEAN ) mocker.patch.object( pdm_service, @@ -543,9 +501,7 @@ def test_process_preliminary_document_reference_exception_during_processing( with pytest.raises(Exception) as exc_info: pdm_service._process_preliminary_document_reference( - mock_document_reference, - object_key, - 1222, + mock_document_reference, object_key, 1222 ) assert "Processing failed" in str(exc_info.value) @@ -555,7 +511,7 @@ def test_get_infrastructure_for_document_key_pdm(service): assert service.table_name == "" assert service.destination_bucket_name == MOCK_LG_BUCKET service._get_infrastructure_for_document_key( - object_parts=["fhir_upload", SnomedCodes.PATIENT_DATA.value.code, "1234"], + object_parts=["fhir_upload", SnomedCodes.PATIENT_DATA.value.code, "1234"] ) assert service.table_name == str(DynamoTables.CORE) assert service.destination_bucket_name == MOCK_PDM_BUCKET @@ -651,19 +607,17 @@ def test_document_type_extraction_from_object_key( def test_handle_upload_pdm_document_reference_request_success( - service, - mock_document_reference, - mocker, + service, mock_document_reference, mocker ): """Test successful handling of the upload document reference request""" pdm_snomed = SnomedCodes.PATIENT_DATA.value object_key = f"fhir_upload/{pdm_snomed.code}/staging/test-doc-id" object_size = 1111 service.document_service.fetch_documents_from_table.return_value = [ - mock_document_reference, + mock_document_reference ] service.virus_scan_service.scan_file = mocker.MagicMock( - return_value=VirusScanResult.CLEAN, + return_value=VirusScanResult.CLEAN ) service.handle_upload_document_reference_request(object_key, object_size) @@ -676,8 +630,7 @@ def test_handle_upload_pdm_document_reference_request_success( def test_copy_files_from_staging_bucket_to_pdm_success( - pdm_service, - mock_pdm_document_reference, + pdm_service, mock_pdm_document_reference ): """Test successful file copying from staging bucket""" source_file_key = ( @@ -687,8 +640,7 @@ def test_copy_files_from_staging_bucket_to_pdm_success( f"{mock_pdm_document_reference.nhs_number}/{mock_pdm_document_reference.id}" ) pdm_service.copy_files_from_staging_bucket( - mock_pdm_document_reference, - source_file_key, + mock_pdm_document_reference, source_file_key ) pdm_service.s3_service.copy_across_bucket.assert_called_once_with( source_bucket=MOCK_STAGING_STORE_BUCKET, diff --git a/lambdas/tests/unit/services/test_post_fhir_document_reference_service.py b/lambdas/tests/unit/services/test_post_fhir_document_reference_service.py index 397f8467a4..4db5aced91 100644 --- a/lambdas/tests/unit/services/test_post_fhir_document_reference_service.py +++ b/lambdas/tests/unit/services/test_post_fhir_document_reference_service.py @@ -6,32 +6,22 @@ from enums.snomed_codes import SnomedCode, SnomedCodes from models.document_reference import DocumentReference from models.fhir.R4.base_models import CodeableConcept, Identifier, Reference -from models.fhir.R4.fhir_document_reference import ( - SNOMED_URL, - Attachment, -) +from models.fhir.R4.fhir_document_reference import SNOMED_URL, Attachment from models.fhir.R4.fhir_document_reference import ( DocumentReference as FhirDocumentReference, ) -from models.fhir.R4.fhir_document_reference import ( - DocumentReferenceContent, -) +from models.fhir.R4.fhir_document_reference import DocumentReferenceContent from services.fhir_document_reference_service_base import ( FhirDocumentReferenceServiceBase, ) from services.post_fhir_document_reference_service import ( PostFhirDocumentReferenceService, ) -from tests.unit.conftest import ( - APIM_API_URL, -) +from tests.unit.conftest import APIM_API_URL from tests.unit.conftest import ( EXPECTED_PARSED_PATIENT_BASE_CASE as mock_pds_patient_details, ) -from tests.unit.conftest import ( - MOCK_LG_TABLE_NAME, - TEST_UUID, -) +from tests.unit.conftest import MOCK_LG_TABLE_NAME, TEST_UUID from utils.exceptions import FhirDocumentReferenceException from utils.lambda_exceptions import DocumentRefException from utils.request_context import request_context @@ -46,16 +36,16 @@ def mock_post_fhir_doc_ref_service(set_env): @pytest.fixture def mock_fhir_doc_ref_base_service(mocker, setup_request_context): mock_document_service = mocker.patch( - "services.fhir_document_reference_service_base.DocumentService", + "services.fhir_document_reference_service_base.DocumentService" ) mock_s3_service = mocker.patch( - "services.fhir_document_reference_service_base.S3Service", + "services.fhir_document_reference_service_base.S3Service" ) mock_dynamo_service = mocker.patch( - "services.fhir_document_reference_service_base.DynamoDBService", + "services.fhir_document_reference_service_base.DynamoDBService" ) mock_doc_type_table_router = mocker.patch( - "services.fhir_document_reference_service_base.DocTypeTableRouter", + "services.fhir_document_reference_service_base.DocTypeTableRouter" ) service = FhirDocumentReferenceServiceBase() service.document_service = mock_document_service.return_value @@ -110,7 +100,7 @@ def valid_fhir_doc_json(): "identifier": { "system": "https://fhir.nhs.uk/Id/nhs-number", "value": "9000000009", - }, + } }, "type": { "coding": [ @@ -118,22 +108,22 @@ def valid_fhir_doc_json(): "system": "http://snomed.info/sct", "code": SnomedCodes.LLOYD_GEORGE.value.code, "display": SnomedCodes.LLOYD_GEORGE.value.display_name, - }, - ], + } + ] }, "custodian": { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "A12345", - }, + } }, "author": [ { "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "A12345", - }, - }, + } + } ], "content": [ { @@ -142,10 +132,10 @@ def valid_fhir_doc_json(): "language": "en-GB", "title": "test-file.pdf", "creation": "2023-01-01T12:00:00Z", - }, - }, + } + } ], - }, + } ) @@ -175,7 +165,7 @@ def test_process_fhir_document_reference_with_presigned_url( mock_handle_document_save.return_value = mock_presigned_url_response result = mock_post_fhir_doc_ref_service.process_fhir_document_reference( - valid_fhir_doc_json, + valid_fhir_doc_json ) expected_pre_sign_url = mock_presigned_url_response @@ -197,7 +187,7 @@ def test_process_fhir_document_reference_with_binary( custom_endpoint = f"{APIM_API_URL}/DocumentReference" mock_handle_document_save.return_value = None result = mock_post_fhir_doc_ref_service.process_fhir_document_reference( - valid_fhir_doc_with_binary, + valid_fhir_doc_with_binary ) assert isinstance(result, str) @@ -208,8 +198,7 @@ def test_process_fhir_document_reference_with_binary( def test_validation_error( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service ): """Test handling of an invalid FHIR document.""" with pytest.raises(DocumentRefException) as excinfo: @@ -233,7 +222,6 @@ def test_validation_error( ], ) def test_doc_ref_no_parse_message_includes_details_format( - mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service, valid_fhir_doc_json, nhs_number, @@ -246,7 +234,7 @@ def test_doc_ref_no_parse_message_includes_details_format( with pytest.raises(DocumentRefException) as error: mock_post_fhir_doc_ref_service.process_fhir_document_reference( - invalid_nhs_doc_json, + invalid_nhs_doc_json ) assert error.value.status_code == 400 @@ -280,8 +268,8 @@ def test_doc_ref_no_parse_message_includes_details_format( "system": "http://snomed.info/sct", "code": "invalid-code", "display": "Invalid", - }, - ], + } + ] }, }, LambdaError.DocRefInvalidType, @@ -309,9 +297,7 @@ def test_document_validation_errors( def test_raise_dynamo_error( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - valid_fhir_doc_json, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service, valid_fhir_doc_json ): """Test handling of DynamoDB error.""" mock_fhir_doc_ref_base_service.dynamo_service.create_item.side_effect = ClientError( @@ -321,7 +307,7 @@ def test_raise_dynamo_error( with pytest.raises(DocumentRefException) as excinfo: mock_post_fhir_doc_ref_service.process_fhir_document_reference( - valid_fhir_doc_json, + valid_fhir_doc_json ) assert excinfo.value.status_code == 500 @@ -355,9 +341,7 @@ def test_save_document_reference_to_dynamo_error( with pytest.raises(DocumentRefException) as excinfo: mock_post_fhir_doc_ref_service._handle_document_save( - document_ref, - valid_fhir_doc_object, - "test_table", + document_ref, valid_fhir_doc_object, "test_table" ) assert excinfo.value.status_code == 500 @@ -375,12 +359,12 @@ def test_process_fhir_document_reference_with_pds_error( """Test process_fhir_document_reference with a real PDS error (PatientNotFoundException).""" mock_check_nhs_number_with_pds.side_effect = FhirDocumentReferenceException( - "Patient not found", + "Patient not found" ) with pytest.raises(DocumentRefException) as excinfo: mock_post_fhir_doc_ref_service.process_fhir_document_reference( - valid_fhir_doc_json, + valid_fhir_doc_json ) assert excinfo.value.status_code == 400 @@ -388,9 +372,7 @@ def test_process_fhir_document_reference_with_pds_error( def test_s3_presigned_url_error( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - valid_fhir_doc_json, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service, valid_fhir_doc_json ): """Test handling of S3 presigned URL error.""" mock_fhir_doc_ref_base_service.s3_service.create_put_presigned_url.side_effect = ( @@ -402,7 +384,7 @@ def test_s3_presigned_url_error( with pytest.raises(DocumentRefException) as excinfo: mock_post_fhir_doc_ref_service.process_fhir_document_reference( - valid_fhir_doc_json, + valid_fhir_doc_json ) assert excinfo.value.status_code == 500 @@ -416,13 +398,12 @@ def test_s3_upload_error( ): """Test handling of S3 upload error.""" mock_fhir_doc_ref_base_service.s3_service.upload_file_obj.side_effect = ClientError( - {"Error": {"Code": "InternalServerError", "Message": "Test error"}}, - "PutObject", + {"Error": {"Code": "InternalServerError", "Message": "Test error"}}, "PutObject" ) with pytest.raises(DocumentRefException) as excinfo: mock_post_fhir_doc_ref_service.process_fhir_document_reference( - valid_fhir_doc_with_binary, + valid_fhir_doc_with_binary ) assert excinfo.value.status_code == 500 @@ -438,12 +419,12 @@ def test_extract_nhs_number_from_fhir_with_invalid_system( """Test _extract_nhs_number_from_fhir method with an invalid NHS number system.""" valid_fhir_doc_object.subject = Reference( - identifier=Identifier(system="invalid-system", value="9000000009"), + identifier=Identifier(system="invalid-system", value="9000000009") ) with pytest.raises(DocumentRefException) as excinfo: mock_post_fhir_doc_ref_service.process_fhir_document_reference( - valid_fhir_doc_object.model_dump_json(), + valid_fhir_doc_object.model_dump_json() ) assert excinfo.value.status_code == 400 @@ -461,7 +442,7 @@ def test_extract_nhs_number_from_fhir_with_missing_identifier( with pytest.raises(DocumentRefException) as excinfo: mock_post_fhir_doc_ref_service.process_fhir_document_reference( - valid_fhir_doc_object.model_dump_json(), + valid_fhir_doc_object.model_dump_json() ) assert excinfo.value.status_code == 400 @@ -469,9 +450,7 @@ def test_extract_nhs_number_from_fhir_with_missing_identifier( def test_create_document_reference_with_author( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - mocker, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service, mocker ): """Test _create_document_reference method with author information included.""" @@ -482,22 +461,20 @@ def test_create_document_reference_with_author( contentType="application/pdf", title="test-file.pdf", creation="2023-01-01T12:00:00Z", - ), - ), + ) + ) ] fhir_doc.custodian = Reference( identifier=Identifier( - system="https://fhir.nhs.uk/Id/ods-organization-code", - value="A12345", - ), + system="https://fhir.nhs.uk/Id/ods-organization-code", value="A12345" + ) ) fhir_doc.author = [ Reference( identifier=Identifier( - system="https://fhir.nhs.uk/Id/ods-organization-code", - value="B67890", - ), - ), + system="https://fhir.nhs.uk/Id/ods-organization-code", value="B67890" + ) + ) ] doc_type = SnomedCode(code="test-code", display_name="Test Type") @@ -519,9 +496,7 @@ def test_create_document_reference_with_author( def test_create_document_reference_without_custodian( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - mocker, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service, mocker ): """Test _create_document_reference method without custodian information.""" @@ -532,16 +507,15 @@ def test_create_document_reference_without_custodian( contentType="application/pdf", title="test-file.pdf", creation="2023-01-01T12:00:00Z", - ), - ), + ) + ) ] fhir_doc.author = [ Reference( identifier=Identifier( - system="https://fhir.nhs.uk/Id/ods-organization-code", - value="B67890", - ), - ), + system="https://fhir.nhs.uk/Id/ods-organization-code", value="B67890" + ) + ) ] fhir_doc.custodian = None @@ -569,8 +543,8 @@ def test_create_document_reference_without_custodian( identifier=Identifier( system="https://fhir.nhs.uk/Id/ods-organization-code", value="A12345", - ), - ), + ) + ) ], "A12345", ), @@ -605,19 +579,16 @@ def test_extract_author_from_fhir( [ Reference( identifier=Identifier( - system="https://fhir.nhs.uk/Id/ods-organization-code", - ), - ), + system="https://fhir.nhs.uk/Id/ods-organization-code" + ) + ) ] ), ([Reference(identifier=None)]), ], ) def test_extract_author_from_fhir_raises_error( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - mocker, - fhir_author, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service, mocker, fhir_author ): """Test _extract_author_from_fhir method with malformed json returns Validation errors.""" fhir_doc = mocker.MagicMock(spec=FhirDocumentReference) @@ -630,9 +601,7 @@ def test_extract_author_from_fhir_raises_error( def test_determine_document_type_with_missing_type( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - mocker, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service, mocker ): """Test _determine_document_type method when type is missing entirely.""" fhir_doc = mocker.MagicMock(spec=FhirDocumentReference) @@ -646,9 +615,7 @@ def test_determine_document_type_with_missing_type( def test_determine_document_type_with_unknown_config( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - mocker, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service, mocker ): """Test _determine_document_type method when type is missing entirely.""" fhir_doc = mocker.MagicMock(spec=FhirDocumentReference) @@ -657,7 +624,7 @@ def test_determine_document_type_with_unknown_config( "system": SNOMED_URL, "code": "1234567890", "display": "unknown code", - }, + } ] fhir_doc.type = CodeableConcept(coding=mock_coding) @@ -669,9 +636,7 @@ def test_determine_document_type_with_unknown_config( def test_determine_document_type_with_missing_coding( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - mocker, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service, mocker ): """Test _determine_document_type method when coding is missing.""" fhir_doc = mocker.MagicMock(spec=FhirDocumentReference) @@ -686,8 +651,7 @@ def test_determine_document_type_with_missing_coding( def test_process_fhir_document_reference_with_malformed_json( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service ): """Test process_fhir_document_reference with malformed JSON.""" malformed_json = '{"resourceType": "DocumentReference", "invalid": }' @@ -700,8 +664,7 @@ def test_process_fhir_document_reference_with_malformed_json( def test_process_fhir_document_reference_with_empty_string( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service ): """Test process_fhir_document_reference with an empty string.""" with pytest.raises(DocumentRefException) as excinfo: @@ -712,8 +675,7 @@ def test_process_fhir_document_reference_with_empty_string( def test_process_fhir_document_reference_with_none( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service ): """Test process_fhir_document_reference with None input.""" with pytest.raises(DocumentRefException) as excinfo: @@ -723,28 +685,8 @@ def test_process_fhir_document_reference_with_none( assert excinfo.value.error == LambdaError.DocRefNoParse -def test_create_lg_document_reference_without_content_raises_error( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - mocker, -): - """Test _create_document_reference method without content attachment raises error.""" - - fhir_doc = mocker.MagicMock(spec=FhirDocumentReference) - fhir_doc.content = None - - with pytest.raises(DocumentRefException) as excinfo: - mock_post_fhir_doc_ref_service.process_fhir_document_reference(fhir_doc) - - assert excinfo.value.status_code == 400 - assert excinfo.value.error == LambdaError.DocRefNoParse - assert excinfo.value.message == "Failed to parse document upload request data" - - def test_s3_file_key_for_lg( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - mocker, + mock_fhir_doc_ref_base_service, mock_post_fhir_doc_ref_service, mocker ): """Test _create_document_reference method without custodian information.""" @@ -755,16 +697,15 @@ def test_s3_file_key_for_lg( contentType="application/pdf", title="test-file.pdf", creation="2023-01-01T12:00:00Z", - ), - ), + ) + ) ] fhir_doc.author = [ Reference( identifier=Identifier( - system="https://fhir.nhs.uk/Id/ods-organization-code", - value="B67890", - ), - ), + system="https://fhir.nhs.uk/Id/ods-organization-code", value="B67890" + ) + ) ] fhir_doc.custodian = None @@ -783,47 +724,3 @@ def test_s3_file_key_for_lg( assert "user_upload/9000000009" in result.s3_upload_key assert f"9000000009/{result.id}" in result.s3_file_key assert result.sub_folder == "user_upload" - - -def test_create_lg_document_reference_without_title_raises_error( - mock_fhir_doc_ref_base_service, - mock_post_fhir_doc_ref_service, - mocker, -): - """Test _create_document_reference method without title information raises error.""" - - fhir_doc = mocker.MagicMock(spec=FhirDocumentReference) - fhir_doc.content = [ - DocumentReferenceContent( - attachment=Attachment( - contentType="application/pdf", - creation="2023-01-01T12:00:00Z", - ), - ), - ] - fhir_doc.author = [ - Reference( - identifier=Identifier( - system="https://fhir.nhs.uk/Id/ods-organization-code", - value="B67890", - ), - ), - ] - fhir_doc.custodian = None - - doc_type = SnomedCodes.LLOYD_GEORGE.value - current_gp_ods = "C13579" - - with pytest.raises(DocumentRefException) as excinfo: - mock_post_fhir_doc_ref_service._create_document_reference( - nhs_number="9000000009", - author="B67890", - doc_type=doc_type, - fhir_doc=fhir_doc, - current_gp_ods=current_gp_ods, - raw_fhir_doc=json.dumps({"foo": "bar"}), - ) - - assert excinfo.value.status_code == 400 - assert excinfo.value.error == LambdaError.DocRefNoParse - assert excinfo.value.message == "Failed to parse document upload request data" diff --git a/lambdas/tests/unit/services/test_update_document_reference_service.py b/lambdas/tests/unit/services/test_update_document_reference_service.py index 05eedf133e..1a09166a45 100644 --- a/lambdas/tests/unit/services/test_update_document_reference_service.py +++ b/lambdas/tests/unit/services/test_update_document_reference_service.py @@ -28,15 +28,15 @@ def mock_update_doc_ref_service(mocker): @pytest.fixture -def mock_fhir_doc_ref_base_service(mocker, setup_request_context, set_env): +def mock_fhir_doc_ref_base_service(mocker, setup_request_context): mock_document_service = mocker.patch( - "services.fhir_document_reference_service_base.DocumentService", + "services.fhir_document_reference_service_base.DocumentService" ) mock_s3_service = mocker.patch( - "services.fhir_document_reference_service_base.S3Service", + "services.fhir_document_reference_service_base.S3Service" ) mock_dynamo_service = mocker.patch( - "services.fhir_document_reference_service_base.DynamoDBService", + "services.fhir_document_reference_service_base.DynamoDBService" ) service = FhirDocumentReferenceServiceBase() service.document_service = mock_document_service.return_value @@ -66,11 +66,10 @@ def mock_stop_if_upload_is_in_progress(mock_update_doc_ref_service, mocker): @pytest.fixture() def mock_validate_files_for_access_and_store( - mocker, - mock_getting_patient_info_from_pds, + mocker, mock_getting_patient_info_from_pds ): yield mocker.patch( - "services.update_document_reference_service.validate_files_for_access_and_store", + "services.update_document_reference_service.validate_files_for_access_and_store" ) @@ -93,29 +92,26 @@ def mock_process_fhir_document_reference(mocker): return_value=json.dumps( { "content": [ - {"attachment": {"url": "https://test-bucket.s3.amazonaws.com/"}}, - ], - }, + {"attachment": {"url": "https://test-bucket.s3.amazonaws.com/"}} + ] + } ), ) @pytest.fixture def mock_get_allowed_list_of_ods_codes_for_upload_pilot( - mock_update_doc_ref_service, - mocker, + mock_update_doc_ref_service, mocker ): return mocker.patch.object( - mock_update_doc_ref_service.feature_flag_service, - "get_allowed_list_of_ods_codes_for_upload_pilot", + mock_update_doc_ref_service, "get_allowed_list_of_ods_codes_for_upload_pilot" ) @pytest.fixture def mock_check_if_ods_code_is_in_pilot(mock_update_doc_ref_service, mocker): return mocker.patch.object( - mock_update_doc_ref_service, - "check_if_ods_code_is_in_pilot", + mock_update_doc_ref_service, "check_if_ods_code_is_in_pilot" ) @@ -138,8 +134,8 @@ def mock_fetch_documents_from_table(mocker, mock_update_doc_ref_service): def test_update_document_reference_request_with_lg_list_happy_path( - mock_fhir_doc_ref_base_service, mock_update_doc_ref_service, + mock_fhir_doc_ref_base_service, mock_getting_patient_info_from_pds, mock_stop_if_upload_is_in_progress, mock_get_allowed_list_of_ods_codes_for_upload_pilot, @@ -149,7 +145,7 @@ def test_update_document_reference_request_with_lg_list_happy_path( mock_fetch_documents_from_table, ): mock_get_allowed_list_of_ods_codes_for_upload_pilot.return_value = [ - TEST_CURRENT_GP_ODS, + TEST_CURRENT_GP_ODS ] mock_getting_patient_info_from_pds.return_value = mock_pds_patient mock_fetch_documents_from_table.return_value = create_test_doc_store_refs() @@ -161,9 +157,7 @@ def test_update_document_reference_request_with_lg_list_happy_path( ) url_references = mock_update_doc_ref_service.update_document_reference_request( - TEST_NHS_NUMBER, - LG_FILE, - TEST_UUID, + TEST_NHS_NUMBER, LG_FILE, TEST_UUID ) expected_response = {"uuid1": mock_presigned_url_response} @@ -189,9 +183,7 @@ def test_ods_code_not_in_pilot_raises_exception( with pytest.raises(DocumentRefException) as exc_info: mock_update_doc_ref_service.update_document_reference_request( - TEST_NHS_NUMBER, - LG_FILE, - TEST_UUID, + TEST_NHS_NUMBER, LG_FILE, TEST_UUID ) mock_process_fhir_document_reference.assert_not_called() @@ -217,9 +209,7 @@ def test_nhs_number_not_found_raises_exception( with pytest.raises(DocumentRefException) as exc_info: mock_update_doc_ref_service.update_document_reference_request( - TEST_NHS_NUMBER, - LG_FILE, - TEST_UUID, + TEST_NHS_NUMBER, LG_FILE, TEST_UUID ) exception = exc_info.value @@ -233,28 +223,26 @@ def test_nhs_number_not_found_raises_exception( # covers for number of files expected, non-pdf files, incorrect file name format, duplicate files def test_invalid_files_raises_exception( - mock_fhir_doc_ref_base_service, mock_update_doc_ref_service, mock_validate_files_for_access_and_store, mock_getting_patient_info_from_pds, mock_pds_patient, mock_get_allowed_list_of_ods_codes_for_upload_pilot, + mock_fhir_doc_ref_base_service, mock_process_fhir_document_reference, mock_stop_if_upload_is_in_progress, mock_fetch_documents_from_table, ): mock_getting_patient_info_from_pds.return_value = mock_pds_patient mock_get_allowed_list_of_ods_codes_for_upload_pilot.return_value = [ - TEST_CURRENT_GP_ODS, + TEST_CURRENT_GP_ODS ] mock_validate_files_for_access_and_store.side_effect = LGInvalidFilesException mock_fetch_documents_from_table.return_value = create_test_doc_store_refs() with pytest.raises(DocumentRefException) as exc_info: mock_update_doc_ref_service.update_document_reference_request( - TEST_NHS_NUMBER, - LG_FILE, - TEST_UUID, + TEST_NHS_NUMBER, LG_FILE, TEST_UUID ) exception = exc_info.value @@ -267,35 +255,29 @@ def test_invalid_files_raises_exception( @freeze_time("2023-10-30T10:25:00") def test_upload_already_in_progress_raises_exception( - mock_fhir_doc_ref_base_service, mock_update_doc_ref_service, mock_fetch_document_by_type, mock_get_allowed_list_of_ods_codes_for_upload_pilot, mock_getting_patient_info_from_pds, mock_pds_patient, + mock_fhir_doc_ref_base_service, mock_process_fhir_document_reference, mock_validate_files_for_access_and_store, mock_fetch_documents_from_table, ): mock_getting_patient_info_from_pds.return_value = mock_pds_patient mock_get_allowed_list_of_ods_codes_for_upload_pilot.return_value = [ - TEST_CURRENT_GP_ODS, + TEST_CURRENT_GP_ODS ] mock_fetch_documents_from_table.return_value = create_test_doc_store_refs() two_minutes_ago = 1698661380 # 2023-10-30T10:23:00 mock_records_upload_in_process = create_test_lloyd_george_doc_store_refs( - override={ - "uploaded": False, - "uploading": True, - "last_updated": two_minutes_ago, - }, + override={"uploaded": False, "uploading": True, "last_updated": two_minutes_ago} ) mock_fetch_document_by_type.return_value = mock_records_upload_in_process with pytest.raises(DocumentRefException) as exc_info: mock_update_doc_ref_service.update_document_reference_request( - TEST_NHS_NUMBER, - LG_FILE, - TEST_UUID, + TEST_NHS_NUMBER, LG_FILE, TEST_UUID ) exception = exc_info.value @@ -305,7 +287,6 @@ def test_upload_already_in_progress_raises_exception( def test_fail_early_if_there_is_no_document_reference_to_update( - mock_fhir_doc_ref_base_service, mock_update_doc_ref_service, mock_fetch_documents_from_table, mock_process_fhir_document_reference, @@ -314,9 +295,7 @@ def test_fail_early_if_there_is_no_document_reference_to_update( mock_fetch_documents_from_table.return_value = [] with pytest.raises(DocumentRefException) as exc_info: mock_update_doc_ref_service.update_document_reference_request( - TEST_NHS_NUMBER, - LG_FILE, - TEST_UUID, + TEST_NHS_NUMBER, LG_FILE, TEST_UUID ) exception = exc_info.value diff --git a/lambdas/tests/unit/services/test_upload_document_reference_service.py b/lambdas/tests/unit/services/test_upload_document_reference_service.py index d8b6a5e865..e030d461fd 100644 --- a/lambdas/tests/unit/services/test_upload_document_reference_service.py +++ b/lambdas/tests/unit/services/test_upload_document_reference_service.py @@ -11,7 +11,7 @@ FinalOrPreliminaryAndNotSuperseded, PreliminaryStatus, ) -from utils.exceptions import DocumentServiceException, FileProcessingException +from utils.exceptions import DocumentServiceException, FileProcessingException, TransactionConflictException from utils.lambda_exceptions import InvalidDocTypeException from lambdas.enums.snomed_codes import SnomedCodes @@ -23,7 +23,6 @@ def mock_document_reference(): doc_ref = DocumentReference.model_construct() doc_ref.id = "test-doc-id" doc_ref.nhs_number = "9000000001" - doc_ref.file_name = "test-file.txt" doc_ref.s3_file_key = "original/test-key" doc_ref.s3_bucket_name = "original-bucket" doc_ref.file_location = "original-location" @@ -32,7 +31,7 @@ def mock_document_reference(): doc_ref.doc_status = "preliminary" doc_ref.version = "1" doc_ref._build_s3_location = Mock( - return_value="s3://test-lg-bucket/9000000001/test-doc-id", + return_value="s3://test-lg-bucket/9000000001/test-doc-id" ) return doc_ref @@ -42,13 +41,13 @@ def mock_virus_scan_service( mocker, ): mock = mocker.patch( - "services.upload_document_reference_service.get_virus_scan_service", + "services.upload_document_reference_service.get_virus_scan_service" ) yield mock @pytest.fixture -def service(set_env, mock_virus_scan_service, mocker): +def service(set_env, mock_virus_scan_service): with patch.multiple( "services.upload_document_reference_service", DocumentService=Mock(), @@ -60,11 +59,6 @@ def service(set_env, mock_virus_scan_service, mocker): service.dynamo_service = Mock() service.virus_scan_service = MockVirusScanService() service.s3_service = Mock() - mocker.patch("io.BytesIO", return_value=None) - mocker.patch( - "services.upload_document_reference_service.check_file_locked_or_corrupt", - return_value=False, - ) return service @@ -77,39 +71,33 @@ def test_handle_upload_document_reference_request_with_empty_object_key(service) def test_handle_upload_document_reference_request_with_none_object_key(service): """Test handling of a None object key""" - service.handle_upload_document_reference_request("", 122) + service.handle_upload_document_reference_request(None, 122) service.document_service.fetch_documents_from_table.assert_not_called() def test_handle_upload_document_reference_request_success( - service, - mock_document_reference, - mocker, + service, mock_document_reference, mocker ): + """Test successful handling of the upload document reference request""" object_key = "staging/test-doc-id" object_size = 1111 mock_document_reference2 = Mock(spec=DocumentReference) - mock_document_reference2.file_name = "filename2.txt" mock_document_reference2.id = "another-doc-id" mock_document_reference2.doc_status = "final" mock_document_reference2.version = "1" service.s3_service.copy_across_bucket.return_value = { - "VersionId": "test-version-id", + "VersionId": "test-version-id" } - mocker.patch("io.BytesIO", return_value=None) - mocker.patch( - "services.upload_document_reference_service.check_file_locked_or_corrupt", - return_value=False, - ) + # First call fetches preliminary doc, second call fetches existing final docs to supersede service.document_service.fetch_documents_from_table.side_effect = [ [mock_document_reference], [mock_document_reference2], ] service.virus_scan_service.scan_file = mocker.MagicMock( - return_value=VirusScanResult.CLEAN, + return_value=VirusScanResult.CLEAN ) service.handle_upload_document_reference_request(object_key, object_size) @@ -126,7 +114,7 @@ def test_handle_upload_document_reference_request_with_exception(service): object_key = "staging/test-doc-id" service.document_service.fetch_documents_from_table.side_effect = Exception( - "Test error", + "Test error" ) service.handle_upload_document_reference_request(object_key) @@ -137,7 +125,7 @@ def test_fetch_preliminary_document_reference_success(service, mock_document_ref document_key = "test-doc-id" service.table_name = "dev_LloydGeorgeReferenceMetadata" service.document_service.fetch_documents_from_table.return_value = [ - mock_document_reference, + mock_document_reference ] result = service._fetch_preliminary_document_reference(document_key) @@ -162,8 +150,7 @@ def test_fetch_preliminary_document_reference_no_documents_found(service): def test_fetch_preliminary_document_reference_multiple_documents_warning( - service, - mock_document_reference, + service, mock_document_reference ): """Test handling when multiple documents are found""" document_key = "test-doc-id" @@ -190,30 +177,23 @@ def test_fetch_preliminary_document_reference_exception(service): def test__process_preliminary_document_reference_clean_virus_scan( - service, - mock_document_reference, - mocker, + service, mock_document_reference, mocker ): """Test processing document reference with a clean virus scan""" object_key = "staging/test-doc-id" mocker.patch.object( - service, - "_perform_virus_scan", - return_value=VirusScanResult.CLEAN, + service, "_perform_virus_scan", return_value=VirusScanResult.CLEAN ) mock_delete = mocker.patch.object(service, "delete_file_from_staging_bucket") mock_process_clean = mocker.patch.object(service, "_process_clean_document") mock_finalize_transaction = mocker.patch.object( - service, - "_finalize_and_supersede_with_transaction", + service, "_finalize_and_supersede_with_transaction" ) service._process_preliminary_document_reference( - mock_document_reference, - object_key, - 1222, + mock_document_reference, object_key, 1222 ) mock_process_clean.assert_called_once() @@ -225,33 +205,26 @@ def test__process_preliminary_document_reference_clean_virus_scan( def test__process_preliminary_document_reference_infected_virus_scan( - service, - mock_document_reference, - mocker, + service, mock_document_reference, mocker ): """Test processing document reference with an infected virus scan""" object_key = "staging/test-doc-id" mocker.patch.object( - service, - "_perform_virus_scan", - return_value=VirusScanResult.INFECTED, + service, "_perform_virus_scan", return_value=VirusScanResult.INFECTED ) mock_delete = mocker.patch.object(service, "delete_file_from_staging_bucket") mock_process_clean = mocker.patch.object(service, "_process_clean_document") mock_update_dynamo = mocker.patch.object(service, "_update_dynamo_table") service._process_preliminary_document_reference( - mock_document_reference, - object_key, - 1222, + mock_document_reference, object_key, 1222 ) mock_process_clean.assert_not_called() mock_update_dynamo.assert_called_once() mock_delete.assert_called_once_with(object_key) - def test_perform_virus_scan_returns_clean_hardcoded(service, mock_document_reference): """Test virus scan returns hardcoded CLEAN result""" object_key = "staging/test-doc-id" @@ -260,9 +233,7 @@ def test_perform_virus_scan_returns_clean_hardcoded(service, mock_document_refer def test_perform_virus_scan_exception_returns_infected( - service, - mock_document_reference, - mocker, + service, mock_document_reference, mocker ): """Test virus scan exception handling returns INFECTED for safety""" mock_virus_service = mocker.patch.object(service, "virus_scan_service") @@ -289,9 +260,7 @@ def test_process_clean_document_success(service, mock_document_reference, mocker def test_process_clean_document_exception_restores_original_values( - service, - mock_document_reference, - mocker, + service, mock_document_reference, mocker ): """Test that original values are restored when processing fails""" object_key = "staging/test-doc-id" @@ -300,9 +269,7 @@ def test_process_clean_document_exception_restores_original_values( original_location = "original-location" mocker.patch.object( - service, - "copy_files_from_staging_bucket", - side_effect=Exception("Copy failed"), + service, "copy_files_from_staging_bucket", side_effect=Exception("Copy failed") ) with pytest.raises(FileProcessingException): service._process_clean_document( @@ -339,7 +306,7 @@ def test_copy_files_from_staging_bucket_client_error(service, mock_document_refe source_file_key = "staging/test-doc-id" client_error = ClientError( error_response={ - "Error": {"Code": "NoSuchBucket", "Message": "Bucket does not exist"}, + "Error": {"Code": "NoSuchBucket", "Message": "Bucket does not exist"} }, operation_name="CopyObject", ) @@ -356,8 +323,7 @@ def test_delete_file_from_staging_bucket_success(service): service.delete_file_from_staging_bucket(source_file_key) service.s3_service.delete_object.assert_called_once_with( - MOCK_STAGING_STORE_BUCKET, - source_file_key, + MOCK_STAGING_STORE_BUCKET, source_file_key ) @@ -370,8 +336,7 @@ def test_delete_pdm_file_from_staging_bucket_success(service): service.delete_file_from_staging_bucket(source_file_key) service.s3_service.delete_object.assert_called_once_with( - MOCK_STAGING_STORE_BUCKET, - source_file_key, + MOCK_STAGING_STORE_BUCKET, source_file_key ) @@ -380,7 +345,7 @@ def test_delete_file_from_staging_bucket_client_error(service): source_file_key = "staging/test-doc-id" client_error = ClientError( error_response={ - "Error": {"Code": "NoSuchKey", "Message": "Key does not exist"}, + "Error": {"Code": "NoSuchKey", "Message": "Key does not exist"} }, operation_name="DeleteObject", ) @@ -424,10 +389,7 @@ def test_update_dynamo_table_client_error(service, mock_document_reference): """Test handling of ClientError during DynamoDB update""" client_error = ClientError( error_response={ - "Error": { - "Code": "ResourceNotFoundException", - "Message": "Table not found", - }, + "Error": {"Code": "ResourceNotFoundException", "Message": "Table not found"} }, operation_name="UpdateItem", ) @@ -456,7 +418,7 @@ def test_document_key_extraction_from_object_key_for_lg( # First call returns preliminary doc, second call returns empty list (no existing finals) service.s3_service.copy_across_bucket.return_value = { - "VersionId": "test-version-id", + "VersionId": "test-version-id" } service.document_service.fetch_documents_from_table.side_effect = [ @@ -484,8 +446,7 @@ def test_document_key_extraction_from_object_key_for_lg( def test_finalize_and_supersede_with_transaction_with_existing_finals( - service, - mock_document_reference, + service, mock_document_reference, mocker ): """Test transaction-based finalisation with existing final documents to supersede""" new_doc = mock_document_reference @@ -502,7 +463,7 @@ def test_finalize_and_supersede_with_transaction_with_existing_finals( service.table_name = "dev_LloydGeorgeReferenceMetadata" service.document_service.fetch_documents_from_table.return_value = [ - existing_final_doc, + existing_final_doc ] mock_build_update = Mock(return_value={"Update": "transaction1"}) @@ -550,8 +511,7 @@ def test_finalize_and_supersede_with_transaction_with_existing_finals( def test_finalize_and_supersede_with_transaction_no_existing_docs( - service, - mock_document_reference, + service, mock_document_reference, mocker ): """Test transaction-based finalization when no existing final documents found""" new_doc = mock_document_reference @@ -575,8 +535,7 @@ def test_finalize_and_supersede_with_transaction_no_existing_docs( def test_finalize_and_supersede_with_transaction_multiple_existing( - service, - mock_document_reference, + service, mock_document_reference, mocker ): """Test transaction-based finalization superseding multiple existing final documents""" new_doc = mock_document_reference @@ -612,8 +571,7 @@ def test_finalize_and_supersede_with_transaction_multiple_existing( def test_finalize_and_supersede_with_transaction_skips_same_id( - service, - mock_document_reference, + service, mock_document_reference, mocker ): """Test that transaction skips documents with the same ID""" new_doc = mock_document_reference @@ -641,8 +599,7 @@ def test_finalize_and_supersede_with_transaction_skips_same_id( def test_finalize_and_supersede_with_transaction_handles_transaction_cancelled( - service, - mock_document_reference, + service, mock_document_reference ): new_doc = mock_document_reference @@ -682,34 +639,25 @@ def test_handle_upload_document_reference_request_no_document_found(service): def test_process_preliminary_document_reference_exception_during_processing( - service, - mock_document_reference, - mocker, + service, mock_document_reference, mocker ): """Test that exceptions during processing are properly raised""" object_key = "staging/test-doc-id" mocker.patch.object( - service, - "_perform_virus_scan", - return_value=VirusScanResult.CLEAN, + service, "_perform_virus_scan", return_value=VirusScanResult.CLEAN ) mocker.patch.object( - service, - "_process_clean_document", - side_effect=Exception("Processing failed"), + service, "_process_clean_document", side_effect=Exception("Processing failed") ) with pytest.raises(Exception) as exc_info: service._process_preliminary_document_reference( - mock_document_reference, - object_key, - 1222, + mock_document_reference, object_key, 1222 ) assert "Processing failed" in str(exc_info.value) - def test_get_infrastructure_for_document_key_non_pdm(service): assert service.table_name == "" infra = service._get_infrastructure_for_document_key(object_parts=["1234", "123"]) @@ -742,32 +690,3 @@ def test_get_infra_invalid_doc_type(monkeypatch, service): # Call function and assert the exception is raised with pytest.raises(InvalidDocTypeException): service._get_infrastructure_for_document_key(["fhir_upload", "999999"]) - - -def test_is_file_invalid_calls_correct_functions(service, mocker): - """Test that is_file_invalid calls the right functions in the correct order""" - object_key = "test-folder/test-file.docx" - file_extension = "docx" - file_content = b"fake docx file content" - - mock_stream = Mock() - mock_stream.read.return_value = file_content - service.s3_service.get_object_stream.return_value = mock_stream - mock_bytesio = mocker.patch("services.upload_document_reference_service.io.BytesIO") - mock_file_stream = Mock() - mock_bytesio.return_value = mock_file_stream - mock_check = mocker.patch( - "services.upload_document_reference_service.check_file_locked_or_corrupt", - return_value=True, - ) - - result = service.is_file_invalid(object_key, file_extension) - - assert result is True - service.s3_service.get_object_stream.assert_called_once_with( - service.staging_s3_bucket_name, - object_key, - ) - mock_stream.read.assert_called_once_with() - mock_bytesio.assert_called_once_with(file_content) - mock_check.assert_called_once_with(mock_file_stream, file_extension) diff --git a/lambdas/utils/constants/file_extensions.py b/lambdas/utils/constants/file_extensions.py deleted file mode 100644 index 4e04dfd004..0000000000 --- a/lambdas/utils/constants/file_extensions.py +++ /dev/null @@ -1,3 +0,0 @@ -TEXT_FILE_EXTENSIONS = ["rtf", "csv", "json", "txt", "xml"] -MEDIA_FILE_EXTENSIONS = ["jpg", "jpeg", "png", "tiff", "tif"] -MICROSOFT_OFFICE_FILE_EXTENSIONS = ["docx", "xlsx", "pptx", "doc", "xls", "ppt"] diff --git a/lambdas/utils/file_utils.py b/lambdas/utils/file_utils.py index f52d369771..ec24c67bc3 100644 --- a/lambdas/utils/file_utils.py +++ b/lambdas/utils/file_utils.py @@ -3,9 +3,7 @@ def convert_csv_dictionary_to_bytes( - headers: list[str], - csv_dict_data: list[dict], - encoding: str = "utf-8", + headers: list[str], csv_dict_data: list[dict], encoding: str = "utf-8" ) -> bytes: csv_buffer = BytesIO() csv_text_wrapper = TextIOWrapper(csv_buffer, encoding=encoding, newline="") diff --git a/poetry.lock b/poetry.lock index 4bf25323d7..c329268964 100644 --- a/poetry.lock +++ b/poetry.lock @@ -205,7 +205,7 @@ version = "2.0.0" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.9" -groups = ["core-lambda", "files-lambda"] +groups = ["core-lambda"] markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, @@ -517,74 +517,56 @@ toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cryptography" -version = "46.0.5" +version = "44.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false -python-versions = "!=3.9.0,!=3.9.1,>=3.8" -groups = ["core-lambda", "files-lambda"] -files = [ - {file = "cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad"}, - {file = "cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b"}, - {file = "cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b"}, - {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263"}, - {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d"}, - {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed"}, - {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2"}, - {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2"}, - {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0"}, - {file = "cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731"}, - {file = "cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82"}, - {file = "cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1"}, - {file = "cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48"}, - {file = "cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4"}, - {file = "cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2"}, - {file = "cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678"}, - {file = "cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87"}, - {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee"}, - {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981"}, - {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9"}, - {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648"}, - {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4"}, - {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0"}, - {file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663"}, - {file = "cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826"}, - {file = "cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d"}, - {file = "cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a"}, - {file = "cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4"}, - {file = "cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31"}, - {file = "cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18"}, - {file = "cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235"}, - {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a"}, - {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76"}, - {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614"}, - {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229"}, - {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1"}, - {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d"}, - {file = "cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c"}, - {file = "cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4"}, - {file = "cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9"}, - {file = "cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72"}, - {file = "cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595"}, - {file = "cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c"}, - {file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a"}, - {file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356"}, - {file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da"}, - {file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257"}, - {file = "cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7"}, - {file = "cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d"}, +python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["core-lambda"] +files = [ + {file = "cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0"}, + {file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf"}, + {file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864"}, + {file = "cryptography-44.0.1-cp37-abi3-win32.whl", hash = "sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a"}, + {file = "cryptography-44.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00"}, + {file = "cryptography-44.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41"}, + {file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b"}, + {file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7"}, + {file = "cryptography-44.0.1-cp39-abi3-win32.whl", hash = "sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9"}, + {file = "cryptography-44.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7"}, + {file = "cryptography-44.0.1.tar.gz", hash = "sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14"}, ] [package.dependencies] -cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""} +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox[uv] (>=2024.4.15)"] -pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] +pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi (>=2024)", "cryptography-vectors (==46.0.5)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] test-randomorder = ["pytest-randomly"] [[package]] @@ -1208,22 +1190,6 @@ files = [ {file = "msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e"}, ] -[[package]] -name = "msoffcrypto-tool" -version = "6.0.0" -description = "Python tool and library for decrypting and encrypting MS Office files using a password or other keys" -optional = false -python-versions = "<4.0,>=3.10" -groups = ["files-lambda"] -files = [ - {file = "msoffcrypto_tool-6.0.0-py3-none-any.whl", hash = "sha256:46c394ed5d9641e802fc79bf3fb0666a53748b23fa8c4aa634ae9d30d46fe397"}, - {file = "msoffcrypto_tool-6.0.0.tar.gz", hash = "sha256:9a5ebc4c0096b42e5d7ebc2350afdc92dc511061e935ca188468094fdd032bbe"}, -] - -[package.dependencies] -cryptography = ">=39.0" -olefile = ">=0.46" - [[package]] name = "mypy-extensions" version = "1.1.0" @@ -1253,21 +1219,6 @@ rsa = ["cryptography (>=3.0.0)"] signals = ["blinker (>=1.4.0)"] signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] -[[package]] -name = "olefile" -version = "0.47" -description = "Python package to parse, read and write Microsoft OLE2 files (Structured Storage or Compound Document, Microsoft Office)" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -groups = ["files-lambda"] -files = [ - {file = "olefile-0.47-py2.py3-none-any.whl", hash = "sha256:543c7da2a7adadf21214938bb79c83ea12b473a4b6ee4ad4bf854e7715e13d1f"}, - {file = "olefile-0.47.zip", hash = "sha256:599383381a0bf3dfbd932ca0ca6515acd174ed48870cbf7fee123d698c192c1c"}, -] - -[package.extras] -tests = ["pytest", "pytest-cov"] - [[package]] name = "openpyxl" version = "3.1.5" @@ -1713,7 +1664,7 @@ version = "2.23" description = "C parser in Python" optional = false python-versions = ">=3.8" -groups = ["core-lambda", "files-lambda"] +groups = ["core-lambda"] markers = "platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\"" files = [ {file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"}, @@ -2499,4 +2450,4 @@ requests = "*" [metadata] lock-version = "2.1" python-versions = "^3.11" -content-hash = "9d5fc7af2841c90590cd173e32abcf400d064cd303e49a5b2ac33964170a0e9b" +content-hash = "77d0249d2dd6c9fbb02b0e434ade72ac221f4df82e8d98ec49250fe0b7ff74df" diff --git a/pyproject.toml b/pyproject.toml index fe89cf8388..4abaa1617d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,9 +51,6 @@ pikepdf = "8.4.0" [tool.poetry.group.data_lambda.dependencies] polars = "1.31.0" -[tool.poetry.group.files_lambda.dependencies] -msoffcrypto-tool = "6.0.0" - [tool.poetry.group.reports_lambda.dependencies] openpyxl = "^3.1.5" reportlab = "^4.3.1"