Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
a544b36
[PRMP-1615] Updated reports
PedroSoaresNHS Mar 23, 2026
dcac000
[PRMP-1615] added tests
PedroSoaresNHS Mar 23, 2026
076ab53
[PRMP-1615] linting
PedroSoaresNHS Mar 23, 2026
db43c47
Merge remote-tracking branch 'origin/main' into PRMP-1615
PedroSoaresNHS Mar 23, 2026
c7dcb8e
Merge remote-tracking branch 'origin/main' into PRMP-1615
PedroSoaresNHS Mar 23, 2026
87297fb
[PRMP-1615] linting
PedroSoaresNHS Mar 23, 2026
95a01b9
Merge remote-tracking branch 'origin/main' into PRMP-1615
PedroSoaresNHS Mar 25, 2026
6f4b82a
Merge remote-tracking branch 'origin/PRMP-1615' into PRMP-1635
PedroSoaresNHS Mar 26, 2026
5995b60
[PRMP-1635] testing updated report
PedroSoaresNHS Mar 26, 2026
9b68d0f
Merge remote-tracking branch 'origin/main' into PRMP-1635
PedroSoaresNHS Mar 26, 2026
654b429
[PRMP-1635] refactored and updated tests
PedroSoaresNHS Mar 27, 2026
779f729
[PRMP-1635] linting
PedroSoaresNHS Mar 27, 2026
4b4dced
Merge remote-tracking branch 'origin/main' into PRMP-1635
PedroSoaresNHS Mar 27, 2026
6c46d5e
Merge remote-tracking branch 'origin/main' into PRMP-1635
PedroSoaresNHS Mar 27, 2026
abc9cf5
Merge remote-tracking branch 'origin/main' into PRMP-1635
PedroSoaresNHS Mar 30, 2026
61a8465
Merge branch 'main' into PRMP-1635
robg-test Mar 30, 2026
6222a27
[PRMP-1635] lint
PedroSoaresNHS Mar 30, 2026
4001e82
Merge branch 'main' into PRMP-1635
robg-test Mar 30, 2026
be8b56a
[PRMP-1635] fixed comments
PedroSoaresNHS Mar 30, 2026
37ba449
Merge remote-tracking branch 'origin/main' into PRMP-1635
PedroSoaresNHS Mar 30, 2026
64957d2
Merge branch 'main' into PRMP-1635
robg-test Mar 30, 2026
29f28b6
Merge remote-tracking branch 'origin/main' into PRMP-1635
PedroSoaresNHS Mar 31, 2026
90cfef1
Merge branch 'main' into PRMP-1635
robg-test Apr 2, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
169 changes: 151 additions & 18 deletions lambdas/services/reporting/excel_report_generator_service.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
from datetime import datetime, timezone

from openpyxl.workbook import Workbook

from models.report.bulk_upload_report import BulkUploadReport
from models.report.bulk_upload_report_output import OdsReport
from utils.audit_logging_setup import LoggingService

logger = LoggingService(__name__)
Expand All @@ -10,47 +13,177 @@ class ExcelReportGenerator:
def create_report_orchestration_xlsx(
self,
ods_code: str,
records: list[dict],
records: list[BulkUploadReport],
output_path: str,
) -> str:
logger.info(
f"Creating Excel report for ODS code {ods_code} and records {len(records)}",
)

generated_at = datetime.now(timezone.utc)
ods_report = self._build_ods_report(ods_code, records, generated_at)

wb = Workbook()
ws = wb.active
self._create_detail_sheet(wb, ods_code, records, generated_at)
self._create_summary_sheet(wb, ods_report)

wb.save(output_path)

logger.info(f"Excel report written successfully for ods code {ods_code}")
return output_path

@staticmethod
def _build_ods_report(
ods_code: str,
records: list[BulkUploadReport],
generated_at: datetime,
) -> OdsReport:
return OdsReport(
generated_at=generated_at.strftime("%Y%m%d"),
uploader_ods_code=ods_code,
report_items=records,
)

def _create_detail_sheet(
self,
workbook: Workbook,
ods_code: str,
records: list[BulkUploadReport],
generated_at: datetime,
) -> None:
ws = workbook.active
ws.title = "Daily Upload Report"

# Report metadata
ws.append([f"ODS Code: {ods_code}"])
ws.append([f"Generated at (UTC): {datetime.now(timezone.utc).isoformat()}"])
ws.append([])
self._append_detail_sheet_metadata(ws, ods_code, generated_at)
self._append_detail_sheet_headers(ws)
self._append_detail_sheet_rows(ws, records)

@staticmethod
def _append_detail_sheet_metadata(
worksheet,
ods_code: str,
generated_at: datetime,
) -> None:
worksheet.append([f"ODS Code: {ods_code}"])
worksheet.append([f"Generated at (UTC): {generated_at.isoformat()}"])
worksheet.append([])

ws.append(
@staticmethod
def _append_detail_sheet_headers(worksheet) -> None:
worksheet.append(
[
"NHS Number",
"Date",
"Uploader ODS",
"PDS ODS",
"Upload Status",
"Reason",
"Sent to Review",
"File Path",
],
)

@staticmethod
def _append_detail_sheet_rows(
worksheet,
records: list[BulkUploadReport],
) -> None:
for record in records:
ws.append(
worksheet.append(
[
record.get("NhsNumber"),
record.get("Date"),
record.get("UploaderOdsCode"),
record.get("PdsOdsCode"),
record.get("UploadStatus"),
record.get("Reason"),
record.get("FilePath"),
record.nhs_number,
record.date,
record.uploader_ods_code,
record.pds_ods_code,
record.upload_status,
record.reason,
record.sent_to_review,
record.file_path,
],
)

wb.save(output_path)
logger.info(f"Excel report written successfully for ods code {ods_code}")
return output_path
def _create_summary_sheet(
self,
workbook: Workbook,
ods_report: OdsReport,
) -> None:
summary_ws = workbook.create_sheet(title="Summary")

self._append_summary_headers(summary_ws)
self._append_summary_totals(summary_ws, ods_report)
self._append_summary_optional_totals(summary_ws, ods_report)
self._append_summary_reason_rows(summary_ws, ods_report)

@staticmethod
def _append_summary_headers(worksheet) -> None:
worksheet.append(["Type", "Description", "Count"])

@staticmethod
def _append_summary_totals(
worksheet,
ods_report: OdsReport,
) -> None:
worksheet.append(
["Total", "Total Ingested", ods_report.get_total_ingested_count()],
)
worksheet.append(
["Total", "Total Successful", ods_report.get_total_successful()],
)
worksheet.append(
["Total", "Total In Review", ods_report.get_total_in_review_count()],
)
worksheet.append(
["Total", "Review Percentage", ods_report.get_total_in_review_percentage()],
)
worksheet.append(
[
"Total",
"Successful Percentage",
ods_report.get_total_successful_percentage(),
],
)
worksheet.append(
[
"Total",
"Successful - Registered Elsewhere",
ods_report.get_total_registered_elsewhere_count(),
],
)
worksheet.append(
[
"Total",
"Successful - Suspended",
ods_report.get_total_suspended_count(),
],
)

@staticmethod
def _append_summary_optional_totals(
worksheet,
ods_report: OdsReport,
) -> None:
if ods_report.get_total_deceased_count():
worksheet.append(
[
"Total",
"Successful - Deceased",
ods_report.get_total_deceased_count(),
],
)

if ods_report.get_total_restricted_count():
worksheet.append(
[
"Total",
"Successful - Restricted",
ods_report.get_total_restricted_count(),
],
)

@staticmethod
def _append_summary_reason_rows(
worksheet,
ods_report: OdsReport,
) -> None:
for row in ods_report.get_unsuccessful_reasons_data_rows():
worksheet.append(row)
42 changes: 34 additions & 8 deletions lambdas/services/reporting/report_orchestration_service.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
import tempfile
from collections import defaultdict
from typing import Dict
from typing import Dict, List

from pydantic import ValidationError

from models.report.bulk_upload_report import BulkUploadReport
from repositories.reporting.reporting_dynamo_repository import ReportingDynamoRepository
from services.reporting.excel_report_generator_service import ExcelReportGenerator
from utils.audit_logging_setup import LoggingService
Expand All @@ -19,21 +22,38 @@ def process_reporting_window(
window_start_ts: int,
window_end_ts: int,
) -> Dict[str, str]:
records = self.repository.get_records_for_time_window(
filtered_records = self.repository.get_records_for_time_window(
window_start_ts,
window_end_ts,
)

if not records:
if not filtered_records:
logger.info("No records found for reporting window")
return {}

records = [
filtered_records = [
record
for record in records
for record in filtered_records
if "expedite" not in (record.get("FilePath") or "").lower()
]

if not filtered_records:
logger.info(
"No records found for reporting window after excluding expedite files",
)
return {}

records: List[BulkUploadReport] = []
for record in filtered_records:
try:
records.append(BulkUploadReport.model_validate(record))
except ValidationError as e:
logger.error(f"Skipping invalid record in reporting orchestration: {e}")

if not records:
logger.info("No valid records after validation")
return {}

records_by_ods = self.group_records_by_ods(records)
generated_files: Dict[str, str] = {}

Expand All @@ -48,14 +68,20 @@ def process_reporting_window(
return generated_files

@staticmethod
def group_records_by_ods(records: list[dict]) -> dict[str, list[dict]]:
def group_records_by_ods(
records: List[BulkUploadReport],
) -> dict[str, List[BulkUploadReport]]:
grouped = defaultdict(list)
for record in records:
ods_code = record.get("UploaderOdsCode") or "UNKNOWN"
ods_code = record.uploader_ods_code or "UNKNOWN"
grouped[ods_code].append(record)
return grouped

def generate_ods_report(self, ods_code: str, records: list[dict]) -> str:
def generate_ods_report(
self,
ods_code: str,
records: List[BulkUploadReport],
) -> str:
with tempfile.NamedTemporaryFile(
suffix=f"_{ods_code}.xlsx",
delete=False,
Expand Down
Loading
Loading