Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,5 @@ error_alarm_notifications_enabled = true
mesh_no_invocation_period_seconds = 86400
create_mesh_processor = true
has_sub_environment_scope = false
dspp_kms_key_alias = "nhsd-dspp-core-prod-extended-attributes-gdp-key"
dspp_submission_s3_bucket_name = "nhsd-dspp-core-prod-s3-submission-upload"
dspp_submission_kms_key_alias = "nhsd-dspp-core-prod-s3-submission-upload-key"
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,5 @@ error_alarm_notifications_enabled = true
mesh_no_invocation_period_seconds = 86400
create_mesh_processor = true
has_sub_environment_scope = false
dspp_kms_key_alias = "nhsd-dspp-core-prod-extended-attributes-gdp-key"
dspp_submission_s3_bucket_name = "nhsd-dspp-core-prod-s3-submission-upload"
dspp_submission_kms_key_alias = "nhsd-dspp-core-prod-s3-submission-upload-key"
20 changes: 5 additions & 15 deletions infrastructure/instance/file_name_processor.tf
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,8 @@ locals {
filename_lambda_dir = abspath("${path.root}/../../lambdas/filenameprocessor")
filename_lambda_files = fileset(local.filename_lambda_dir, "**")
filename_lambda_dir_sha = sha1(join("", [for f in local.filename_lambda_files : filesha1("${local.filename_lambda_dir}/${f}")]))
dps_bucket_name_for_extended_attribute = (
var.environment == "prod"
? "nhsd-dspp-core-prod-extended-attributes-gdp"
: "nhsd-dspp-core-ref-extended-attributes-gdp"
)
dps_bucket_arn_for_extended_attribute = [
"arn:aws:s3:::${local.dps_bucket_name_for_extended_attribute}/*"
]
}



resource "aws_ecr_repository" "file_name_processor_lambda_repository" {
image_scanning_configuration {
scan_on_push = true
Expand Down Expand Up @@ -178,7 +168,7 @@ resource "aws_iam_policy" "filenameprocessor_lambda_exec_policy" {
"Action" : [
"s3:PutObject"
],
"Resource" : local.dps_bucket_arn_for_extended_attribute
"Resource" : ["arn:aws:s3:::${var.dspp_submission_s3_bucket_name}/*"]
}
]
})
Expand Down Expand Up @@ -264,14 +254,14 @@ resource "aws_iam_policy" "filenameprocessor_dps_extended_attribute_kms_policy"
{
Effect = "Allow",
Action = [
"kms:Decrypt",
"kms:Encrypt",
"kms:GenerateDataKey",
"kms:DescribeKey"
"kms:DescribeKey",
],
Resource = "arn:aws:kms:eu-west-2:${var.dspp_core_account_id}:key/*",
"Condition" = {
"ForAnyValue:StringEquals" = {
"kms:ResourceAliases" = "alias/${var.dspp_kms_key_alias}"
"kms:ResourceAliases" = "alias/${var.dspp_submission_kms_key_alias}"
}
}
}
Expand Down Expand Up @@ -329,7 +319,7 @@ resource "aws_lambda_function" "file_processor_lambda" {
DPS_ACCOUNT_ID = var.dspp_core_account_id
SOURCE_BUCKET_NAME = aws_s3_bucket.batch_data_source_bucket.bucket
ACK_BUCKET_NAME = aws_s3_bucket.batch_data_destination_bucket.bucket
DPS_BUCKET_NAME = local.dps_bucket_name_for_extended_attribute
DPS_BUCKET_NAME = var.dspp_submission_s3_bucket_name
QUEUE_URL = aws_sqs_queue.batch_file_created.url
REDIS_HOST = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].address
REDIS_PORT = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].port
Expand Down
12 changes: 9 additions & 3 deletions infrastructure/instance/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,16 @@ variable "csoc_account_id" {
default = "693466633220"
}

variable "dspp_kms_key_alias" {
description = "Alias name of the DPS KMS key allowed for SSE-KMS encryption"
variable "dspp_submission_s3_bucket_name" {
description = "Name of the DSPP (DPS) S3 bucket where extended attributes files should be submitted"
type = string
default = "nhsd-dspp-core-ref-extended-attributes-gdp-key"
default = "nhsd-dspp-core-ref-s3-submission-upload"
}

variable "dspp_submission_kms_key_alias" {
description = "Alias of the DSPP (DPS) KMS key required to encrypt extended attributes files"
type = string
default = "nhsd-dspp-core-ref-s3-submission-upload-key"
}

variable "create_mesh_processor" {
Expand Down
2 changes: 1 addition & 1 deletion lambdas/filenameprocessor/src/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
# Currently only COVID extended attributes files are supported, might be extended in future for other vaccine types
EXTENDED_ATTRIBUTES_VACC_TYPE = "COVID"

DPS_DESTINATION_PREFIX = "dps_destination"
DPS_DESTINATION_PREFIX = "generic/EXTENDED_ATTRIBUTES_DAILY_1"
EXTENDED_ATTRIBUTES_ARCHIVE_PREFIX = "extended-attributes-archive"
VALID_EA_VERSIONS = ["V1_5"]
ERROR_TYPE_TO_STATUS_CODE_MAP = {
Expand Down
8 changes: 4 additions & 4 deletions lambdas/filenameprocessor/tests/test_lambda_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,8 +288,8 @@ def test_lambda_handler_extended_attributes_success(self, mock_get_redis_client)
archived_obj = s3_client.get_object(Bucket=BucketNames.SOURCE, Key=archived_key)
self.assertIsNotNone(archived_obj)

# Also verify file copied to DPS destination bucket under dps_destination/<file_key>
dps_key = f"dps_destination/{test_cases[0].file_key}"
# Also verify file copied to DPS destination bucket under generic/EXTENDED_ATTRIBUTES_DAILY_1/<file_key>
dps_key = f"generic/EXTENDED_ATTRIBUTES_DAILY_1/{test_cases[0].file_key}"
copied_obj = s3_client.get_object(Bucket=BucketNames.DPS_DESTINATION, Key=dps_key)
self.assertIsNotNone(copied_obj)

Expand Down Expand Up @@ -463,7 +463,7 @@ def test_lambda_handler_extended_attributes_extension_checks(self, mock_get_redi
# Ensure processed path hit by checking archive move in source bucket
s3_client.get_object(Bucket=BucketNames.SOURCE, Key=f"extended-attributes-archive/{csv_key}")
# And verify copy to DPS destination
s3_client.get_object(Bucket=BucketNames.DPS_DESTINATION, Key=f"dps_destination/{csv_key}")
s3_client.get_object(Bucket=BucketNames.DPS_DESTINATION, Key=f"generic/EXTENDED_ATTRIBUTES_DAILY_1/{csv_key}")

# .DAT accepted
dat_key = MockFileDetails.extended_attributes_file.file_key[:-3] + "dat"
Expand All @@ -474,7 +474,7 @@ def test_lambda_handler_extended_attributes_extension_checks(self, mock_get_redi
):
lambda_handler(self.make_event([self.make_record(dat_key)]), None)
s3_client.get_object(Bucket=BucketNames.SOURCE, Key=f"extended-attributes-archive/{dat_key}")
s3_client.get_object(Bucket=BucketNames.DPS_DESTINATION, Key=f"dps_destination/{dat_key}")
s3_client.get_object(Bucket=BucketNames.DPS_DESTINATION, Key=f"generic/EXTENDED_ATTRIBUTES_DAILY_1/{dat_key}")

# Invalid extension fails
bad_ext_key = csv_key[:-3] + "txt"
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
"poetry -P quality_checks run ruff check --fix",
"poetry -P quality_checks run ruff format"
],
"*.tf": "terraform fmt",
"*.{tf,tfvars}": "terraform fmt",
"immunisation-fhir-api.{yaml,json}": "redocly lint --skip-rule=security-defined"
}
}