Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions infrastructure/terraform/components/dl/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ No requirements.
| <a name="input_parent_acct_environment"></a> [parent\_acct\_environment](#input\_parent\_acct\_environment) | Name of the environment responsible for the acct resources used, affects things like DNS zone. Useful for named dev environments | `string` | `"main"` | no |
| <a name="input_pdm_mock_access_token"></a> [pdm\_mock\_access\_token](#input\_pdm\_mock\_access\_token) | Mock access token for PDM API authentication (used in local/dev environments) | `string` | `"mock-pdm-token"` | no |
| <a name="input_pdm_use_non_mock_token"></a> [pdm\_use\_non\_mock\_token](#input\_pdm\_use\_non\_mock\_token) | Whether to use the shared APIM access token from SSM (/component/environment/apim/access\_token) instead of the mock token | `bool` | `false` | no |
| <a name="input_pii_data_retention_policy_days"></a> [pii\_data\_retention\_policy\_days](#input\_pii\_data\_retention\_policy\_days) | The number of days for data retention policy for PII | `number` | `534` | no |
| <a name="input_project"></a> [project](#input\_project) | The name of the tfscaffold project | `string` | n/a | yes |
| <a name="input_queue_batch_size"></a> [queue\_batch\_size](#input\_queue\_batch\_size) | maximum number of queue items to process | `number` | `10` | no |
| <a name="input_queue_batch_window_seconds"></a> [queue\_batch\_window\_seconds](#input\_queue\_batch\_window\_seconds) | maximum time in seconds between processing events | `number` | `1` | no |
Expand All @@ -53,11 +54,13 @@ No requirements.
| <a name="module_pdm_uploader"></a> [pdm\_uploader](#module\_pdm\_uploader) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.29/terraform-lambda.zip | n/a |
| <a name="module_print_analyser"></a> [print\_analyser](#module\_print\_analyser) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.29/terraform-lambda.zip | n/a |
| <a name="module_print_status_handler"></a> [print\_status\_handler](#module\_print\_status\_handler) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.29/terraform-lambda.zip | n/a |
| <a name="module_report_event_transformer"></a> [report\_event\_transformer](#module\_report\_event\_transformer) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.29/terraform-lambda.zip | n/a |
| <a name="module_s3bucket_cf_logs"></a> [s3bucket\_cf\_logs](#module\_s3bucket\_cf\_logs) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-s3bucket.zip | n/a |
| <a name="module_s3bucket_file_safe"></a> [s3bucket\_file\_safe](#module\_s3bucket\_file\_safe) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-s3bucket.zip | n/a |
| <a name="module_s3bucket_letters"></a> [s3bucket\_letters](#module\_s3bucket\_letters) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-s3bucket.zip | n/a |
| <a name="module_s3bucket_non_pii_data"></a> [s3bucket\_non\_pii\_data](#module\_s3bucket\_non\_pii\_data) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-s3bucket.zip | n/a |
| <a name="module_s3bucket_pii_data"></a> [s3bucket\_pii\_data](#module\_s3bucket\_pii\_data) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-s3bucket.zip | n/a |
| <a name="module_s3bucket_reporting"></a> [s3bucket\_reporting](#module\_s3bucket\_reporting) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-s3bucket.zip | n/a |
| <a name="module_s3bucket_static_assets"></a> [s3bucket\_static\_assets](#module\_s3bucket\_static\_assets) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-s3bucket.zip | n/a |
| <a name="module_sqs_core_notifier"></a> [sqs\_core\_notifier](#module\_sqs\_core\_notifier) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-sqs.zip | n/a |
| <a name="module_sqs_event_publisher_errors"></a> [sqs\_event\_publisher\_errors](#module\_sqs\_event\_publisher\_errors) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-sqs.zip | n/a |
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
resource "aws_athena_workgroup" "reporting" {
name = local.csi
description = "Athena Workgroup for ${var.environment}"
force_destroy = true

configuration {
enforce_workgroup_configuration = true

result_configuration {
expected_bucket_owner = var.aws_account_id
output_location = "s3://${module.s3bucket_reporting.bucket}/athena-output/"

encryption_configuration {
encryption_option = "SSE_KMS"
kms_key_arn = module.kms.key_arn
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# IAM role for EventBridge to write to Kinesis Firehose
data "aws_iam_policy_document" "eventbridge_firehose_assume_role" {
statement {
effect = "Allow"

principals {
type = "Service"
identifiers = ["events.amazonaws.com"]
}

actions = ["sts:AssumeRole"]
}
}

resource "aws_iam_role" "eventbridge_firehose" {
name = "${local.csi}-eventbridge-firehose"
description = "Role for EventBridge to write to Kinesis Firehose"
assume_role_policy = data.aws_iam_policy_document.eventbridge_firehose_assume_role.json
}

data "aws_iam_policy_document" "eventbridge_firehose_policy" {
statement {
effect = "Allow"

actions = [
"firehose:PutRecord",
"firehose:PutRecordBatch"
]

resources = [
aws_kinesis_firehose_delivery_stream.to_s3_reporting.arn
]
}
}

resource "aws_iam_role_policy" "eventbridge_firehose" {
name = "${local.csi}-eventbridge-firehose"
role = aws_iam_role.eventbridge_firehose.id
policy = data.aws_iam_policy_document.eventbridge_firehose_policy.json
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
resource "aws_cloudwatch_event_rule" "all_events" {
name = "${local.csi}-all-events"
description = "Event rule to match all Digital Letters events"
event_bus_name = aws_cloudwatch_event_bus.main.name

event_pattern = jsonencode({
"detail" : {
"type" : [{
"prefix" : "uk.nhs.notify.digital.letters."
}]
}
})
}

resource "aws_cloudwatch_event_target" "reporting_firehose" {
rule = aws_cloudwatch_event_rule.all_events.name
arn = aws_kinesis_firehose_delivery_stream.to_s3_reporting.arn
role_arn = aws_iam_role.eventbridge_firehose.arn
event_bus_name = aws_cloudwatch_event_bus.main.name
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
resource "aws_cloudwatch_log_group" "kinesis_logs" {
name = "/aws/kinesisfirehose/${local.csi}-to-s3-reporting"
retention_in_days = var.log_retention_in_days
}

resource "aws_cloudwatch_log_stream" "reporting_kinesis_logs" {
name = "${local.csi}reportingKinesisLogs"
log_group_name = aws_cloudwatch_log_group.kinesis_logs.name
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
resource "aws_glue_catalog_database" "reporting" {
name = "${local.csi}-reporting"
description = "Reporting database for ${var.environment}"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
resource "aws_glue_catalog_table" "event_record" {
name = "event_record"
description = "Event records for ${var.environment}"
database_name = aws_glue_catalog_database.reporting.name

table_type = "EXTERNAL_TABLE"

storage_descriptor {
location = "s3://${module.s3bucket_reporting.bucket}/${local.firehose_output_path_prefix}/reporting/parquet/event_record"

input_format = "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat"
output_format = "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat"

ser_de_info {
serialization_library = "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe"
}

# additional columns must be added at the end of the list
columns {
name = "messagereference"
type = "string"
}
columns {
name = "pagecount"
type = "int"
}
columns {
name = "supplierid"
type = "string"
}
columns {
name = "time"
type = "string"
}
columns {
name = "type"
type = "string"
}
}

partition_keys {
name = "senderid"
type = "string"
}

partition_keys {
name = "__year"
type = "int"
}
partition_keys {
name = "__month"
type = "int"
}
partition_keys {
name = "__day"
type = "int"
}

parameters = {
EXTERNAL = "TRUE"
"parquet.compression" = "SNAPPY"
compressionType = "none"
classification = "parquet"
}
}

resource "aws_glue_partition_index" "event_record" {
database_name = aws_glue_catalog_database.reporting.name
table_name = aws_glue_catalog_table.event_record.name

partition_index {
index_name = "data"
keys = ["senderid", "__year", "__month", "__day"]
}

timeouts {
create = "60m"
delete = "60m"
}
}
Loading
Loading