From 5f7faf8d1ed76ea3fe81290a2658bc51e51c75a0 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Tue, 3 Mar 2026 17:25:30 +0000 Subject: [PATCH 01/46] NPT-937 Create VPC Endpoints for managing traffic between SAET platform and AWS services --- .../account_github_runner_compute.policy.json.tpl | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/infrastructure/stacks/github_runner/account_github_runner_compute.policy.json.tpl b/infrastructure/stacks/github_runner/account_github_runner_compute.policy.json.tpl index 6cd9d386..eb5f4a3f 100644 --- a/infrastructure/stacks/github_runner/account_github_runner_compute.policy.json.tpl +++ b/infrastructure/stacks/github_runner/account_github_runner_compute.policy.json.tpl @@ -82,22 +82,25 @@ "Action": [ "apigateway:CreateRestApi", "apigateway:DeleteRestApi", - "apigateway:GetRestApi", + "apigateway:Get*", + "apigateway:Patch*", + "apigateway:Put*", + "apigateway:Post*", "apigateway:UpdateRestApi", "apigateway:ListRestApis", "apigateway:CreateDeployment", - "apigateway:GetDeployment", "apigateway:ListDeployments", "apigateway:UpdateStage", "apigateway:CreateStage", "apigateway:DeleteStage", - "apigateway:GetStage", "apigateway:TagResource", "apigateway:UntagResource", "apigateway:ListTagsForResource", "ec2:Describe*", "ec2:DescribeVpcs", - "ec2:ModifyVpcBlockPublicAccessOptions" + "ec2:ModifyVpcBlockPublicAccessOptions", + "lambda:Get*", + "lambda:List*" ], "Resource": "*" }, From 382820e2e25e6750d72e7a9024a5424fcd4387a4 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Wed, 4 Mar 2026 10:30:10 +0000 Subject: [PATCH 02/46] NPT-937 Add Policies to VPC Endpoints --- .../account_wide/vpce_security_group.tf | 2 +- ...github_runner_role_permissions_boundary.tf | 22 +- .../github_runner/vpc_endpoint_policies.tf | 484 ++++++++++++++++++ 3 files changed, 506 insertions(+), 2 deletions(-) create mode 100644 infrastructure/stacks/github_runner/vpc_endpoint_policies.tf diff --git a/infrastructure/stacks/account_wide/vpce_security_group.tf b/infrastructure/stacks/account_wide/vpce_security_group.tf index 2eb18003..eff2b17e 100644 --- a/infrastructure/stacks/account_wide/vpce_security_group.tf +++ b/infrastructure/stacks/account_wide/vpce_security_group.tf @@ -1,4 +1,5 @@ # Generate Security Group used by all VPC Endpoints: +# checkov:skip=CKV2_AWS_5: "Security Group is attached to the VPC interface endpoints" resource "aws_security_group" "vpc_endpoints" { name = "vpc-endpoints-sg" description = "Allows all internal to VPC resources access to the endpoints and the endpoints to access the AWS Services" @@ -50,4 +51,3 @@ resource "aws_security_group_rule" "main_dynamodb_prefix_out" { prefix_list_ids = [data.aws_prefix_list.dynamodb.id] security_group_id = aws_security_group.vpc_endpoints.id } - diff --git a/infrastructure/stacks/github_runner/github_runner_role_permissions_boundary.tf b/infrastructure/stacks/github_runner/github_runner_role_permissions_boundary.tf index 64a83653..8a55152a 100644 --- a/infrastructure/stacks/github_runner/github_runner_role_permissions_boundary.tf +++ b/infrastructure/stacks/github_runner/github_runner_role_permissions_boundary.tf @@ -20,6 +20,8 @@ data "aws_iam_policy_document" "permissions_boundary" { "apigateway:CreateStage", "apigateway:TagResource", "apigateway:UntagResource", + "cloudwatch:PutMetricData", + "cloudwatch:ListMetrics", "cloudwatch:PutMetricAlarm", "cloudwatch:DeleteAlarms", "cloudwatch:Describe*", @@ -77,8 +79,9 @@ data "aws_iam_policy_document" "permissions_boundary" { "firehose:UntagDeliveryStream", "firehose:StartDeliveryStreamEncryption", "firehose:StopDeliveryStreamEncryption", + "inspector2:List*", + "inspector2:Get*", "inspector2:BatchGetAccountStatus", - "inspector2:GetConfiguration", "kms:CreateKey", "kms:Describe*", "kms:CreateAlias", @@ -132,6 +135,8 @@ data "aws_iam_policy_document" "permissions_boundary" { "s3:DeleteBucket", "s3:PutBucket*", "securityhub:Get*", + "securityhub:BatchImportFindings", + "securityhub:BatchUpdateFindings", "securityhub:Describe*", "secretsmanager:CreateSecret", "secretsmanager:DeleteSecret", @@ -182,6 +187,11 @@ data "aws_iam_policy_document" "permissions_boundary" { "dynamodb:Query" ] resources = ["*"] + condition { + test = "StringEquals" + variable = "aws:RequestedRegion" + values = [var.aws_region] + } } } # Allow access to Global services with no regionional condition @@ -229,6 +239,8 @@ data "aws_iam_policy_document" "permissions_boundary" { "route53domains:ListDomains", "shield:List*", "shield:Describe*", + "sts:AssumeRole", + "sts:AssumeRoleWithWebIdentity", "sts:GetCallerIdentity", "wafv2:CreateWebACL", "wafv2:DeleteWebACL", @@ -243,6 +255,14 @@ data "aws_iam_policy_document" "permissions_boundary" { "wafv2:DeleteLoggingConfiguration" ] resources = ["*"] + condition { + test = "StringEquals" + variable = "aws:RequestedRegion" + values = [ + var.aws_region, + "us-east-1" + ] + } } statement { diff --git a/infrastructure/stacks/github_runner/vpc_endpoint_policies.tf b/infrastructure/stacks/github_runner/vpc_endpoint_policies.tf new file mode 100644 index 00000000..a1c93114 --- /dev/null +++ b/infrastructure/stacks/github_runner/vpc_endpoint_policies.tf @@ -0,0 +1,484 @@ + +# secretsmanager +data "aws_iam_policy_document" "secretsmanager_endpoint_policy" { + statement { + sid = "AllowSecretAccess" + effect = "Allow" + actions = [ + "secretsmanager:CreateSecret", + "secretsmanager:DeleteSecret", + "secretsmanager:Get*", + "secretsmanager:UpdateSecret", + "secretsmanager:DescribeSecret", + "secretsmanager:List*", + "secretsmanager:TagResource", + "secretsmanager:UntagResource", + "secretsmanager:PutSecretValue" + ] + resources = ["*"] + principals { + type = "*" + identifiers = ["*"] + } + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = [local.account_id] + } + } +} + +# KMS +data "aws_iam_policy_document" "kms_endpoint_policy" { + statement { + sid = "AllowKMSAccess" + effect = "Allow" + actions = [ + "kms:CreateKey", + "kms:Describe*", + "kms:CreateAlias", + "kms:List*", + "kms:Get*", + "kms:DeleteAlias", + "kms:UpdateKeyDescription", + "kms:CreateGrant", + "kms:TagResource", + "kms:UntagResource", + "kms:EnableKeyRotation", + "kms:ScheduleKeyDeletion", + "kms:PutKeyPolicy", + "kms:Encrypt", + "kms:Decrypt*", + "kms:ReEncrypt*", + "kms:GenerateDataKey" + ] + resources = ["*"] + principals { + type = "*" + identifiers = ["*"] + } + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = [local.account_id] + } + } +} + +# SSM +data "aws_iam_policy_document" "ssm_endpoint_policy" { + statement { + sid = "AllowSSMAccess" + effect = "Allow" + actions = [ + "ssm:Describe*", + "ssm:GetParameter*", + "ssm:List*", + "ssm:PutParameter", + "ssm:AddTagsToResource", + "ssm:DeleteParameter" + ] + resources = ["*"] + principals { + type = "*" + identifiers = ["*"] + } + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = [local.account_id] + } + } +} + +# STS +data "aws_iam_policy_document" "sts_endpoint_policy" { + statement { + sid = "AllowSTSAccess" + effect = "Allow" + actions = [ + "sts:AssumeRole", + "sts:AssumeRoleWithWebIdentity", + "sts:GetCallerIdentity" + ] + resources = ["*"] + principals { + type = "*" + identifiers = ["*"] + } + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = [local.account_id] + } + } +} + +# Lambda +data "aws_iam_policy_document" "lambda_endpoint_policy" { + statement { + sid = "AllowLambdaAccess" + effect = "Allow" + actions = [ + "lambda:CreateFunction", + "lambda:UpdateFunction*", + "lambda:DeleteFunction", + "lambda:Get*", + "lambda:List*", + "lambda:TagResource", + "lambda:UntagResource", + "lambda:PublishVersion", + "lambda:CreateAlias", + "lambda:UpdateAlias", + "lambda:DeleteAlias", + "lambda:AddPermission", + "lambda:RemovePermission", + "lambda:PutProvisionedConcurrencyConfig", + "lambda:DeleteProvisionedConcurrencyConfig", + "lambda:PutFunctionConcurrency" + ] + resources = ["*"] + principals { + type = "*" + identifiers = ["*"] + } + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = [local.account_id] + } + } +} + +# API Gateway +data "aws_iam_policy_document" "apigateway_endpoint_policy" { + statement { + sid = "AllowAPIGatewayAccess" + effect = "Allow" + actions = [ + "apigateway:CreateRestApi", + "apigateway:Delete*", + "apigateway:Get*", + "apigateway:Put*", + "apigateway:UpdateRestApi", + "apigateway:List*", + "apigateway:Patch*", + "apigateway:Post*", + "apigateway:CreateDeployment", + "apigateway:UpdateStage", + "apigateway:CreateStage", + "apigateway:TagResource", + "apigateway:UntagResource" + ] + resources = ["*"] + principals { + type = "*" + identifiers = ["*"] + } + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = [local.account_id] + } + } +} + +# API Gateway (execute-api) +data "aws_iam_policy_document" "execute_api_endpoint_policy" { + statement { + sid = "AllowAPIGatewayInvoke" + effect = "Allow" + actions = [ + "execute-api:Invoke" + ] + resources = ["*"] + principals { + type = "*" + identifiers = ["*"] + } + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = [local.account_id] + } + } +} + + +# CloudWatch Logs +data "aws_iam_policy_document" "logs_endpoint_policy" { + statement { + sid = "AllowCloudWatchLogsAccess" + effect = "Allow" + actions = [ + "logs:CreateLogGroup", + "logs:DeleteLogGroup", + "logs:Describe*", + "logs:List*", + "logs:Tag*", + "logs:Untag*", + "logs:CreateLogStream", + "logs:DeleteLogStream", + "logs:PutRetentionPolicy", + "logs:CreateExportTask" + ] + resources = ["*"] + principals { + type = "*" + identifiers = ["*"] + } + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = [local.account_id] + } + } +} + +# CloudWatch Monitoring (metrics) +data "aws_iam_policy_document" "monitoring_endpoint_policy" { + statement { + sid = "AllowCloudWatchMetrics" + effect = "Allow" + actions = [ + "cloudwatch:PutMetricData", + "cloudwatch:ListMetrics" + "cloudwatch:PutMetricAlarm", + "cloudwatch:DeleteAlarms", + "cloudwatch:Describe*", + "cloudwatch:ListTagsForResource", + "cloudwatch:TagResource", + "cloudwatch:UntagResource", + "cloudwatch:Get*" + ] + resources = ["*"] + principals { + type = "*" + identifiers = ["*"] + } + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = [local.account_id] + } + } +} + +# Kinesis Firehose +data "aws_iam_policy_document" "firehose_endpoint_policy" { + statement { + sid = "AllowFirehoseAccess" + effect = "Allow" + actions = [ + "firehose:CreateDeliveryStream", + "firehose:DeleteDeliveryStream", + "firehose:Describe*", + "firehose:UpdateDestination", + "firehose:PutRecord", + "firehose:PutRecordBatch", + "firehose:TagDeliveryStream", + "firehose:List*", + "firehose:UntagDeliveryStream", + "firehose:StartDeliveryStreamEncryption", + "firehose:StopDeliveryStreamEncryption" + ] + resources = ["*"] + principals { + type = "*" + identifiers = ["*"] + } + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = [local.account_id] + } + } +} + +# EventBridge +data "aws_iam_policy_document" "events_endpoint_policy" { + statement { + sid = "AllowEventBridgeAccess" + effect = "Allow" + actions = [ + "events:PutRule", + "events:PutTargets", + "events:DeleteRule", + "events:RemoveTargets", + "events:Describe*", + "events:List*", + "events:TagResource", + "events:UntagResource" + ] + resources = ["*"] + principals { + type = "*" + identifiers = ["*"] + } + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = [local.account_id] + } + } +} + +# Security Hub +data "aws_iam_policy_document" "securityhub_endpoint_policy" { + statement { + sid = "AllowSecurityHubAccess" + effect = "Allow" + actions = [ + "securityhub:Get*", + "securityhub:BatchImportFindings", + "securityhub:BatchUpdateFindings", + "securityhub:Describe*" + ] + resources = ["*"] + principals { + type = "*" + identifiers = ["*"] + } + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = [local.account_id] + } + } +} + +# Inspector +data "aws_iam_policy_document" "inspector_endpoint_policy" { + statement { + sid = "AllowInspectorAccess" + effect = "Allow" + actions = [ + "inspector2:List*", + "inspector2:Get*", + "inspector2:BatchGetAccountStatus" + ] + resources = ["*"] + principals { + type = "*" + identifiers = ["*"] + } + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = [local.account_id] + } + } +} + +# Access Analyzer +data "aws_iam_policy_document" "access_analyzer_endpoint_policy" { + statement { + sid = "AllowAccessAnalyzerAccess" + effect = "Allow" + actions = [ + "access-analyzer:List*", + "access-analyzer:Get*" + ] + resources = ["*"] + principals { + type = "*" + identifiers = ["*"] + } + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = [local.account_id] + } + } +} + +# S3 +data "aws_iam_policy_document" "s3_endpoint_policy" { + statement { + sid = "AllowS3Access" + effect = "Allow" + actions = [ + "s3:PutLifecycleConfiguration", + "s3:PutEncryptionConfiguration", + "s3:List*", + "s3:Get*", + "s3:PutObject*", + "s3:DeleteObject", + "s3:CreateBucket", + "s3:DeleteBucket", + "s3:PutBucket*" + ] + resources = ["*"] + principals { + type = "*" + identifiers = ["*"] + } + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = [local.account_id] + } + } +} + +# Dynamodb +data "aws_iam_policy_document" "dynamodb_endpoint_policy" { + statement { + sid = "AllowDynamodbAccess" + effect = "Allow" + actions = [ + "dynamodb:Describe*", + "dynamodb:Get*", + "dynamodb:List*", + "dynamodb:DeleteTable", + "dynamodb:DeleteItem", + "dynamodb:CreateTable", + "dynamodb:TagResource", + "dynamodb:UntagResource", + "dynamodb:UpdateTable", + "dynamodb:UpdateContinuousBackups", + "dynamodb:PutItem" + ] + resources = ["*"] + principals { + type = "*" + identifiers = ["*"] + } + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = [local.account_id] + } + } +} + +# wafv2 +data "aws_iam_policy_document" "wafv2_endpoint_policy" { + statement { + sid = "AllowWafv2Access" + effect = "Allow" + actions = [ + "wafv2:CreateWebACL", + "wafv2:DeleteWebACL", + "wafv2:UpdateWebACL", + "wafv2:TagResource", + "wafv2:UntagResource", + "wafv2:List*", + "wafv2:AssociateWebACL", + "wafv2:DisassociateWebACL", + "wafv2:PutLoggingConfiguration", + "wafv2:Get*", + "wafv2:DeleteLoggingConfiguration" + ] + resources = ["*"] + principals { + type = "*" + identifiers = ["*"] + } + condition { + test = "StringEquals" + variable = "aws:PrincipalAccount" + values = [local.account_id] + } + } +} From 06402de191627ab79a04e00216154d67722bc3b8 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Wed, 4 Mar 2026 10:59:40 +0000 Subject: [PATCH 03/46] NPT-937 Add Policies to VPC Endpoints --- infrastructure/stacks/account_wide/vpce.tf | 18 ++++++++++++++++++ .../github_runner/vpc_endpoint_policies.tf | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/infrastructure/stacks/account_wide/vpce.tf b/infrastructure/stacks/account_wide/vpce.tf index 241be56a..9855a557 100644 --- a/infrastructure/stacks/account_wide/vpce.tf +++ b/infrastructure/stacks/account_wide/vpce.tf @@ -16,6 +16,7 @@ module "vpc_endpoints" { service = "s3" service_type = "Gateway" route_table_ids = module.vpc.private_route_table_ids + policy = data.aws_iam_policy_document.s3_endpoint_policy.json tags = { Name = "${local.resource_prefix}-s3-gateway-vpc-endpoint" } } @@ -23,6 +24,7 @@ module "vpc_endpoints" { service = "dynamodb" service_type = "Gateway" route_table_ids = module.vpc.private_route_table_ids + policy = data.aws_iam_policy_document.dynamodb_endpoint_policy.json tags = { Name = "${local.resource_prefix}-dynamodb-gateway-vpc-endpoint" } } @@ -34,6 +36,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] + policy = data.aws_iam_policy_document.lambda_endpoint_policy.json private_dns_enabled = true tags = { Name = "${local.resource_prefix}-lambda-vpc-endpoint" } } @@ -43,6 +46,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] + policy = data.aws_iam_policy_document.execute_api_endpoint_policy.json private_dns_enabled = true tags = { Name = "${local.resource_prefix}-execute-api-vpc-endpoint" } } @@ -52,6 +56,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] + policy = data.aws_iam_policy_document.apigateway_endpoint_policy.json private_dns_enabled = true tags = { Name = "${local.resource_prefix}-apigateway-vpc-endpoint" } } @@ -61,6 +66,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] + policy = data.aws_iam_policy_document.secretsmanager_endpoint_policy.json private_dns_enabled = true tags = { Name = "${local.resource_prefix}-secretsmanager-vpc-endpoint" } } @@ -71,6 +77,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] + policy = data.aws_iam_policy_document.logs_endpoint_policy.json private_dns_enabled = true tags = { Name = "${local.resource_prefix}-cloudwatch-logs-vpc-endpoint" } } @@ -81,6 +88,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] + policy = data.aws_iam_policy_document.monitoring_endpoint_policy.json private_dns_enabled = true tags = { Name = "${local.resource_prefix}-cloudwatch-monitoring-vpc-endpoint" } } @@ -90,6 +98,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] + policy = data.aws_iam_policy_document.ssm_endpoint_policy.json private_dns_enabled = true tags = { Name = "${local.resource_prefix}-ssm-vpc-endpoint" } } @@ -99,6 +108,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] + policy = data.aws_iam_policy_document.ssm_endpoint_policy.json private_dns_enabled = true tags = { Name = "${local.resource_prefix}-ssmmessages-vpc-endpoint" } } @@ -108,6 +118,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] + policy = data.aws_iam_policy_document.kms_endpoint_policy.json private_dns_enabled = true tags = { Name = "${local.resource_prefix}-kms-vpc-endpoint" } } @@ -117,6 +128,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] + policy = data.aws_iam_policy_document.sts_endpoint_policy.json private_dns_enabled = true tags = { Name = "${local.resource_prefix}-sts-vpc-endpoint" } } @@ -127,6 +139,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] + policy = data.aws_iam_policy_document.firehose_endpoint_policy.json private_dns_enabled = true tags = { Name = "${local.resource_prefix}-kinesis-firehose-vpc-endpoint" } } @@ -136,6 +149,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] + policy = data.aws_iam_policy_document.access_analyzer_endpoint_policy.json private_dns_enabled = true tags = { Name = "${local.resource_prefix}-access-analyzer-vpc-endpoint" } } @@ -145,6 +159,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] + policy = data.aws_iam_policy_document.inspector_endpoint_policy.json private_dns_enabled = true tags = { Name = "${local.resource_prefix}-inspector2-vpc-endpoint" } } @@ -154,6 +169,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] + policy = data.aws_iam_policy_document.securityhub_endpoint_policy.json private_dns_enabled = true tags = { Name = "${local.resource_prefix}-securityhub-vpc-endpoint" } } @@ -164,6 +180,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] + policy = data.aws_iam_policy_document.events_endpoint_policy.json private_dns_enabled = true tags = { Name = "${local.resource_prefix}-eventbridge-vpc-endpoint" } } @@ -173,6 +190,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] + policy = data.aws_iam_policy_document.wafv2_endpoint_policy.json private_dns_enabled = true tags = { Name = "${local.resource_prefix}-wafv2-vpc-endpoint" } } diff --git a/infrastructure/stacks/github_runner/vpc_endpoint_policies.tf b/infrastructure/stacks/github_runner/vpc_endpoint_policies.tf index a1c93114..fdc066d6 100644 --- a/infrastructure/stacks/github_runner/vpc_endpoint_policies.tf +++ b/infrastructure/stacks/github_runner/vpc_endpoint_policies.tf @@ -242,7 +242,7 @@ data "aws_iam_policy_document" "monitoring_endpoint_policy" { effect = "Allow" actions = [ "cloudwatch:PutMetricData", - "cloudwatch:ListMetrics" + "cloudwatch:ListMetrics", "cloudwatch:PutMetricAlarm", "cloudwatch:DeleteAlarms", "cloudwatch:Describe*", From 64e0cce911e5dbe915809bb0e75fd4532b4dabc1 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Wed, 4 Mar 2026 13:22:21 +0000 Subject: [PATCH 04/46] NPT-937 Add policies to VPC Endpoints --- .github/workflows/cicd-2-publish.yaml | 2 +- .github/workflows/cicd-3-deploy.yaml | 1 + .github/workflows/cicd-4-deploy-sandbox.yaml | 1 + .github/workflows/infrastructure-cleardown.yaml | 1 + infrastructure/stacks/account_wide/vpce_security_group.tf | 2 +- infrastructure/stacks/triage/restapi.tf | 5 ++++- infrastructure/stacks/triage/variables.tf | 2 +- 7 files changed, 10 insertions(+), 4 deletions(-) diff --git a/.github/workflows/cicd-2-publish.yaml b/.github/workflows/cicd-2-publish.yaml index 172f9ce7..1b4a21dd 100644 --- a/.github/workflows/cicd-2-publish.yaml +++ b/.github/workflows/cicd-2-publish.yaml @@ -1,5 +1,5 @@ name: "CI/CD publish" - +# checkov:skip=CKV2_GHA_1: "Ensure top-level permissions are not set to write-all. TODO- NPT-1102" on: pull_request: types: [closed] diff --git a/.github/workflows/cicd-3-deploy.yaml b/.github/workflows/cicd-3-deploy.yaml index cdff9e43..f2fed222 100644 --- a/.github/workflows/cicd-3-deploy.yaml +++ b/.github/workflows/cicd-3-deploy.yaml @@ -2,6 +2,7 @@ name: "CI/CD deploy" on: workflow_dispatch: + # checkov:skip=CKV_GHA_7:Inputs to be reviewed TODO- NPT-1102 inputs: tag: description: "This is the tag that is going to be deployed" diff --git a/.github/workflows/cicd-4-deploy-sandbox.yaml b/.github/workflows/cicd-4-deploy-sandbox.yaml index 2ffd12d4..aa736608 100644 --- a/.github/workflows/cicd-4-deploy-sandbox.yaml +++ b/.github/workflows/cicd-4-deploy-sandbox.yaml @@ -2,6 +2,7 @@ name: "CI/CD deploy sandbox API" on: workflow_dispatch: + # checkov:skip=CKV_GHA_7:Inputs to be reviewed TODO- NPT-1102 inputs: tag: description: "This is the tag that is going to be deployed" diff --git a/.github/workflows/infrastructure-cleardown.yaml b/.github/workflows/infrastructure-cleardown.yaml index c85786cf..8c4e4872 100644 --- a/.github/workflows/infrastructure-cleardown.yaml +++ b/.github/workflows/infrastructure-cleardown.yaml @@ -44,6 +44,7 @@ on: jobs: destroy-application-infrastructure: + # checkov:skip=CKV2_GHA_1: "Ensure top-level permissions are not set to write-all. TODO- NPT-1102" permissions: id-token: write contents: read diff --git a/infrastructure/stacks/account_wide/vpce_security_group.tf b/infrastructure/stacks/account_wide/vpce_security_group.tf index eff2b17e..469d6132 100644 --- a/infrastructure/stacks/account_wide/vpce_security_group.tf +++ b/infrastructure/stacks/account_wide/vpce_security_group.tf @@ -1,6 +1,6 @@ # Generate Security Group used by all VPC Endpoints: -# checkov:skip=CKV2_AWS_5: "Security Group is attached to the VPC interface endpoints" resource "aws_security_group" "vpc_endpoints" { + # checkov:skip=CKV2_AWS_5: "Security Group is attached to the VPC interface endpoints" name = "vpc-endpoints-sg" description = "Allows all internal to VPC resources access to the endpoints and the endpoints to access the AWS Services" vpc_id = module.vpc.vpc_id diff --git a/infrastructure/stacks/triage/restapi.tf b/infrastructure/stacks/triage/restapi.tf index cbbebb5e..d54936c4 100644 --- a/infrastructure/stacks/triage/restapi.tf +++ b/infrastructure/stacks/triage/restapi.tf @@ -13,6 +13,7 @@ resource "aws_api_gateway_rest_api" "triage" { # CloudWatch Log Group for API Gateway access logs resource "aws_cloudwatch_log_group" "api_gateway_access_logs" { + # checkov:skip=CKV_AWS_158: "Ensure that CloudWatch Log Group is encrypted by KMS - TODO NPT-1102" name = "/aws/apigateway/${local.resource_prefix}-${var.api_name}${local.workspace_suffix}/access-logs" retention_in_days = var.log_retention_days } @@ -109,10 +110,12 @@ resource "aws_api_gateway_deployment" "deployment" { # Create a stage (environment) for the API resource "aws_api_gateway_stage" "stage" { - # checkov:skip=CKV_AWS_51: NPT-1102 Ticket NPT-1102 has been raised + # checkov:skip=CKV2_AWS_51: "Ensure AWS API Gateway endpoints uses client certificate auth TODO-NPT-1102" # checkov:skip=CKV_AWS_73: NPT-1102 Ticket NPT-1102 has been raised # checkov:skip=CKV_AWS_76: NPT-1102 Ticket NPT-1102 has been raised # checkov:skip=CKV_AWS_120: NPT-1102 Ticket NPT-1102 has been raised + # checkov:skip=CKV2_AWS_4: NPT-1102 Ticket NPT-1102 has been raised + # checkov:skip=CKV2_AWS_29: "Ensure public API gateway are protected by WAF - TODO NPT-1102" deployment_id = aws_api_gateway_deployment.deployment.id rest_api_id = aws_api_gateway_rest_api.triage.id stage_name = var.stage_name diff --git a/infrastructure/stacks/triage/variables.tf b/infrastructure/stacks/triage/variables.tf index 27651c5f..9a29e52f 100644 --- a/infrastructure/stacks/triage/variables.tf +++ b/infrastructure/stacks/triage/variables.tf @@ -50,7 +50,7 @@ variable "authorization" { variable "log_retention_days" { description = "Number of days to retain CloudWatch logs for API Gateway access logs" type = number - default = 30 + default = 365 } # API Gateway X-Ray Tracing From d4b4787746532f20191a8458c8145ef047e545ff Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Wed, 4 Mar 2026 13:34:52 +0000 Subject: [PATCH 05/46] NPT-937 Add policies to VPC Endpoints --- infrastructure/stacks/artefact_management/data.tf | 15 --------------- infrastructure/stacks/artefact_management/s3.tf | 4 ++-- 2 files changed, 2 insertions(+), 17 deletions(-) diff --git a/infrastructure/stacks/artefact_management/data.tf b/infrastructure/stacks/artefact_management/data.tf index ad9cabff..6d6f3c1f 100644 --- a/infrastructure/stacks/artefact_management/data.tf +++ b/infrastructure/stacks/artefact_management/data.tf @@ -1,18 +1,3 @@ data "aws_iam_role" "app_github_runner_iam_role" { name = "${var.repo_name}-${var.app_github_runner_role_name}" } - -variable "aws_dev_account_id" { - description = "AWS Account ID for dev environment" - type = string -} - -# variable "aws_account_id_test" { -# description = "AWS Account ID for test environment" -# type = string -# } - -# variable "aws_account_id_prod" { -# description = "AWS Account ID for prod environment" -# type = string -# } diff --git a/infrastructure/stacks/artefact_management/s3.tf b/infrastructure/stacks/artefact_management/s3.tf index f0d9418b..e836d01a 100644 --- a/infrastructure/stacks/artefact_management/s3.tf +++ b/infrastructure/stacks/artefact_management/s3.tf @@ -15,7 +15,7 @@ data "aws_iam_policy_document" "artefacts_bucket_policy" { principals { type = "AWS" identifiers = [ - "arn:aws:iam::${var.aws_dev_account_id}:role/${var.repo_name}-dev-${var.app_github_runner_role_name}" + "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.environment}-${var.app_github_runner_role_name}" ] } actions = [ @@ -31,7 +31,7 @@ data "aws_iam_policy_document" "artefacts_bucket_policy" { type = "AWS" identifiers = [ "${data.aws_iam_role.app_github_runner_iam_role.arn}", - "arn:aws:iam::${var.aws_dev_account_id}:role/${var.repo_name}-dev-${var.app_github_runner_role_name}" + "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${environment}-${var.app_github_runner_role_name}" ] } actions = [ From a62360189138fa39d3e3e5d4bcf969143c7b02d9 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Wed, 4 Mar 2026 13:42:25 +0000 Subject: [PATCH 06/46] NPT-937 Add policies to VPC Endpoints --- .github/actions/action-infrastructure-stack/action.yaml | 1 - .github/workflows/deploy-application-infrastructure.yaml | 5 ----- .github/workflows/deploy-infrastructure.yaml | 4 ---- .../pipeline-deploy-account-infrastructure.yaml | 6 ------ .github/workflows/pipeline-deploy-application.yaml | 2 -- .github/workflows/pipeline-deploy-policies.yaml | 3 --- .github/workflows/quality-checks.yaml | 4 ---- scripts/workflow/action-infra-stack.sh | 9 --------- 8 files changed, 34 deletions(-) diff --git a/.github/actions/action-infrastructure-stack/action.yaml b/.github/actions/action-infrastructure-stack/action.yaml index 52f6d3b1..44d74747 100644 --- a/.github/actions/action-infrastructure-stack/action.yaml +++ b/.github/actions/action-infrastructure-stack/action.yaml @@ -62,7 +62,6 @@ runs: APPLICATION_TAG: ${{ inputs.application_tag }} RELEASE_TAG: ${{ inputs.release_tag }} COMMIT_HASH: ${{ inputs.commit_hash }} - AWS_DEV_ACCOUNT_ID: ${{ inputs.dev_account_id }} id: "action_stack" shell: bash run: | diff --git a/.github/workflows/deploy-application-infrastructure.yaml b/.github/workflows/deploy-application-infrastructure.yaml index d4c3601f..a537e54f 100644 --- a/.github/workflows/deploy-application-infrastructure.yaml +++ b/.github/workflows/deploy-application-infrastructure.yaml @@ -58,9 +58,6 @@ on: MGMT_ACCOUNT_ID: description: "AWS management account ID for credentials" required: true - AWS_DEV_ACCOUNT_ID: - description: "AWS dev account ID for credentials" - required: true outputs: plan_result: description: "The Terraform plan output" @@ -90,7 +87,6 @@ jobs: secrets: ACCOUNT_ID: ${{ secrets.ACCOUNT_ID }} MGMT_ACCOUNT_ID: ${{ secrets.MGMT_ACCOUNT_ID }} - AWS_DEV_ACCOUNT_ID: ${{ secrets.AWS_DEV_ACCOUNT_ID }} manual-approval-application-infra: name: "Manual approval for deployment of application infrastructure to the ${{ inputs.environment }} environment" @@ -132,7 +128,6 @@ jobs: secrets: ACCOUNT_ID: ${{ secrets.ACCOUNT_ID }} MGMT_ACCOUNT_ID: ${{ secrets.MGMT_ACCOUNT_ID }} - AWS_DEV_ACCOUNT_ID: ${{ secrets.AWS_DEV_ACCOUNT_ID }} deploy_summary: name: "Summarise deployment of application infrastructure to ${{ inputs.environment }} environment" diff --git a/.github/workflows/deploy-infrastructure.yaml b/.github/workflows/deploy-infrastructure.yaml index e676ee7d..4bf69922 100644 --- a/.github/workflows/deploy-infrastructure.yaml +++ b/.github/workflows/deploy-infrastructure.yaml @@ -66,9 +66,6 @@ on: MGMT_ACCOUNT_ID: description: "AWS management account ID for credentials" required: true - AWS_DEV_ACCOUNT_ID: - description: "AWS dev account ID for credentials" - required: true outputs: plan_result: description: "The Terraform plan output" @@ -120,7 +117,6 @@ jobs: release_tag: ${{ inputs.release_tag }} commit_hash: ${{ inputs.commit_hash }} mgmt_account_id: ${{ secrets.MGMT_ACCOUNT_ID }} - dev_account_id: ${{ secrets.AWS_DEV_ACCOUNT_ID }} - name: "Upload Terraform Plan Artifact" uses: actions/upload-artifact@v7 diff --git a/.github/workflows/pipeline-deploy-account-infrastructure.yaml b/.github/workflows/pipeline-deploy-account-infrastructure.yaml index 153ed3d9..23107597 100644 --- a/.github/workflows/pipeline-deploy-account-infrastructure.yaml +++ b/.github/workflows/pipeline-deploy-account-infrastructure.yaml @@ -55,9 +55,6 @@ on: MGMT_ACCOUNT_ID: description: "Management AWS account ID for credentials" required: true - AWS_DEV_ACCOUNT_ID: - description: "AWS dev account ID" - required: true concurrency: group: account-infrastructure-${{ github.ref }} @@ -84,7 +81,6 @@ jobs: secrets: ACCOUNT_ID: ${{ secrets.ACCOUNT_ID }} MGMT_ACCOUNT_ID: ${{ secrets.MGMT_ACCOUNT_ID }} - AWS_DEV_ACCOUNT_ID: ${{ secrets.AWS_DEV_ACCOUNT_ID }} plan-infrastructure: name: "Plan ${{ matrix.name }} infrastructure deployment for ${{ matrix.environment }}" @@ -118,7 +114,6 @@ jobs: secrets: ACCOUNT_ID: ${{ secrets.ACCOUNT_ID }} MGMT_ACCOUNT_ID: ${{ secrets.MGMT_ACCOUNT_ID }} - AWS_DEV_ACCOUNT_ID: ${{ secrets.AWS_DEV_ACCOUNT_ID }} manual-approval: name: "Manual approval for ${{ needs.metadata.outputs.environment }} infrastructure deployment" @@ -164,4 +159,3 @@ jobs: secrets: ACCOUNT_ID: ${{ secrets.ACCOUNT_ID }} MGMT_ACCOUNT_ID: ${{ secrets.MGMT_ACCOUNT_ID }} - AWS_DEV_ACCOUNT_ID: ${{ secrets.AWS_DEV_ACCOUNT_ID }} diff --git a/.github/workflows/pipeline-deploy-application.yaml b/.github/workflows/pipeline-deploy-application.yaml index 27b945cd..e960219f 100644 --- a/.github/workflows/pipeline-deploy-application.yaml +++ b/.github/workflows/pipeline-deploy-application.yaml @@ -51,7 +51,6 @@ jobs: secrets: ACCOUNT_ID: ${{ secrets.ACCOUNT_ID }} MGMT_ACCOUNT_ID: ${{ secrets.MGMT_ACCOUNT_ID }} - AWS_DEV_ACCOUNT_ID: ${{ secrets.AWS_DEV_ACCOUNT_ID }} perform-code-analysis: name: "Perform static code analysis" @@ -77,7 +76,6 @@ jobs: secrets: ACCOUNT_ID: ${{ secrets.ACCOUNT_ID }} MGMT_ACCOUNT_ID: ${{ secrets.MGMT_ACCOUNT_ID }} - AWS_DEV_ACCOUNT_ID: ${{ secrets.AWS_DEV_ACCOUNT_ID }} check-pipeline-status: name: "Check Pipeline Status" diff --git a/.github/workflows/pipeline-deploy-policies.yaml b/.github/workflows/pipeline-deploy-policies.yaml index 7418ebfe..6435e32d 100644 --- a/.github/workflows/pipeline-deploy-policies.yaml +++ b/.github/workflows/pipeline-deploy-policies.yaml @@ -59,7 +59,6 @@ jobs: secrets: ACCOUNT_ID: ${{ secrets.ACCOUNT_ID }} MGMT_ACCOUNT_ID: ${{ secrets.MGMT_ACCOUNT_ID }} - AWS_DEV_ACCOUNT_ID: ${{ secrets.AWS_DEV_ACCOUNT_ID }} plan-permissions-infrastructure: name: "Plan ${{ matrix.name }} permissions infrastructure deployment for ${{ matrix.environment }}" @@ -90,7 +89,6 @@ jobs: secrets: ACCOUNT_ID: ${{ secrets.ACCOUNT_ID }} MGMT_ACCOUNT_ID: ${{ secrets.MGMT_ACCOUNT_ID }} - AWS_DEV_ACCOUNT_ID: ${{ secrets.AWS_DEV_ACCOUNT_ID }} manual-approval-permissions: name: "Manual approval for ${{ needs.metadata.outputs.environment }} permissions infrastructure deployment" @@ -134,4 +132,3 @@ jobs: secrets: ACCOUNT_ID: ${{ secrets.ACCOUNT_ID }} MGMT_ACCOUNT_ID: ${{ secrets.MGMT_ACCOUNT_ID }} - AWS_DEV_ACCOUNT_ID: ${{ secrets.AWS_DEV_ACCOUNT_ID }} diff --git a/.github/workflows/quality-checks.yaml b/.github/workflows/quality-checks.yaml index 36b7ae51..266c5ab5 100644 --- a/.github/workflows/quality-checks.yaml +++ b/.github/workflows/quality-checks.yaml @@ -47,9 +47,6 @@ on: MGMT_ACCOUNT_ID: description: "Management AWS account ID for credentials" required: true - AWS_DEV_ACCOUNT_ID: - description: "AWS dev account ID" - required: true jobs: scan-secrets: @@ -175,7 +172,6 @@ jobs: action: validate project: saet mgmt_account_id: ${{ secrets.MGMT_ACCOUNT_ID }} - dev_account_id: ${{ secrets.AWS_DEV_ACCOUNT_ID }} check-terraform-format: name: "Check Terraform format" diff --git a/scripts/workflow/action-infra-stack.sh b/scripts/workflow/action-infra-stack.sh index 834421da..df9ac757 100644 --- a/scripts/workflow/action-infra-stack.sh +++ b/scripts/workflow/action-infra-stack.sh @@ -154,15 +154,6 @@ terraform-initialise terraform workspace select -or-create "$WORKSPACE" -if [ "$STACK" = "artefact_management" ] ; then - echo "Exporting account ID for artefact_management stack" - if [ -z "$AWS_DEV_ACCOUNT_ID" ] ; then - echo "AWS_DEV_ACCOUNT_ID environment variable must be set for artefact_management stack" - exit 1 - fi - export TF_VAR_aws_dev_account_id=$AWS_DEV_ACCOUNT_ID -fi - # plan if [ -n "$ACTION" ] && [ "$ACTION" = 'plan' ] ; then terraform plan -out $STACK.tfplan \ From e12dcdfc86fafc75646dff795bfd3014a365c546 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Wed, 4 Mar 2026 13:49:40 +0000 Subject: [PATCH 07/46] NPT-937 Add policies to VPC Endpoints --- infrastructure/stacks/artefact_management/s3.tf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infrastructure/stacks/artefact_management/s3.tf b/infrastructure/stacks/artefact_management/s3.tf index e836d01a..d4ff7e18 100644 --- a/infrastructure/stacks/artefact_management/s3.tf +++ b/infrastructure/stacks/artefact_management/s3.tf @@ -31,7 +31,7 @@ data "aws_iam_policy_document" "artefacts_bucket_policy" { type = "AWS" identifiers = [ "${data.aws_iam_role.app_github_runner_iam_role.arn}", - "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${environment}-${var.app_github_runner_role_name}" + "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.environment}-${var.app_github_runner_role_name}" ] } actions = [ From 356c087b7d806fde5b77a245358ce7ce70f4ed34 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Wed, 4 Mar 2026 17:12:45 +0000 Subject: [PATCH 08/46] NPT-937 Add policies to VPC Endpoints --- infrastructure/stacks/account_wide/data.tf | 8 + infrastructure/stacks/account_wide/vpce.tf | 36 +- infrastructure/stacks/github_runner/output.tf | 5 + .../github_runner/vpc_endpoint_policies.tf | 696 ++++++------------ 4 files changed, 256 insertions(+), 489 deletions(-) create mode 100644 infrastructure/stacks/github_runner/output.tf diff --git a/infrastructure/stacks/account_wide/data.tf b/infrastructure/stacks/account_wide/data.tf index 25b85408..84ee4102 100644 --- a/infrastructure/stacks/account_wide/data.tf +++ b/infrastructure/stacks/account_wide/data.tf @@ -19,3 +19,11 @@ data "aws_prefix_list" "dynamodb" { name = "com.amazonaws.${var.aws_region}.dynamodb" } +data "terraform_remote_state" "github_runner" { + backend = "s3" + config = { + bucket = "nhse-${var.environment}-${var.repo_name}-terraform-state" + key = github_runner/terraform.state + region = var.aws_region + } +} diff --git a/infrastructure/stacks/account_wide/vpce.tf b/infrastructure/stacks/account_wide/vpce.tf index 9855a557..c2c57577 100644 --- a/infrastructure/stacks/account_wide/vpce.tf +++ b/infrastructure/stacks/account_wide/vpce.tf @@ -16,7 +16,7 @@ module "vpc_endpoints" { service = "s3" service_type = "Gateway" route_table_ids = module.vpc.private_route_table_ids - policy = data.aws_iam_policy_document.s3_endpoint_policy.json + policy = data.terraform_remote_state.github_runner.outputs.endpoint_policies["s3"] tags = { Name = "${local.resource_prefix}-s3-gateway-vpc-endpoint" } } @@ -24,7 +24,7 @@ module "vpc_endpoints" { service = "dynamodb" service_type = "Gateway" route_table_ids = module.vpc.private_route_table_ids - policy = data.aws_iam_policy_document.dynamodb_endpoint_policy.json + policy = data.terraform_remote_state.github_runner.outputs.endpoint_policies["dynamodb"] tags = { Name = "${local.resource_prefix}-dynamodb-gateway-vpc-endpoint" } } @@ -36,7 +36,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] - policy = data.aws_iam_policy_document.lambda_endpoint_policy.json + policy = data.terraform_remote_state.github_runner.outputs.endpoint_policies["lambda"] private_dns_enabled = true tags = { Name = "${local.resource_prefix}-lambda-vpc-endpoint" } } @@ -46,7 +46,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] - policy = data.aws_iam_policy_document.execute_api_endpoint_policy.json + policy = data.terraform_remote_state.github_runner.outputs.endpoint_policies["execute_api"] private_dns_enabled = true tags = { Name = "${local.resource_prefix}-execute-api-vpc-endpoint" } } @@ -56,7 +56,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] - policy = data.aws_iam_policy_document.apigateway_endpoint_policy.json + policy = data.terraform_remote_state.github_runner.outputs.endpoint_policies["apigateway"] private_dns_enabled = true tags = { Name = "${local.resource_prefix}-apigateway-vpc-endpoint" } } @@ -66,7 +66,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] - policy = data.aws_iam_policy_document.secretsmanager_endpoint_policy.json + policy = data.terraform_remote_state.github_runner.outputs.endpoint_policies["secretsmanager"] private_dns_enabled = true tags = { Name = "${local.resource_prefix}-secretsmanager-vpc-endpoint" } } @@ -77,7 +77,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] - policy = data.aws_iam_policy_document.logs_endpoint_policy.json + policy = data.terraform_remote_state.github_runner.outputs.endpoint_policies["logs"] private_dns_enabled = true tags = { Name = "${local.resource_prefix}-cloudwatch-logs-vpc-endpoint" } } @@ -88,7 +88,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] - policy = data.aws_iam_policy_document.monitoring_endpoint_policy.json + policy = data.terraform_remote_state.github_runner.outputs.endpoint_policies["monitoring"] private_dns_enabled = true tags = { Name = "${local.resource_prefix}-cloudwatch-monitoring-vpc-endpoint" } } @@ -98,7 +98,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] - policy = data.aws_iam_policy_document.ssm_endpoint_policy.json + policy = data.terraform_remote_state.github_runner.outputs.endpoint_policies["ssm"] private_dns_enabled = true tags = { Name = "${local.resource_prefix}-ssm-vpc-endpoint" } } @@ -108,7 +108,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] - policy = data.aws_iam_policy_document.ssm_endpoint_policy.json + policy = data.terraform_remote_state.github_runner.outputs.endpoint_policies["ssm"] private_dns_enabled = true tags = { Name = "${local.resource_prefix}-ssmmessages-vpc-endpoint" } } @@ -118,7 +118,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] - policy = data.aws_iam_policy_document.kms_endpoint_policy.json + policy = data.terraform_remote_state.github_runner.outputs.endpoint_policies["kms"] private_dns_enabled = true tags = { Name = "${local.resource_prefix}-kms-vpc-endpoint" } } @@ -128,7 +128,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] - policy = data.aws_iam_policy_document.sts_endpoint_policy.json + policy = data.terraform_remote_state.github_runner.outputs.endpoint_policies["sts"] private_dns_enabled = true tags = { Name = "${local.resource_prefix}-sts-vpc-endpoint" } } @@ -139,7 +139,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] - policy = data.aws_iam_policy_document.firehose_endpoint_policy.json + policy = data.terraform_remote_state.github_runner.outputs.endpoint_policies["firehose"] private_dns_enabled = true tags = { Name = "${local.resource_prefix}-kinesis-firehose-vpc-endpoint" } } @@ -149,7 +149,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] - policy = data.aws_iam_policy_document.access_analyzer_endpoint_policy.json + policy = data.terraform_remote_state.github_runner.outputs.endpoint_policies["access_analyzer"] private_dns_enabled = true tags = { Name = "${local.resource_prefix}-access-analyzer-vpc-endpoint" } } @@ -159,7 +159,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] - policy = data.aws_iam_policy_document.inspector_endpoint_policy.json + policy = data.terraform_remote_state.github_runner.outputs.endpoint_policies["inspector"] private_dns_enabled = true tags = { Name = "${local.resource_prefix}-inspector2-vpc-endpoint" } } @@ -169,7 +169,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] - policy = data.aws_iam_policy_document.securityhub_endpoint_policy.json + policy = data.terraform_remote_state.github_runner.outputs.endpoint_policies["securityhub"] private_dns_enabled = true tags = { Name = "${local.resource_prefix}-securityhub-vpc-endpoint" } } @@ -180,7 +180,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] - policy = data.aws_iam_policy_document.events_endpoint_policy.json + policy = data.terraform_remote_state.github_runner.outputs.endpoint_policies["events"] private_dns_enabled = true tags = { Name = "${local.resource_prefix}-eventbridge-vpc-endpoint" } } @@ -190,7 +190,7 @@ module "vpc_endpoints" { service_type = "Interface" subnet_ids = module.vpc.private_subnets security_group_ids = [aws_security_group.vpc_endpoints.id] - policy = data.aws_iam_policy_document.wafv2_endpoint_policy.json + policy = data.terraform_remote_state.github_runner.outputs.endpoint_policies["wafv2"] private_dns_enabled = true tags = { Name = "${local.resource_prefix}-wafv2-vpc-endpoint" } } diff --git a/infrastructure/stacks/github_runner/output.tf b/infrastructure/stacks/github_runner/output.tf new file mode 100644 index 00000000..c6ce6cfc --- /dev/null +++ b/infrastructure/stacks/github_runner/output.tf @@ -0,0 +1,5 @@ +output "endpoint_policies" { + value = { + for k, v in data.aws_iam_policy_document.endpoint_policies : k => v.json + } +} diff --git a/infrastructure/stacks/github_runner/vpc_endpoint_policies.tf b/infrastructure/stacks/github_runner/vpc_endpoint_policies.tf index fdc066d6..34dbd836 100644 --- a/infrastructure/stacks/github_runner/vpc_endpoint_policies.tf +++ b/infrastructure/stacks/github_runner/vpc_endpoint_policies.tf @@ -1,475 +1,229 @@ - -# secretsmanager -data "aws_iam_policy_document" "secretsmanager_endpoint_policy" { - statement { - sid = "AllowSecretAccess" - effect = "Allow" - actions = [ - "secretsmanager:CreateSecret", - "secretsmanager:DeleteSecret", - "secretsmanager:Get*", - "secretsmanager:UpdateSecret", - "secretsmanager:DescribeSecret", - "secretsmanager:List*", - "secretsmanager:TagResource", - "secretsmanager:UntagResource", - "secretsmanager:PutSecretValue" - ] - resources = ["*"] - principals { - type = "*" - identifiers = ["*"] - } - condition { - test = "StringEquals" - variable = "aws:PrincipalAccount" - values = [local.account_id] - } - } -} - -# KMS -data "aws_iam_policy_document" "kms_endpoint_policy" { - statement { - sid = "AllowKMSAccess" - effect = "Allow" - actions = [ - "kms:CreateKey", - "kms:Describe*", - "kms:CreateAlias", - "kms:List*", - "kms:Get*", - "kms:DeleteAlias", - "kms:UpdateKeyDescription", - "kms:CreateGrant", - "kms:TagResource", - "kms:UntagResource", - "kms:EnableKeyRotation", - "kms:ScheduleKeyDeletion", - "kms:PutKeyPolicy", - "kms:Encrypt", - "kms:Decrypt*", - "kms:ReEncrypt*", - "kms:GenerateDataKey" - ] - resources = ["*"] - principals { - type = "*" - identifiers = ["*"] - } - condition { - test = "StringEquals" - variable = "aws:PrincipalAccount" - values = [local.account_id] - } - } -} - -# SSM -data "aws_iam_policy_document" "ssm_endpoint_policy" { - statement { - sid = "AllowSSMAccess" - effect = "Allow" - actions = [ - "ssm:Describe*", - "ssm:GetParameter*", - "ssm:List*", - "ssm:PutParameter", - "ssm:AddTagsToResource", - "ssm:DeleteParameter" - ] - resources = ["*"] - principals { - type = "*" - identifiers = ["*"] - } - condition { - test = "StringEquals" - variable = "aws:PrincipalAccount" - values = [local.account_id] - } - } -} - -# STS -data "aws_iam_policy_document" "sts_endpoint_policy" { - statement { - sid = "AllowSTSAccess" - effect = "Allow" - actions = [ - "sts:AssumeRole", - "sts:AssumeRoleWithWebIdentity", - "sts:GetCallerIdentity" - ] - resources = ["*"] - principals { - type = "*" - identifiers = ["*"] - } - condition { - test = "StringEquals" - variable = "aws:PrincipalAccount" - values = [local.account_id] - } - } -} - -# Lambda -data "aws_iam_policy_document" "lambda_endpoint_policy" { - statement { - sid = "AllowLambdaAccess" - effect = "Allow" - actions = [ - "lambda:CreateFunction", - "lambda:UpdateFunction*", - "lambda:DeleteFunction", - "lambda:Get*", - "lambda:List*", - "lambda:TagResource", - "lambda:UntagResource", - "lambda:PublishVersion", - "lambda:CreateAlias", - "lambda:UpdateAlias", - "lambda:DeleteAlias", - "lambda:AddPermission", - "lambda:RemovePermission", - "lambda:PutProvisionedConcurrencyConfig", - "lambda:DeleteProvisionedConcurrencyConfig", - "lambda:PutFunctionConcurrency" - ] - resources = ["*"] - principals { - type = "*" - identifiers = ["*"] - } - condition { - test = "StringEquals" - variable = "aws:PrincipalAccount" - values = [local.account_id] - } - } -} - -# API Gateway -data "aws_iam_policy_document" "apigateway_endpoint_policy" { - statement { - sid = "AllowAPIGatewayAccess" - effect = "Allow" - actions = [ - "apigateway:CreateRestApi", - "apigateway:Delete*", - "apigateway:Get*", - "apigateway:Put*", - "apigateway:UpdateRestApi", - "apigateway:List*", - "apigateway:Patch*", - "apigateway:Post*", - "apigateway:CreateDeployment", - "apigateway:UpdateStage", - "apigateway:CreateStage", - "apigateway:TagResource", - "apigateway:UntagResource" - ] - resources = ["*"] - principals { - type = "*" - identifiers = ["*"] - } - condition { - test = "StringEquals" - variable = "aws:PrincipalAccount" - values = [local.account_id] - } - } -} - -# API Gateway (execute-api) -data "aws_iam_policy_document" "execute_api_endpoint_policy" { - statement { - sid = "AllowAPIGatewayInvoke" - effect = "Allow" - actions = [ - "execute-api:Invoke" - ] - resources = ["*"] - principals { - type = "*" - identifiers = ["*"] - } - condition { - test = "StringEquals" - variable = "aws:PrincipalAccount" - values = [local.account_id] - } - } -} - - -# CloudWatch Logs -data "aws_iam_policy_document" "logs_endpoint_policy" { - statement { - sid = "AllowCloudWatchLogsAccess" - effect = "Allow" - actions = [ - "logs:CreateLogGroup", - "logs:DeleteLogGroup", - "logs:Describe*", - "logs:List*", - "logs:Tag*", - "logs:Untag*", - "logs:CreateLogStream", - "logs:DeleteLogStream", - "logs:PutRetentionPolicy", - "logs:CreateExportTask" - ] - resources = ["*"] - principals { - type = "*" - identifiers = ["*"] - } - condition { - test = "StringEquals" - variable = "aws:PrincipalAccount" - values = [local.account_id] - } - } -} - -# CloudWatch Monitoring (metrics) -data "aws_iam_policy_document" "monitoring_endpoint_policy" { - statement { - sid = "AllowCloudWatchMetrics" - effect = "Allow" - actions = [ - "cloudwatch:PutMetricData", - "cloudwatch:ListMetrics", - "cloudwatch:PutMetricAlarm", - "cloudwatch:DeleteAlarms", - "cloudwatch:Describe*", - "cloudwatch:ListTagsForResource", - "cloudwatch:TagResource", - "cloudwatch:UntagResource", - "cloudwatch:Get*" - ] - resources = ["*"] - principals { - type = "*" - identifiers = ["*"] - } - condition { - test = "StringEquals" - variable = "aws:PrincipalAccount" - values = [local.account_id] - } - } -} - -# Kinesis Firehose -data "aws_iam_policy_document" "firehose_endpoint_policy" { - statement { - sid = "AllowFirehoseAccess" - effect = "Allow" - actions = [ - "firehose:CreateDeliveryStream", - "firehose:DeleteDeliveryStream", - "firehose:Describe*", - "firehose:UpdateDestination", - "firehose:PutRecord", - "firehose:PutRecordBatch", - "firehose:TagDeliveryStream", - "firehose:List*", - "firehose:UntagDeliveryStream", - "firehose:StartDeliveryStreamEncryption", - "firehose:StopDeliveryStreamEncryption" - ] - resources = ["*"] - principals { - type = "*" - identifiers = ["*"] - } - condition { - test = "StringEquals" - variable = "aws:PrincipalAccount" - values = [local.account_id] - } - } -} - -# EventBridge -data "aws_iam_policy_document" "events_endpoint_policy" { - statement { - sid = "AllowEventBridgeAccess" - effect = "Allow" - actions = [ - "events:PutRule", - "events:PutTargets", - "events:DeleteRule", - "events:RemoveTargets", - "events:Describe*", - "events:List*", - "events:TagResource", - "events:UntagResource" - ] - resources = ["*"] - principals { - type = "*" - identifiers = ["*"] - } - condition { - test = "StringEquals" - variable = "aws:PrincipalAccount" - values = [local.account_id] - } - } -} - -# Security Hub -data "aws_iam_policy_document" "securityhub_endpoint_policy" { - statement { - sid = "AllowSecurityHubAccess" - effect = "Allow" - actions = [ - "securityhub:Get*", - "securityhub:BatchImportFindings", - "securityhub:BatchUpdateFindings", - "securityhub:Describe*" - ] - resources = ["*"] - principals { - type = "*" - identifiers = ["*"] - } - condition { - test = "StringEquals" - variable = "aws:PrincipalAccount" - values = [local.account_id] - } - } -} - -# Inspector -data "aws_iam_policy_document" "inspector_endpoint_policy" { - statement { - sid = "AllowInspectorAccess" - effect = "Allow" - actions = [ - "inspector2:List*", - "inspector2:Get*", - "inspector2:BatchGetAccountStatus" - ] - resources = ["*"] - principals { - type = "*" - identifiers = ["*"] - } - condition { - test = "StringEquals" - variable = "aws:PrincipalAccount" - values = [local.account_id] - } - } -} - -# Access Analyzer -data "aws_iam_policy_document" "access_analyzer_endpoint_policy" { - statement { - sid = "AllowAccessAnalyzerAccess" - effect = "Allow" - actions = [ - "access-analyzer:List*", - "access-analyzer:Get*" - ] - resources = ["*"] - principals { - type = "*" - identifiers = ["*"] - } - condition { - test = "StringEquals" - variable = "aws:PrincipalAccount" - values = [local.account_id] - } - } -} - -# S3 -data "aws_iam_policy_document" "s3_endpoint_policy" { - statement { - sid = "AllowS3Access" - effect = "Allow" - actions = [ - "s3:PutLifecycleConfiguration", - "s3:PutEncryptionConfiguration", - "s3:List*", - "s3:Get*", - "s3:PutObject*", - "s3:DeleteObject", - "s3:CreateBucket", - "s3:DeleteBucket", - "s3:PutBucket*" - ] - resources = ["*"] - principals { - type = "*" - identifiers = ["*"] - } - condition { - test = "StringEquals" - variable = "aws:PrincipalAccount" - values = [local.account_id] - } - } -} - -# Dynamodb -data "aws_iam_policy_document" "dynamodb_endpoint_policy" { - statement { - sid = "AllowDynamodbAccess" - effect = "Allow" - actions = [ - "dynamodb:Describe*", - "dynamodb:Get*", - "dynamodb:List*", - "dynamodb:DeleteTable", - "dynamodb:DeleteItem", - "dynamodb:CreateTable", - "dynamodb:TagResource", - "dynamodb:UntagResource", - "dynamodb:UpdateTable", - "dynamodb:UpdateContinuousBackups", - "dynamodb:PutItem" - ] - resources = ["*"] - principals { - type = "*" - identifiers = ["*"] - } - condition { - test = "StringEquals" - variable = "aws:PrincipalAccount" - values = [local.account_id] - } - } -} - -# wafv2 -data "aws_iam_policy_document" "wafv2_endpoint_policy" { - statement { - sid = "AllowWafv2Access" +locals { + endpoint_policies = { + + secretsmanager = { + actions = [ + "secretsmanager:CreateSecret", + "secretsmanager:DeleteSecret", + "secretsmanager:Get*", + "secretsmanager:UpdateSecret", + "secretsmanager:DescribeSecret", + "secretsmanager:List*", + "secretsmanager:TagResource", + "secretsmanager:UntagResource", + "secretsmanager:PutSecretValue" + ] + } + kms = { + actions = [ + "kms:CreateKey", + "kms:Describe*", + "kms:CreateAlias", + "kms:List*", + "kms:Get*", + "kms:DeleteAlias", + "kms:UpdateKeyDescription", + "kms:CreateGrant", + "kms:TagResource", + "kms:UntagResource", + "kms:EnableKeyRotation", + "kms:ScheduleKeyDeletion", + "kms:PutKeyPolicy", + "kms:Encrypt", + "kms:Decrypt*", + "kms:ReEncrypt*", + "kms:GenerateDataKey" + ] + } + ssm = { + actions = [ + "ssm:Describe*", + "ssm:GetParameter*", + "ssm:List*", + "ssm:PutParameter", + "ssm:AddTagsToResource", + "ssm:DeleteParameter" + ] + } + sts = { + actions = [ + "sts:AssumeRole", + "sts:AssumeRoleWithWebIdentity", + "sts:GetCallerIdentity" + ] + } + lambda = { + + actions = [ + "lambda:CreateFunction", + "lambda:UpdateFunction*", + "lambda:DeleteFunction", + "lambda:Get*", + "lambda:List*", + "lambda:TagResource", + "lambda:UntagResource", + "lambda:PublishVersion", + "lambda:CreateAlias", + "lambda:UpdateAlias", + "lambda:DeleteAlias", + "lambda:AddPermission", + "lambda:RemovePermission", + "lambda:PutProvisionedConcurrencyConfig", + "lambda:DeleteProvisionedConcurrencyConfig", + "lambda:PutFunctionConcurrency" + ] + } + apigateway = { + actions = [ + "apigateway:CreateRestApi", + "apigateway:Delete*", + "apigateway:Get*", + "apigateway:Put*", + "apigateway:UpdateRestApi", + "apigateway:List*", + "apigateway:Patch*", + "apigateway:Post*", + "apigateway:CreateDeployment", + "apigateway:UpdateStage", + "apigateway:CreateStage", + "apigateway:TagResource", + "apigateway:UntagResource" + ] + } + execute_api = { + actions = [ + "execute-api:Invoke" + ] + } + logs = { + actions = [ + "logs:CreateLogGroup", + "logs:DeleteLogGroup", + "logs:Describe*", + "logs:List*", + "logs:Tag*", + "logs:Untag*", + "logs:CreateLogStream", + "logs:DeleteLogStream", + "logs:PutRetentionPolicy", + "logs:CreateExportTask" + ] + } + monitoring = { + actions = [ + "cloudwatch:PutMetricData", + "cloudwatch:ListMetrics", + "cloudwatch:PutMetricAlarm", + "cloudwatch:DeleteAlarms", + "cloudwatch:Describe*", + "cloudwatch:ListTagsForResource", + "cloudwatch:TagResource", + "cloudwatch:UntagResource", + "cloudwatch:Get*" + ] + } + firehose = { + actions = [ + "firehose:CreateDeliveryStream", + "firehose:DeleteDeliveryStream", + "firehose:Describe*", + "firehose:UpdateDestination", + "firehose:PutRecord", + "firehose:PutRecordBatch", + "firehose:TagDeliveryStream", + "firehose:List*", + "firehose:UntagDeliveryStream", + "firehose:StartDeliveryStreamEncryption", + "firehose:StopDeliveryStreamEncryption" + ] + } + events = { + actions = [ + "events:PutRule", + "events:PutTargets", + "events:DeleteRule", + "events:RemoveTargets", + "events:Describe*", + "events:List*", + "events:TagResource", + "events:UntagResource" + ] + } + + securityhub = { + actions = [ + "securityhub:Get*", + "securityhub:BatchImportFindings", + "securityhub:BatchUpdateFindings", + "securityhub:Describe*" + ] + } + inspector = { + actions = [ + "inspector2:List*", + "inspector2:Get*", + "inspector2:BatchGetAccountStatus" + ] + } + access_analyzer = { + actions = [ + "access-analyzer:List*", + "access-analyzer:Get*" + ] + } + s3 = { + actions = [ + "s3:PutLifecycleConfiguration", + "s3:PutEncryptionConfiguration", + "s3:List*", + "s3:Get*", + "s3:PutObject*", + "s3:DeleteObject", + "s3:CreateBucket", + "s3:DeleteBucket", + "s3:PutBucket*" + ] + } + dynamodb = { + actions = [ + "dynamodb:Describe*", + "dynamodb:Get*", + "dynamodb:List*", + "dynamodb:DeleteTable", + "dynamodb:DeleteItem", + "dynamodb:CreateTable", + "dynamodb:TagResource", + "dynamodb:UntagResource", + "dynamodb:UpdateTable", + "dynamodb:UpdateContinuousBackups", + "dynamodb:PutItem" + ] + } + wafv2 = { + actions = [ + "wafv2:CreateWebACL", + "wafv2:DeleteWebACL", + "wafv2:UpdateWebACL", + "wafv2:TagResource", + "wafv2:UntagResource", + "wafv2:List*", + "wafv2:AssociateWebACL", + "wafv2:DisassociateWebACL", + "wafv2:PutLoggingConfiguration", + "wafv2:Get*", + "wafv2:DeleteLoggingConfiguration" + ] + } + } +} + + +data "aws_iam_policy_document" "endpoint_policies" { + for_each = local.endpoint_policies + statement { + sid = "Allow${title(each.key)}Access" effect = "Allow" - actions = [ - "wafv2:CreateWebACL", - "wafv2:DeleteWebACL", - "wafv2:UpdateWebACL", - "wafv2:TagResource", - "wafv2:UntagResource", - "wafv2:List*", - "wafv2:AssociateWebACL", - "wafv2:DisassociateWebACL", - "wafv2:PutLoggingConfiguration", - "wafv2:Get*", - "wafv2:DeleteLoggingConfiguration" - ] + actions = each.value.actions resources = ["*"] principals { type = "*" From ceb22d9fc3411c9e0fbbf0a232b4c585f0a0d577 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Wed, 4 Mar 2026 17:18:19 +0000 Subject: [PATCH 09/46] NPT-937 Add policies to VPC Endpoints --- infrastructure/stacks/account_wide/data.tf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infrastructure/stacks/account_wide/data.tf b/infrastructure/stacks/account_wide/data.tf index 84ee4102..e27c0813 100644 --- a/infrastructure/stacks/account_wide/data.tf +++ b/infrastructure/stacks/account_wide/data.tf @@ -23,7 +23,7 @@ data "terraform_remote_state" "github_runner" { backend = "s3" config = { bucket = "nhse-${var.environment}-${var.repo_name}-terraform-state" - key = github_runner/terraform.state + key = "github_runner/terraform.state" region = var.aws_region } } From d17a7c7941fdfef0d486aa21b2fb53cf20d740b2 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Wed, 4 Mar 2026 17:53:50 +0000 Subject: [PATCH 10/46] NPT-1102 Resolve checkov top level permission error --- .github/workflows/cicd-1-pull-request.yaml | 3 ++- .github/workflows/cicd-2-publish.yaml | 4 +++- .github/workflows/cicd-3-deploy.yaml | 2 ++ .github/workflows/cicd-4-deploy-sandbox.yaml | 2 ++ .github/workflows/infrastructure-cleardown.yaml | 2 ++ .github/workflows/stage-1-commit.yaml | 2 ++ .github/workflows/stage-2-test.yaml | 4 ++++ .github/workflows/stage-3-build.yaml | 2 ++ .github/workflows/stage-4-acceptance.yaml | 2 ++ .github/workflows/stage-5-notification.yaml | 2 ++ 10 files changed, 23 insertions(+), 2 deletions(-) diff --git a/.github/workflows/cicd-1-pull-request.yaml b/.github/workflows/cicd-1-pull-request.yaml index d94f424e..1293691e 100644 --- a/.github/workflows/cicd-1-pull-request.yaml +++ b/.github/workflows/cicd-1-pull-request.yaml @@ -1,7 +1,8 @@ name: "CI/CD pull request" # The total recommended execution time for the "CI/CD Pull Request" workflow is around 20 minutes. - +permissions: + contents: read on: push: branches: diff --git a/.github/workflows/cicd-2-publish.yaml b/.github/workflows/cicd-2-publish.yaml index 1b4a21dd..083edbdb 100644 --- a/.github/workflows/cicd-2-publish.yaml +++ b/.github/workflows/cicd-2-publish.yaml @@ -1,5 +1,7 @@ name: "CI/CD publish" -# checkov:skip=CKV2_GHA_1: "Ensure top-level permissions are not set to write-all. TODO- NPT-1102" + +permissions: + contents: read on: pull_request: types: [closed] diff --git a/.github/workflows/cicd-3-deploy.yaml b/.github/workflows/cicd-3-deploy.yaml index f2fed222..3ce62d30 100644 --- a/.github/workflows/cicd-3-deploy.yaml +++ b/.github/workflows/cicd-3-deploy.yaml @@ -1,5 +1,7 @@ name: "CI/CD deploy" +permissions: + contents: read on: workflow_dispatch: # checkov:skip=CKV_GHA_7:Inputs to be reviewed TODO- NPT-1102 diff --git a/.github/workflows/cicd-4-deploy-sandbox.yaml b/.github/workflows/cicd-4-deploy-sandbox.yaml index aa736608..69cf6a06 100644 --- a/.github/workflows/cicd-4-deploy-sandbox.yaml +++ b/.github/workflows/cicd-4-deploy-sandbox.yaml @@ -1,5 +1,7 @@ name: "CI/CD deploy sandbox API" +permissions: + contents: read on: workflow_dispatch: # checkov:skip=CKV_GHA_7:Inputs to be reviewed TODO- NPT-1102 diff --git a/.github/workflows/infrastructure-cleardown.yaml b/.github/workflows/infrastructure-cleardown.yaml index 8c4e4872..298226b7 100644 --- a/.github/workflows/infrastructure-cleardown.yaml +++ b/.github/workflows/infrastructure-cleardown.yaml @@ -1,5 +1,7 @@ name: Cleardown Infrastructure +permissions: + contents: read on: workflow_call: inputs: diff --git a/.github/workflows/stage-1-commit.yaml b/.github/workflows/stage-1-commit.yaml index 41e5c4c0..0bc6281f 100644 --- a/.github/workflows/stage-1-commit.yaml +++ b/.github/workflows/stage-1-commit.yaml @@ -1,5 +1,7 @@ name: "Commit stage" +permissions: + contents: read on: workflow_call: inputs: diff --git a/.github/workflows/stage-2-test.yaml b/.github/workflows/stage-2-test.yaml index 815d01f0..5bbb8365 100644 --- a/.github/workflows/stage-2-test.yaml +++ b/.github/workflows/stage-2-test.yaml @@ -1,5 +1,7 @@ name: "Test stage" +permissions: + contents: read on: workflow_call: inputs: @@ -32,6 +34,8 @@ jobs: test-unit: name: "Unit tests" runs-on: ubuntu-latest + permissions: + contents: read timeout-minutes: 5 steps: - name: "Checkout code" diff --git a/.github/workflows/stage-3-build.yaml b/.github/workflows/stage-3-build.yaml index 45a5645a..35bc8aae 100644 --- a/.github/workflows/stage-3-build.yaml +++ b/.github/workflows/stage-3-build.yaml @@ -1,5 +1,7 @@ name: "Build stage" +permissions: + contents: read on: workflow_call: inputs: diff --git a/.github/workflows/stage-4-acceptance.yaml b/.github/workflows/stage-4-acceptance.yaml index 80c85ab6..bee57568 100644 --- a/.github/workflows/stage-4-acceptance.yaml +++ b/.github/workflows/stage-4-acceptance.yaml @@ -1,5 +1,7 @@ name: "Acceptance stage" +permissions: + contents: read on: workflow_call: inputs: diff --git a/.github/workflows/stage-5-notification.yaml b/.github/workflows/stage-5-notification.yaml index 3dc0dbe2..1b2ba9c4 100644 --- a/.github/workflows/stage-5-notification.yaml +++ b/.github/workflows/stage-5-notification.yaml @@ -1,5 +1,7 @@ name: "Notification stage" +permissions: + contents: read on: workflow_call: secrets: From 3f6b9497e45730e78f83203f75c9711956548227 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Wed, 4 Mar 2026 17:57:18 +0000 Subject: [PATCH 11/46] NPT-1102 Resolve checkov top level permission error --- .github/workflows/cicd-1-pull-request.yaml | 1 + .github/workflows/cicd-2-publish.yaml | 1 + .github/workflows/cicd-3-deploy.yaml | 1 + .github/workflows/cicd-4-deploy-sandbox.yaml | 1 + 4 files changed, 4 insertions(+) diff --git a/.github/workflows/cicd-1-pull-request.yaml b/.github/workflows/cicd-1-pull-request.yaml index 1293691e..41fa3c8b 100644 --- a/.github/workflows/cicd-1-pull-request.yaml +++ b/.github/workflows/cicd-1-pull-request.yaml @@ -3,6 +3,7 @@ name: "CI/CD pull request" # The total recommended execution time for the "CI/CD Pull Request" workflow is around 20 minutes. permissions: contents: read + id-token: write on: push: branches: diff --git a/.github/workflows/cicd-2-publish.yaml b/.github/workflows/cicd-2-publish.yaml index 083edbdb..7b42221e 100644 --- a/.github/workflows/cicd-2-publish.yaml +++ b/.github/workflows/cicd-2-publish.yaml @@ -2,6 +2,7 @@ name: "CI/CD publish" permissions: contents: read + id-token: write on: pull_request: types: [closed] diff --git a/.github/workflows/cicd-3-deploy.yaml b/.github/workflows/cicd-3-deploy.yaml index 3ce62d30..914780dc 100644 --- a/.github/workflows/cicd-3-deploy.yaml +++ b/.github/workflows/cicd-3-deploy.yaml @@ -2,6 +2,7 @@ name: "CI/CD deploy" permissions: contents: read + id-token: write on: workflow_dispatch: # checkov:skip=CKV_GHA_7:Inputs to be reviewed TODO- NPT-1102 diff --git a/.github/workflows/cicd-4-deploy-sandbox.yaml b/.github/workflows/cicd-4-deploy-sandbox.yaml index 69cf6a06..b3407f0e 100644 --- a/.github/workflows/cicd-4-deploy-sandbox.yaml +++ b/.github/workflows/cicd-4-deploy-sandbox.yaml @@ -2,6 +2,7 @@ name: "CI/CD deploy sandbox API" permissions: contents: read + id-token: write on: workflow_dispatch: # checkov:skip=CKV_GHA_7:Inputs to be reviewed TODO- NPT-1102 From 5dfae6fe5d0281356c07e5265fca9f9eb8a255f9 Mon Sep 17 00:00:00 2001 From: Jack Cullen Date: Thu, 5 Mar 2026 12:09:38 +0000 Subject: [PATCH 12/46] NPT-1135: Fixing the cleardown script --- .github/workflows/artefacts-cleardown.yaml | 7 +++++++ .github/workflows/infrastructure-cleardown.yaml | 7 +++++++ .github/workflows/pipeline-infrastructure-cleardown.yaml | 3 +-- 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/.github/workflows/artefacts-cleardown.yaml b/.github/workflows/artefacts-cleardown.yaml index 2204b116..cd467b1a 100644 --- a/.github/workflows/artefacts-cleardown.yaml +++ b/.github/workflows/artefacts-cleardown.yaml @@ -27,6 +27,13 @@ on: description: "The type of permissions (e.g., account, app)" required: true type: string + secrets: + ACCOUNT_ID: + description: "AWS account ID for credentials" + required: true + MGMT_ACCOUNT_ID: + description: "AWS management account ID for credentials" + required: false jobs: cleardown-artefacts: diff --git a/.github/workflows/infrastructure-cleardown.yaml b/.github/workflows/infrastructure-cleardown.yaml index 298226b7..b66bb403 100644 --- a/.github/workflows/infrastructure-cleardown.yaml +++ b/.github/workflows/infrastructure-cleardown.yaml @@ -43,6 +43,13 @@ on: description: "The type of permissions (e.g., account, app)" required: true type: string + secrets: + ACCOUNT_ID: + description: "AWS account ID for credentials" + required: true + MGMT_ACCOUNT_ID: + description: "AWS management account ID for credentials" + required: true jobs: destroy-application-infrastructure: diff --git a/.github/workflows/pipeline-infrastructure-cleardown.yaml b/.github/workflows/pipeline-infrastructure-cleardown.yaml index dde62a50..7aa7448e 100644 --- a/.github/workflows/pipeline-infrastructure-cleardown.yaml +++ b/.github/workflows/pipeline-infrastructure-cleardown.yaml @@ -32,7 +32,6 @@ on: description: "Specify the workspace to cleardown" required: true type: string - jobs: metadata: if: github.actor != 'github-merge-queue[bot]' @@ -47,7 +46,7 @@ jobs: with: environment: ${{ github.event.client_payload.environment || inputs.environment || needs.metadata.outputs.environment }} workspace: ${{ github.event.client_payload.workspace || inputs.workspace || needs.metadata.outputs.workspace }} - stacks: "['triage]" + stacks: "['triage']" application_tag: ${{ inputs.application_tag || github.event.client_payload.application_tag || 'latest' }} commit_hash: ${{ needs.metadata.outputs.commit_hash }} workflow_timeout: 30 From ebbe151251a0d038af0ee86573308c0c2e33d858 Mon Sep 17 00:00:00 2001 From: Jack Cullen Date: Thu, 5 Mar 2026 14:30:04 +0000 Subject: [PATCH 13/46] NPT-1135: Fixing the cleardown script --- scripts/workflow/cleardown-terraform-state.sh | 68 +++++++++++++++++++ 1 file changed, 68 insertions(+) create mode 100755 scripts/workflow/cleardown-terraform-state.sh diff --git a/scripts/workflow/cleardown-terraform-state.sh b/scripts/workflow/cleardown-terraform-state.sh new file mode 100755 index 00000000..344120c6 --- /dev/null +++ b/scripts/workflow/cleardown-terraform-state.sh @@ -0,0 +1,68 @@ +#! /bin/bash + +# fail on first error +set -e +EXPORTS_SET=0 + +# check necessary environment variables are set +if [[ -z "$WORKSPACE" ]] ; then + echo Set WORKSPACE + EXPORTS_SET=1 +fi + +if [[ -z "$ENVIRONMENT" ]] ; then + echo Set ENVIRONMENT + EXPORTS_SET=1 +fi + +if [[ -z "$STACK" ]] ; then + echo Set STACK + EXPORTS_SET=1 +fi + +if [[ $EXPORTS_SET = 1 ]] ; then + echo One or more exports not set + exit 1 +fi + +# set additional environment variable +export TF_VAR_repo_name="${REPOSITORY:-"$(basename -s .git "$(git config --get remote.origin.url)")"}" +# required for terraform management stack +export TERRAFORM_BUCKET_NAME="nhse-$ENVIRONMENT-$TF_VAR_repo_name-terraform-state" # globally unique name +export TERRAFORM_LOCK_TABLE="nhse-$ENVIRONMENT-$TF_VAR_repo_name-terraform-state-lock" + +echo "Current terraform workspace is --> $WORKSPACE" +echo "Terraform state S3 bucket name is --> $TERRAFORM_BUCKET_NAME" +echo "Terraform state lock DynamoDB table is --> $TERRAFORM_LOCK_TABLE" + +# Delete Terraform state and lock entries for each stack +echo "Stack to have terraform state deleted is: $STACK" + + # Delete terraform state for current terraform workspace & echo results following deletion + deletion_output=$(aws s3 rm s3://$TERRAFORM_BUCKET_NAME/env:/$WORKSPACE/$STACK/terraform.state 2>&1) + + if [[ -n "$deletion_output" ]]; then + echo "Successfully deleted Terraform State file for the following workspace --> $WORKSPACE" + + existing_item=$(aws dynamodb get-item \ + --table-name "$TERRAFORM_LOCK_TABLE" \ + --key '{"LockID": {"S": "'${TERRAFORM_BUCKET_NAME}'/env:/'${WORKSPACE}'/'${STACK}'/terraform.state-md5"}}' \ + 2>&1) + + aws dynamodb delete-item \ + --table-name "$TERRAFORM_LOCK_TABLE" \ + --key '{"LockID": {"S": "'${TERRAFORM_BUCKET_NAME}'/env:/'${WORKSPACE}'/'${STACK}'/terraform.state-md5"}}' \ + + after_deletion=$(aws dynamodb get-item \ + --table-name "$TERRAFORM_LOCK_TABLE" \ + --key '{"LockID": {"S": "'${TERRAFORM_BUCKET_NAME}'/env:/'${WORKSPACE}'/'${STACK}'/terraform.state-md5"}}' \ + 2>&1) + if [[ -n "$existing_item" && -z "$after_deletion" ]]; then + echo "Successfully deleted Terraform State Lock file for the following stack --> $STACK" + else + echo "Terraform state Lock file not found for deletion or deletion failed for the following stack --> $STACK" + exit 1 + fi + else + echo "Terraform State file not found for deletion or deletion failed for the following workspace --> $WORKSPACE" + fi From 304237ac2101b5edde796ab470f7db4cd5199f81 Mon Sep 17 00:00:00 2001 From: Jack Cullen Date: Thu, 5 Mar 2026 14:46:58 +0000 Subject: [PATCH 14/46] NPT-1135: Fixing the cleardown script --- .../actions/cleardown-tf-state/action.yaml | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 .github/actions/cleardown-tf-state/action.yaml diff --git a/.github/actions/cleardown-tf-state/action.yaml b/.github/actions/cleardown-tf-state/action.yaml new file mode 100644 index 00000000..3005c91f --- /dev/null +++ b/.github/actions/cleardown-tf-state/action.yaml @@ -0,0 +1,25 @@ +name: "Cleardown terraform state action" +description: "Delete the terraform state" +inputs: + workspace: + description: "The name of the workspace to action the infrastructure into." + required: true + environment: + description: "The name of the environment to action the infrastructure into." + required: true + stack: + description: "A single variable for the stack to be cleared." + required: true + +runs: + using: composite + steps: + - name: Delete terraform state + id: delete_tf_state + shell: bash + env: + WORKSPACE: ${{ inputs.workspace }} + ENVIRONMENT: ${{ inputs.environment }} + STACK: ${{ inputs.stack }} + run: | + ./scripts/workflow/cleardown-terraform-state.sh From fb7d75eeaa1e2817ae2cd0acd17b7dce9c04267e Mon Sep 17 00:00:00 2001 From: Jack Cullen Date: Thu, 5 Mar 2026 15:24:09 +0000 Subject: [PATCH 15/46] NPT-1135: Fixing the cleardown script --- .../workflows/pipeline-infrastructure-cleardown.yaml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pipeline-infrastructure-cleardown.yaml b/.github/workflows/pipeline-infrastructure-cleardown.yaml index 7aa7448e..383e4cb6 100644 --- a/.github/workflows/pipeline-infrastructure-cleardown.yaml +++ b/.github/workflows/pipeline-infrastructure-cleardown.yaml @@ -34,7 +34,15 @@ on: type: string jobs: metadata: - if: github.actor != 'github-merge-queue[bot]' + if: >- + github.actor != 'github-merge-queue[bot]' && + ( + github.event_name != 'delete' || + ( + github.event.ref_type == 'branch' && + (startsWith(github.event.ref, 'task/') || startsWith(github.event.ref, 'dependabot/')) + ) + ) name: "Get Metadata" uses: ./.github/workflows/metadata.yaml From 5d612da6546d562d86502cf833a0e20e896e8e4e Mon Sep 17 00:00:00 2001 From: Jack Cullen Date: Thu, 5 Mar 2026 16:11:26 +0000 Subject: [PATCH 16/46] NPT-1135: Fixing the cleardown script --- scripts/workflow/cleardown-artefacts.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/workflow/cleardown-artefacts.sh b/scripts/workflow/cleardown-artefacts.sh index 90ca6045..8e18af3f 100755 --- a/scripts/workflow/cleardown-artefacts.sh +++ b/scripts/workflow/cleardown-artefacts.sh @@ -27,7 +27,7 @@ fi echo "Clearing down artefacts at or below $ARTEFACT_BUCKET_NAME/$WORKSPACE" -deletion_output=$(aws s3 rm --recursive s3://$ARTEFACT_BUCKET_NAME/$WORKSPACE/ 2>&1) +#deletion_output=$(aws s3 rm --recursive s3://$ARTEFACT_BUCKET_NAME/$WORKSPACE/ 2>&1) if [[ -n "$deletion_output" ]]; then echo "Sucessfully deleted following artefacts from $ARTEFACT_BUCKET_NAME/$WORKSPACE" From 9c70964018d8bc2be13f08314c0eff1613e94db3 Mon Sep 17 00:00:00 2001 From: Jack Cullen Date: Fri, 6 Mar 2026 13:53:11 +0000 Subject: [PATCH 17/46] NPT-951: Adding monitoring to the Dynamodb --- infrastructure/stacks/triage/alarms.tf | 49 +++++++++++++++++++++++ infrastructure/stacks/triage/variables.tf | 43 ++++++++++++++++++++ 2 files changed, 92 insertions(+) create mode 100644 infrastructure/stacks/triage/alarms.tf diff --git a/infrastructure/stacks/triage/alarms.tf b/infrastructure/stacks/triage/alarms.tf new file mode 100644 index 00000000..6f429d14 --- /dev/null +++ b/infrastructure/stacks/triage/alarms.tf @@ -0,0 +1,49 @@ +locals { + dynamodb_alarm_table_names = { + starting_coords = module.starting_coords.dynamodb_table_name + triage_nodes = module.triage_nodes.dynamodb_table_name + bodymaps = module.bodymaps.dynamodb_table_name + } +} + +resource "aws_cloudwatch_metric_alarm" "dynamodb_system_errors" { + for_each = var.enable_dynamodb_basic_alarms ? local.dynamodb_alarm_table_names : {} + + alarm_name = "${local.resource_prefix}-${each.key}-dynamodb-system-errors${local.workspace_suffix}" + alarm_description = "DynamoDB system errors detected for ${each.value}" + namespace = "AWS/DynamoDB" + metric_name = "SystemErrors" + statistic = "Sum" + period = var.dynamodb_alarm_period_seconds + evaluation_periods = var.dynamodb_alarm_evaluation_periods + threshold = var.dynamodb_system_errors_threshold + comparison_operator = "GreaterThanThreshold" + treat_missing_data = "notBreaching" + alarm_actions = var.dynamodb_alarm_actions + ok_actions = var.dynamodb_alarm_ok_actions + + dimensions = { + TableName = each.value + } +} + +resource "aws_cloudwatch_metric_alarm" "dynamodb_throttled_requests" { + for_each = var.enable_dynamodb_basic_alarms ? local.dynamodb_alarm_table_names : {} + + alarm_name = "${local.resource_prefix}-${each.key}-dynamodb-throttled-requests${local.workspace_suffix}" + alarm_description = "DynamoDB throttled requests detected for ${each.value}" + namespace = "AWS/DynamoDB" + metric_name = "ThrottledRequests" + statistic = "Sum" + period = var.dynamodb_alarm_period_seconds + evaluation_periods = var.dynamodb_alarm_evaluation_periods + threshold = var.dynamodb_throttled_requests_threshold + comparison_operator = "GreaterThanThreshold" + treat_missing_data = "notBreaching" + alarm_actions = var.dynamodb_alarm_actions + ok_actions = var.dynamodb_alarm_ok_actions + + dimensions = { + TableName = each.value + } +} diff --git a/infrastructure/stacks/triage/variables.tf b/infrastructure/stacks/triage/variables.tf index 9a29e52f..1a3a2c65 100644 --- a/infrastructure/stacks/triage/variables.tf +++ b/infrastructure/stacks/triage/variables.tf @@ -59,3 +59,46 @@ variable "enable_xray_tracing" { type = bool default = true } + +# DynamoDB basic alarms +variable "enable_dynamodb_basic_alarms" { + description = "Enable basic CloudWatch alarms for DynamoDB table health metrics" + type = bool + default = true +} + +variable "dynamodb_alarm_actions" { + description = "List of ARNs for CloudWatch alarm actions (for example SNS topic ARNs)" + type = list(string) + default = [] +} + +variable "dynamodb_alarm_ok_actions" { + description = "List of ARNs for CloudWatch OK actions" + type = list(string) + default = [] +} + +variable "dynamodb_alarm_period_seconds" { + description = "Period in seconds over which DynamoDB alarm metrics are evaluated" + type = number + default = 300 +} + +variable "dynamodb_alarm_evaluation_periods" { + description = "Number of periods over which data is compared to alarm threshold" + type = number + default = 1 +} + +variable "dynamodb_system_errors_threshold" { + description = "Threshold for the DynamoDB SystemErrors alarm" + type = number + default = 0 +} + +variable "dynamodb_throttled_requests_threshold" { + description = "Threshold for the DynamoDB ThrottledRequests alarm" + type = number + default = 0 +} From 705ef25c1488b6216e8a1ac7101b7b8ba6977e2e Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Thu, 5 Mar 2026 09:45:00 +0000 Subject: [PATCH 18/46] NPT-1102 Resolve Checkov top level write permission issues --- .github/workflows/artefacts-cleardown.yaml | 4 +++- .github/workflows/build-project.yaml | 4 +++- .github/workflows/cicd-2-publish.yaml | 7 ++++++- .github/workflows/cicd-3-deploy.yaml | 7 ++++++- .github/workflows/cicd-4-deploy-sandbox.yaml | 7 ++++++- .../deploy-application-infrastructure.yaml | 13 ++++++++++++- .github/workflows/deploy-infrastructure.yaml | 4 +++- .github/workflows/metadata.yaml | 3 ++- ...pipeline-deploy-account-infrastructure.yaml | 14 ++++++++++++-- .../workflows/pipeline-deploy-application.yaml | 13 ++++++++++--- .../workflows/pipeline-deploy-policies.yaml | 14 ++++++++++++-- .../pipeline-infrastructure-cleardown.yaml | 8 ++++++-- .github/workflows/pipeline-status-check.yaml | 5 ++++- .github/workflows/quality-checks.yaml | 18 +++++++++++++++++- 14 files changed, 102 insertions(+), 19 deletions(-) diff --git a/.github/workflows/artefacts-cleardown.yaml b/.github/workflows/artefacts-cleardown.yaml index cd467b1a..9e65a5b1 100644 --- a/.github/workflows/artefacts-cleardown.yaml +++ b/.github/workflows/artefacts-cleardown.yaml @@ -1,7 +1,6 @@ name: Cleardown Artefacts permissions: - id-token: write contents: read on: workflow_call: @@ -41,6 +40,9 @@ jobs: runs-on: ubuntu-latest timeout-minutes: ${{ inputs.workflow_timeout }} environment: ${{ inputs.environment }} + permissions: + id-token: write + contents: read steps: - name: "Checkout code" diff --git a/.github/workflows/build-project.yaml b/.github/workflows/build-project.yaml index 8b5e2cfa..7b83388b 100644 --- a/.github/workflows/build-project.yaml +++ b/.github/workflows/build-project.yaml @@ -2,7 +2,6 @@ name: Build project workflow run-name: Build ${{ inputs.type }} - ${{ inputs.name }} permissions: - id-token: write contents: read on: workflow_call: @@ -54,6 +53,9 @@ jobs: name: "Build ${{ inputs.build_type }} - ${{ inputs.name }}" runs-on: ubuntu-latest environment: ${{ inputs.environment }} + permissions: + id-token: write + contents: read steps: - name: "Checkout code" diff --git a/.github/workflows/cicd-2-publish.yaml b/.github/workflows/cicd-2-publish.yaml index 7b42221e..88311a47 100644 --- a/.github/workflows/cicd-2-publish.yaml +++ b/.github/workflows/cicd-2-publish.yaml @@ -2,7 +2,6 @@ name: "CI/CD publish" permissions: contents: read - id-token: write on: pull_request: types: [closed] @@ -13,6 +12,9 @@ jobs: metadata: name: "Set CI/CD metadata" runs-on: ubuntu-latest + permissions: + contents: read + id-token: write if: github.event.pull_request.merged == true timeout-minutes: 1 outputs: @@ -49,6 +51,9 @@ jobs: name: "Publish packages" runs-on: ubuntu-latest needs: [metadata] + permissions: + contents: read + id-token: write if: github.event.pull_request.merged == true timeout-minutes: 3 steps: diff --git a/.github/workflows/cicd-3-deploy.yaml b/.github/workflows/cicd-3-deploy.yaml index 914780dc..aa55437d 100644 --- a/.github/workflows/cicd-3-deploy.yaml +++ b/.github/workflows/cicd-3-deploy.yaml @@ -2,7 +2,6 @@ name: "CI/CD deploy" permissions: contents: read - id-token: write on: workflow_dispatch: # checkov:skip=CKV_GHA_7:Inputs to be reviewed TODO- NPT-1102 @@ -33,6 +32,9 @@ jobs: metadata: name: "Set CI/CD metadata" runs-on: ubuntu-latest + permissions: + contents: read + id-token: write timeout-minutes: 1 outputs: build_datetime: ${{ steps.variables.outputs.build_datetime }} @@ -76,6 +78,9 @@ jobs: deploy: name: "Deploy to an environment" runs-on: ubuntu-latest + permissions: + contents: read + id-token: write needs: [metadata] timeout-minutes: 10 steps: diff --git a/.github/workflows/cicd-4-deploy-sandbox.yaml b/.github/workflows/cicd-4-deploy-sandbox.yaml index b3407f0e..45b758d5 100644 --- a/.github/workflows/cicd-4-deploy-sandbox.yaml +++ b/.github/workflows/cicd-4-deploy-sandbox.yaml @@ -2,7 +2,6 @@ name: "CI/CD deploy sandbox API" permissions: contents: read - id-token: write on: workflow_dispatch: # checkov:skip=CKV_GHA_7:Inputs to be reviewed TODO- NPT-1102 @@ -33,6 +32,9 @@ jobs: metadata: name: "Set CI/CD metadata" runs-on: ubuntu-latest + permissions: + contents: read + id-token: write timeout-minutes: 1 outputs: build_datetime: ${{ steps.variables.outputs.build_datetime }} @@ -76,6 +78,9 @@ jobs: deploy: name: "Deploy sandbox API" runs-on: ubuntu-latest + permissions: + contents: read + id-token: write needs: [metadata] timeout-minutes: 10 steps: diff --git a/.github/workflows/deploy-application-infrastructure.yaml b/.github/workflows/deploy-application-infrastructure.yaml index a537e54f..d79b1f3c 100644 --- a/.github/workflows/deploy-application-infrastructure.yaml +++ b/.github/workflows/deploy-application-infrastructure.yaml @@ -1,7 +1,6 @@ name: Deploy application infrastructure workflow permissions: - id-token: write contents: read on: workflow_call: @@ -69,6 +68,9 @@ on: jobs: plan-application-infrastructure: name: "Plan application infrastructure deployment to ${{ inputs.environment }} " + permissions: + id-token: write + contents: read concurrency: group: "${{ inputs.environment }}-${{ inputs.tag || inputs.workspace}}" cancel-in-progress: false @@ -90,6 +92,9 @@ jobs: manual-approval-application-infra: name: "Manual approval for deployment of application infrastructure to the ${{ inputs.environment }} environment" + permissions: + id-token: write + contents: read if: ${{ needs.plan-application-infrastructure.outputs.plan_result == 'true' }} needs: - plan-application-infrastructure @@ -108,6 +113,9 @@ jobs: apply-application-infrastructure: name: "Apply application infrastructure deployment to ${{ inputs.environment }}" + permissions: + id-token: write + contents: read concurrency: group: "${{ inputs.environment }}-${{ inputs.tag || inputs.workspace}}" cancel-in-progress: false @@ -131,6 +139,9 @@ jobs: deploy_summary: name: "Summarise deployment of application infrastructure to ${{ inputs.environment }} environment" + permissions: + id-token: write + contents: read needs: - plan-application-infrastructure - manual-approval-application-infra diff --git a/.github/workflows/deploy-infrastructure.yaml b/.github/workflows/deploy-infrastructure.yaml index 4bf69922..3c377b39 100644 --- a/.github/workflows/deploy-infrastructure.yaml +++ b/.github/workflows/deploy-infrastructure.yaml @@ -1,7 +1,6 @@ name: Deploy Infrastructure Workflow permissions: - id-token: write contents: read on: workflow_call: @@ -74,6 +73,9 @@ on: jobs: deploy-infrastructure: name: "Deploy infrastructure" + permissions: + id-token: write + contents: read runs-on: ubuntu-latest environment: ${{ inputs.environment }} timeout-minutes: ${{ inputs.workflow_timeout }} diff --git a/.github/workflows/metadata.yaml b/.github/workflows/metadata.yaml index 9a31de8f..6f4d849d 100644 --- a/.github/workflows/metadata.yaml +++ b/.github/workflows/metadata.yaml @@ -1,6 +1,7 @@ name: Metadata Workflow -permissions: {} +permissions: + contents: read on: workflow_call: inputs: diff --git a/.github/workflows/pipeline-deploy-account-infrastructure.yaml b/.github/workflows/pipeline-deploy-account-infrastructure.yaml index 23107597..f93236b8 100644 --- a/.github/workflows/pipeline-deploy-account-infrastructure.yaml +++ b/.github/workflows/pipeline-deploy-account-infrastructure.yaml @@ -1,9 +1,7 @@ name: Pipeline Deploy Account Level Infrastructures Pipeline permissions: - id-token: write contents: read - on: push: branches: @@ -68,6 +66,9 @@ jobs: quality-checks: name: "Quality checks for ${{ needs.metadata.outputs.environment }} deployment" + permissions: + id-token: write + contents: read needs: - metadata uses: ./.github/workflows/quality-checks.yaml @@ -84,6 +85,9 @@ jobs: plan-infrastructure: name: "Plan ${{ matrix.name }} infrastructure deployment for ${{ matrix.environment }}" + permissions: + id-token: write + contents: read concurrency: group: "${{ matrix.environment }}-default-${{ matrix.name }}-${{matrix.stacks}}" cancel-in-progress: false @@ -117,6 +121,9 @@ jobs: manual-approval: name: "Manual approval for ${{ needs.metadata.outputs.environment }} infrastructure deployment" + permissions: + id-token: write + contents: read if: ${{ github.ref == 'refs/heads/main' && (needs.plan-infrastructure.outputs.plan_result == 'true') }} needs: - metadata @@ -129,6 +136,9 @@ jobs: apply-infrastructure: name: "Apply ${{ matrix.name }} infrastructure deployment for ${{ matrix.environment }}" + permissions: + id-token: write + contents: read concurrency: group: "${{ matrix.environment }}-default-${{ matrix.name }}-${{matrix.stacks}}" cancel-in-progress: false diff --git a/.github/workflows/pipeline-deploy-application.yaml b/.github/workflows/pipeline-deploy-application.yaml index e960219f..6d98b631 100644 --- a/.github/workflows/pipeline-deploy-application.yaml +++ b/.github/workflows/pipeline-deploy-application.yaml @@ -1,9 +1,7 @@ name: Application Deployment Pipeline permissions: - id-token: write - contents: write - + contents: read on: push: branches: @@ -39,6 +37,9 @@ jobs: quality-checks: name: "Quality checks for ${{ needs.metadata.outputs.environment }} deployment" + permissions: + id-token: write + contents: write needs: - metadata uses: ./.github/workflows/quality-checks.yaml @@ -54,6 +55,9 @@ jobs: perform-code-analysis: name: "Perform static code analysis" + permissions: + id-token: write + contents: write needs: - metadata uses: ./.github/workflows/static-code-analysis.yaml @@ -62,6 +66,9 @@ jobs: deploy-application-infrastructure: name: "Deploy application infrastructure to the ${{ needs.metadata.outputs.environment }} environment" + permissions: + id-token: write + contents: write needs: - metadata - perform-code-analysis diff --git a/.github/workflows/pipeline-deploy-policies.yaml b/.github/workflows/pipeline-deploy-policies.yaml index 6435e32d..6aebc3c1 100644 --- a/.github/workflows/pipeline-deploy-policies.yaml +++ b/.github/workflows/pipeline-deploy-policies.yaml @@ -1,9 +1,7 @@ name: Deploy Policies Infrastructure Pipeline permissions: - id-token: write contents: read - on: push: branches: @@ -46,6 +44,9 @@ jobs: quality-checks: name: "Quality checks for ${{ needs.metadata.outputs.environment }} deployment" + permissions: + id-token: write + contents: read needs: - metadata uses: ./.github/workflows/quality-checks.yaml @@ -62,6 +63,9 @@ jobs: plan-permissions-infrastructure: name: "Plan ${{ matrix.name }} permissions infrastructure deployment for ${{ matrix.environment }}" + permissions: + id-token: write + contents: read concurrency: group: "${{ matrix.environment }}-default-${{ matrix.name }}-permissions-${{matrix.stacks}}" cancel-in-progress: false @@ -92,6 +96,9 @@ jobs: manual-approval-permissions: name: "Manual approval for ${{ needs.metadata.outputs.environment }} permissions infrastructure deployment" + permissions: + id-token: write + contents: read if: ${{ (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop') && (needs.plan-permissions-infrastructure.outputs.plan_result == 'true') }} needs: - metadata @@ -104,6 +111,9 @@ jobs: apply-permissions-infrastructure: name: "Apply ${{ matrix.name }} permissions infrastructure deployment for ${{ matrix.environment }}" + permissions: + id-token: write + contents: read concurrency: group: "${{ matrix.environment }}-default-${{ matrix.name }}-permissions-${{matrix.stacks}}" cancel-in-progress: false diff --git a/.github/workflows/pipeline-infrastructure-cleardown.yaml b/.github/workflows/pipeline-infrastructure-cleardown.yaml index 383e4cb6..562113b0 100644 --- a/.github/workflows/pipeline-infrastructure-cleardown.yaml +++ b/.github/workflows/pipeline-infrastructure-cleardown.yaml @@ -5,9 +5,7 @@ name: Cleardown Application Infrastructure Pipeline # a task branch or # a dependabot branch permissions: - id-token: write contents: read - on: delete: repository_dispatch: @@ -48,6 +46,9 @@ jobs: cleardown-infrastructure: name: "Cleardown Infrastructure" + permissions: + id-token: write + contents: read needs: - metadata uses: ./.github/workflows/infrastructure-cleardown.yaml @@ -66,6 +67,9 @@ jobs: cleardown-artefacts: if: github.actor != 'github-actions[bot]' name: "Cleardown Artefacts" + permissions: + id-token: write + contents: read needs: - metadata - cleardown-infrastructure diff --git a/.github/workflows/pipeline-status-check.yaml b/.github/workflows/pipeline-status-check.yaml index fc777d5c..b12c6136 100644 --- a/.github/workflows/pipeline-status-check.yaml +++ b/.github/workflows/pipeline-status-check.yaml @@ -1,6 +1,7 @@ name: Pipeline outcome check -permissions: {} +permissions: + contents: read on: workflow_call: @@ -8,6 +9,8 @@ on: jobs: check-pipeline-status: name: "Check Pipeline Status" + permissions: + contents: read runs-on: ubuntu-latest steps: - uses: martialonline/workflow-status@fe13c6a4716673e224038aa1b02387352fb35e13 diff --git a/.github/workflows/quality-checks.yaml b/.github/workflows/quality-checks.yaml index 266c5ab5..627e284e 100644 --- a/.github/workflows/quality-checks.yaml +++ b/.github/workflows/quality-checks.yaml @@ -1,7 +1,6 @@ name: Code Quality Checks Workflow permissions: - id-token: write contents: read on: workflow_call: @@ -51,6 +50,9 @@ on: jobs: scan-secrets: name: "Scan secrets" + permissions: + id-token: write + contents: read runs-on: ubuntu-latest timeout-minutes: 2 steps: @@ -103,6 +105,9 @@ jobs: count-lines-of-code: name: "Count lines of code" + permissions: + id-token: write + contents: read runs-on: ubuntu-latest timeout-minutes: 2 @@ -123,6 +128,9 @@ jobs: scan-dependencies: name: "Scan dependencies" + permissions: + id-token: write + contents: read runs-on: ubuntu-latest timeout-minutes: 2 steps: @@ -143,6 +151,9 @@ jobs: validate-terraform: name: "Validate Terraform" + permissions: + id-token: write + contents: read runs-on: ubuntu-latest timeout-minutes: ${{ inputs.workflow_timeout }} environment: ${{ inputs.environment }} @@ -175,6 +186,9 @@ jobs: check-terraform-format: name: "Check Terraform format" + permissions: + id-token: write + contents: read runs-on: ubuntu-latest timeout-minutes: ${{ inputs.workflow_timeout }} steps: @@ -188,6 +202,8 @@ jobs: checkov-scan: name: "Checkov scan" + permissions: + contents: read runs-on: ubuntu-latest timeout-minutes: ${{ inputs.workflow_timeout }} permissions: From 52025ee4c6492ec5560858f488c15e6fe150d42c Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Thu, 5 Mar 2026 09:48:10 +0000 Subject: [PATCH 19/46] NPT-1102 Resolve Checkov top level write permission issues --- .github/workflows/quality-checks.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/quality-checks.yaml b/.github/workflows/quality-checks.yaml index 627e284e..8b9f9d63 100644 --- a/.github/workflows/quality-checks.yaml +++ b/.github/workflows/quality-checks.yaml @@ -206,8 +206,6 @@ jobs: contents: read runs-on: ubuntu-latest timeout-minutes: ${{ inputs.workflow_timeout }} - permissions: - contents: read steps: - name: "Checkout code" uses: actions/checkout@v6 From cbfb7ddaeb750f6aadeafdbc542030fbad709bc2 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Thu, 5 Mar 2026 10:47:00 +0000 Subject: [PATCH 20/46] NPT-1102 Resolve Checkov which sets maxItem to requestBody array --- docs/user-guides/api-spec.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/user-guides/api-spec.yaml b/docs/user-guides/api-spec.yaml index c1e57138..34b98c14 100644 --- a/docs/user-guides/api-spec.yaml +++ b/docs/user-guides/api-spec.yaml @@ -712,6 +712,7 @@ components: description: Must contain age, gender, party and skillset, at minimum. Also includes the selected answers to any previously received questions. items: $ref: "#/components/schemas/FHIRTaskInput" + maxItems: 30 minItems: 4 required: [id,resourceType,status,intent,input] From 68da6af8710ae955bbbd1a53cf6f6d473990033e Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Thu, 5 Mar 2026 10:55:20 +0000 Subject: [PATCH 21/46] NPT-1102 Resolve Checkov which sets maxItem to requestBody array --- docs/user-guides/api-spec.yaml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docs/user-guides/api-spec.yaml b/docs/user-guides/api-spec.yaml index 34b98c14..c8a4c04e 100644 --- a/docs/user-guides/api-spec.yaml +++ b/docs/user-guides/api-spec.yaml @@ -917,6 +917,8 @@ components: - $ref: "#/components/schemas/FHIRBundleResourceQuestionnaire" - $ref: "#/components/schemas/FHIRBundleResourceServiceRequest" - $ref: "#/components/schemas/FHIRBundleResourceCommunication" + minItems: 1 + maxItems: 4 FHIRBundleResourceTask: type: object @@ -1055,6 +1057,8 @@ components: type: string description: "DEPRECATED. Contains the coordinate of the question" example: "PW1827.100" + maxItems: 1 + minItems: 1 status: type: string @@ -1085,6 +1089,8 @@ components: oneOf: - $ref: "#/components/schemas/FHIRBundleResourceQuestionnaireQuestionRationale" - $ref: "#/components/schemas/FHIRBundleResourceQuestionnaireAnswer" + minItems: 1 + maxItems: 2 FHIRBundleResourceQuestionnaireQuestionRationale: type: object @@ -1178,6 +1184,8 @@ components: type: array items: $ref: "#/components/schemas/FHIRBundleResourceQuestionnaireBodyMapGroup" + minItems: 1 + maxItems: 1 FHIRBundleResourceQuestionnaireBodyMapGroup: type: object @@ -1194,6 +1202,7 @@ components: items: $ref: "#/components/schemas/FHIRBundleResourceQuestionnaireBodyMapArea" minItems: 1 + maxItems: 10 FHIRBundleResourceQuestionnaireBodyMapArea: description: Areas within the parent body area group. @@ -1215,6 +1224,7 @@ components: items: $ref: "#/components/schemas/FHIRBundleResourceQuestionnaireBodyMapPathway" minItems: 1 + maxItems: 10 FHIRBundleResourceQuestionnaireBodyMapPathway: type: object @@ -1285,6 +1295,10 @@ components: type: string description: Identifiers to the problematic values in the input. example: "input[2].valueString" + minItems: 1 + maxItems: 10 + minItems: 1 + maxItems: 10 FHIRBundleResourceServiceRequest: type: object @@ -1337,6 +1351,7 @@ components: type: string example: "Sunburn" minItems: 1 + maxItems: 10 FHIRBundleResourceCommunication: type: object @@ -1360,6 +1375,7 @@ components: type: string example: "Have plenty of non-alcoholic drinks to help cool down and to prevent dehydration." minItems: 1 + maxItems: 10 security: - app-level0: [] From cbb5b48b0f4b39faad0a37fc300c9ca79607efad Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Thu, 5 Mar 2026 10:57:40 +0000 Subject: [PATCH 22/46] NPT-1102 Resolve Checkov which sets maxItem to requestBody array --- docs/user-guides/api-spec.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/user-guides/api-spec.yaml b/docs/user-guides/api-spec.yaml index c8a4c04e..60945484 100644 --- a/docs/user-guides/api-spec.yaml +++ b/docs/user-guides/api-spec.yaml @@ -1296,7 +1296,7 @@ components: description: Identifiers to the problematic values in the input. example: "input[2].valueString" minItems: 1 - maxItems: 10 + maxItems: 10 minItems: 1 maxItems: 10 @@ -1375,7 +1375,7 @@ components: type: string example: "Have plenty of non-alcoholic drinks to help cool down and to prevent dehydration." minItems: 1 - maxItems: 10 + maxItems: 15 security: - app-level0: [] From 1a4193e4f0831820649aabc2b5061d936509c149 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Thu, 5 Mar 2026 11:26:06 +0000 Subject: [PATCH 23/46] NPT-1102 Allow temporary deployment from develop branch to dev environment --- .github/workflows/pipeline-deploy-account-infrastructure.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pipeline-deploy-account-infrastructure.yaml b/.github/workflows/pipeline-deploy-account-infrastructure.yaml index f93236b8..7a2f83be 100644 --- a/.github/workflows/pipeline-deploy-account-infrastructure.yaml +++ b/.github/workflows/pipeline-deploy-account-infrastructure.yaml @@ -124,7 +124,7 @@ jobs: permissions: id-token: write contents: read - if: ${{ github.ref == 'refs/heads/main' && (needs.plan-infrastructure.outputs.plan_result == 'true') }} + if: ${{ (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop') && (needs.plan-infrastructure.outputs.plan_result == 'true') }} needs: - metadata - plan-infrastructure From 1faaee835cab385b66213aa21776948cc37315f4 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Thu, 5 Mar 2026 11:42:34 +0000 Subject: [PATCH 24/46] NPT-1102 Allow temporary deployment from develop branch to dev environment --- .github/workflows/pipeline-deploy-account-infrastructure.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pipeline-deploy-account-infrastructure.yaml b/.github/workflows/pipeline-deploy-account-infrastructure.yaml index 7a2f83be..f3bec738 100644 --- a/.github/workflows/pipeline-deploy-account-infrastructure.yaml +++ b/.github/workflows/pipeline-deploy-account-infrastructure.yaml @@ -142,7 +142,7 @@ jobs: concurrency: group: "${{ matrix.environment }}-default-${{ matrix.name }}-${{matrix.stacks}}" cancel-in-progress: false - if: ${{ github.ref == 'refs/heads/main' }} + if: ${{ github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop' }} needs: - metadata - manual-approval From 196dd6ce5f21b3edc6130b661fa451665669c84a Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Thu, 5 Mar 2026 12:27:42 +0000 Subject: [PATCH 25/46] NPT-937 Add appropriate matrix stacks for apply account infra job --- .github/workflows/pipeline-deploy-account-infrastructure.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/pipeline-deploy-account-infrastructure.yaml b/.github/workflows/pipeline-deploy-account-infrastructure.yaml index f3bec738..bf6cbd39 100644 --- a/.github/workflows/pipeline-deploy-account-infrastructure.yaml +++ b/.github/workflows/pipeline-deploy-account-infrastructure.yaml @@ -155,6 +155,7 @@ jobs: stacks: "['account_security']" - name: "env" environment: ${{ needs.metadata.outputs.environment }} + stacks: "['terraform_management','account_wide']" - name: "mgmt" environment: ${{ needs.metadata.outputs.mgmt_environment }} stacks: "['terraform_management','account_security','artefact_management']" From 3c51b4bff5d942d8f0dc3015405d582a0327839b Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Thu, 5 Mar 2026 13:39:06 +0000 Subject: [PATCH 26/46] NPT-937 Amend github runner permission policies --- infrastructure/stacks/artefact_management/s3.tf | 1 - .../account_github_runner_compute.policy.json.tpl | 2 ++ .../account_github_runner_data.policy.json.tpl | 1 + .../account_github_runner_security.policy.json.tpl | 1 + .../app_github_runner_security.policy.json.tpl | 3 ++- .../github_runner_role_permissions_boundary.tf | 8 +++++++- .../stacks/github_runner/vpc_endpoint_policies.tf | 3 ++- 7 files changed, 15 insertions(+), 4 deletions(-) diff --git a/infrastructure/stacks/artefact_management/s3.tf b/infrastructure/stacks/artefact_management/s3.tf index d4ff7e18..40d457d2 100644 --- a/infrastructure/stacks/artefact_management/s3.tf +++ b/infrastructure/stacks/artefact_management/s3.tf @@ -30,7 +30,6 @@ data "aws_iam_policy_document" "artefacts_bucket_policy" { principals { type = "AWS" identifiers = [ - "${data.aws_iam_role.app_github_runner_iam_role.arn}", "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.environment}-${var.app_github_runner_role_name}" ] } diff --git a/infrastructure/stacks/github_runner/account_github_runner_compute.policy.json.tpl b/infrastructure/stacks/github_runner/account_github_runner_compute.policy.json.tpl index eb5f4a3f..72cf59fb 100644 --- a/infrastructure/stacks/github_runner/account_github_runner_compute.policy.json.tpl +++ b/infrastructure/stacks/github_runner/account_github_runner_compute.policy.json.tpl @@ -59,6 +59,7 @@ "ec2:DeleteSecurityGroup", "ec2:DeleteTags", "ec2:DeleteVpc", + "ec2:DeleteVpcEndpoints", "ec2:ModifyVpcAttribute", "ec2:ReplaceNetworkAclAssociation", "ec2:RevokeSecurityGroupEgress", @@ -98,6 +99,7 @@ "apigateway:ListTagsForResource", "ec2:Describe*", "ec2:DescribeVpcs", + "ec2:CreateTags", "ec2:ModifyVpcBlockPublicAccessOptions", "lambda:Get*", "lambda:List*" diff --git a/infrastructure/stacks/github_runner/account_github_runner_data.policy.json.tpl b/infrastructure/stacks/github_runner/account_github_runner_data.policy.json.tpl index f5942187..9ebb1a10 100644 --- a/infrastructure/stacks/github_runner/account_github_runner_data.policy.json.tpl +++ b/infrastructure/stacks/github_runner/account_github_runner_data.policy.json.tpl @@ -130,6 +130,7 @@ "logs:DeleteLogDelivery", "logs:DeleteResourcePolicy", "logs:Describe*", + "logs:Tag*", "logs:DescribeResourcePolicies", "logs:ListTagsForResource", "logs:PutResourcePolicy", diff --git a/infrastructure/stacks/github_runner/account_github_runner_security.policy.json.tpl b/infrastructure/stacks/github_runner/account_github_runner_security.policy.json.tpl index 551a04fb..43b45a54 100644 --- a/infrastructure/stacks/github_runner/account_github_runner_security.policy.json.tpl +++ b/infrastructure/stacks/github_runner/account_github_runner_security.policy.json.tpl @@ -38,6 +38,7 @@ "access-analyzer:ListAnalyzers", "access-analyzer:ListArchiveRules", "access-analyzer:ListFindings", + "access-analyzer:Tag*", "acm:DescribeCertificate", "acm:GetCertificate", "acm:ListCertificates", diff --git a/infrastructure/stacks/github_runner/app_github_runner_security.policy.json.tpl b/infrastructure/stacks/github_runner/app_github_runner_security.policy.json.tpl index 8cad015e..52e8ec61 100644 --- a/infrastructure/stacks/github_runner/app_github_runner_security.policy.json.tpl +++ b/infrastructure/stacks/github_runner/app_github_runner_security.policy.json.tpl @@ -138,7 +138,8 @@ "access-analyzer:GetFinding", "access-analyzer:ListAnalyzers", "access-analyzer:ListArchiveRules", - "access-analyzer:ListFindings" + "access-analyzer:ListFindings", + "access-analyzer:Tag*" ], "Resource": "*" }, diff --git a/infrastructure/stacks/github_runner/github_runner_role_permissions_boundary.tf b/infrastructure/stacks/github_runner/github_runner_role_permissions_boundary.tf index 8a55152a..bbd7b6bc 100644 --- a/infrastructure/stacks/github_runner/github_runner_role_permissions_boundary.tf +++ b/infrastructure/stacks/github_runner/github_runner_role_permissions_boundary.tf @@ -7,6 +7,7 @@ data "aws_iam_policy_document" "permissions_boundary" { actions = [ "access-analyzer:Get*", "access-analyzer:List*", + "access-analyzer:Tag*", "apigateway:CreateRestApi", "apigateway:Delete*", "apigateway:Get*", @@ -50,6 +51,7 @@ data "aws_iam_policy_document" "permissions_boundary" { "ec2:CreateVpc*", "ec2:ModifyVpcAttribute", "ec2:DeleteVpc", + "ec2:DeleteVpcEndpoints", "ec2:CreateRouteTable", "ec2:CreateSubnet", "ec2:RevokeSecurityGroupIngress", @@ -208,7 +210,12 @@ data "aws_iam_policy_document" "permissions_boundary" { "iam:Get*", "iam:List*", "iam:CreateRole", + "iam:DeleteInstanceProfile", "iam:DeleteRole", + "iam:DeleteRolePolicy", + "iam:DeleteServiceLinkedRole", + "iam:DeletePolicy*", + "iam:DeleteServiceLinkedRole", "iam:UpdateRole", "iam:UpdateAssumeRolePolicy", "iam:PutRolePolicy", @@ -216,7 +223,6 @@ data "aws_iam_policy_document" "permissions_boundary" { "iam:AttachRolePolicy", "iam:DetachRolePolicy", "iam:CreatePolicy*", - "iam:DeletePolicy*", "iam:TagRole", "iam:UntagPolicy", "iam:PassRole", diff --git a/infrastructure/stacks/github_runner/vpc_endpoint_policies.tf b/infrastructure/stacks/github_runner/vpc_endpoint_policies.tf index 34dbd836..cdba6297 100644 --- a/infrastructure/stacks/github_runner/vpc_endpoint_policies.tf +++ b/infrastructure/stacks/github_runner/vpc_endpoint_policies.tf @@ -168,7 +168,8 @@ locals { access_analyzer = { actions = [ "access-analyzer:List*", - "access-analyzer:Get*" + "access-analyzer:Get*", + "access-analyzer:Tag*" ] } s3 = { From 9fc84d2598ff02ad9e1296e80a372b70f8330736 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Thu, 5 Mar 2026 14:31:35 +0000 Subject: [PATCH 27/46] NPT-937 Amend github runner permission policies --- .../app_github_runner_compute.policy.json.tpl | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/infrastructure/stacks/github_runner/app_github_runner_compute.policy.json.tpl b/infrastructure/stacks/github_runner/app_github_runner_compute.policy.json.tpl index 8daff869..8f6a97b4 100644 --- a/infrastructure/stacks/github_runner/app_github_runner_compute.policy.json.tpl +++ b/infrastructure/stacks/github_runner/app_github_runner_compute.policy.json.tpl @@ -45,13 +45,25 @@ "ec2:AssociateRouteTable", "ec2:AuthorizeSecurityGroupEgress", "ec2:AuthorizeSecurityGroupIngress", + "ec2:CreateFlowLogs", + "ec2:CreateNetworkAcl", + "ec2:CreateNetworkAclEntry", + "ec2:CreateRouteTable", + "ec2:CreateSecurityGroup", + "ec2:CreateSubnet", "ec2:CreateTags", + "ec2:CreateVpc", + "ec2:CreateVpcEndpoint", + "ec2:DeleteNetworkAcl", + "ec2:DeleteNetworkAclEntry", + "ec2:DeleteSecurityGroup", "ec2:DeleteTags", + "ec2:DeleteVpc", + "ec2:DeleteVpcEndpoints", "ec2:ModifyVpcAttribute", "ec2:ReplaceNetworkAclAssociation", "ec2:RevokeSecurityGroupEgress", "ec2:RevokeSecurityGroupIngress", - "ec2:CreateSecurityGroup", "ec2:UpdateSecurityGroupRuleDescriptionsEgress" ], "Resource": [ From e0f4e462050b9210cdbfe1e6cee4fb433e0dd203 Mon Sep 17 00:00:00 2001 From: Maciej Kaczor Date: Fri, 6 Mar 2026 11:00:10 +0100 Subject: [PATCH 28/46] _ping endpoint implementation --- src/deploy.sh | 49 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/src/deploy.sh b/src/deploy.sh index 930fb6a2..647395de 100755 --- a/src/deploy.sh +++ b/src/deploy.sh @@ -289,6 +289,23 @@ for part in "${parts[@]}"; do parent_id="$resource_id" done +# Create _ping endpoint +ping_resource_id=$($AWS apigateway create-resource \ + --rest-api-id "$rest_api_id" \ + --parent-id "$root_resource_id" \ + --path-part "_ping" \ + --query 'id' \ + --output text +) + +$AWS apigateway put-method \ + --rest-api-id "$rest_api_id" \ + --resource-id "$ping_resource_id" \ + --http-method GET \ + --authorization-type "NONE" > /dev/null + + + # Create POST method $AWS apigateway put-method \ --rest-api-id "$rest_api_id" \ @@ -299,6 +316,29 @@ $AWS apigateway put-method \ echo "✅ $API_NAME POST method created: $rest_api_id" # Integrate with Lambda +$AWS apigateway put-integration \ + --rest-api-id "$rest_api_id" \ + --resource-id "$ping_resource_id" \ + --http-method GET \ + --type MOCK \ + --request-templates '{"application/json":"{}"}' + +$AWS apigateway put-integration-response \ + --rest-api-id "$rest_api_id" \ + --resource-id "$ping_resource_id" \ + --http-method GET \ + --status-code 200 \ + --selection-pattern "" \ + --response-templates '{"application/json":"{\"status\":\"ok\"}"}' + +$AWS apigateway put-method-response \ + --rest-api-id "$rest_api_id" \ + --resource-id "$ping_resource_id" \ + --http-method GET \ + --status-code 200 \ + --response-models '{"application/json":"Empty"}' \ + --response-parameters '{"method.response.header.Content-Type": true}' + $AWS apigateway put-integration \ --rest-api-id "$rest_api_id" \ --resource-id "$resource_id" \ @@ -345,6 +385,15 @@ $AWS apigateway update-stage \ echo "✅ X-Ray tracing enabled on API Gateway stage: $STAGE_NAME" +echo "STAGES:" +$AWS apigateway get-stages --rest-api-id "$rest_api_id" + +echo "RESOURCES:" +$AWS apigateway get-resources --rest-api-id "$rest_api_id" + +echo "DEPLOYMENTS:" +$AWS apigateway get-deployments --rest-api-id "$rest_api_id" + # Output endpoints if [[ "$target" == "$LOCALSTACK" ]]; then echo "ℹ️ LocalStack API endpoint: http://localhost:4566/restapis/${rest_api_id}/${STAGE_NAME}/_user_request_/${ENDPOINT_PATH}" From e1c9f27f2fb220cee16a7b5e1fe6e9e03bfebf57 Mon Sep 17 00:00:00 2001 From: Maciej Kaczor Date: Fri, 6 Mar 2026 12:37:07 +0100 Subject: [PATCH 29/46] NPT-1107: update _ping endpoint to working version --- src/deploy.sh | 21 +++++++-------------- 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/src/deploy.sh b/src/deploy.sh index 647395de..a218e482 100755 --- a/src/deploy.sh +++ b/src/deploy.sh @@ -31,6 +31,7 @@ TRIAGE_NODE_TABLE="TriageNodes" TBRANCH="T1" CONCURRENT_EXEC_NUM=5 LAMBDA_APIG_ALIAS_NAME="live" +PING_ENDPOINT="_ping" # Create IAM role role_arn=$($AWS iam create-role \ @@ -293,7 +294,7 @@ done ping_resource_id=$($AWS apigateway create-resource \ --rest-api-id "$rest_api_id" \ --parent-id "$root_resource_id" \ - --path-part "_ping" \ + --path-part "$PING_ENDPOINT" \ --query 'id' \ --output text ) @@ -304,7 +305,7 @@ $AWS apigateway put-method \ --http-method GET \ --authorization-type "NONE" > /dev/null - +echo "✅ $API_NAME GET method created: $rest_api_id" # Create POST method $AWS apigateway put-method \ @@ -321,7 +322,7 @@ $AWS apigateway put-integration \ --resource-id "$ping_resource_id" \ --http-method GET \ --type MOCK \ - --request-templates '{"application/json":"{}"}' + --request-templates '{"application/json":"{\"statusCode\": 200}"}' > /dev/null $AWS apigateway put-integration-response \ --rest-api-id "$rest_api_id" \ @@ -329,7 +330,7 @@ $AWS apigateway put-integration-response \ --http-method GET \ --status-code 200 \ --selection-pattern "" \ - --response-templates '{"application/json":"{\"status\":\"ok\"}"}' + --response-templates '{"application/json":"{\"status\":\"ok\"}"}' > /dev/null $AWS apigateway put-method-response \ --rest-api-id "$rest_api_id" \ @@ -337,7 +338,7 @@ $AWS apigateway put-method-response \ --http-method GET \ --status-code 200 \ --response-models '{"application/json":"Empty"}' \ - --response-parameters '{"method.response.header.Content-Type": true}' + --response-parameters '{"method.response.header.Content-Type": true}' > /dev/null $AWS apigateway put-integration \ --rest-api-id "$rest_api_id" \ @@ -385,18 +386,10 @@ $AWS apigateway update-stage \ echo "✅ X-Ray tracing enabled on API Gateway stage: $STAGE_NAME" -echo "STAGES:" -$AWS apigateway get-stages --rest-api-id "$rest_api_id" - -echo "RESOURCES:" -$AWS apigateway get-resources --rest-api-id "$rest_api_id" - -echo "DEPLOYMENTS:" -$AWS apigateway get-deployments --rest-api-id "$rest_api_id" - # Output endpoints if [[ "$target" == "$LOCALSTACK" ]]; then echo "ℹ️ LocalStack API endpoint: http://localhost:4566/restapis/${rest_api_id}/${STAGE_NAME}/_user_request_/${ENDPOINT_PATH}" + echo "ℹ️ LocalStack ping endpoint: http://localhost:4566/restapis/${rest_api_id}/${STAGE_NAME}/_user_request_/${PING_ENDPOINT}" else echo "ℹ️ AWS API endpoint: https://${rest_api_id}.execute-api.${REGION}.amazonaws.com/${STAGE_NAME}/${ENDPOINT_PATH}" fi From dfb6c1fe9001ae24ec3fc4e52456a424b99be206 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Mon, 9 Mar 2026 15:54:05 +0000 Subject: [PATCH 30/46] NPT-1140 Amend NHS pathway file uploader s3 bucket name --- infrastructure/stacks/account_wide/outputs.tf | 9 ++++++ infrastructure/stacks/triage/data.tf | 30 ++++--------------- infrastructure/stacks/triage/lambda.tf | 7 ++--- infrastructure/stacks/triage/s3.tf | 4 +-- infrastructure/stacks/triage/trigger.tf | 2 +- 5 files changed, 21 insertions(+), 31 deletions(-) create mode 100644 infrastructure/stacks/account_wide/outputs.tf diff --git a/infrastructure/stacks/account_wide/outputs.tf b/infrastructure/stacks/account_wide/outputs.tf new file mode 100644 index 00000000..7f624980 --- /dev/null +++ b/infrastructure/stacks/account_wide/outputs.tf @@ -0,0 +1,9 @@ +output "vpc_id" { + description = "VPC ID" + values = module.vpc.vpc_id +} + +output "private_subnet_ids" { + description = "List of private subnet IDs" + values = module.vpc.private_subnets +} diff --git a/infrastructure/stacks/triage/data.tf b/infrastructure/stacks/triage/data.tf index d20342c1..a8345ce1 100644 --- a/infrastructure/stacks/triage/data.tf +++ b/infrastructure/stacks/triage/data.tf @@ -1,31 +1,13 @@ -data "aws_vpc" "vpc" { - filter { - name = "tag:Name" - values = ["${local.account_prefix}-vpc"] +data "terraform_remote_state" "vpc" { + backend = "s3" + config = { + bucket = "nhse-${var.environment}-${var.repo_name}-terraform-state" + key = "account_wide/terraform.state" + region = var.aws_region } } -data "aws_subnets" "private_subnets" { - filter { - name = "vpc-id" - values = [data.aws_vpc.vpc.id] - } - - filter { - name = "tag:Name" - values = ["${local.account_prefix}-vpc-private-*"] - } - filter { - name = "tag:CidrRange" - values = [var.vpc_private_subnet_cidr_range] - } -} - -data "aws_subnet" "private_subnets_details" { - for_each = toset(data.aws_subnets.private_subnets.ids) - id = each.value -} data "aws_prefix_list" "dynamodb" { name = "com.amazonaws.${var.aws_region}.dynamodb" } diff --git a/infrastructure/stacks/triage/lambda.tf b/infrastructure/stacks/triage/lambda.tf index b5129d96..ff23a3ba 100644 --- a/infrastructure/stacks/triage/lambda.tf +++ b/infrastructure/stacks/triage/lambda.tf @@ -12,14 +12,14 @@ module "s3lambda" { timeout = 60 description = "Lambda function for S3 event processing in SAET Triage API" - subnet_ids = [for subnet in data.aws_subnet.private_subnets_details : subnet.id] + subnet_ids = data.terraform_remote_state.vpc.outputs.private_subnet_ids security_group_ids = [aws_security_group.triage_api_lambda_security_group.id] vpc_id = data.aws_vpc.vpc.id environment_variables = { "ENVIRONMENT" = var.environment "WORKSPACE" = terraform.workspace == "default" ? "" : terraform.workspace - "DATA_RELEASE_BUCKET" = module.pathway_artefact_bucket.s3_bucket_id + "DATA_RELEASE_BUCKET" = module.clinical_data_uploader_bucket.s3_bucket_id "BODY_MAP_NODE_TABLE" = module.bodymaps.dynamodb_table_name "STARTING_NODE_TABLE" = module.starting_coords.dynamodb_table_name "TRIAGE_NODE_TABLE" = module.triage_nodes.dynamodb_table_name @@ -39,14 +39,13 @@ module "apiglambda" { memory_size = var.apig_mem_size description = "Lambda function for App gateway processing event in SAET Triage API" - subnet_ids = [for subnet in data.aws_subnet.private_subnets_details : subnet.id] + subnet_ids = data.terraform_remote_state.vpc.outputs.private_subnet_ids security_group_ids = [aws_security_group.triage_api_lambda_security_group.id] vpc_id = data.aws_vpc.vpc.id environment_variables = { "ENVIRONMENT" = var.environment "WORKSPACE" = terraform.workspace == "default" ? "" : terraform.workspace - "DATA_RELEASE_BUCKET" = module.pathway_artefact_bucket.s3_bucket_id "BODY_MAP_NODE_TABLE" = module.bodymaps.dynamodb_table_name "STARTING_NODE_TABLE" = module.starting_coords.dynamodb_table_name "TRIAGE_NODE_TABLE" = module.triage_nodes.dynamodb_table_name diff --git a/infrastructure/stacks/triage/s3.tf b/infrastructure/stacks/triage/s3.tf index ca8ed716..407c87f1 100644 --- a/infrastructure/stacks/triage/s3.tf +++ b/infrastructure/stacks/triage/s3.tf @@ -1,4 +1,4 @@ -module "pathway_artefact_bucket" { +module "clinical_data_uploader_bucket" { source = "../../modules/s3" - bucket_name = "${local.resource_prefix}-artefact${local.workspace_suffix}" + bucket_name = "${local.account_prefix}-clinical-data-uploader" } diff --git a/infrastructure/stacks/triage/trigger.tf b/infrastructure/stacks/triage/trigger.tf index fee45627..da42bb5e 100644 --- a/infrastructure/stacks/triage/trigger.tf +++ b/infrastructure/stacks/triage/trigger.tf @@ -4,7 +4,7 @@ resource "aws_lambda_permission" "allows3" { action = "lambda:InvokeFunction" function_name = module.s3lambda.lambda_function_name principal = "s3.amazonaws.com" - source_arn = module.pathway_artefact_bucket.s3_bucket_arn + source_arn = module.clinical_data_uploader_bucket.s3_bucket_arn } resource "aws_s3_bucket_notification" "bucket_notification" { From 65202dbe71c8d9f1a2b01cf47b6c9a6d3c43cd82 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Mon, 9 Mar 2026 15:57:08 +0000 Subject: [PATCH 31/46] NPT-1140 Amend NHS pathway file uploader s3 bucket name --- infrastructure/stacks/account_wide/outputs.tf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/infrastructure/stacks/account_wide/outputs.tf b/infrastructure/stacks/account_wide/outputs.tf index 7f624980..ff190d15 100644 --- a/infrastructure/stacks/account_wide/outputs.tf +++ b/infrastructure/stacks/account_wide/outputs.tf @@ -1,9 +1,9 @@ output "vpc_id" { description = "VPC ID" - values = module.vpc.vpc_id + value = module.vpc.vpc_id } output "private_subnet_ids" { description = "List of private subnet IDs" - values = module.vpc.private_subnets + value = module.vpc.private_subnets } From 1f1305eb184b8dec917317335f74a98f91929e79 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Mon, 9 Mar 2026 16:11:47 +0000 Subject: [PATCH 32/46] NPT-1140 Amend NHS pathway file uploader s3 bucket name --- infrastructure/stacks/triage/iam.tf | 4 ++-- infrastructure/stacks/triage/lambda.tf | 4 ++-- infrastructure/stacks/triage/security_group.tf | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/infrastructure/stacks/triage/iam.tf b/infrastructure/stacks/triage/iam.tf index 4b050e78..d8b3b250 100644 --- a/infrastructure/stacks/triage/iam.tf +++ b/infrastructure/stacks/triage/iam.tf @@ -26,9 +26,9 @@ resource "aws_iam_policy" "s3_access" { ], Resource = [ # bucket itself - "arn:aws:s3:::${module.pathway_artefact_bucket.s3_bucket_id}", + "arn:aws:s3:::${module.clinical_data_uploader_bucket.s3_bucket_id}", # all objects inside - "arn:aws:s3:::${module.pathway_artefact_bucket.s3_bucket_id}/*" + "arn:aws:s3:::${module.clinical_data_uploader_bucket.s3_bucket_id}/*" ] }, ] diff --git a/infrastructure/stacks/triage/lambda.tf b/infrastructure/stacks/triage/lambda.tf index ff23a3ba..5ffe2803 100644 --- a/infrastructure/stacks/triage/lambda.tf +++ b/infrastructure/stacks/triage/lambda.tf @@ -14,7 +14,7 @@ module "s3lambda" { subnet_ids = data.terraform_remote_state.vpc.outputs.private_subnet_ids security_group_ids = [aws_security_group.triage_api_lambda_security_group.id] - vpc_id = data.aws_vpc.vpc.id + vpc_id = data.terraform_remote_state.vpc.outputs.vpc_id environment_variables = { "ENVIRONMENT" = var.environment @@ -41,7 +41,7 @@ module "apiglambda" { subnet_ids = data.terraform_remote_state.vpc.outputs.private_subnet_ids security_group_ids = [aws_security_group.triage_api_lambda_security_group.id] - vpc_id = data.aws_vpc.vpc.id + vpc_id = data.terraform_remote_state.vpc.outputs.vpc_id environment_variables = { "ENVIRONMENT" = var.environment diff --git a/infrastructure/stacks/triage/security_group.tf b/infrastructure/stacks/triage/security_group.tf index ad3faa11..649dd21c 100644 --- a/infrastructure/stacks/triage/security_group.tf +++ b/infrastructure/stacks/triage/security_group.tf @@ -3,7 +3,7 @@ resource "aws_security_group" "triage_api_lambda_security_group" { name = "${local.resource_prefix}-${var.processor_lambda_name}${local.workspace_suffix}-sg" description = "Security group for processor lambda" - vpc_id = data.aws_vpc.vpc.id + vpc_id = data.terraform_remote_state.vpc.outputs.vpc_id } resource "aws_vpc_security_group_egress_rule" "allow_dynamodb_access_from_organisation_api" { From 599c28d1a8154187059542a2be3bacba8cd9bbf2 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Mon, 9 Mar 2026 16:15:01 +0000 Subject: [PATCH 33/46] NPT-1140 Amend NHS pathway file uploader s3 bucket name --- infrastructure/stacks/triage/trigger.tf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infrastructure/stacks/triage/trigger.tf b/infrastructure/stacks/triage/trigger.tf index da42bb5e..fea25ecc 100644 --- a/infrastructure/stacks/triage/trigger.tf +++ b/infrastructure/stacks/triage/trigger.tf @@ -8,7 +8,7 @@ resource "aws_lambda_permission" "allows3" { } resource "aws_s3_bucket_notification" "bucket_notification" { - bucket = module.pathway_artefact_bucket.s3_bucket_id + bucket = module.clinical_data_uploader_bucket.s3_bucket_id lambda_function { lambda_function_arn = module.s3lambda.lambda_function_arn From f269d96cd1fdca8b446d235ccf716cd360aa7525 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Mon, 9 Mar 2026 16:50:34 +0000 Subject: [PATCH 34/46] NPT-1140 Amend artefact management bucket policy principal to account github runner role --- infrastructure/stacks/artefact_management/s3.tf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/infrastructure/stacks/artefact_management/s3.tf b/infrastructure/stacks/artefact_management/s3.tf index 40d457d2..2674d48e 100644 --- a/infrastructure/stacks/artefact_management/s3.tf +++ b/infrastructure/stacks/artefact_management/s3.tf @@ -15,7 +15,7 @@ data "aws_iam_policy_document" "artefacts_bucket_policy" { principals { type = "AWS" identifiers = [ - "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.environment}-${var.app_github_runner_role_name}" + "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.environment}-${var.account_github_runner_role_name}" ] } actions = [ @@ -30,7 +30,7 @@ data "aws_iam_policy_document" "artefacts_bucket_policy" { principals { type = "AWS" identifiers = [ - "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.environment}-${var.app_github_runner_role_name}" + "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.environment}-${var.account_github_runner_role_name}" ] } actions = [ From 5dc8a706853e1eb6b1ce352b020313ff593235ec Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Mon, 9 Mar 2026 18:39:19 +0000 Subject: [PATCH 35/46] NPT-1140 Amend artefact management bucket policy principal to account github runner role --- infrastructure/stacks/artefact_management/s3.tf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/infrastructure/stacks/artefact_management/s3.tf b/infrastructure/stacks/artefact_management/s3.tf index 2674d48e..937dad16 100644 --- a/infrastructure/stacks/artefact_management/s3.tf +++ b/infrastructure/stacks/artefact_management/s3.tf @@ -15,7 +15,7 @@ data "aws_iam_policy_document" "artefacts_bucket_policy" { principals { type = "AWS" identifiers = [ - "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.environment}-${var.account_github_runner_role_name}" + "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.account_github_runner_role_name}" ] } actions = [ @@ -30,7 +30,7 @@ data "aws_iam_policy_document" "artefacts_bucket_policy" { principals { type = "AWS" identifiers = [ - "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.environment}-${var.account_github_runner_role_name}" + "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.account_github_runner_role_name}" ] } actions = [ From 14751b3293ff3bc7ca9954ac4313c47402a92f74 Mon Sep 17 00:00:00 2001 From: Maciej Kaczor Date: Tue, 10 Mar 2026 09:52:53 +0100 Subject: [PATCH 36/46] NPT-1107: create _status endpoint --- src/deploy.sh | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/src/deploy.sh b/src/deploy.sh index a218e482..4f7de89f 100755 --- a/src/deploy.sh +++ b/src/deploy.sh @@ -32,6 +32,7 @@ TBRANCH="T1" CONCURRENT_EXEC_NUM=5 LAMBDA_APIG_ALIAS_NAME="live" PING_ENDPOINT="_ping" +STATUS_ENDPOINT="_status" # Create IAM role role_arn=$($AWS iam create-role \ @@ -299,13 +300,25 @@ ping_resource_id=$($AWS apigateway create-resource \ --output text ) +status_resource_id=$($AWS apigateway create-resource \ + --rest-api-id "$rest_api_id" \ + --parent-id "$root_resource_id" \ + --path-part "$STATUS_ENDPOINT" \ + --query 'id' \ + --output text +) + $AWS apigateway put-method \ --rest-api-id "$rest_api_id" \ --resource-id "$ping_resource_id" \ --http-method GET \ --authorization-type "NONE" > /dev/null -echo "✅ $API_NAME GET method created: $rest_api_id" +$AWS apigateway put-method \ + --rest-api-id "$rest_api_id" \ + --resource-id "$status_resource_id" \ + --http-method POST \ + --authorization-type "NONE" > /dev/null # Create POST method $AWS apigateway put-method \ @@ -348,6 +361,14 @@ $AWS apigateway put-integration \ --integration-http-method POST \ --uri "arn:aws:apigateway:$REGION:lambda:path/2015-03-31/functions/$lambda_apig_alias_arn/invocations" > /dev/null +$AWS apigateway put-integration \ + --rest-api-id "$rest_api_id" \ + --resource-id "$status_resource_id" \ + --http-method POST \ + --type AWS_PROXY \ + --integration-http-method POST \ + --uri "arn:aws:apigateway:$REGION:lambda:path/2015-03-31/functions/$lambda_apig_alias_arn/invocations" > /dev/null + if [[ "$target" == "$LOCALSTACK" ]]; then echo "✅ $API_NAME triage resource linked to lambda: $LAMBDA_NAME-apig" else From 9d4712cba6cd2307464fbd5ddbcd8cc8c9ba8501 Mon Sep 17 00:00:00 2001 From: Jack Cullen Date: Tue, 10 Mar 2026 09:31:16 +0000 Subject: [PATCH 37/46] NPT-951: Adding monitoring to the Dynamodb --- infrastructure/stacks/triage/s3.tf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infrastructure/stacks/triage/s3.tf b/infrastructure/stacks/triage/s3.tf index 407c87f1..72451c6c 100644 --- a/infrastructure/stacks/triage/s3.tf +++ b/infrastructure/stacks/triage/s3.tf @@ -1,4 +1,4 @@ module "clinical_data_uploader_bucket" { source = "../../modules/s3" - bucket_name = "${local.account_prefix}-clinical-data-uploader" + bucket_name = "${local.account_prefix}-clinical-data-uploader-${local.workspace_suffix}" } From 4b0d29780410b26de07117bda1d0ed690a1346ec Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Tue, 10 Mar 2026 10:06:00 +0000 Subject: [PATCH 38/46] NPT-1140 Deploy infra in INT env and amend artefact s3 bucket principals --- ...ipeline-deploy-account-infrastructure.yaml | 1 + .../pipeline-deploy-application.yaml | 1 + .../workflows/pipeline-deploy-policies.yaml | 1 + .../pipeline-infrastructure-cleardown.yaml | 1 + infrastructure/environments/int/.gitkeep | 0 .../environments/int/account_security.tfvars | 1 + .../environments/int/acount_wide.tfvars | 20 ++++++++++++++++ .../environments/int/environment.tfvars | 2 ++ infrastructure/environments/int/triage.tfvars | 0 .../stacks/artefact_management/s3.tf | 24 ++++++++++++++----- src/deploy.sh | 2 +- ... API (42.2.0-111O).postman_collection.json | 2 +- ...riage API (48.2.0).postman_collection.json | 2 +- 13 files changed, 48 insertions(+), 9 deletions(-) create mode 100644 infrastructure/environments/int/.gitkeep create mode 100644 infrastructure/environments/int/account_security.tfvars create mode 100644 infrastructure/environments/int/acount_wide.tfvars create mode 100644 infrastructure/environments/int/environment.tfvars create mode 100644 infrastructure/environments/int/triage.tfvars diff --git a/.github/workflows/pipeline-deploy-account-infrastructure.yaml b/.github/workflows/pipeline-deploy-account-infrastructure.yaml index bf6cbd39..0ecdb85a 100644 --- a/.github/workflows/pipeline-deploy-account-infrastructure.yaml +++ b/.github/workflows/pipeline-deploy-account-infrastructure.yaml @@ -34,6 +34,7 @@ on: type: choice options: - dev + - int - test - prod workflow_call: diff --git a/.github/workflows/pipeline-deploy-application.yaml b/.github/workflows/pipeline-deploy-application.yaml index 6d98b631..332150e4 100644 --- a/.github/workflows/pipeline-deploy-application.yaml +++ b/.github/workflows/pipeline-deploy-application.yaml @@ -27,6 +27,7 @@ on: type: choice options: - dev + - int - test - prod diff --git a/.github/workflows/pipeline-deploy-policies.yaml b/.github/workflows/pipeline-deploy-policies.yaml index 6aebc3c1..67f692d3 100644 --- a/.github/workflows/pipeline-deploy-policies.yaml +++ b/.github/workflows/pipeline-deploy-policies.yaml @@ -24,6 +24,7 @@ on: type: choice options: - dev + - int - test - prod workflow_call: diff --git a/.github/workflows/pipeline-infrastructure-cleardown.yaml b/.github/workflows/pipeline-infrastructure-cleardown.yaml index 562113b0..ab93fd15 100644 --- a/.github/workflows/pipeline-infrastructure-cleardown.yaml +++ b/.github/workflows/pipeline-infrastructure-cleardown.yaml @@ -25,6 +25,7 @@ on: type: choice options: - dev + - int - test workspace: description: "Specify the workspace to cleardown" diff --git a/infrastructure/environments/int/.gitkeep b/infrastructure/environments/int/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/infrastructure/environments/int/account_security.tfvars b/infrastructure/environments/int/account_security.tfvars new file mode 100644 index 00000000..cd2da3ef --- /dev/null +++ b/infrastructure/environments/int/account_security.tfvars @@ -0,0 +1 @@ +enable_iam_analyzer = true diff --git a/infrastructure/environments/int/acount_wide.tfvars b/infrastructure/environments/int/acount_wide.tfvars new file mode 100644 index 00000000..78d7ac24 --- /dev/null +++ b/infrastructure/environments/int/acount_wide.tfvars @@ -0,0 +1,20 @@ +vpc = { + name = "vpc" + cidr = "10.171.0.0/16" + + public_subnet_a = "10.171.0.0/21" + public_subnet_b = "10.171.8.0/21" + public_subnet_c = "10.171.16.0/21" + + private_subnet_a = "10.171.24.0/21" + private_subnet_b = "10.171.32.0/21" + private_subnet_c = "10.171.40.0/21" +} + +enable_flow_log = false +flow_log_s3_force_destroy = true + +# Single NAT Gateway +enable_nat_gateway = true +single_nat_gateway = true +one_nat_gateway_per_az = false diff --git a/infrastructure/environments/int/environment.tfvars b/infrastructure/environments/int/environment.tfvars new file mode 100644 index 00000000..585012c5 --- /dev/null +++ b/infrastructure/environments/int/environment.tfvars @@ -0,0 +1,2 @@ +# environment specific values that are applicable to more than one stack +environment = "int" diff --git a/infrastructure/environments/int/triage.tfvars b/infrastructure/environments/int/triage.tfvars new file mode 100644 index 00000000..e69de29b diff --git a/infrastructure/stacks/artefact_management/s3.tf b/infrastructure/stacks/artefact_management/s3.tf index 937dad16..2f87aacc 100644 --- a/infrastructure/stacks/artefact_management/s3.tf +++ b/infrastructure/stacks/artefact_management/s3.tf @@ -1,3 +1,19 @@ +locals { + mgmt_principals = [ + "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.account_github_runner_role_name}", + "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.app_github_runner_role_name}" + ] + + other_env_principals = [ + "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.environment}-${var.account_github_runner_role_name}", + "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.environment}-${var.app_github_runner_role_name}" + ] + + bucket_principals = var.environment == "mgmt" ? local.mgmt_principals : local.other_env_principals +} + + + module "artefacts_bucket" { source = "../../modules/s3" bucket_name = local.artefacts_bucket @@ -14,9 +30,7 @@ data "aws_iam_policy_document" "artefacts_bucket_policy" { statement { principals { type = "AWS" - identifiers = [ - "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.account_github_runner_role_name}" - ] + identifiers = local.bucket_principals } actions = [ "s3:ListBucket", @@ -29,9 +43,7 @@ data "aws_iam_policy_document" "artefacts_bucket_policy" { statement { principals { type = "AWS" - identifiers = [ - "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.account_github_runner_role_name}" - ] + identifiers = local.bucket_principals } actions = [ "s3:GetObject", diff --git a/src/deploy.sh b/src/deploy.sh index 4f7de89f..acc93f4c 100755 --- a/src/deploy.sh +++ b/src/deploy.sh @@ -352,7 +352,7 @@ $AWS apigateway put-method-response \ --status-code 200 \ --response-models '{"application/json":"Empty"}' \ --response-parameters '{"method.response.header.Content-Type": true}' > /dev/null - + $AWS apigateway put-integration \ --rest-api-id "$rest_api_id" \ --resource-id "$resource_id" \ diff --git a/tests/integration/Patient Triage API (42.2.0-111O).postman_collection.json b/tests/integration/Patient Triage API (42.2.0-111O).postman_collection.json index f811d4f1..f2375413 100644 --- a/tests/integration/Patient Triage API (42.2.0-111O).postman_collection.json +++ b/tests/integration/Patient Triage API (42.2.0-111O).postman_collection.json @@ -1005,4 +1005,4 @@ } } ] -} \ No newline at end of file +} diff --git a/tests/integration/Patient Triage API (48.2.0).postman_collection.json b/tests/integration/Patient Triage API (48.2.0).postman_collection.json index 0c4a059b..5da754a1 100644 --- a/tests/integration/Patient Triage API (48.2.0).postman_collection.json +++ b/tests/integration/Patient Triage API (48.2.0).postman_collection.json @@ -8739,4 +8739,4 @@ } } ] -} \ No newline at end of file +} From 6e93517253b3d848b9803cc054eca8e501e67c0a Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Tue, 10 Mar 2026 11:58:54 +0000 Subject: [PATCH 39/46] NPT-1140 Amend artefact mgmt S3 bucket principal identifier to contain roles across all env --- .github/workflows/deploy-infrastructure.yaml | 13 ++++++ ...ipeline-deploy-account-infrastructure.yaml | 12 +++++ .../stacks/artefact_management/s3.tf | 45 ++++++++++++++----- .../stacks/artefact_management/variables.tf | 14 ++++++ 4 files changed, 72 insertions(+), 12 deletions(-) create mode 100644 infrastructure/stacks/artefact_management/variables.tf diff --git a/.github/workflows/deploy-infrastructure.yaml b/.github/workflows/deploy-infrastructure.yaml index 3c377b39..99386c0f 100644 --- a/.github/workflows/deploy-infrastructure.yaml +++ b/.github/workflows/deploy-infrastructure.yaml @@ -65,6 +65,15 @@ on: MGMT_ACCOUNT_ID: description: "AWS management account ID for credentials" required: true + DEV_ACCOUNT_ID: + description: "DEV AWS account ID" + required: true + TEST_ACCOUNT_ID: + description: "TEST AWS account ID" + required: true + PROD_ACCOUNT_ID: + description: "PROD AWS account ID" + required: true outputs: plan_result: description: "The Terraform plan output" @@ -109,6 +118,10 @@ jobs: - name: "Deploy infrastructure stack" id: deploy_stack uses: ./.github/actions/action-infrastructure-stack + env: + TF_VAR_dev_account_id: ${{ secrets.DEV_ACCOUNT_ID }} + TF_VAR_test_account_id: ${{ secrets.TEST_ACCOUNT_ID }} + TF_VAR_prod_account_id: ${{ secrets.PROD_ACCOUNT_ID }} with: environment: ${{ inputs.environment }} workspace: ${{ inputs.workspace }} diff --git a/.github/workflows/pipeline-deploy-account-infrastructure.yaml b/.github/workflows/pipeline-deploy-account-infrastructure.yaml index 0ecdb85a..abc3b648 100644 --- a/.github/workflows/pipeline-deploy-account-infrastructure.yaml +++ b/.github/workflows/pipeline-deploy-account-infrastructure.yaml @@ -54,6 +54,15 @@ on: MGMT_ACCOUNT_ID: description: "Management AWS account ID for credentials" required: true + DEV_ACCOUNT_ID: + description: "DEV AWS account ID" + required: true + TEST_ACCOUNT_ID: + description: "TEST AWS account ID" + required: true + PROD_ACCOUNT_ID: + description: "PROD AWS account ID" + required: true concurrency: group: account-infrastructure-${{ github.ref }} @@ -171,3 +180,6 @@ jobs: secrets: ACCOUNT_ID: ${{ secrets.ACCOUNT_ID }} MGMT_ACCOUNT_ID: ${{ secrets.MGMT_ACCOUNT_ID }} + DEV_ACCOUNT_ID: ${{ secrets.DEV_ACCOUNT_ID }} + TEST_ACCOUNT_ID: ${{ secrets.TEST_ACCOUNT_ID }} + PROD_ACCOUNT_ID: ${{ secrets.PROD_ACCOUNT_ID }} diff --git a/infrastructure/stacks/artefact_management/s3.tf b/infrastructure/stacks/artefact_management/s3.tf index 2f87aacc..bb827e27 100644 --- a/infrastructure/stacks/artefact_management/s3.tf +++ b/infrastructure/stacks/artefact_management/s3.tf @@ -1,17 +1,38 @@ locals { - mgmt_principals = [ - "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.account_github_runner_role_name}", - "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.app_github_runner_role_name}" - ] + principals_by_environment = { + mgmt = [ + "arn:aws:iam::${var.mgmt_account_id}:role/${var.repo_name}-${var.account_github_runner_role_name}", + "arn:aws:iam::${var.mgmt_account_id}:role/${var.repo_name}-${var.app_github_runner_role_name}" + ] - other_env_principals = [ - "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.environment}-${var.account_github_runner_role_name}", - "arn:aws:iam::${local.account_id}:role/${var.repo_name}-${var.environment}-${var.app_github_runner_role_name}" - ] + dev = [ + "arn:aws:iam::${var.dev_account_id}:role/${var.repo_name}-dev-${var.account_github_runner_role_name}", + "arn:aws:iam::${var.dev_account_id}:role/${var.repo_name}-dev-${var.app_github_runner_role_name}" + ] - bucket_principals = var.environment == "mgmt" ? local.mgmt_principals : local.other_env_principals -} + int = [ + "arn:aws:iam::${var.test_account_id}:role/${var.repo_name}-int-${var.account_github_runner_role_name}", + "arn:aws:iam::${var.test_account_id}:role/${var.repo_name}-int-${var.app_github_runner_role_name}" + ] + + test = [ + "arn:aws:iam::${var.test_account_id}:role/${var.repo_name}-test-${var.account_github_runner_role_name}", + "arn:aws:iam::${var.test_account_id}:role/${var.repo_name}-test-${var.app_github_runner_role_name}" + ] + ref = [ + "arn:aws:iam::${var.test_account_id}:role/${var.repo_name}-ref-${var.account_github_runner_role_name}", + "arn:aws:iam::${var.test_account_id}:role/${var.repo_name}-ref-${var.app_github_runner_role_name}" + ] + + prod = [ + "arn:aws:iam::${var.prod_account_id}:role/${var.repo_name}-prod-${var.account_github_runner_role_name}", + "arn:aws:iam::${var.prod_account_id}:role/${var.repo_name}-prod-${var.app_github_runner_role_name}" + ] + } + + all_bucket_principals = flatten(values(local.principals_by_environment)) +} module "artefacts_bucket" { @@ -30,7 +51,7 @@ data "aws_iam_policy_document" "artefacts_bucket_policy" { statement { principals { type = "AWS" - identifiers = local.bucket_principals + identifiers = local.all_bucket_principals } actions = [ "s3:ListBucket", @@ -43,7 +64,7 @@ data "aws_iam_policy_document" "artefacts_bucket_policy" { statement { principals { type = "AWS" - identifiers = local.bucket_principals + identifiers = local.all_bucket_principals } actions = [ "s3:GetObject", diff --git a/infrastructure/stacks/artefact_management/variables.tf b/infrastructure/stacks/artefact_management/variables.tf new file mode 100644 index 00000000..694d6d37 --- /dev/null +++ b/infrastructure/stacks/artefact_management/variables.tf @@ -0,0 +1,14 @@ +variable "dev_account_id" { + description = "DEV account ID" + type = string +} + +variable "test_account_id" { + description = "TEST account ID" + type = string +} + +variable "prod_account_id" { + description = "PROD account ID" + type = string +} From a5dbcbf03d20a39f7acf07a1190116b223224bdb Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Tue, 10 Mar 2026 12:03:31 +0000 Subject: [PATCH 40/46] NPT-1140 Amend artefact mgmt S3 bucket principal identifier to contain roles across all env --- .github/workflows/deploy-application-infrastructure.yaml | 6 ++++++ .../workflows/pipeline-deploy-account-infrastructure.yaml | 3 +++ 2 files changed, 9 insertions(+) diff --git a/.github/workflows/deploy-application-infrastructure.yaml b/.github/workflows/deploy-application-infrastructure.yaml index d79b1f3c..715d0080 100644 --- a/.github/workflows/deploy-application-infrastructure.yaml +++ b/.github/workflows/deploy-application-infrastructure.yaml @@ -89,6 +89,9 @@ jobs: secrets: ACCOUNT_ID: ${{ secrets.ACCOUNT_ID }} MGMT_ACCOUNT_ID: ${{ secrets.MGMT_ACCOUNT_ID }} + DEV_ACCOUNT_ID: ${{ secrets.DEV_ACCOUNT_ID }} + TEST_ACCOUNT_ID: ${{ secrets.TEST_ACCOUNT_ID }} + PROD_ACCOUNT_ID: ${{ secrets.PROD_ACCOUNT_ID }} manual-approval-application-infra: name: "Manual approval for deployment of application infrastructure to the ${{ inputs.environment }} environment" @@ -136,6 +139,9 @@ jobs: secrets: ACCOUNT_ID: ${{ secrets.ACCOUNT_ID }} MGMT_ACCOUNT_ID: ${{ secrets.MGMT_ACCOUNT_ID }} + DEV_ACCOUNT_ID: ${{ secrets.DEV_ACCOUNT_ID }} + TEST_ACCOUNT_ID: ${{ secrets.TEST_ACCOUNT_ID }} + PROD_ACCOUNT_ID: ${{ secrets.PROD_ACCOUNT_ID }} deploy_summary: name: "Summarise deployment of application infrastructure to ${{ inputs.environment }} environment" diff --git a/.github/workflows/pipeline-deploy-account-infrastructure.yaml b/.github/workflows/pipeline-deploy-account-infrastructure.yaml index abc3b648..0c76b487 100644 --- a/.github/workflows/pipeline-deploy-account-infrastructure.yaml +++ b/.github/workflows/pipeline-deploy-account-infrastructure.yaml @@ -128,6 +128,9 @@ jobs: secrets: ACCOUNT_ID: ${{ secrets.ACCOUNT_ID }} MGMT_ACCOUNT_ID: ${{ secrets.MGMT_ACCOUNT_ID }} + DEV_ACCOUNT_ID: ${{ secrets.DEV_ACCOUNT_ID }} + TEST_ACCOUNT_ID: ${{ secrets.TEST_ACCOUNT_ID }} + PROD_ACCOUNT_ID: ${{ secrets.PROD_ACCOUNT_ID }} manual-approval: name: "Manual approval for ${{ needs.metadata.outputs.environment }} infrastructure deployment" From f7b20a7a06d8b59b35f3fb2f6bce71ad547c81ec Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Tue, 10 Mar 2026 12:31:44 +0000 Subject: [PATCH 41/46] NPT-1140 Amend artefact mgmt S3 bucket principal identifier to contain roles across env excluding PROD --- .../deploy-application-infrastructure.yaml | 2 -- .github/workflows/deploy-infrastructure.yaml | 4 --- ...ipeline-deploy-account-infrastructure.yaml | 5 ---- .../stacks/artefact_management/s3.tf | 26 ++++++++++--------- .../stacks/artefact_management/variables.tf | 5 ---- 5 files changed, 14 insertions(+), 28 deletions(-) diff --git a/.github/workflows/deploy-application-infrastructure.yaml b/.github/workflows/deploy-application-infrastructure.yaml index 715d0080..5fae5299 100644 --- a/.github/workflows/deploy-application-infrastructure.yaml +++ b/.github/workflows/deploy-application-infrastructure.yaml @@ -91,7 +91,6 @@ jobs: MGMT_ACCOUNT_ID: ${{ secrets.MGMT_ACCOUNT_ID }} DEV_ACCOUNT_ID: ${{ secrets.DEV_ACCOUNT_ID }} TEST_ACCOUNT_ID: ${{ secrets.TEST_ACCOUNT_ID }} - PROD_ACCOUNT_ID: ${{ secrets.PROD_ACCOUNT_ID }} manual-approval-application-infra: name: "Manual approval for deployment of application infrastructure to the ${{ inputs.environment }} environment" @@ -141,7 +140,6 @@ jobs: MGMT_ACCOUNT_ID: ${{ secrets.MGMT_ACCOUNT_ID }} DEV_ACCOUNT_ID: ${{ secrets.DEV_ACCOUNT_ID }} TEST_ACCOUNT_ID: ${{ secrets.TEST_ACCOUNT_ID }} - PROD_ACCOUNT_ID: ${{ secrets.PROD_ACCOUNT_ID }} deploy_summary: name: "Summarise deployment of application infrastructure to ${{ inputs.environment }} environment" diff --git a/.github/workflows/deploy-infrastructure.yaml b/.github/workflows/deploy-infrastructure.yaml index 99386c0f..d83a1169 100644 --- a/.github/workflows/deploy-infrastructure.yaml +++ b/.github/workflows/deploy-infrastructure.yaml @@ -71,9 +71,6 @@ on: TEST_ACCOUNT_ID: description: "TEST AWS account ID" required: true - PROD_ACCOUNT_ID: - description: "PROD AWS account ID" - required: true outputs: plan_result: description: "The Terraform plan output" @@ -121,7 +118,6 @@ jobs: env: TF_VAR_dev_account_id: ${{ secrets.DEV_ACCOUNT_ID }} TF_VAR_test_account_id: ${{ secrets.TEST_ACCOUNT_ID }} - TF_VAR_prod_account_id: ${{ secrets.PROD_ACCOUNT_ID }} with: environment: ${{ inputs.environment }} workspace: ${{ inputs.workspace }} diff --git a/.github/workflows/pipeline-deploy-account-infrastructure.yaml b/.github/workflows/pipeline-deploy-account-infrastructure.yaml index 0c76b487..e1c0d298 100644 --- a/.github/workflows/pipeline-deploy-account-infrastructure.yaml +++ b/.github/workflows/pipeline-deploy-account-infrastructure.yaml @@ -60,9 +60,6 @@ on: TEST_ACCOUNT_ID: description: "TEST AWS account ID" required: true - PROD_ACCOUNT_ID: - description: "PROD AWS account ID" - required: true concurrency: group: account-infrastructure-${{ github.ref }} @@ -130,7 +127,6 @@ jobs: MGMT_ACCOUNT_ID: ${{ secrets.MGMT_ACCOUNT_ID }} DEV_ACCOUNT_ID: ${{ secrets.DEV_ACCOUNT_ID }} TEST_ACCOUNT_ID: ${{ secrets.TEST_ACCOUNT_ID }} - PROD_ACCOUNT_ID: ${{ secrets.PROD_ACCOUNT_ID }} manual-approval: name: "Manual approval for ${{ needs.metadata.outputs.environment }} infrastructure deployment" @@ -185,4 +181,3 @@ jobs: MGMT_ACCOUNT_ID: ${{ secrets.MGMT_ACCOUNT_ID }} DEV_ACCOUNT_ID: ${{ secrets.DEV_ACCOUNT_ID }} TEST_ACCOUNT_ID: ${{ secrets.TEST_ACCOUNT_ID }} - PROD_ACCOUNT_ID: ${{ secrets.PROD_ACCOUNT_ID }} diff --git a/infrastructure/stacks/artefact_management/s3.tf b/infrastructure/stacks/artefact_management/s3.tf index bb827e27..a8e933b2 100644 --- a/infrastructure/stacks/artefact_management/s3.tf +++ b/infrastructure/stacks/artefact_management/s3.tf @@ -15,20 +15,22 @@ locals { "arn:aws:iam::${var.test_account_id}:role/${var.repo_name}-int-${var.app_github_runner_role_name}" ] - test = [ - "arn:aws:iam::${var.test_account_id}:role/${var.repo_name}-test-${var.account_github_runner_role_name}", - "arn:aws:iam::${var.test_account_id}:role/${var.repo_name}-test-${var.app_github_runner_role_name}" - ] + # The below environments have been commented out until their environments and roles have been bootstrapped - ref = [ - "arn:aws:iam::${var.test_account_id}:role/${var.repo_name}-ref-${var.account_github_runner_role_name}", - "arn:aws:iam::${var.test_account_id}:role/${var.repo_name}-ref-${var.app_github_runner_role_name}" - ] + # test = [ + # "arn:aws:iam::${var.test_account_id}:role/${var.repo_name}-test-${var.account_github_runner_role_name}", + # "arn:aws:iam::${var.test_account_id}:role/${var.repo_name}-test-${var.app_github_runner_role_name}" + # ] - prod = [ - "arn:aws:iam::${var.prod_account_id}:role/${var.repo_name}-prod-${var.account_github_runner_role_name}", - "arn:aws:iam::${var.prod_account_id}:role/${var.repo_name}-prod-${var.app_github_runner_role_name}" - ] + # ref = [ + # "arn:aws:iam::${var.test_account_id}:role/${var.repo_name}-ref-${var.account_github_runner_role_name}", + # "arn:aws:iam::${var.test_account_id}:role/${var.repo_name}-ref-${var.app_github_runner_role_name}" + # ] + + # prod = [ + # "arn:aws:iam::${var.prod_account_id}:role/${var.repo_name}-prod-${var.account_github_runner_role_name}", + # "arn:aws:iam::${var.prod_account_id}:role/${var.repo_name}-prod-${var.app_github_runner_role_name}" + # ] } all_bucket_principals = flatten(values(local.principals_by_environment)) diff --git a/infrastructure/stacks/artefact_management/variables.tf b/infrastructure/stacks/artefact_management/variables.tf index 694d6d37..f7b44626 100644 --- a/infrastructure/stacks/artefact_management/variables.tf +++ b/infrastructure/stacks/artefact_management/variables.tf @@ -7,8 +7,3 @@ variable "test_account_id" { description = "TEST account ID" type = string } - -variable "prod_account_id" { - description = "PROD account ID" - type = string -} From ee96bc3cde20dc969e9b56d88f2ac0576536582a Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Tue, 10 Mar 2026 13:31:57 +0000 Subject: [PATCH 42/46] NPT-1140 Amend action infra stack script to remove reference to unused environments --- .sonarlint/connectedMode.json | 2 +- docs/developer-guides/Scripting_Docker.md | 4 ++-- scripts/workflow/action-infra-stack.sh | 16 +++------------- 3 files changed, 6 insertions(+), 16 deletions(-) diff --git a/.sonarlint/connectedMode.json b/.sonarlint/connectedMode.json index 2914f710..1223ab25 100644 --- a/.sonarlint/connectedMode.json +++ b/.sonarlint/connectedMode.json @@ -2,4 +2,4 @@ "sonarCloudOrganization": "nhsdigital", "projectKey": "NHSDigital_patient-triage-api", "region": "EU" -} \ No newline at end of file +} diff --git a/docs/developer-guides/Scripting_Docker.md b/docs/developer-guides/Scripting_Docker.md index 9869bd21..ebde6a56 100644 --- a/docs/developer-guides/Scripting_Docker.md +++ b/docs/developer-guides/Scripting_Docker.md @@ -153,8 +153,8 @@ It is usually the case that there is a specific image that you will most often w ```make build: # Build the project artefact @Pipeline - DOCKER_IMAGE=my-shiny-app - make docker-build + DOCKER_IMAGE=my-shiny-app + make docker-build ``` Now when you run `make build`, it will do the right thing. Keeping this convention consistent across projects means that new starters can be on-boarded quickly, without needing to learn a new set of conventions each time. diff --git a/scripts/workflow/action-infra-stack.sh b/scripts/workflow/action-infra-stack.sh index df9ac757..690b5f73 100644 --- a/scripts/workflow/action-infra-stack.sh +++ b/scripts/workflow/action-infra-stack.sh @@ -33,16 +33,6 @@ TF_VAR_stack_name=$(echo "$STACK" | tr '_' '-' ) export TF_VAR_stack_name export TF_VAR_mgmt_account_id="${MGMT_ACCOUNT_ID:-""}" -# Override ENVIRONMENT to non-prod for account_policies or account_security stack -if { [ "$STACK" = "account_policies" ] || [ "$STACK" = "account_security" ]; } && { [ "$ENVIRONMENT" = "ref" ] || [ "$ENVIRONMENT" = "sandpit" ] || [ "$ENVIRONMENT" = "int" ] ; } then - export ENVIRONMENT="non-prod" - echo "Stack is $STACK - overriding ENVIRONMENT to non-prod" -fi - -if { [ "$STACK" = "account_policies" ] || [ "$STACK" = "account_security" ]; } && { [ "$ENVIRONMENT" = "dev" ] || [ "$ENVIRONMENT" = "test" ] ; } then - export ENVIRONMENT="dev" - echo "Stack is $STACK - overriding ENVIRONMENT to dev" -fi # needed for terraform management stack export TF_VAR_terraform_state_bucket_name="nhse-$ENVIRONMENT-$TF_VAR_repo_name-terraform-state" # globally unique name @@ -62,11 +52,11 @@ if [ -z "$STACK" ] ; then fi if [ -z "$ENVIRONMENT" ] ; then - echo Set ENVIRONMENT to the environment to action the terraform in - one of dev, test, preprod, prod + echo Set ENVIRONMENT to the environment to action the terraform in - one of dev, test, int, ref, prod EXPORTS_SET=1 else - if [[ ! $ENVIRONMENT =~ ^(mgmt|dev|test|sandpit|int|ref|non-prod|preprod|prod|prototype) ]]; then - echo ENVIRONMENT should be mgmt, dev, test, sandpit, int, ref, non-prod, preprod or prod + if [[ ! $ENVIRONMENT =~ ^(mgmt|dev|test|int|ref|prod) ]]; then + echo ENVIRONMENT should be mgmt, dev, test, int, ref, prod EXPORTS_SET=1 fi fi From 8fa16d8aef98843d6dde4fe7a1b52e9d56d0362e Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Tue, 10 Mar 2026 15:00:21 +0000 Subject: [PATCH 43/46] NPT-1140 Amend action infra stack script to remove reference to unused environments --- .../environments/int/{acount_wide.tfvars => account_wide.tfvars} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename infrastructure/environments/int/{acount_wide.tfvars => account_wide.tfvars} (100%) diff --git a/infrastructure/environments/int/acount_wide.tfvars b/infrastructure/environments/int/account_wide.tfvars similarity index 100% rename from infrastructure/environments/int/acount_wide.tfvars rename to infrastructure/environments/int/account_wide.tfvars From ddf99df7464c2dafa86f0a8427179531067ffac5 Mon Sep 17 00:00:00 2001 From: Andrew Walker Date: Fri, 6 Mar 2026 09:27:38 +0000 Subject: [PATCH 44/46] Integrates with Odin via a new lambda reading log events from the APIG lambda --- .github/workflows/build-project.yaml | 9 +- .github/workflows/cicd-3-deploy.yaml | 3 + .gitignore | 1 + .vscode/settings.json | 3 + Makefile | 25 +- infrastructure/modules/lambda/variables.tf | 2 +- .../stacks/account_policies/iam_roles.tf | 28 +- .../templates/performance_user_data.sh.tmpl | 2 +- infrastructure/stacks/triage/iam.tf | 23 + infrastructure/stacks/triage/lambda.tf | 32 ++ infrastructure/stacks/triage/trigger.tf | 10 + infrastructure/stacks/triage/variables.tf | 10 + scripts/docker/dgoss.sh | 2 +- scripts/docker/docker.lib.sh | 2 +- scripts/docker/dockerfile-linter.sh | 2 +- scripts/docker/tests/docker.test.sh | 2 +- scripts/githooks/check-branch-name.sh | 2 +- scripts/githooks/check-commit-message.sh | 9 +- scripts/githooks/check-english-usage.sh | 2 +- scripts/githooks/check-file-format.sh | 2 +- scripts/githooks/check-markdown-format.sh | 2 +- scripts/githooks/check-terraform-format.sh | 2 +- scripts/githooks/scan-secrets.sh | 2 +- .../reports/create-lines-of-code-report.sh | 2 +- scripts/reports/create-sbom-report.sh | 2 +- scripts/reports/perform-static-analysis.sh | 2 +- scripts/reports/scan-vulnerabilities.sh | 2 +- scripts/shellscript-linter.sh | 2 +- scripts/terraform/terraform.lib.sh | 2 +- scripts/terraform/terraform.sh | 2 +- scripts/tests/coverage.sh | 2 +- scripts/tests/integration.sh | 2 +- scripts/tests/lint.sh | 2 +- scripts/tests/unit.sh | 2 +- scripts/workflow/generate-feature-flags.sh | 2 +- .../transformer.mjs | 164 +++++++ src/delete-apis.sh | 2 +- src/delete-sandbox-api.sh | 2 +- src/deploy-sandbox.sh | 2 +- src/deploy.sh | 409 ++++++------------ src/deployment/api-gateway.sh | 96 ++++ src/deployment/aws-env.sh | 24 + src/deployment/dynamo_db.sh | 30 ++ src/deployment/iam-roles.sh | 67 +++ src/deployment/lambda.sh | 104 +++++ src/update.sh | 2 +- 46 files changed, 790 insertions(+), 313 deletions(-) create mode 100644 src/cloudwatch_to_splunk_transformer/transformer.mjs create mode 100755 src/deployment/api-gateway.sh create mode 100755 src/deployment/aws-env.sh create mode 100755 src/deployment/dynamo_db.sh create mode 100755 src/deployment/iam-roles.sh create mode 100755 src/deployment/lambda.sh diff --git a/.github/workflows/build-project.yaml b/.github/workflows/build-project.yaml index 7b83388b..987783f9 100644 --- a/.github/workflows/build-project.yaml +++ b/.github/workflows/build-project.yaml @@ -98,9 +98,16 @@ jobs: APPLICATION_TAG: ${{ inputs.application_tag }} RELEASE_BUILD: ${{ inputs.release_build }} - - name: "Publish artefacts to GitHub" + - name: "Publish triage lambdas to GitHub" uses: actions/upload-artifact@v7 with: name: ${{ inputs.name }}-${{ inputs.build_type }}-artefacts path: src/lambda_function.zip if-no-files-found: error + + - name: "Publish splunk lambda to GitHub" + uses: actions/upload-artifact@v7 + with: + name: ${{ inputs.name }}-${{ inputs.build_type }}-splunk-artefact + path: src/splunk_function.zip + if-no-files-found: error diff --git a/.github/workflows/cicd-3-deploy.yaml b/.github/workflows/cicd-3-deploy.yaml index aa55437d..89d6170d 100644 --- a/.github/workflows/cicd-3-deploy.yaml +++ b/.github/workflows/cicd-3-deploy.yaml @@ -99,6 +99,9 @@ jobs: - name: "Clean up previous builds" run: make clean - name: "Deploy" + env: + SPLUNK_HEC_URL: ${{ secrets.SPLUNK_HEC_URL }} + SPLUNK_HEC_TOKEN: ${{ secrets.SPLUNK_HEC_TOKEN }} run: make deploy - name: "Update APIM" env: diff --git a/.gitignore b/.gitignore index aef45e57..77cbd879 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,7 @@ .venv **/__pycache__ **/lambda_function.zip +**/splunk_function.zip .coverage coverage_report.txt coverage.xml diff --git a/.vscode/settings.json b/.vscode/settings.json index 3d32f0cd..e34314d5 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -14,8 +14,11 @@ "Initialise", "licence", "LOCALSTACK", + "MGMT", "organisation", "personalisation", + "saet", + "Summarise", "Triagelogic", "Unrecognised" ], diff --git a/Makefile b/Makefile index 03e44749..1510f70e 100644 --- a/Makefile +++ b/Makefile @@ -11,10 +11,11 @@ dependencies: # Install dependencies needed to build and test the project @Pipel python3 -m venv .venv source .venv/bin/activate && pip install --upgrade pip && pip install -r dependencies.txt -r dev_dependencies.txt -build: # Build the project artefact @Pipeline +build: # Build the project artefacts @Pipeline python3 -m venv .venv rm -rf package rm -f src/lambda_function.zip || true + rm -f src/splunk_function.zip || true mkdir package source .venv/bin/activate && \ pip install -r dependencies.txt --target ./package && \ @@ -31,13 +32,19 @@ build: # Build the project artefact @Pipeline pushd src/lambda_function zip -r ../lambda_function.zip . -x __pycache__ > /dev/null popd + pushd src/cloudwatch_to_splunk_transformer + zip -r ../splunk_function.zip . > /dev/null + popd -publish: # Publish the project artefact @Pipeline - @echo "Publishing lambda_function.zip to S3 artifact bucket..." - @BUCKET_NAME="$${REPO_NAME}-$${ENVIRONMENT}-triage-artifact"; \ - echo "Target S3 bucket: $$BUCKET_NAME"; \ +publish: # Publish the project artefacts @Pipeline + echo "Publishing lambda_function.zip to S3 artifact bucket..." + export BUCKET_NAME="$${REPO_NAME}-$${ENVIRONMENT}-triage-artifact" + echo "Target S3 bucket: $$BUCKET_NAME" aws s3 cp src/lambda_function.zip s3://$$BUCKET_NAME/lambda_function.zip - @echo "Successfully published lambda_function.zip" + echo "Successfully published lambda_function.zip" + echo "Publishing splunk_function.zip to S3 artifact bucket..." + aws s3 cp src/splunk_function.zip s3://$$BUCKET_NAME/splunk_function.zip + echo "Successfully published splunk_function.zip" deploy-local: build # Deploy the project artefact to the target environment @Pipeline echo "Deploying to localstack" @@ -47,7 +54,7 @@ deploy-local: build # Deploy the project artefact to the target environment @Pip deploy: build echo "Deploying to AWS" echo "Pre-requisites: AWS credentials are configured" - (cd src; ./deploy.sh) + (cd src; ./deploy.sh aws) update-local: build echo "Updating local lambdas" @@ -65,6 +72,8 @@ clean-slate: clean-local SKIP_DEPS=true make test-coverage test-lint clean:: # Clean-up project resources (main) @Operations + echo "Deleting subscription filter..." + aws logs delete-subscription-filter --log-group-name /aws/lambda/triageApiLambda-apig --filter-name triageApiLambda-apig-filter || echo "Subscription filter not found" echo "Deleting LambdaS3ReadAccess role policy..." aws iam delete-role-policy --role-name triageApiLambda-ex-role --policy-name LambdaS3ReadAccess || echo "LambdaS3ReadAccess role policy not found" echo "Deleting AWSLambdaBasicExecutionRole role policy..." @@ -81,6 +90,8 @@ clean:: # Clean-up project resources (main) @Operations aws lambda delete-function --function-name triageApiLambda-apig || echo "Function not found" echo "Deleting triageApiLambda-s3 function..." aws lambda delete-function --function-name triageApiLambda-s3 || echo "Function not found" + echo "Deleting triageApiLambda-log-stream function..." + aws lambda delete-function --function-name triageApiLambda-log-stream || echo "Function not found" echo "Deleting pathways-data-bucket bucket..." aws s3 rm 's3://pathways-data-bucket' --recursive || echo "Bucket not found" aws s3api delete-bucket --bucket pathways-data-bucket || echo "Bucket not found" diff --git a/infrastructure/modules/lambda/variables.tf b/infrastructure/modules/lambda/variables.tf index a8f602b4..42a954a5 100644 --- a/infrastructure/modules/lambda/variables.tf +++ b/infrastructure/modules/lambda/variables.tf @@ -101,7 +101,7 @@ variable "security_group_ids" { } variable "s3_bucket_name" { - description = "Name of the S3 bucket where the Lambda package is stored" + description = "Name of the S3 bucket where the Lambda packages are stored" } variable "s3_key" { diff --git a/infrastructure/stacks/account_policies/iam_roles.tf b/infrastructure/stacks/account_policies/iam_roles.tf index 53a306f0..61010fe4 100644 --- a/infrastructure/stacks/account_policies/iam_roles.tf +++ b/infrastructure/stacks/account_policies/iam_roles.tf @@ -9,10 +9,10 @@ resource "aws_iam_service_linked_role" "shield" { # in the account resource "aws_iam_role" "cloudwatch_api_gateway_role" { name = "${var.project}-api-gateway-cloudwatch" - assume_role_policy = data.aws_iam_policy_document.assume_role.json + assume_role_policy = data.aws_iam_policy_document.assume_apig_role.json } -data "aws_iam_policy_document" "assume_role" { +data "aws_iam_policy_document" "assume_apig_role" { statement { effect = "Allow" @@ -29,3 +29,27 @@ resource "aws_iam_role_policy_attachment" "api_gateway_cloudwatch_policy_attachm role = aws_iam_role.cloudwatch_api_gateway_role.id policy_arn = "arn:aws:iam::aws:policy/service-role/AmazonAPIGatewayPushToCloudWatchLogs" } + +# Role and policy for allowing the Lambda function to write logs to CloudWatch Logs +resource "aws_iam_role" "cloudwatch_lambda_role" { + name = "${var.project}-lambda-cloudwatch" + assume_role_policy = data.aws_iam_policy_document.assume_lambda_role.json +} + +data "aws_iam_policy_document" "assume_lambda_role" { + statement { + effect = "Allow" + + principals { + type = "Service" + identifiers = ["lambda.amazonaws.com"] + } + + actions = ["sts:AssumeRole"] + } +} + +resource "aws_iam_role_policy_attachment" "lambda_cloudwatch_policy_attachment" { + role = aws_iam_role.cloudwatch_lambda_role.id + policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole" +} diff --git a/infrastructure/stacks/account_wide/templates/performance_user_data.sh.tmpl b/infrastructure/stacks/account_wide/templates/performance_user_data.sh.tmpl index f91b2ca3..a5c94e11 100644 --- a/infrastructure/stacks/account_wide/templates/performance_user_data.sh.tmpl +++ b/infrastructure/stacks/account_wide/templates/performance_user_data.sh.tmpl @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail # Log user-data to file as in gp_search diff --git a/infrastructure/stacks/triage/iam.tf b/infrastructure/stacks/triage/iam.tf index d8b3b250..7d8b8659 100644 --- a/infrastructure/stacks/triage/iam.tf +++ b/infrastructure/stacks/triage/iam.tf @@ -82,3 +82,26 @@ resource "aws_iam_role_policy_attachment" "ddb_aatach" { # policy_arn = each.value # } +data "aws_iam_policy_document" "logs_assume_role" { + statement { + actions = ["sts:AssumeRole"] + effect = "Allow" + principals { + type = "Service" + identifiers = ["logs.amazonaws.com"] + } + } +} + +resource "aws_iam_role" "log_export" { + name = "${local.processor_lambda_name}-lambda-log-export-role" + assume_role_policy = data.aws_iam_policy_document.logs_assume_role.json +} + +resource "aws_cloudwatch_log_subscription_filter" "lambda_log_export" { + name = "${local.processor_lambda_name}-filter" + log_group_name = aws_cloudwatch_log_group.export.name + filter_pattern = "" + destination_arn = aws_lambda.cw_to_splunk_transformer_lambda.arn + role_arn = aws_iam_role.log_export.arn +} diff --git a/infrastructure/stacks/triage/lambda.tf b/infrastructure/stacks/triage/lambda.tf index 5ffe2803..c1c616da 100644 --- a/infrastructure/stacks/triage/lambda.tf +++ b/infrastructure/stacks/triage/lambda.tf @@ -53,8 +53,40 @@ module "apiglambda" { account_id = data.aws_caller_identity.current.account_id } +resource "aws_cloudwatch_log_group" "apig_lambda_log_group" { + name = "/aws/lambda/${module.apiglambda.lambda_function_name}" + retention_in_days = 30 +} + resource "aws_lambda_alias" "apiglambda_live" { name = "live" function_name = module.apiglambda.lambda_function_name function_version = module.apiglambda.lambda_function_version } + +module "cw_to_splunk_transformer_lambda" { + source = "../../modules/lambda" + aws_region = var.aws_region + function_name = "${local.resource_prefix}-log-transformer-Lambda" + policy_jsons = [aws_iam_policy.s3_access.policy] + handler = "api_gateway_configurator.handler" + s3_bucket_name = local.artefacts_bucket + runtime = var.runtime + s3_key = var.s3_key # TODO: should be changed because it's a different package + memory_size = var.apig_mem_size + description = "Lambda function to transform cloudwatch logs and stream them to splunk" + + subnet_ids = [for subnet in data.aws_subnet.private_subnets_details : subnet.id] + security_group_ids = [aws_security_group.triage_api_lambda_security_group.id] + vpc_id = data.aws_vpc.vpc.id + + environment_variables = { + "ENVIRONMENT" = var.environment + "WORKSPACE" = terraform.workspace == "default" ? "" : terraform.workspace + "SPLUNK_HEC_URL" = var.splunk_hec_url + "SPLUNK_HEC_TOKEN" = var.splunk_hec_token + } + account_id = data.aws_caller_identity.current.account_id +} + + diff --git a/infrastructure/stacks/triage/trigger.tf b/infrastructure/stacks/triage/trigger.tf index fea25ecc..c8267aea 100644 --- a/infrastructure/stacks/triage/trigger.tf +++ b/infrastructure/stacks/triage/trigger.tf @@ -27,3 +27,13 @@ resource "aws_lambda_permission" "allowapig" { source_arn = "${aws_api_gateway_rest_api.triage.execution_arn}/*/${local.api_method}/${join("/", local.path_parts)}" qualifier = "live" } + +# Odin Lambda trigger for cloudwatch logs +resource "aws_lambda_permission" "allowodins" { + statement_id = "AllowExecutionFromCloudWatch" + action = "lambda:InvokeFunction" + function_name = module.cw_to_splunk_transformer_lambda.lambda_function_name + principal = "logs.amazonaws.com" + source_arn = "${aws_api_gateway_rest_api.triage.execution_arn}/*/${local.api_method}/${join("/", local.path_parts)}" + qualifier = "live" +} diff --git a/infrastructure/stacks/triage/variables.tf b/infrastructure/stacks/triage/variables.tf index 1a3a2c65..bfa691fe 100644 --- a/infrastructure/stacks/triage/variables.tf +++ b/infrastructure/stacks/triage/variables.tf @@ -60,6 +60,16 @@ variable "enable_xray_tracing" { default = true } +variable "splunk_hec_url" { + description = "The URL for the Splunk HEC endpoint to send logs to" + type = string +} + +variable "splunk_hec_token" { + description = "The token to send logs to the Splunk HEC endpoint" + type = string +} + # DynamoDB basic alarms variable "enable_dynamodb_basic_alarms" { description = "Enable basic CloudWatch alarms for DynamoDB table health metrics" diff --git a/scripts/docker/dgoss.sh b/scripts/docker/dgoss.sh index 8c5ce432..acefea92 100644 --- a/scripts/docker/dgoss.sh +++ b/scripts/docker/dgoss.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # shellcheck disable=SC2016,SC2154,SC2166 # WARNING: Please DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. diff --git a/scripts/docker/docker.lib.sh b/scripts/docker/docker.lib.sh index 561ad47f..b60adbc4 100644 --- a/scripts/docker/docker.lib.sh +++ b/scripts/docker/docker.lib.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # shellcheck disable=SC2155 # WARNING: Please DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. diff --git a/scripts/docker/dockerfile-linter.sh b/scripts/docker/dockerfile-linter.sh index 7ce37266..326e56d7 100755 --- a/scripts/docker/dockerfile-linter.sh +++ b/scripts/docker/dockerfile-linter.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # WARNING: Please DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. diff --git a/scripts/docker/tests/docker.test.sh b/scripts/docker/tests/docker.test.sh index 46129b77..2f2d4081 100755 --- a/scripts/docker/tests/docker.test.sh +++ b/scripts/docker/tests/docker.test.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # shellcheck disable=SC1091,SC2034,SC2317 # WARNING: Please DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. diff --git a/scripts/githooks/check-branch-name.sh b/scripts/githooks/check-branch-name.sh index a3b9b95f..e0e40e1f 100755 --- a/scripts/githooks/check-branch-name.sh +++ b/scripts/githooks/check-branch-name.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -e exit_code=0 diff --git a/scripts/githooks/check-commit-message.sh b/scripts/githooks/check-commit-message.sh index 5d6248a2..99bb3d8c 100755 --- a/scripts/githooks/check-commit-message.sh +++ b/scripts/githooks/check-commit-message.sh @@ -20,18 +20,17 @@ function check_jira_ref { fi echo $COMMIT_MESSAGE - return + return 0 } function check_commit_message_format { COMMIT_MESSAGE="$1" - local regex='^(feat|fix|chore|docs|style|refactor|perf|test|ci|build|revert|style)(\([a-z0-9_-]+\))?: (SAET)-[0-9]+ .+' + local REGEX='^(feat|fix|chore|docs|style|refactor|perf|test|ci|build|revert|style)(\([a-z0-9_-]+\))?: (SAET)-[0-9]+ .+' - if ! [[ $COMMIT_MESSAGE =~ $regex ]]; then + if ! [[ $COMMIT_MESSAGE =~ $REGEX ]]; then echo -e "\033[0;31mInvalid conventional commit message format! Expected: (): \033[0m" return 1 fi - return } function check_commit_message_length { @@ -41,7 +40,6 @@ function check_commit_message_length { if [[ "$COMMIT_MESSAGE_LENGTH" -gt $GIT_COMMIT_MESSAGE_MAX_LENGTH ]] ; then echo "At $COMMIT_MESSAGE_LENGTH characters the commit message exceeds limit of $GIT_COMMIT_MESSAGE_MAX_LENGTH" fi - return } function check_git_commit_message { @@ -55,7 +53,6 @@ function check_git_commit_message { [[ ! -z "$VALID_LENGTH" ]] && echo $VALID_LENGTH return 1 fi - return } # ---- MAIN EXECUTION ---- diff --git a/scripts/githooks/check-english-usage.sh b/scripts/githooks/check-english-usage.sh index f5b7933c..cb9710c4 100755 --- a/scripts/githooks/check-english-usage.sh +++ b/scripts/githooks/check-english-usage.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # WARNING: Please, DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. diff --git a/scripts/githooks/check-file-format.sh b/scripts/githooks/check-file-format.sh index df1a4fa8..a07d1637 100755 --- a/scripts/githooks/check-file-format.sh +++ b/scripts/githooks/check-file-format.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # WARNING: Please, DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. diff --git a/scripts/githooks/check-markdown-format.sh b/scripts/githooks/check-markdown-format.sh index 8d04ff1e..65858f65 100755 --- a/scripts/githooks/check-markdown-format.sh +++ b/scripts/githooks/check-markdown-format.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # WARNING: Please, DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. diff --git a/scripts/githooks/check-terraform-format.sh b/scripts/githooks/check-terraform-format.sh index e4ddd9fc..1899159d 100755 --- a/scripts/githooks/check-terraform-format.sh +++ b/scripts/githooks/check-terraform-format.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # WARNING: Please DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. diff --git a/scripts/githooks/scan-secrets.sh b/scripts/githooks/scan-secrets.sh index 829ced69..1135b968 100755 --- a/scripts/githooks/scan-secrets.sh +++ b/scripts/githooks/scan-secrets.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # WARNING: Please, DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. diff --git a/scripts/reports/create-lines-of-code-report.sh b/scripts/reports/create-lines-of-code-report.sh index b1c475b4..3486ac47 100755 --- a/scripts/reports/create-lines-of-code-report.sh +++ b/scripts/reports/create-lines-of-code-report.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # WARNING: Please, DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. diff --git a/scripts/reports/create-sbom-report.sh b/scripts/reports/create-sbom-report.sh index 4882f5aa..23ac0012 100755 --- a/scripts/reports/create-sbom-report.sh +++ b/scripts/reports/create-sbom-report.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # WARNING: Please, DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. diff --git a/scripts/reports/perform-static-analysis.sh b/scripts/reports/perform-static-analysis.sh index e27af68b..34b9257b 100755 --- a/scripts/reports/perform-static-analysis.sh +++ b/scripts/reports/perform-static-analysis.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # WARNING: Please, DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. diff --git a/scripts/reports/scan-vulnerabilities.sh b/scripts/reports/scan-vulnerabilities.sh index e331cab5..ae519e9f 100755 --- a/scripts/reports/scan-vulnerabilities.sh +++ b/scripts/reports/scan-vulnerabilities.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # WARNING: Please, DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. diff --git a/scripts/shellscript-linter.sh b/scripts/shellscript-linter.sh index 123a8437..609475d0 100755 --- a/scripts/shellscript-linter.sh +++ b/scripts/shellscript-linter.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # WARNING: Please DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. diff --git a/scripts/terraform/terraform.lib.sh b/scripts/terraform/terraform.lib.sh index 1bd6399f..f337ac95 100644 --- a/scripts/terraform/terraform.lib.sh +++ b/scripts/terraform/terraform.lib.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # WARNING: Please DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. diff --git a/scripts/terraform/terraform.sh b/scripts/terraform/terraform.sh index 310edc52..4cf1862c 100755 --- a/scripts/terraform/terraform.sh +++ b/scripts/terraform/terraform.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # WARNING: Please DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. diff --git a/scripts/tests/coverage.sh b/scripts/tests/coverage.sh index a503fd29..62111385 100755 --- a/scripts/tests/coverage.sh +++ b/scripts/tests/coverage.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail diff --git a/scripts/tests/integration.sh b/scripts/tests/integration.sh index 35fc385a..6877a76c 100755 --- a/scripts/tests/integration.sh +++ b/scripts/tests/integration.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail target=${1:-aws} diff --git a/scripts/tests/lint.sh b/scripts/tests/lint.sh index 70e76b0e..03f582d0 100755 --- a/scripts/tests/lint.sh +++ b/scripts/tests/lint.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail diff --git a/scripts/tests/unit.sh b/scripts/tests/unit.sh index 19ab3e7b..828737e1 100755 --- a/scripts/tests/unit.sh +++ b/scripts/tests/unit.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail diff --git a/scripts/workflow/generate-feature-flags.sh b/scripts/workflow/generate-feature-flags.sh index 5cef8e54..34feb6b6 100644 --- a/scripts/workflow/generate-feature-flags.sh +++ b/scripts/workflow/generate-feature-flags.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Script to generate AWS AppConfig Feature Flags JSON from the toggle registry # This script reads the toggle registry YAML and generates environment-specific feature flags diff --git a/src/cloudwatch_to_splunk_transformer/transformer.mjs b/src/cloudwatch_to_splunk_transformer/transformer.mjs new file mode 100644 index 00000000..64744782 --- /dev/null +++ b/src/cloudwatch_to_splunk_transformer/transformer.mjs @@ -0,0 +1,164 @@ +/** + * Stream events from AWS CloudWatch Logs to Splunk + * + * This function streams AWS CloudWatch Logs to Splunk using + * Splunk's HTTP event collector API. + * + * Define the following Environment Variables in the console below to configure + * this function to stream logs to your Splunk host: + * + * 1. SPLUNK_HEC_URL: URL address for your Splunk HTTP event collector endpoint. + * Default port for event collector is 8088. Example: https://host.com:8088/services/collector + * + * 2. SPLUNK_HEC_TOKEN: Token for your Splunk HTTP event collector. + * To create a new token for this Lambda function, refer to Splunk Docs: + * http://docs.splunk.com/Documentation/Splunk/latest/Data/UsetheHTTPEventCollector#Create_an_Event_Collector_token + */ +import * as zlib from 'node:zlib'; +import * as url from 'node:url'; +import { createRequire } from 'node:module'; + +const require = createRequire(import.meta.url); + +const Logger = function Logger(config) { + this.url = config.url; + this.token = config.token; + + this.addMetadata = true; + this.setSource = true; + + this.parsedUrl = url.parse(this.url); + // eslint-disable-next-line import/no-dynamic-require + this.requester = require(this.parsedUrl.protocol.substring(0, this.parsedUrl.protocol.length - 1)); + // Initialize request options which can be overridden & extended by consumer as needed + this.requestOptions = { + hostname: this.parsedUrl.hostname, + path: this.parsedUrl.path, + port: this.parsedUrl.port, + method: 'POST', + headers: { + Authorization: `Splunk ${this.token}`, + }, + rejectUnauthorized: false, + }; + + this.payloads = []; +}; + +// Simple logging API for Lambda functions +Logger.prototype.log = function log(message, context) { + this.logWithTime(Date.now(), message, context); +}; + +Logger.prototype.logWithTime = function logWithTime(time, message, context) { + const payload = {}; + + if (Object.prototype.toString.call(message) === '[object Array]') { + throw new Error('message argument must be a string or a JSON object.'); + } + payload.event = message; + + // Add Lambda metadata + if (typeof context !== 'undefined') { + if (this.addMetadata) { + // Enrich event only if it is an object + if (message === Object(message)) { + payload.event = JSON.parse(JSON.stringify(message)); // deep copy + payload.event.awsRequestId = context.awsRequestId; + } + } + if (this.setSource) { + payload.source = `lambda:${context.functionName}`; + } + } + + payload.time = new Date(time).getTime() / 1000; + + this.logEvent(payload); +}; + +Logger.prototype.logEvent = function logEvent(payload) { + this.payloads.push(JSON.stringify(payload)); +}; + +Logger.prototype.flushAsync = function flushAsync(callback) { + callback = callback || (() => {}); // eslint-disable-line no-param-reassign + + console.log('Sending event(s)'); + const req = this.requester.request(this.requestOptions, (res) => { + res.setEncoding('utf8'); + + console.log('Response received'); + res.on('data', (data) => { + let error = null; + if (res.statusCode !== 200) { + error = new Error(`error: statusCode=${res.statusCode}\n\n${data}`); + console.error(error); + } + this.payloads.length = 0; + callback(error, data); + }); + }); + + req.on('error', (error) => { + callback(error); + }); + + req.end(this.payloads.join(''), 'utf8'); +}; + +const loggerConfig = { + url: process.env.SPLUNK_HEC_URL, + token: process.env.SPLUNK_HEC_TOKEN, +}; +const logger = new Logger(loggerConfig); + +export const handler = (event, context, callback) => { + console.log('Received event:', JSON.stringify(event, null, 2)); + + // CloudWatch Logs data is base64 encoded so decode here + const payload = Buffer.from(event.awslogs.data, 'base64'); + // CloudWatch Logs are gzip compressed so expand here + zlib.gunzip(payload, (err, result) => { + if (err) { + callback(err); + } else { + const parsed = JSON.parse(result.toString('utf8')); + console.log('Decoded payload:', JSON.stringify(parsed, null, 2)); + let count = 0; + if (parsed.logEvents) { + parsed.logEvents.forEach((item) => { + /* Log event to Splunk with explicit event timestamp. + - Use optional 'context' argument to send Lambda metadata e.g. awsRequestId, functionName. + - Change "item.timestamp" below if time is specified in another field in the event. + - Change to "logger.log(item.message, context)" if no time field is present in event. */ + logger.logWithTime(item.timestamp, item.message, context); + + /* Alternatively, UNCOMMENT logger call below if you want to override Splunk input settings */ + /* Log event to Splunk with any combination of explicit timestamp, index, source, sourcetype, and host. + - Complete list of input settings available at http://docs.splunk.com/Documentation/Splunk/latest/RESTREF/RESTinput#services.2Fcollector */ + // logger.logEvent({ + // time: new Date(item.timestamp).getTime() / 1000, + // host: 'serverless', + // source: `lambda:${context.functionName}`, + // sourcetype: 'httpevent', + // index: 'main', + // event: item.message, + // }); + + count += 1; + }); + } + // Send all the events in a single batch to Splunk + logger.flushAsync((error, response) => { + if (error) { + callback(error); + } else { + console.log(`Response from Splunk:\n${response}`); + console.log(`Successfully processed ${count} log event(s).`); + callback(null, count); // Return number of log events + } + }); + } + }); +}; diff --git a/src/delete-apis.sh b/src/delete-apis.sh index 4f40e3a3..64f0941a 100755 --- a/src/delete-apis.sh +++ b/src/delete-apis.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail # Use AWS CLI to get all REST API IDs named 'triageAPI' diff --git a/src/delete-sandbox-api.sh b/src/delete-sandbox-api.sh index ec86f987..ceea02c5 100755 --- a/src/delete-sandbox-api.sh +++ b/src/delete-sandbox-api.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail # Use AWS CLI to get all REST API IDs named 'triageAPI' diff --git a/src/deploy-sandbox.sh b/src/deploy-sandbox.sh index 6a22e268..5e01d499 100755 --- a/src/deploy-sandbox.sh +++ b/src/deploy-sandbox.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail target=${1:-aws} diff --git a/src/deploy.sh b/src/deploy.sh index acc93f4c..8f350a24 100755 --- a/src/deploy.sh +++ b/src/deploy.sh @@ -1,18 +1,14 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail -target=${1:-aws} -readonly LOCALSTACK="localstack" +target="$1" +source ./deployment/aws-env.sh "$target" +source ./deployment/iam-roles.sh +source ./deployment/dynamo_db.sh +source ./deployment/lambda.sh +source ./deployment/api-gateway.sh -if [[ "$target" == "$LOCALSTACK" ]]; then - AWS="aws --endpoint-url=http://localhost:4566" - REGION="us-east-1" - ACCOUNT_ID="000000000000" -else - AWS="aws" - REGION=$($AWS configure get region) - ACCOUNT_ID=$($AWS sts get-caller-identity --query Account --output text) -fi +readonly IS_LOCAL=$(is_localstack) # Constants API_NAME="triageAPI" @@ -33,52 +29,28 @@ CONCURRENT_EXEC_NUM=5 LAMBDA_APIG_ALIAS_NAME="live" PING_ENDPOINT="_ping" STATUS_ENDPOINT="_status" +LOG_STREAM_LAMBDA_NAME="${LAMBDA_NAME}-log-stream" +LOG_STREAM_ZIP_FILE="splunk_function.zip" +LOG_STREAM_RUNTIME="nodejs22.x" # Create IAM role -role_arn=$($AWS iam create-role \ - --role-name "$LAMBDA_EX_ROLE_NAME" \ - --assume-role-policy-document '{ - "Version": "2012-10-17", - "Statement": [ - { - "Effect": "Allow", - "Principal": { - "Service": "lambda.amazonaws.com" - }, - "Action": "sts:AssumeRole" - } - ] - }' \ - --query 'Role.Arn' --output text) +role_arn=$(create_iam_role "$LAMBDA_EX_ROLE_NAME" "lambda.amazonaws.com") echo "✅ IAM role created: $role_arn" -# Attach basic execution policy -$AWS iam attach-role-policy \ - --role-name "$LAMBDA_EX_ROLE_NAME" \ - --policy-arn arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole - +attach_role_policy "$LAMBDA_EX_ROLE_NAME" "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole" echo "✅ Basic execution policy attached to role: $LAMBDA_EX_ROLE_NAME" # Attach X-Ray write access policy -$AWS iam attach-role-policy \ - --role-name "$LAMBDA_EX_ROLE_NAME" \ - --policy-arn arn:aws:iam::aws:policy/AWSXRayDaemonWriteAccess - +attach_role_policy "$LAMBDA_EX_ROLE_NAME" "arn:aws:iam::aws:policy/AWSXRayDaemonWriteAccess" echo "✅ X-Ray write access policy attached to role: $LAMBDA_EX_ROLE_NAME" # Attach DynamoDB execution policy -$AWS iam attach-role-policy \ - --role-name "$LAMBDA_EX_ROLE_NAME" \ - --policy-arn arn:aws:iam::aws:policy/service-role/AWSLambdaDynamoDBExecutionRole - +attach_role_policy "$LAMBDA_EX_ROLE_NAME" "arn:aws:iam::aws:policy/service-role/AWSLambdaDynamoDBExecutionRole" echo "✅ DynamoDB execution policy attached to role: $LAMBDA_EX_ROLE_NAME" # Attach custom S3 access policy for the Lambda to access the bucket -$AWS iam put-role-policy \ - --role-name "$LAMBDA_EX_ROLE_NAME" \ - --policy-name LambdaS3ReadAccess \ - --policy-document '{ +put_role_policy "$LAMBDA_EX_ROLE_NAME" "LambdaS3ReadAccess" '{ "Version": "2012-10-17", "Statement": [ { @@ -88,9 +60,23 @@ $AWS iam put-role-policy \ } ] }' - echo "✅ Custom S3 access policy attached to role: $LAMBDA_EX_ROLE_NAME" +# Attach custom KMS access policy for the Lambda to access the key store +LAMBDA_KEY=$($AWS kms list-aliases --query 'Aliases[?AliasName==`alias/aws/lambda`].TargetKeyId' --output text) + +put_role_policy "$LAMBDA_EX_ROLE_NAME" "LambdaKMSAccess" "{ + \"Version\": \"2012-10-17\", + \"Statement\": [ + { + \"Effect\": \"Allow\", + \"Action\": \"kms:Decrypt\", + \"Resource\": \"arn:aws:kms:eu-west-2:$ACCOUNT_ID:key/$LAMBDA_KEY\" + } + ] + }" +echo "✅ Custom KMS access policy attached to role: $LAMBDA_EX_ROLE_NAME" + # Attach custom DynamoDB access policy DYNAMODB_LAMBDA_ACCESS_POLICY="{ \"Version\": \"2012-10-17\", @@ -131,125 +117,48 @@ DYNAMODB_LAMBDA_ACCESS_POLICY="{ ] } " - -$AWS iam put-role-policy \ - --role-name "$LAMBDA_EX_ROLE_NAME" \ - --policy-name DynamoDBLambdaAccess \ - --policy-document "$DYNAMODB_LAMBDA_ACCESS_POLICY" - +put_role_policy "$LAMBDA_EX_ROLE_NAME" "DynamoDBLambdaAccess" "$DYNAMODB_LAMBDA_ACCESS_POLICY" echo "✅ Custom DynamoDB access policy attached to role: $LAMBDA_EX_ROLE_NAME" -# Create DynamoDB table -ddb_starting_nodes_table_arn=$($AWS dynamodb create-table \ - --table-name $STARTING_NODE_TABLE \ - --region "$REGION" \ - --attribute-definitions AttributeName=Skillset,AttributeType=S AttributeName=GenderAgeParty,AttributeType=S\ - --key-schema AttributeName=Skillset,KeyType=HASH AttributeName=GenderAgeParty,KeyType=RANGE\ - --billing-mode PAY_PER_REQUEST \ - --sse-specification Enabled=true \ - --query 'TableDescription.TableArn' --output text) - -# Wait for the table to be created -$AWS dynamodb wait table-exists --table-name $STARTING_NODE_TABLE --region "$REGION" - +# Create DynamoDB tables +ddb_starting_nodes_table_arn=$(create_table $STARTING_NODE_TABLE \ + "$REGION" \ + "AttributeName=Skillset,AttributeType=S AttributeName=GenderAgeParty,AttributeType=S" \ + "AttributeName=Skillset,KeyType=HASH AttributeName=GenderAgeParty,KeyType=RANGE") echo "✅ DynamoDB table \"$STARTING_NODE_TABLE\" created with X-Ray tracing: $ddb_starting_nodes_table_arn" -ddb_triage_nodes_table_arn=$($AWS dynamodb create-table \ - --table-name $TRIAGE_NODE_TABLE \ - --region "$REGION" \ - --attribute-definitions AttributeName=Coordinate,AttributeType=S \ - --key-schema AttributeName=Coordinate,KeyType=HASH \ - --billing-mode PAY_PER_REQUEST \ - --sse-specification Enabled=true \ - --query 'TableDescription.TableArn' --output text) - - # Wait for the table to be created -$AWS dynamodb wait table-exists --table-name $TRIAGE_NODE_TABLE --region "$REGION" - +ddb_triage_nodes_table_arn=$(create_table $TRIAGE_NODE_TABLE \ + "$REGION" \ + "AttributeName=Coordinate,AttributeType=S" \ + "AttributeName=Coordinate,KeyType=HASH") echo "✅ DynamoDB table \"$TRIAGE_NODE_TABLE\" created with X-Ray tracing: $ddb_triage_nodes_table_arn" -ddb_body_map_nodes_table_arn=$($AWS dynamodb create-table \ - --table-name $BODY_MAP_NODE_TABLE \ - --region "$REGION" \ - --attribute-definitions AttributeName=id,AttributeType=S \ - --key-schema AttributeName=id,KeyType=HASH \ - --billing-mode PAY_PER_REQUEST \ - --sse-specification Enabled=true \ - --query 'TableDescription.TableArn' --output text) - - # Wait for the table to be created -$AWS dynamodb wait table-exists --table-name $BODY_MAP_NODE_TABLE --region "$REGION" - +ddb_body_map_nodes_table_arn=$(create_table $BODY_MAP_NODE_TABLE \ + "$REGION" \ + "AttributeName=id,AttributeType=S" \ + "AttributeName=id,KeyType=HASH") echo "✅ DynamoDB table \"$BODY_MAP_NODE_TABLE\" created with X-Ray tracing: $ddb_body_map_nodes_table_arn" # Create Lambda function -lambda_apig_arn=$($AWS lambda create-function \ - --function-name "$LAMBDA_NAME-apig" \ - --zip-file fileb://$ZIP_FILE \ - --runtime $RUNTIME \ - --role "$role_arn" \ - --handler "$API_HANDLER" \ - --region "$REGION" \ - --memory-size 512 \ - --tracing-config Mode=Active \ - --environment "Variables={BODY_MAP_NODE_TABLE=$BODY_MAP_NODE_TABLE,STARTING_NODE_TABLE=$STARTING_NODE_TABLE,TRIAGE_NODE_TABLE=$TRIAGE_NODE_TABLE,DATA_RELEASE_BUCKET=$S3_BUCKET_NAME,TBRANCH=$TBRANCH}" \ - --query 'FunctionArn' --output text) - +lambda_apig_arn=$(create_lambda "$LAMBDA_NAME-apig" \ + "$ZIP_FILE" \ + "$RUNTIME" \ + "$role_arn" \ + "$API_HANDLER" \ + "$REGION" \ + "BODY_MAP_NODE_TABLE=$BODY_MAP_NODE_TABLE,STARTING_NODE_TABLE=$STARTING_NODE_TABLE,TRIAGE_NODE_TABLE=$TRIAGE_NODE_TABLE,DATA_RELEASE_BUCKET=$S3_BUCKET_NAME,TBRANCH=$TBRANCH") echo "✅ APIG Lambda function created with X-Ray tracing enabled: $lambda_apig_arn" -$AWS lambda wait function-active-v2 --function-name "$LAMBDA_NAME-apig" --region "$REGION" - -$AWS lambda publish-version \ - --function-name "$LAMBDA_NAME-apig" \ - --region "$REGION" > /dev/null - -latest_version=$($AWS lambda list-versions-by-function \ - --function-name "$LAMBDA_NAME-apig" \ - --region "$REGION" \ - --query 'Versions[-1].Version' --output text) +if [[ "$IS_LOCAL" != "true" ]]; then + publish_lambda_version "$LAMBDA_NAME-apig" "$REGION" -if [[ "$target" != "$LOCALSTACK" ]]; then - $AWS lambda put-provisioned-concurrency-config \ - --function-name "$LAMBDA_NAME-apig" \ - --provisioned-concurrent-executions $CONCURRENT_EXEC_NUM \ - --qualifier "$latest_version" \ - --region "$REGION" > /dev/null + latest_version=$(list_lambda_versions "$LAMBDA_NAME-apig" "$REGION") + add_provisioned_concurrency "$LAMBDA_NAME-apig" "$REGION" "$latest_version" "$CONCURRENT_EXEC_NUM" echo "✅ Provisioned concurrency ($CONCURRENT_EXEC_NUM) configured for $LAMBDA_NAME-apig version $latest_version" - # Wait for provisioned concurrency to be ready - echo "⏳ Waiting for provisioned concurrency to be ready..." - max_wait=60 - elapsed=0 - while [[ $elapsed -lt $max_wait ]]; do - status=$($AWS lambda get-provisioned-concurrency-config \ - --function-name "$LAMBDA_NAME-apig" \ - --qualifier "$latest_version" \ - --region "$REGION" \ - --query 'Status' --output text 2>/dev/null || echo "FAILED") - - if [[ "$status" = "READY" ]]; then - echo "✅ Provisioned concurrency is ready" - break - fi - - echo " Status: $status (waiting...)" - sleep 3 - elapsed=$((elapsed + 3)) - done - - if [[ $elapsed -ge $max_wait ]]; then - echo "⚠️ Warning: Provisioned concurrency may not be fully ready yet" - fi - # Create alias pointing to the versioned function - $AWS lambda create-alias \ - --function-name "$LAMBDA_NAME-apig" \ - --name "$LAMBDA_APIG_ALIAS_NAME" \ - --function-version "$latest_version" \ - --description "Live alias pointing to version $latest_version, used for provisioned concurrency. API Gateway to point to this lambda version" \ - --region "$REGION" > /dev/null - + create_lambda_alias "$LAMBDA_NAME-apig" "$REGION" "$LAMBDA_APIG_ALIAS_NAME" "$latest_version" echo "✅ Created alias '$LAMBDA_APIG_ALIAS_NAME' pointing to version $latest_version" # Update lambda_apig_arn to use the alias @@ -261,105 +170,29 @@ else fi # Create REST API -rest_api_id=$($AWS apigateway create-rest-api \ - --name "$API_NAME" \ - --query 'id' --output text) - +rest_api_id=$(create_rest_api "$API_NAME") echo "✅ $API_NAME Rest API created: $rest_api_id" # Get root resource ID -root_resource_id=$($AWS apigateway get-resources \ - --rest-api-id "$rest_api_id" \ - --query 'items[?path==`/`].id' --output text) - -echo "✅ $API_NAME Root resource ID retrieved: $root_resource_id" - -# Iteratively create resource (e.g., /FHIR/R4/triage) - -IFS='/' read -ra parts <<< "$ENDPOINT_PATH" -parent_id="$root_resource_id" - -for part in "${parts[@]}"; do - [[ -z "$part" ]] && continue - resource_id=$($AWS apigateway create-resource \ - --rest-api-id "$rest_api_id" \ - --parent-id "$parent_id" \ - --path-part "$part" \ - --query 'id' --output text) - - echo "✅ $API_NAME triage resource created for $part: $resource_id" - parent_id="$resource_id" -done +root_resource_id=$(get_root_resource_id "$rest_api_id") +echo "✅ $API_NAME Root / resource ID retrieved: $root_resource_id" # Create _ping endpoint -ping_resource_id=$($AWS apigateway create-resource \ - --rest-api-id "$rest_api_id" \ - --parent-id "$root_resource_id" \ - --path-part "$PING_ENDPOINT" \ - --query 'id' \ - --output text -) - -status_resource_id=$($AWS apigateway create-resource \ - --rest-api-id "$rest_api_id" \ - --parent-id "$root_resource_id" \ - --path-part "$STATUS_ENDPOINT" \ - --query 'id' \ - --output text -) - -$AWS apigateway put-method \ - --rest-api-id "$rest_api_id" \ - --resource-id "$ping_resource_id" \ - --http-method GET \ - --authorization-type "NONE" > /dev/null +ping_resource_id=$(create_resource "$rest_api_id" "$root_resource_id" "$PING_ENDPOINT") +echo "✅ $API_NAME /$PING_ENDPOINT resource created: $ping_resource_id" -$AWS apigateway put-method \ - --rest-api-id "$rest_api_id" \ - --resource-id "$status_resource_id" \ - --http-method POST \ - --authorization-type "NONE" > /dev/null +create_method "$rest_api_id" "$ping_resource_id" "GET" +echo "✅ $API_NAME GET /$PING_ENDPOINT method created: $rest_api_id" -# Create POST method -$AWS apigateway put-method \ - --rest-api-id "$rest_api_id" \ - --resource-id "$resource_id" \ - --http-method POST \ - --authorization-type "NONE" > /dev/null +# Define mock integration for _ping endpoint +put_mock_integration "$rest_api_id" "$ping_resource_id" -echo "✅ $API_NAME POST method created: $rest_api_id" +# Create _status endpoint +status_resource_id=$(create_resource "$rest_api_id" "$root_resource_id" "$STATUS_ENDPOINT") +echo "✅ $API_NAME /$STATUS_ENDPOINT resource created: $status_resource_id" -# Integrate with Lambda -$AWS apigateway put-integration \ - --rest-api-id "$rest_api_id" \ - --resource-id "$ping_resource_id" \ - --http-method GET \ - --type MOCK \ - --request-templates '{"application/json":"{\"statusCode\": 200}"}' > /dev/null - -$AWS apigateway put-integration-response \ - --rest-api-id "$rest_api_id" \ - --resource-id "$ping_resource_id" \ - --http-method GET \ - --status-code 200 \ - --selection-pattern "" \ - --response-templates '{"application/json":"{\"status\":\"ok\"}"}' > /dev/null - -$AWS apigateway put-method-response \ - --rest-api-id "$rest_api_id" \ - --resource-id "$ping_resource_id" \ - --http-method GET \ - --status-code 200 \ - --response-models '{"application/json":"Empty"}' \ - --response-parameters '{"method.response.header.Content-Type": true}' > /dev/null - -$AWS apigateway put-integration \ - --rest-api-id "$rest_api_id" \ - --resource-id "$resource_id" \ - --http-method POST \ - --type AWS_PROXY \ - --integration-http-method POST \ - --uri "arn:aws:apigateway:$REGION:lambda:path/2015-03-31/functions/$lambda_apig_alias_arn/invocations" > /dev/null +create_method "$rest_api_id" "$status_resource_id" "POST" +echo "✅ $API_NAME POST /$STATUS_ENDPOINT method created: $rest_api_id" $AWS apigateway put-integration \ --rest-api-id "$rest_api_id" \ @@ -369,19 +202,34 @@ $AWS apigateway put-integration \ --integration-http-method POST \ --uri "arn:aws:apigateway:$REGION:lambda:path/2015-03-31/functions/$lambda_apig_alias_arn/invocations" > /dev/null -if [[ "$target" == "$LOCALSTACK" ]]; then - echo "✅ $API_NAME triage resource linked to lambda: $LAMBDA_NAME-apig" -else - echo "✅ $API_NAME triage resource linked to lambda: $LAMBDA_NAME-apig:$LAMBDA_APIG_ALIAS_NAME" -fi +# Iteratively create resource (e.g., /FHIR/R4/triage) +IFS='/' read -ra parts <<< "$ENDPOINT_PATH" +parent_id="$root_resource_id" -# Add permission for API Gateway to invoke Lambda -if [[ "$target" == "$LOCALSTACK" ]]; then +for part in "${parts[@]}"; do + [[ -z "$part" ]] && continue + resource_id=$(create_resource "$rest_api_id" "$parent_id" "$part") + + echo "✅ $API_NAME triage resource created for .../$part: $resource_id" + parent_id="$resource_id" +done + +# Create POST method +create_method "$rest_api_id" "$resource_id" "POST" +echo "✅ $API_NAME POST /$ENDPOINT_PATH method created: $rest_api_id" + +put_lambda_integration "$rest_api_id" "$resource_id" "POST" "$lambda_apig_alias_arn" "$REGION" +echo "✅ $API_NAME POST /$ENDPOINT_PATH method integrated with Lambda: $rest_api_id" + +if [[ "$IS_LOCAL" == "true" ]]; then + echo "✅ $API_NAME triage resource linked to lambda: $LAMBDA_NAME-apig" lambda_permission_name="$LAMBDA_NAME-apig" else + echo "✅ $API_NAME triage resource linked to lambda: $LAMBDA_NAME-apig:$LAMBDA_APIG_ALIAS_NAME" lambda_permission_name="$LAMBDA_NAME-apig:$LAMBDA_APIG_ALIAS_NAME" fi +# Add permission for API Gateway to invoke Lambda $AWS lambda add-permission \ --function-name "$lambda_permission_name" \ --statement-id "apigatewayv1-access" \ @@ -393,10 +241,7 @@ $AWS lambda add-permission \ echo "✅ Permission for APIG to invoke $lambda_permission_name lambda granted" # Deploy the API -$AWS apigateway create-deployment \ - --rest-api-id "$rest_api_id" \ - --stage-name "$STAGE_NAME" > /dev/null - +create_deployment "$rest_api_id" "$STAGE_NAME" echo "✅ API $API_NAME deployed" # Enable X-Ray tracing on API Gateway stage @@ -408,28 +253,22 @@ $AWS apigateway update-stage \ echo "✅ X-Ray tracing enabled on API Gateway stage: $STAGE_NAME" # Output endpoints -if [[ "$target" == "$LOCALSTACK" ]]; then +if [[ "$IS_LOCAL" == "true" ]]; then echo "ℹ️ LocalStack API endpoint: http://localhost:4566/restapis/${rest_api_id}/${STAGE_NAME}/_user_request_/${ENDPOINT_PATH}" echo "ℹ️ LocalStack ping endpoint: http://localhost:4566/restapis/${rest_api_id}/${STAGE_NAME}/_user_request_/${PING_ENDPOINT}" else echo "ℹ️ AWS API endpoint: https://${rest_api_id}.execute-api.${REGION}.amazonaws.com/${STAGE_NAME}/${ENDPOINT_PATH}" + echo "ℹ️ AWS ping endpoint: https://${rest_api_id}.execute-api.${REGION}.amazonaws.com/${STAGE_NAME}/${PING_ENDPOINT}" fi # Create ingress Lambda function -lambda_s3_arn=$($AWS lambda create-function \ - --function-name "$LAMBDA_NAME-s3" \ - --zip-file fileb://$ZIP_FILE \ - --runtime $RUNTIME \ - --role "$role_arn" \ - --handler "$S3_HANDLER" \ - --timeout 900 \ - --memory-size 1024 \ - --region "$REGION" \ - --snap-start ApplyOn=PublishedVersions \ - --tracing-config Mode=Active \ - --environment "Variables={BODY_MAP_NODE_TABLE=$BODY_MAP_NODE_TABLE,STARTING_NODE_TABLE=$STARTING_NODE_TABLE,TRIAGE_NODE_TABLE=$TRIAGE_NODE_TABLE,DATA_RELEASE_BUCKET=$S3_BUCKET_NAME,TBRANCH=$TBRANCH}" \ - --query 'FunctionArn' --output text) - +lambda_s3_arn=$(create_lambda "$LAMBDA_NAME-s3" \ + "$ZIP_FILE" \ + "$RUNTIME" \ + "$role_arn" \ + "$S3_HANDLER" \ + "$REGION" \ + "BODY_MAP_NODE_TABLE=$BODY_MAP_NODE_TABLE,STARTING_NODE_TABLE=$STARTING_NODE_TABLE,TRIAGE_NODE_TABLE=$TRIAGE_NODE_TABLE,DATA_RELEASE_BUCKET=$S3_BUCKET_NAME,TBRANCH=$TBRANCH") echo "✅ S3-ingress Lambda function created with X-Ray tracing enabled: $lambda_s3_arn" $AWS s3api create-bucket \ @@ -440,17 +279,18 @@ $AWS s3api create-bucket \ echo "✅ S3 bucket created: $S3_BUCKET_NAME" -$AWS lambda add-permission --function-name "$LAMBDA_NAME-s3" \ - --principal s3.amazonaws.com --statement-id s3invoke --action "lambda:InvokeFunction" \ - --source-arn arn:aws:s3:::$S3_BUCKET_NAME \ +$AWS lambda add-permission \ + --function-name "$LAMBDA_NAME-s3" \ + --principal s3.amazonaws.com \ + --statement-id s3invoke \ + --action "lambda:InvokeFunction" \ + --source-arn "arn:aws:s3:::$S3_BUCKET_NAME" \ --region "$REGION" \ --source-account $ACCOUNT_ID \ > /dev/null echo "✅ $S3_BUCKET_NAME bucket granted permission to invoke lambda" -$AWS lambda wait function-active-v2 --function-name triageApiLambda-s3 --region "$REGION" - NOTIFICATION_CONFIG="{ \"LambdaFunctionConfigurations\": [ { @@ -468,6 +308,37 @@ $AWS s3api put-bucket-notification-configuration \ > /dev/null echo "✅ $S3_BUCKET_NAME bucket PathwaysReleaseEventConfiguration configured" + +if [[ "$IS_LOCAL" != "true" ]]; then + # create log stream Lambda function + lambda_log_stream_arn=$(create_lambda "$LOG_STREAM_LAMBDA_NAME" \ + "$LOG_STREAM_ZIP_FILE" \ + "$LOG_STREAM_RUNTIME" \ + "$role_arn" \ + "transformer.handler" \ + "$REGION" \ + "API_NAME=$API_NAME,STAGE_NAME=$STAGE_NAME,SPLUNK_HEC_URL=$SPLUNK_HEC_URL,SPLUNK_HEC_TOKEN=$SPLUNK_HEC_TOKEN") + echo "✅ Log stream Lambda function created: $lambda_log_stream_arn" + + # give lambda permission to be invoked by CloudWatch Logs + $AWS lambda add-permission \ + --function-name "$LOG_STREAM_LAMBDA_NAME" \ + --statement-id "logs-invoke-permission" \ + --action "lambda:InvokeFunction" \ + --principal logs.amazonaws.com \ + --source-arn "arn:aws:logs:$REGION:$ACCOUNT_ID:log-group:/aws/lambda/${LAMBDA_NAME}-apig:*" \ + --region "$REGION" > /dev/null + + $AWS logs put-subscription-filter \ + --log-group-name "/aws/lambda/${LAMBDA_NAME}-apig" \ + --filter-name "${LAMBDA_NAME}-apig-filter" \ + --filter-pattern "" \ + --destination-arn "$lambda_log_stream_arn" \ + --region "$REGION" > /dev/null +else + echo "ℹ️ LocalStack: Skipping log stream Lambda" +fi + echo echo Done! echo diff --git a/src/deployment/api-gateway.sh b/src/deployment/api-gateway.sh new file mode 100755 index 00000000..5998af44 --- /dev/null +++ b/src/deployment/api-gateway.sh @@ -0,0 +1,96 @@ +#!/usr/bin/env bash +set -euo pipefail + +create_rest_api() { + local api_name="$1" + echo "$($AWS apigateway create-rest-api --name "$api_name" --query 'id' --output text)" + return +} + +get_root_resource_id() { + local api_id="$1" + echo "$($AWS apigateway get-resources --rest-api-id "$api_id" --query 'items[?path==`/`].id' --output text)" + return +} + +create_resource() { + local api_id="$1" + local parent_id="$2" + local path_part="$3" + echo "$($AWS apigateway create-resource --rest-api-id "$api_id" --parent-id "$parent_id" --path-part "$path_part" --query 'id' --output text)" + return +} + +create_method() { + local api_id="$1" + local resource_id="$2" + local http_method="$3" + local auth_type="${4:-NONE}" + + $AWS apigateway put-method \ + --rest-api-id "$api_id" \ + --resource-id "$resource_id" \ + --http-method "$http_method" \ + --authorization-type "$auth_type" > /dev/null + + return +} + +put_mock_integration() { + local api_id="$1" + local resource_id="$2" + + $AWS apigateway put-integration \ + --rest-api-id "$api_id" \ + --resource-id "$resource_id" \ + --http-method "GET" \ + --type MOCK \ + --request-templates '{"application/json":"{\"statusCode\": 200}"}' > /dev/null + + $AWS apigateway put-integration-response \ + --rest-api-id "$api_id" \ + --resource-id "$resource_id" \ + --http-method "GET" \ + --status-code 200 \ + --selection-pattern "" \ + --response-templates '{"application/json":"{\"status\":\"ok\"}"}' > /dev/null + + $AWS apigateway put-method-response \ + --rest-api-id "$api_id" \ + --resource-id "$resource_id" \ + --http-method "GET" \ + --status-code 200 \ + --response-models '{"application/json":"Empty"}' \ + --response-parameters '{"method.response.header.Content-Type": true}' > /dev/null + + return +} + +put_lambda_integration() { + local api_id="$1" + local resource_id="$2" + local http_method="$3" + local lambda_arn="$4" + local region="$5" + + $AWS apigateway put-integration \ + --rest-api-id "$api_id" \ + --resource-id "$resource_id" \ + --http-method "$http_method" \ + --type AWS_PROXY \ + --integration-http-method POST \ + --uri "arn:aws:apigateway:$region:lambda:path/2015-03-31/functions/$lambda_arn/invocations" > /dev/null + + return +} + +create_deployment() { + local api_id="$1" + local stage_name="$2" + + $AWS apigateway create-deployment \ + --rest-api-id "$api_id" \ + --stage-name "$stage_name" > /dev/null + + return +} diff --git a/src/deployment/aws-env.sh b/src/deployment/aws-env.sh new file mode 100755 index 00000000..d73c8a11 --- /dev/null +++ b/src/deployment/aws-env.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash +set -euo pipefail + +target=${1:-aws} + +case "$target" in + localstack) + export AWS="aws --endpoint-url=http://localhost:4566" + export REGION="us-east-1" + export ACCOUNT_ID="000000000000" + localstack=true + ;; + *) + export AWS="aws" + export REGION=$($AWS configure get region) + export ACCOUNT_ID=$($AWS sts get-caller-identity --query Account --output text) + localstack=false + ;; +esac + +is_localstack() { + echo $localstack + return +} diff --git a/src/deployment/dynamo_db.sh b/src/deployment/dynamo_db.sh new file mode 100755 index 00000000..f0d5ed3d --- /dev/null +++ b/src/deployment/dynamo_db.sh @@ -0,0 +1,30 @@ +#!/usr/bin/env bash +set -euo pipefail + +_split() { + local str="$1" + local delimiter="$2" + IFS="$delimiter" read -ra parts <<< "$str" + echo "${parts[@]}" +} + +create_table() { + local table_name="$1" + local region="$2" + local attribute_definitions="$3" + local key_schema="$4" + + local arn=$($AWS dynamodb create-table \ + --table-name "$table_name" \ + --region "$region" \ + --attribute-definitions $(_split "$attribute_definitions" " ") \ + --key-schema $(_split "$key_schema" " ") \ + --billing-mode PAY_PER_REQUEST \ + --sse-specification Enabled=true \ + --query 'TableDescription.TableArn' --output text) + + $AWS dynamodb wait table-exists --table-name $STARTING_NODE_TABLE --region "$REGION" + echo "$arn" + + return +} diff --git a/src/deployment/iam-roles.sh b/src/deployment/iam-roles.sh new file mode 100755 index 00000000..268463b3 --- /dev/null +++ b/src/deployment/iam-roles.sh @@ -0,0 +1,67 @@ +#!/usr/bin/env bash +set -euo pipefail + +create_iam_role() { + local role_name="$1" + local service="$2" + local existing_arn + local new_arn + + if existing_arn=$($AWS iam get-role \ + --role-name "$role_name" \ + --query 'Role.Arn' \ + --output text 2>/dev/null + ); then + echo "$existing_arn" + else + new_arn=$($AWS iam create-role \ + --role-name "$role_name" \ + --query 'Role.Arn' \ + --output text \ + --assume-role-policy-document "{ + \"Version\": \"2012-10-17\", + \"Statement\": [ + { + \"Effect\": \"Allow\", + \"Principal\": { + \"Service\": \"$service\" + }, + \"Action\": \"sts:AssumeRole\" + } + ] +}" + ) + echo "$new_arn" + fi + return +} + +attach_role_policy() { + local role_name="$1" + local policy_arn="$2" + + if ! $AWS iam list-attached-role-policies \ + --role-name "$role_name" \ + --query "AttachedPolicies[?PolicyArn=='$policy_arn'].PolicyArn" \ + --output text | grep -q "$policy_arn"; then + + $AWS iam attach-role-policy \ + --role-name "$role_name" \ + --policy-arn "$policy_arn" + fi + + return +} + +put_role_policy() { + local role_name="$1" + local policy_name="$2" + local policy_document="$3" + + $AWS iam put-role-policy \ + --role-name "$role_name" \ + --policy-name "$policy_name" \ + --policy-document "$policy_document" + + return +} diff --git a/src/deployment/lambda.sh b/src/deployment/lambda.sh new file mode 100755 index 00000000..8cb45942 --- /dev/null +++ b/src/deployment/lambda.sh @@ -0,0 +1,104 @@ +#!/usr/local/env bash +set -euo pipefail + +create_lambda() { + local lambda_name="$1" + local zip_file="$2" + local runtime="$3" + local role_arn="$4" + local api_handler="$5" + local region="$6" + local environment="$7" + echo $($AWS lambda create-function \ + --function-name "$lambda_name" \ + --zip-file fileb://$zip_file \ + --runtime $runtime \ + --role "$role_arn" \ + --handler "$api_handler" \ + --region "$region" \ + --environment "Variables={$environment}" \ + --memory-size 512 \ + --tracing-config Mode=Active \ + --query 'FunctionArn' --output text) + $AWS lambda wait function-active-v2 --function-name "$lambda_name" --region "$region" + return +} + +publish_lambda_version() { + local lambda_name="$1" + local region="$2" + + $AWS lambda publish-version \ + --function-name "$lambda_name" \ + --region "$region" \ + --query 'Version' --output text + + return +} + +list_lambda_versions() { + local lambda_name="$1" + local region="$2" + + echo $($AWS lambda list-versions-by-function \ + --function-name "$lambda_name" \ + --region "$region" \ + --query 'Versions[-1].Version' --output text) + return +} + +add_provisioned_concurrency() { + local lambda_name="$1" + local region="$2" + local version="$3" + local concurrent_executions="$4" + + $AWS lambda put-provisioned-concurrency-config \ + --function-name "$lambda_name" \ + --region "$region" \ + --provisioned-concurrent-executions $concurrent_executions \ + --qualifier "$version" > /dev/null + + # Wait for provisioned concurrency to be ready + echo "⏳ Waiting for provisioned concurrency to be ready..." + local max_wait=60 + local elapsed=0 + while [[ $elapsed -lt $max_wait ]]; do + local status=$($AWS lambda get-provisioned-concurrency-config \ + --function-name "$lambda_name" \ + --qualifier "$version" \ + --region "$region" \ + --query 'Status' --output text 2>/dev/null || echo "FAILED") + + if [[ "$status" = "READY" ]]; then + echo "✅ Provisioned concurrency is ready" + break + fi + + echo " Status: $status (waiting...)" + sleep 3 + elapsed=$((elapsed + 3)) + done + + if [[ $elapsed -ge $max_wait ]]; then + echo "⚠️ Warning: Provisioned concurrency may not be fully ready yet" + fi + + return +} + +create_lambda_alias() { + local lambda_name="$1" + local region="$2" + local alias_name="$3" + local version="$4" + + $AWS lambda create-alias \ + --function-name "$lambda_name" \ + --region "$region" \ + --name "$alias_name" \ + --description "Live alias pointing to version $version, used for provisioned concurrency. API Gateway to point to this lambda version" \ + --function-version "$version" > /dev/null + + return +} diff --git a/src/update.sh b/src/update.sh index ca7c3aa5..d58d1aef 100755 --- a/src/update.sh +++ b/src/update.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail AWS="aws --endpoint-url=http://localhost:4566" From d91785fac1aa95f58f945a43d146db30ba4857f9 Mon Sep 17 00:00:00 2001 From: soji-kainos-nhs-temp Date: Tue, 10 Mar 2026 17:15:28 +0000 Subject: [PATCH 45/46] NPT-1140 Include missing Github runner policy actions --- .../account_github_runner_compute.policy.json.tpl | 6 +++++- .../account_github_runner_data.policy.json.tpl | 2 ++ .../account_github_runner_security.policy.json.tpl | 4 ++++ .../app_github_runner_compute.policy.json.tpl | 6 +++++- .../app_github_runner_data.policy.json.tpl | 1 + .../app_github_runner_security.policy.json.tpl | 10 +++++++--- .../github_runner_role_permissions_boundary.tf | 11 +++++++++-- 7 files changed, 33 insertions(+), 7 deletions(-) diff --git a/infrastructure/stacks/github_runner/account_github_runner_compute.policy.json.tpl b/infrastructure/stacks/github_runner/account_github_runner_compute.policy.json.tpl index 72cf59fb..bf9dd08a 100644 --- a/infrastructure/stacks/github_runner/account_github_runner_compute.policy.json.tpl +++ b/infrastructure/stacks/github_runner/account_github_runner_compute.policy.json.tpl @@ -45,9 +45,11 @@ "ec2:AssociateRouteTable", "ec2:AuthorizeSecurityGroupEgress", "ec2:AuthorizeSecurityGroupIngress", + "ec2:AttachInternetGateway", "ec2:CreateFlowLogs", "ec2:CreateNetworkAcl", "ec2:CreateNetworkAclEntry", + "ec2:CreateInternetGateway", "ec2:CreateRouteTable", "ec2:CreateSecurityGroup", "ec2:CreateSubnet", @@ -60,6 +62,7 @@ "ec2:DeleteTags", "ec2:DeleteVpc", "ec2:DeleteVpcEndpoints", + "ec2:DeleteInternetGateway", "ec2:ModifyVpcAttribute", "ec2:ReplaceNetworkAclAssociation", "ec2:RevokeSecurityGroupEgress", @@ -74,7 +77,8 @@ "arn:aws:ec2:${aws_region}:${account_id}:route-table/*", "arn:aws:ec2:${aws_region}:${account_id}:network-acl/*", "arn:aws:ec2:${aws_region}:${account_id}:security-group/*", - "arn:aws:ec2:${aws_region}:${account_id}:security-group-rule/*" + "arn:aws:ec2:${aws_region}:${account_id}:security-group-rule/*", + "arn:aws:ec2:${aws_region}:${account_id}:internet-gateway/*" ] }, { diff --git a/infrastructure/stacks/github_runner/account_github_runner_data.policy.json.tpl b/infrastructure/stacks/github_runner/account_github_runner_data.policy.json.tpl index 9ebb1a10..e50dc05c 100644 --- a/infrastructure/stacks/github_runner/account_github_runner_data.policy.json.tpl +++ b/infrastructure/stacks/github_runner/account_github_runner_data.policy.json.tpl @@ -126,6 +126,7 @@ "Effect": "Allow", "Action": [ "cloudwatch:GetMetricWidgetImage", + "logs:CreateLogGroup", "logs:CreateLogDelivery", "logs:DeleteLogDelivery", "logs:DeleteResourcePolicy", @@ -224,6 +225,7 @@ "arn:aws:logs:${aws_region}:${account_id}:log-group:/aws/lambda/${resource_prefix}-*${workspace_suffix}", "arn:aws:logs:${aws_region}:${account_id}:log-group:/aws/apigateway/${resource_prefix}-*${workspace_suffix}", "arn:aws:logs:${aws_region}:${account_id}:log-group:/aws/stepfunctions/${resource_prefix}-*${workspace_suffix}", + "arn:aws:logs:${aws_region}:${account_id}:log-group:/aws/accessanalyzer/*", "arn:aws:cloudwatch:${aws_region}:${account_id}:alarm:*", "arn:aws:cloudwatch::${account_id}:dashboard/*", "arn:aws:sns:${aws_region}:${account_id}:cloudwatch*", diff --git a/infrastructure/stacks/github_runner/account_github_runner_security.policy.json.tpl b/infrastructure/stacks/github_runner/account_github_runner_security.policy.json.tpl index 43b45a54..ff962755 100644 --- a/infrastructure/stacks/github_runner/account_github_runner_security.policy.json.tpl +++ b/infrastructure/stacks/github_runner/account_github_runner_security.policy.json.tpl @@ -32,6 +32,7 @@ "Sid": "ManagementAccess", "Effect": "Allow", "Action": [ + "access-analyzer:CreateAnalyzer", "access-analyzer:GetAnalyzer", "access-analyzer:GetArchiveRule", "access-analyzer:GetFinding", @@ -47,6 +48,7 @@ "iam:List*", "inspector2:BatchGetAccountStatus", "inspector2:GetConfiguration", + "inspector2:Enable*", "kms:CreateAlias", "kms:CreateKey", "kms:List*", @@ -57,8 +59,10 @@ "securityhub:DescribeHub", "securityhub:GetFindings", "securityhub:GetInsights", + "securityhub:BatchEnable*", "shield:Describe*", "shield:List*", + "shield:CreateSubscription", "sts:GetCallerIdentity", "wafv2:List*" ], diff --git a/infrastructure/stacks/github_runner/app_github_runner_compute.policy.json.tpl b/infrastructure/stacks/github_runner/app_github_runner_compute.policy.json.tpl index 8f6a97b4..da9398fd 100644 --- a/infrastructure/stacks/github_runner/app_github_runner_compute.policy.json.tpl +++ b/infrastructure/stacks/github_runner/app_github_runner_compute.policy.json.tpl @@ -45,11 +45,13 @@ "ec2:AssociateRouteTable", "ec2:AuthorizeSecurityGroupEgress", "ec2:AuthorizeSecurityGroupIngress", + "ec2:AttachInternetGateway", "ec2:CreateFlowLogs", "ec2:CreateNetworkAcl", "ec2:CreateNetworkAclEntry", "ec2:CreateRouteTable", "ec2:CreateSecurityGroup", + "ec2:CreateInternetGateway", "ec2:CreateSubnet", "ec2:CreateTags", "ec2:CreateVpc", @@ -57,6 +59,7 @@ "ec2:DeleteNetworkAcl", "ec2:DeleteNetworkAclEntry", "ec2:DeleteSecurityGroup", + "ec2:DeleteInternetGateway", "ec2:DeleteTags", "ec2:DeleteVpc", "ec2:DeleteVpcEndpoints", @@ -74,7 +77,8 @@ "arn:aws:ec2:${aws_region}:${account_id}:route-table/*", "arn:aws:ec2:${aws_region}:${account_id}:network-acl/*", "arn:aws:ec2:${aws_region}:${account_id}:security-group/*", - "arn:aws:ec2:${aws_region}:${account_id}:security-group-rule/*" + "arn:aws:ec2:${aws_region}:${account_id}:security-group-rule/*", + "arn:aws:ec2:${aws_region}:${account_id}:internet-gateway/*" ] }, { diff --git a/infrastructure/stacks/github_runner/app_github_runner_data.policy.json.tpl b/infrastructure/stacks/github_runner/app_github_runner_data.policy.json.tpl index 7a33ec40..95e39bba 100644 --- a/infrastructure/stacks/github_runner/app_github_runner_data.policy.json.tpl +++ b/infrastructure/stacks/github_runner/app_github_runner_data.policy.json.tpl @@ -198,6 +198,7 @@ "arn:aws:logs:${aws_region}:${account_id}:log-group:/aws/lambda/${resource_prefix}-*${workspace_suffix}", "arn:aws:logs:${aws_region}:${account_id}:log-group:/aws/apigateway/${resource_prefix}-*${workspace_suffix}", "arn:aws:logs:${aws_region}:${account_id}:log-group:/aws/stepfunctions/${resource_prefix}-*${workspace_suffix}", + "arn:aws:logs:${aws_region}:${account_id}:log-group:/aws/accessanalyzer/*", "arn:aws:cloudwatch:${aws_region}:${account_id}:alarm:*", "arn:aws:cloudwatch::${account_id}:dashboard/*", "arn:aws:sns:${aws_region}:${account_id}:cloudwatch*", diff --git a/infrastructure/stacks/github_runner/app_github_runner_security.policy.json.tpl b/infrastructure/stacks/github_runner/app_github_runner_security.policy.json.tpl index 52e8ec61..6fb107e2 100644 --- a/infrastructure/stacks/github_runner/app_github_runner_security.policy.json.tpl +++ b/infrastructure/stacks/github_runner/app_github_runner_security.policy.json.tpl @@ -133,6 +133,7 @@ "Sid": "IAMAccessAnalyzerReadOnly", "Effect": "Allow", "Action": [ + "access-analyzer:CreateAnalyzer", "access-analyzer:GetAnalyzer", "access-analyzer:GetArchiveRule", "access-analyzer:GetFinding", @@ -234,7 +235,8 @@ "Effect": "Allow", "Action": [ "shield:Describe*", - "shield:List*" + "shield:List*", + "shield:CreateSubscription" ], "Resource": "*" }, @@ -245,7 +247,8 @@ "securityhub:GetEnabledStandards", "securityhub:DescribeHub", "securityhub:GetFindings", - "securityhub:GetInsights" + "securityhub:GetInsights", + "securityhub:BatchEnable*" ], "Resource": "*" }, @@ -254,7 +257,8 @@ "Effect": "Allow", "Action": [ "inspector2:BatchGetAccountStatus", - "inspector2:GetConfiguration" + "inspector2:GetConfiguration", + "inspector2:Enable*" ], "Resource": "*" } diff --git a/infrastructure/stacks/github_runner/github_runner_role_permissions_boundary.tf b/infrastructure/stacks/github_runner/github_runner_role_permissions_boundary.tf index bbd7b6bc..dddf8716 100644 --- a/infrastructure/stacks/github_runner/github_runner_role_permissions_boundary.tf +++ b/infrastructure/stacks/github_runner/github_runner_role_permissions_boundary.tf @@ -8,6 +8,7 @@ data "aws_iam_policy_document" "permissions_boundary" { "access-analyzer:Get*", "access-analyzer:List*", "access-analyzer:Tag*", + "access-analyzer:Create*", "apigateway:CreateRestApi", "apigateway:Delete*", "apigateway:Get*", @@ -53,14 +54,17 @@ data "aws_iam_policy_document" "permissions_boundary" { "ec2:DeleteVpc", "ec2:DeleteVpcEndpoints", "ec2:CreateRouteTable", + "ec2:CreateInternetGateway", "ec2:CreateSubnet", "ec2:RevokeSecurityGroupIngress", "ec2:CreateSecurityGroup", "ec2:RevokeSecurityGroupEgress", + "ec2:AttachInternetGateway", "ec2:AuthorizeSecurityGroup*", "ec2:CreateFlowLogs", "ec2:ReplaceNetworkAclAssociation", "ec2:DeleteSecurityGroup", + "ec2:DeleteInternetGateway", "ec2:UpdateSecurityGroupRuleDescriptionsEgress", "events:PutRule", "events:PutTargets", @@ -84,6 +88,7 @@ data "aws_iam_policy_document" "permissions_boundary" { "inspector2:List*", "inspector2:Get*", "inspector2:BatchGetAccountStatus", + "inspector2:Enable*", "kms:CreateKey", "kms:Describe*", "kms:CreateAlias", @@ -121,12 +126,12 @@ data "aws_iam_policy_document" "permissions_boundary" { "logs:DeleteLogGroup", "logs:Describe*", "logs:List*", + "logs:Put*", "logs:Tag*", "logs:Untag*", - "logs:CreateLogStream", + "logs:Create*", "logs:DeleteLogStream", "logs:PutRetentionPolicy", - "logs:CreateExportTask", "s3:PutLifecycleConfiguration", "s3:PutEncryptionConfiguration", "s3:List*", @@ -139,6 +144,7 @@ data "aws_iam_policy_document" "permissions_boundary" { "securityhub:Get*", "securityhub:BatchImportFindings", "securityhub:BatchUpdateFindings", + "securityhub:BatchEnable*", "securityhub:Describe*", "secretsmanager:CreateSecret", "secretsmanager:DeleteSecret", @@ -245,6 +251,7 @@ data "aws_iam_policy_document" "permissions_boundary" { "route53domains:ListDomains", "shield:List*", "shield:Describe*", + "shield:Create*", "sts:AssumeRole", "sts:AssumeRoleWithWebIdentity", "sts:GetCallerIdentity", From c55670718558eb008dae9e87735468a018a712df Mon Sep 17 00:00:00 2001 From: Jack Cullen Date: Wed, 11 Mar 2026 14:57:08 +0000 Subject: [PATCH 46/46] NPT-951: DynamoDB_Logging --- scripts/workflow/boostrapper.sh | 289 ++++++++++++++++++++++++++++++++ 1 file changed, 289 insertions(+) create mode 100644 scripts/workflow/boostrapper.sh diff --git a/scripts/workflow/boostrapper.sh b/scripts/workflow/boostrapper.sh new file mode 100644 index 00000000..95da98a5 --- /dev/null +++ b/scripts/workflow/boostrapper.sh @@ -0,0 +1,289 @@ +#! /bin/bash + +# This bootstrapper script initialises various resources necessary for Terraform and Github Actions to build +# fail on first error +set -e +# Before running this bootstrapper script: +# - Login to an appropriate AWS account as appropriate user via commamnd-line AWS-cli +# - Export the following variables appropriate for your account and github setup prior to calling this script +# - They are NOT set in this script to avoid details being stored in repo +export ACTION="${ACTION:-"apply"}" # default action is plan +export AWS_REGION="${AWS_REGION:-"eu-west-2"}" # The AWS region into which you intend to deploy the application (where the terraform bucket will be created) eg eu-west-2 +export ENVIRONMENT="${ENVIRONMENT:-"mgmt"}" # Identify the environment (one of dev,test,security,preprod or prod) usually part of the account name +export PROJECT="${PROJECT:-"saet"}" +export TF_VAR_REPO_NAME="${REPOSITORY:-"$(basename -s .git "$(git config --get remote.origin.url)")"}" +export TF_VAR_TERRAFORM_STATE_BUCKET_NAME="nhse-$ENVIRONMENT-$TF_VAR_REPO_NAME-terraform-state" # globally unique name +export TF_VAR_TERRAFORM_LOCK_TABLE_NAME="nhse-$ENVIRONMENT-$TF_VAR_REPO_NAME-terraform-state-lock" + +export WORKSPACE="${WORKSPACE:-"default"}" + +# These used by both stacks to be bootstrapped +ROOT_DIR=$PWD +COMMON_TF_VARS_FILE="common.tfvars" +INFRASTRUCTURE_DIR="${INFRASTRUCTURE_DIR:-"infrastructure"}" +TERRAFORM_DIR="${TERRAFORM_DIR:-"$INFRASTRUCTURE_DIR/stacks"}" +ENVIRONMENTS_DIR="$ROOT_DIR/$INFRASTRUCTURE_DIR/environments" + +# check exports have been done +EXPORTS_SET=0 +# Check key variables have been exported - see above +if [[ ! "$ACTION" =~ ^(plan|apply|destroy) ]]; then + echo ACTION must be one of following terraform actions - plan, apply or destroy + EXPORTS_SET=1 +fi + +if [[ -z "$AWS_REGION" ]] ; then + echo Set AWS_REGION to name of the AWS region to host the terraform state bucket + EXPORTS_SET=1 +fi + +if [[ -z "$PROJECT" ]] ; then + echo Set PROJECT to identify if account is for saet + EXPORTS_SET=1 +else + if [[ ! "$PROJECT" =~ ^(saet) ]]; then + echo PROJECT should be saet + EXPORTS_SET=1 + fi +fi + +if [[ -z "$ENVIRONMENT" ]] ; then + echo Set ENVIRONMENT to identify if account is for mgmt, dev, test, sandpit, int, ref, non-prod, preprod or prod + EXPORTS_SET=1 +else + if [[ ! $ENVIRONMENT =~ ^(mgmt|dev|test|sandpit|int|ref|non-prod|preprod|prod|prototype) ]]; then + echo ENVIRONMENT should be mgmt, dev, test, sandpit, int, ref, non-prod, preprod or prod + EXPORTS_SET=1 + fi +fi + +if [[ $EXPORTS_SET = 1 ]] ; then + echo One or more required exports not correctly set + exit 1 +fi + +ENV_TF_VARS_FILE="$ENVIRONMENT/environment.tfvars" +if ! [[ -f "$ENVIRONMENTS_DIR/$ENV_TF_VARS_FILE" ]] ; then + echo "No environment variables defined for $ENVIRONMENT environment" + exit 1 +fi + + +# ------------- +# First time thru we haven't build the remote state bucket or lock table - so assume it doesn't exist to use +# if remote state bucket does exist we are going to use it +if aws s3api head-bucket --bucket "$TF_VAR_TERRAFORM_STATE_BUCKET_NAME" 2>/dev/null; then + echo "Terraform S3 State Bucket Name: ${TF_VAR_TERRAFORM_STATE_BUCKET_NAME} already bootstrapped" + export USE_REMOTE_STATE_STORE=true +else + export USE_REMOTE_STATE_STORE=false +fi + +# ------------- Step one create tf state bucket, state locks and account alias ----------- +export ACTION=$ACTION +export STACK=terraform_management +TF_VAR_STACK_NAME=$(echo "$STACK" | tr '_' '-' ) +export TF_VAR_STACK_NAME + +# function to migrate state from local to remote +function terraform-init-migrate { + TERRAFORM_STATE_KEY=$STACK/terraform.state + + terraform init -migrate-state -force-copy \ + -backend-config="bucket=$TF_VAR_TERRAFORM_STATE_BUCKET_NAME" \ + -backend-config="dynamodb_table=$TF_VAR_TERRAFORM_LOCK_TABLE_NAME" \ + -backend-config="encrypt=true" \ + -backend-config="key=$TERRAFORM_STATE_KEY" \ + -backend-config="region=$AWS_REGION" + +} +# function to determine if state is held locally or remote +function terraform-initialise { + + echo "Terraform S3 State Bucket Name: ${TF_VAR_TERRAFORM_STATE_BUCKET_NAME}" + echo "Terraform Lock Table Name: ${TF_VAR_TERRAFORM_LOCK_TABLE_NAME}" + + if [[ "$USE_REMOTE_STATE_STORE" =~ ^(false|no|n|off|0|FALSE|NO|N|OFF) ]]; then + terraform init + else + terraform init \ + -backend-config="bucket=$TF_VAR_TERRAFORM_STATE_BUCKET_NAME" \ + -backend-config="dynamodb_table=$TF_VAR_TERRAFORM_LOCK_TABLE_NAME" \ + -backend-config="encrypt=true" \ + -backend-config="key=$STACK/terraform.state" \ + -backend-config="region=$AWS_REGION" + fi +} + +function github_runner_stack { + # now do account_wide stack for github runner and for oidc provider + # ------------- Step three create thumbprint for github actions ----------- + export HOST=$(curl https://token.actions.githubusercontent.com/.well-known/openid-configuration) + export CERT_URL=$(jq -r '.jwks_uri | split("/")[2]' <<< $HOST) + export THUMBPRINT=$(echo | openssl s_client -servername "$CERT_URL" -showcerts -connect "$CERT_URL":443 2> /dev/null | tac | sed -n '/-----END CERTIFICATE-----/,/-----BEGIN CERTIFICATE-----/p; /-----BEGIN CERTIFICATE-----/q' | tac | openssl x509 -sha1 -fingerprint -noout | sed 's/://g' | awk -F= '{print tolower($2)}') + # ------------- Step four create oidc identity provider, github runner role and policies for that role ----------- + export TF_VAR_oidc_provider_url="https://token.actions.githubusercontent.com" + export TF_VAR_oidc_thumbprint=$THUMBPRINT + export TF_VAR_oidc_client="sts.amazonaws.com" + export STACK=github_runner + TF_VAR_STACK_NAME=$(echo "$STACK" | tr '_' '-' ) + export TF_VAR_STACK_NAME + + # specific to stack + STACK_TF_VARS_FILE="$STACK.tfvars" + # the directory that holds the stack to terraform + STACK_DIR=$PWD/$TERRAFORM_DIR/$STACK + + if [[ "$USE_REMOTE_STATE_STORE" =~ ^(false|no|n|off|0|FALSE|NO|N|OFF) ]]; then + echo "Bootstrapping the $STACK stack (terraform $ACTION) to terraform workspace $WORKSPACE for environment $ENVIRONMENT and project $PROJECT" + else + echo "Preparing to run terraform $ACTION for $STACK stack to terraform workspace $WORKSPACE for environment $ENVIRONMENT and project $PROJECT" + fi + + # remove any previous local backend for stack + rm -rf "$STACK_DIR"/.terraform + rm -f "$STACK_DIR"/.terraform.lock.hcl + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/common/locals.tf "$STACK_DIR" + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/common/provider.tf "$STACK_DIR" + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/common/common-variables.tf "$STACK_DIR" + # copy shared tf files to stack + if [[ "$USE_REMOTE_STATE_STORE" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON) ]]; then + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/remote/versions.tf "$STACK_DIR" + else + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/local/versions.tf "$STACK_DIR" + fi + # switch to target stack directory ahead of tf init/plan/apply + cd "$STACK_DIR" || exit + # if no stack tfvars create temporary one + TEMP_STACK_TF_VARS_FILE=0 + if [[ ! -f "$ROOT_DIR/$INFRASTRUCTURE_DIR/$STACK_TF_VARS_FILE" ]] ; then + touch "$ROOT_DIR/$INFRASTRUCTURE_DIR/$STACK_TF_VARS_FILE" + TEMP_STACK_TF_VARS_FILE=1 + fi + + # init terraform + terraform-initialise + + if [ -n "$ACTION" ] && [ "$ACTION" = 'plan' ] ; then + terraform plan \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$COMMON_TF_VARS_FILE \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$STACK_TF_VARS_FILE \ + -var-file "$ENVIRONMENTS_DIR/$ENV_TF_VARS_FILE" + fi + + if [ -n "$ACTION" ] && [ "$ACTION" = 'apply' ] ; then + terraform apply -auto-approve \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$COMMON_TF_VARS_FILE \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$STACK_TF_VARS_FILE \ + -var-file "$ENVIRONMENTS_DIR/$ENV_TF_VARS_FILE" + fi + # cleardown temp files + rm -f "$STACK_DIR"/common-variables.tf + rm -f "$STACK_DIR"/locals.tf + rm -f "$STACK_DIR"/provider.tf + rm -f "$STACK_DIR"/versions.tf + if [ $TEMP_STACK_TF_VARS_FILE == 1 ]; then + rm "$ROOT_DIR/$INFRASTRUCTURE_DIR/$STACK_TF_VARS_FILE" + fi + +} + +if [[ "$USE_REMOTE_STATE_STORE" =~ ^(false|no|n|off|0|FALSE|NO|N|OFF) ]]; then + echo "Bootstrapping the $STACK stack (terraform $ACTION) to terraform workspace $WORKSPACE for environment $ENVIRONMENT and project $PROJECT" +else + echo "Preparing to run terraform $ACTION for $STACK stack to terraform workspace $WORKSPACE for environment $ENVIRONMENT and project $PROJECT" +fi + +# specific to stack +STACK_TF_VARS_FILE="$STACK.tfvars" +# the directory that holds the stack to terraform +STACK_DIR=$PWD/$TERRAFORM_DIR/$STACK +# remove any previous local backend for stack +rm -rf "$STACK_DIR"/.terraform +rm -f "$STACK_DIR"/.terraform.lock.hcl +# copy shared tf files to stack +cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/common/locals.tf "$STACK_DIR" +cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/common/provider.tf "$STACK_DIR" +cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/common/common-variables.tf "$STACK_DIR" + +if [[ "$USE_REMOTE_STATE_STORE" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON) ]]; then + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/remote/versions.tf "$STACK_DIR" +else + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/local/versions.tf "$STACK_DIR" +fi +# switch to target stack directory ahead of tf init/plan/apply +cd "$STACK_DIR" || exit +# if no stack tfvars create temporary one +TEMP_STACK_TF_VARS_FILE=0 +if [[ ! -f "$ROOT_DIR/$INFRASTRUCTURE_DIR/$STACK_TF_VARS_FILE" ]] ; then + touch "$ROOT_DIR/$INFRASTRUCTURE_DIR/$STACK_TF_VARS_FILE" + TEMP_STACK_TF_VARS_FILE=1 +fi + +# init terraform +terraform-initialise + +if [ -n "$ACTION" ] && [ "$ACTION" = 'plan' ] ; then + terraform plan \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$COMMON_TF_VARS_FILE \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$STACK_TF_VARS_FILE \ + -var-file "$ENVIRONMENTS_DIR/$ENV_TF_VARS_FILE" +fi +if [ -n "$ACTION" ] && [ "$ACTION" = 'apply' ] ; then + terraform apply -auto-approve \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$COMMON_TF_VARS_FILE \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$STACK_TF_VARS_FILE \ + -var-file "$ENVIRONMENTS_DIR/$ENV_TF_VARS_FILE" +fi +if [ -n "$ACTION" ] && [ "$ACTION" = 'destroy' ] ; then + terraform destroy -auto-approve \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$COMMON_TF_VARS_FILE \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$STACK_TF_VARS_FILE \ + -var-file "$ENVIRONMENTS_DIR/$ENV_TF_VARS_FILE" +fi +# cleardown temp files +rm -f "$STACK_DIR"/common-variables.tf +rm -f "$STACK_DIR"/locals.tf +rm -f "$STACK_DIR"/provider.tf +rm -f "$STACK_DIR"/versions.tf + +if [[ $TEMP_STACK_TF_VARS_FILE == 1 ]]; then + rm "$ROOT_DIR/$INFRASTRUCTURE_DIR/$STACK_TF_VARS_FILE" +fi + +# back to root +cd "$ROOT_DIR" || exit + +# having build the stack using a local backend we need to migrate the state held locally to newly build remote + +if ! $USE_REMOTE_STATE_STORE ; then + # check if remote state bucket exists we are okay to migrate state to it + if aws s3api head-bucket --bucket "$TF_VAR_TERRAFORM_STATE_BUCKET_NAME" 2>/dev/null; then + export USE_REMOTE_STATE_STORE=true + echo Preparing to migrate stack from local backend to remote backend + # the directory that holds the stack to terraform + ROOT_DIR=$PWD + STACK_DIR=$PWD/$TERRAFORM_DIR/$STACK + cd "$STACK_DIR" || exit + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/remote/versions.tf "$STACK_DIR" + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/common/locals.tf "$STACK_DIR" + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/common/provider.tf "$STACK_DIR" + # run terraform init with migrate flag set + terraform-init-migrate + # now push local state to remote + terraform state push "$STACK_DIR"/terraform.tfstate + rm -f "$STACK_DIR"/locals.tf + rm -f "$STACK_DIR"/provider.tf + rm -f "$STACK_DIR"/versions.tf + # remove local terraform state to prevent clash when re-running eg to plan + rm -f "$STACK_DIR"/terraform.tfstate + cd "$ROOT_DIR" || exit + else + export USE_REMOTE_STATE_STORE=false + fi +fi + +# back to root +cd "$ROOT_DIR" || exit +echo "Preparing the $TF_VAR_REPO_NAME repo github-runner in $ENVIRONMENT environment" +github_runner_stack