diff --git a/.github/actions/cleardown-tf-state/action.yaml b/.github/actions/cleardown-tf-state/action.yaml new file mode 100644 index 00000000..3005c91f --- /dev/null +++ b/.github/actions/cleardown-tf-state/action.yaml @@ -0,0 +1,25 @@ +name: "Cleardown terraform state action" +description: "Delete the terraform state" +inputs: + workspace: + description: "The name of the workspace to action the infrastructure into." + required: true + environment: + description: "The name of the environment to action the infrastructure into." + required: true + stack: + description: "A single variable for the stack to be cleared." + required: true + +runs: + using: composite + steps: + - name: Delete terraform state + id: delete_tf_state + shell: bash + env: + WORKSPACE: ${{ inputs.workspace }} + ENVIRONMENT: ${{ inputs.environment }} + STACK: ${{ inputs.stack }} + run: | + ./scripts/workflow/cleardown-terraform-state.sh diff --git a/.github/workflows/artefacts-cleardown.yaml b/.github/workflows/artefacts-cleardown.yaml index a45dd6f0..9e65a5b1 100644 --- a/.github/workflows/artefacts-cleardown.yaml +++ b/.github/workflows/artefacts-cleardown.yaml @@ -26,6 +26,13 @@ on: description: "The type of permissions (e.g., account, app)" required: true type: string + secrets: + ACCOUNT_ID: + description: "AWS account ID for credentials" + required: true + MGMT_ACCOUNT_ID: + description: "AWS management account ID for credentials" + required: false jobs: cleardown-artefacts: diff --git a/.github/workflows/infrastructure-cleardown.yaml b/.github/workflows/infrastructure-cleardown.yaml index 298226b7..b66bb403 100644 --- a/.github/workflows/infrastructure-cleardown.yaml +++ b/.github/workflows/infrastructure-cleardown.yaml @@ -43,6 +43,13 @@ on: description: "The type of permissions (e.g., account, app)" required: true type: string + secrets: + ACCOUNT_ID: + description: "AWS account ID for credentials" + required: true + MGMT_ACCOUNT_ID: + description: "AWS management account ID for credentials" + required: true jobs: destroy-application-infrastructure: diff --git a/.github/workflows/pipeline-infrastructure-cleardown.yaml b/.github/workflows/pipeline-infrastructure-cleardown.yaml index 57938486..ab93fd15 100644 --- a/.github/workflows/pipeline-infrastructure-cleardown.yaml +++ b/.github/workflows/pipeline-infrastructure-cleardown.yaml @@ -31,10 +31,17 @@ on: description: "Specify the workspace to cleardown" required: true type: string - jobs: metadata: - if: github.actor != 'github-merge-queue[bot]' + if: >- + github.actor != 'github-merge-queue[bot]' && + ( + github.event_name != 'delete' || + ( + github.event.ref_type == 'branch' && + (startsWith(github.event.ref, 'task/') || startsWith(github.event.ref, 'dependabot/')) + ) + ) name: "Get Metadata" uses: ./.github/workflows/metadata.yaml @@ -49,7 +56,7 @@ jobs: with: environment: ${{ github.event.client_payload.environment || inputs.environment || needs.metadata.outputs.environment }} workspace: ${{ github.event.client_payload.workspace || inputs.workspace || needs.metadata.outputs.workspace }} - stacks: "['triage]" + stacks: "['triage']" application_tag: ${{ inputs.application_tag || github.event.client_payload.application_tag || 'latest' }} commit_hash: ${{ needs.metadata.outputs.commit_hash }} workflow_timeout: 30 diff --git a/infrastructure/stacks/triage/alarms.tf b/infrastructure/stacks/triage/alarms.tf new file mode 100644 index 00000000..6f429d14 --- /dev/null +++ b/infrastructure/stacks/triage/alarms.tf @@ -0,0 +1,49 @@ +locals { + dynamodb_alarm_table_names = { + starting_coords = module.starting_coords.dynamodb_table_name + triage_nodes = module.triage_nodes.dynamodb_table_name + bodymaps = module.bodymaps.dynamodb_table_name + } +} + +resource "aws_cloudwatch_metric_alarm" "dynamodb_system_errors" { + for_each = var.enable_dynamodb_basic_alarms ? local.dynamodb_alarm_table_names : {} + + alarm_name = "${local.resource_prefix}-${each.key}-dynamodb-system-errors${local.workspace_suffix}" + alarm_description = "DynamoDB system errors detected for ${each.value}" + namespace = "AWS/DynamoDB" + metric_name = "SystemErrors" + statistic = "Sum" + period = var.dynamodb_alarm_period_seconds + evaluation_periods = var.dynamodb_alarm_evaluation_periods + threshold = var.dynamodb_system_errors_threshold + comparison_operator = "GreaterThanThreshold" + treat_missing_data = "notBreaching" + alarm_actions = var.dynamodb_alarm_actions + ok_actions = var.dynamodb_alarm_ok_actions + + dimensions = { + TableName = each.value + } +} + +resource "aws_cloudwatch_metric_alarm" "dynamodb_throttled_requests" { + for_each = var.enable_dynamodb_basic_alarms ? local.dynamodb_alarm_table_names : {} + + alarm_name = "${local.resource_prefix}-${each.key}-dynamodb-throttled-requests${local.workspace_suffix}" + alarm_description = "DynamoDB throttled requests detected for ${each.value}" + namespace = "AWS/DynamoDB" + metric_name = "ThrottledRequests" + statistic = "Sum" + period = var.dynamodb_alarm_period_seconds + evaluation_periods = var.dynamodb_alarm_evaluation_periods + threshold = var.dynamodb_throttled_requests_threshold + comparison_operator = "GreaterThanThreshold" + treat_missing_data = "notBreaching" + alarm_actions = var.dynamodb_alarm_actions + ok_actions = var.dynamodb_alarm_ok_actions + + dimensions = { + TableName = each.value + } +} diff --git a/infrastructure/stacks/triage/s3.tf b/infrastructure/stacks/triage/s3.tf index 407c87f1..72451c6c 100644 --- a/infrastructure/stacks/triage/s3.tf +++ b/infrastructure/stacks/triage/s3.tf @@ -1,4 +1,4 @@ module "clinical_data_uploader_bucket" { source = "../../modules/s3" - bucket_name = "${local.account_prefix}-clinical-data-uploader" + bucket_name = "${local.account_prefix}-clinical-data-uploader-${local.workspace_suffix}" } diff --git a/infrastructure/stacks/triage/variables.tf b/infrastructure/stacks/triage/variables.tf index dcfe6caa..cedfe71a 100644 --- a/infrastructure/stacks/triage/variables.tf +++ b/infrastructure/stacks/triage/variables.tf @@ -51,6 +51,7 @@ variable "log_retention_days" { description = "Number of days to retain CloudWatch logs for API Gateway access logs" type = number default = 365 + default = 365 } # API Gateway X-Ray Tracing @@ -71,3 +72,46 @@ variable "splunk_hec_token" { type = string default = "" } + +# DynamoDB basic alarms +variable "enable_dynamodb_basic_alarms" { + description = "Enable basic CloudWatch alarms for DynamoDB table health metrics" + type = bool + default = true +} + +variable "dynamodb_alarm_actions" { + description = "List of ARNs for CloudWatch alarm actions (for example SNS topic ARNs)" + type = list(string) + default = [] +} + +variable "dynamodb_alarm_ok_actions" { + description = "List of ARNs for CloudWatch OK actions" + type = list(string) + default = [] +} + +variable "dynamodb_alarm_period_seconds" { + description = "Period in seconds over which DynamoDB alarm metrics are evaluated" + type = number + default = 300 +} + +variable "dynamodb_alarm_evaluation_periods" { + description = "Number of periods over which data is compared to alarm threshold" + type = number + default = 1 +} + +variable "dynamodb_system_errors_threshold" { + description = "Threshold for the DynamoDB SystemErrors alarm" + type = number + default = 0 +} + +variable "dynamodb_throttled_requests_threshold" { + description = "Threshold for the DynamoDB ThrottledRequests alarm" + type = number + default = 0 +} diff --git a/scripts/githooks/check-commit-message.sh b/scripts/githooks/check-commit-message.sh index e6256109..4ec5fa2f 100755 --- a/scripts/githooks/check-commit-message.sh +++ b/scripts/githooks/check-commit-message.sh @@ -20,18 +20,17 @@ function check_jira_ref { fi echo $COMMIT_MESSAGE - return + return 0 } function check_commit_message_format { COMMIT_MESSAGE="$1" - local regex='^(feat|fix|chore|docs|style|refactor|perf|test|ci|build|revert|style)(\([a-z0-9_-]+\))?: (SAET)-[0-9]+ .+' + local REGEX='^(feat|fix|chore|docs|style|refactor|perf|test|ci|build|revert|style)(\([a-z0-9_-]+\))?: (SAET)-[0-9]+ .+' - if ! [[ $COMMIT_MESSAGE =~ $regex ]]; then + if ! [[ $COMMIT_MESSAGE =~ $REGEX ]]; then echo -e "\033[0;31mInvalid conventional commit message format! Expected: (): \033[0m" return 1 fi - return } function check_commit_message_length { @@ -41,7 +40,6 @@ function check_commit_message_length { if [[ "$COMMIT_MESSAGE_LENGTH" -gt $GIT_COMMIT_MESSAGE_MAX_LENGTH ]] ; then echo "At $COMMIT_MESSAGE_LENGTH characters the commit message exceeds limit of $GIT_COMMIT_MESSAGE_MAX_LENGTH" fi - return } function check_git_commit_message { @@ -55,7 +53,6 @@ function check_git_commit_message { [[ ! -z "$VALID_LENGTH" ]] && echo $VALID_LENGTH return 1 fi - return } # ---- MAIN EXECUTION ---- diff --git a/scripts/workflow/boostrapper.sh b/scripts/workflow/boostrapper.sh new file mode 100644 index 00000000..95da98a5 --- /dev/null +++ b/scripts/workflow/boostrapper.sh @@ -0,0 +1,289 @@ +#! /bin/bash + +# This bootstrapper script initialises various resources necessary for Terraform and Github Actions to build +# fail on first error +set -e +# Before running this bootstrapper script: +# - Login to an appropriate AWS account as appropriate user via commamnd-line AWS-cli +# - Export the following variables appropriate for your account and github setup prior to calling this script +# - They are NOT set in this script to avoid details being stored in repo +export ACTION="${ACTION:-"apply"}" # default action is plan +export AWS_REGION="${AWS_REGION:-"eu-west-2"}" # The AWS region into which you intend to deploy the application (where the terraform bucket will be created) eg eu-west-2 +export ENVIRONMENT="${ENVIRONMENT:-"mgmt"}" # Identify the environment (one of dev,test,security,preprod or prod) usually part of the account name +export PROJECT="${PROJECT:-"saet"}" +export TF_VAR_REPO_NAME="${REPOSITORY:-"$(basename -s .git "$(git config --get remote.origin.url)")"}" +export TF_VAR_TERRAFORM_STATE_BUCKET_NAME="nhse-$ENVIRONMENT-$TF_VAR_REPO_NAME-terraform-state" # globally unique name +export TF_VAR_TERRAFORM_LOCK_TABLE_NAME="nhse-$ENVIRONMENT-$TF_VAR_REPO_NAME-terraform-state-lock" + +export WORKSPACE="${WORKSPACE:-"default"}" + +# These used by both stacks to be bootstrapped +ROOT_DIR=$PWD +COMMON_TF_VARS_FILE="common.tfvars" +INFRASTRUCTURE_DIR="${INFRASTRUCTURE_DIR:-"infrastructure"}" +TERRAFORM_DIR="${TERRAFORM_DIR:-"$INFRASTRUCTURE_DIR/stacks"}" +ENVIRONMENTS_DIR="$ROOT_DIR/$INFRASTRUCTURE_DIR/environments" + +# check exports have been done +EXPORTS_SET=0 +# Check key variables have been exported - see above +if [[ ! "$ACTION" =~ ^(plan|apply|destroy) ]]; then + echo ACTION must be one of following terraform actions - plan, apply or destroy + EXPORTS_SET=1 +fi + +if [[ -z "$AWS_REGION" ]] ; then + echo Set AWS_REGION to name of the AWS region to host the terraform state bucket + EXPORTS_SET=1 +fi + +if [[ -z "$PROJECT" ]] ; then + echo Set PROJECT to identify if account is for saet + EXPORTS_SET=1 +else + if [[ ! "$PROJECT" =~ ^(saet) ]]; then + echo PROJECT should be saet + EXPORTS_SET=1 + fi +fi + +if [[ -z "$ENVIRONMENT" ]] ; then + echo Set ENVIRONMENT to identify if account is for mgmt, dev, test, sandpit, int, ref, non-prod, preprod or prod + EXPORTS_SET=1 +else + if [[ ! $ENVIRONMENT =~ ^(mgmt|dev|test|sandpit|int|ref|non-prod|preprod|prod|prototype) ]]; then + echo ENVIRONMENT should be mgmt, dev, test, sandpit, int, ref, non-prod, preprod or prod + EXPORTS_SET=1 + fi +fi + +if [[ $EXPORTS_SET = 1 ]] ; then + echo One or more required exports not correctly set + exit 1 +fi + +ENV_TF_VARS_FILE="$ENVIRONMENT/environment.tfvars" +if ! [[ -f "$ENVIRONMENTS_DIR/$ENV_TF_VARS_FILE" ]] ; then + echo "No environment variables defined for $ENVIRONMENT environment" + exit 1 +fi + + +# ------------- +# First time thru we haven't build the remote state bucket or lock table - so assume it doesn't exist to use +# if remote state bucket does exist we are going to use it +if aws s3api head-bucket --bucket "$TF_VAR_TERRAFORM_STATE_BUCKET_NAME" 2>/dev/null; then + echo "Terraform S3 State Bucket Name: ${TF_VAR_TERRAFORM_STATE_BUCKET_NAME} already bootstrapped" + export USE_REMOTE_STATE_STORE=true +else + export USE_REMOTE_STATE_STORE=false +fi + +# ------------- Step one create tf state bucket, state locks and account alias ----------- +export ACTION=$ACTION +export STACK=terraform_management +TF_VAR_STACK_NAME=$(echo "$STACK" | tr '_' '-' ) +export TF_VAR_STACK_NAME + +# function to migrate state from local to remote +function terraform-init-migrate { + TERRAFORM_STATE_KEY=$STACK/terraform.state + + terraform init -migrate-state -force-copy \ + -backend-config="bucket=$TF_VAR_TERRAFORM_STATE_BUCKET_NAME" \ + -backend-config="dynamodb_table=$TF_VAR_TERRAFORM_LOCK_TABLE_NAME" \ + -backend-config="encrypt=true" \ + -backend-config="key=$TERRAFORM_STATE_KEY" \ + -backend-config="region=$AWS_REGION" + +} +# function to determine if state is held locally or remote +function terraform-initialise { + + echo "Terraform S3 State Bucket Name: ${TF_VAR_TERRAFORM_STATE_BUCKET_NAME}" + echo "Terraform Lock Table Name: ${TF_VAR_TERRAFORM_LOCK_TABLE_NAME}" + + if [[ "$USE_REMOTE_STATE_STORE" =~ ^(false|no|n|off|0|FALSE|NO|N|OFF) ]]; then + terraform init + else + terraform init \ + -backend-config="bucket=$TF_VAR_TERRAFORM_STATE_BUCKET_NAME" \ + -backend-config="dynamodb_table=$TF_VAR_TERRAFORM_LOCK_TABLE_NAME" \ + -backend-config="encrypt=true" \ + -backend-config="key=$STACK/terraform.state" \ + -backend-config="region=$AWS_REGION" + fi +} + +function github_runner_stack { + # now do account_wide stack for github runner and for oidc provider + # ------------- Step three create thumbprint for github actions ----------- + export HOST=$(curl https://token.actions.githubusercontent.com/.well-known/openid-configuration) + export CERT_URL=$(jq -r '.jwks_uri | split("/")[2]' <<< $HOST) + export THUMBPRINT=$(echo | openssl s_client -servername "$CERT_URL" -showcerts -connect "$CERT_URL":443 2> /dev/null | tac | sed -n '/-----END CERTIFICATE-----/,/-----BEGIN CERTIFICATE-----/p; /-----BEGIN CERTIFICATE-----/q' | tac | openssl x509 -sha1 -fingerprint -noout | sed 's/://g' | awk -F= '{print tolower($2)}') + # ------------- Step four create oidc identity provider, github runner role and policies for that role ----------- + export TF_VAR_oidc_provider_url="https://token.actions.githubusercontent.com" + export TF_VAR_oidc_thumbprint=$THUMBPRINT + export TF_VAR_oidc_client="sts.amazonaws.com" + export STACK=github_runner + TF_VAR_STACK_NAME=$(echo "$STACK" | tr '_' '-' ) + export TF_VAR_STACK_NAME + + # specific to stack + STACK_TF_VARS_FILE="$STACK.tfvars" + # the directory that holds the stack to terraform + STACK_DIR=$PWD/$TERRAFORM_DIR/$STACK + + if [[ "$USE_REMOTE_STATE_STORE" =~ ^(false|no|n|off|0|FALSE|NO|N|OFF) ]]; then + echo "Bootstrapping the $STACK stack (terraform $ACTION) to terraform workspace $WORKSPACE for environment $ENVIRONMENT and project $PROJECT" + else + echo "Preparing to run terraform $ACTION for $STACK stack to terraform workspace $WORKSPACE for environment $ENVIRONMENT and project $PROJECT" + fi + + # remove any previous local backend for stack + rm -rf "$STACK_DIR"/.terraform + rm -f "$STACK_DIR"/.terraform.lock.hcl + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/common/locals.tf "$STACK_DIR" + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/common/provider.tf "$STACK_DIR" + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/common/common-variables.tf "$STACK_DIR" + # copy shared tf files to stack + if [[ "$USE_REMOTE_STATE_STORE" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON) ]]; then + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/remote/versions.tf "$STACK_DIR" + else + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/local/versions.tf "$STACK_DIR" + fi + # switch to target stack directory ahead of tf init/plan/apply + cd "$STACK_DIR" || exit + # if no stack tfvars create temporary one + TEMP_STACK_TF_VARS_FILE=0 + if [[ ! -f "$ROOT_DIR/$INFRASTRUCTURE_DIR/$STACK_TF_VARS_FILE" ]] ; then + touch "$ROOT_DIR/$INFRASTRUCTURE_DIR/$STACK_TF_VARS_FILE" + TEMP_STACK_TF_VARS_FILE=1 + fi + + # init terraform + terraform-initialise + + if [ -n "$ACTION" ] && [ "$ACTION" = 'plan' ] ; then + terraform plan \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$COMMON_TF_VARS_FILE \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$STACK_TF_VARS_FILE \ + -var-file "$ENVIRONMENTS_DIR/$ENV_TF_VARS_FILE" + fi + + if [ -n "$ACTION" ] && [ "$ACTION" = 'apply' ] ; then + terraform apply -auto-approve \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$COMMON_TF_VARS_FILE \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$STACK_TF_VARS_FILE \ + -var-file "$ENVIRONMENTS_DIR/$ENV_TF_VARS_FILE" + fi + # cleardown temp files + rm -f "$STACK_DIR"/common-variables.tf + rm -f "$STACK_DIR"/locals.tf + rm -f "$STACK_DIR"/provider.tf + rm -f "$STACK_DIR"/versions.tf + if [ $TEMP_STACK_TF_VARS_FILE == 1 ]; then + rm "$ROOT_DIR/$INFRASTRUCTURE_DIR/$STACK_TF_VARS_FILE" + fi + +} + +if [[ "$USE_REMOTE_STATE_STORE" =~ ^(false|no|n|off|0|FALSE|NO|N|OFF) ]]; then + echo "Bootstrapping the $STACK stack (terraform $ACTION) to terraform workspace $WORKSPACE for environment $ENVIRONMENT and project $PROJECT" +else + echo "Preparing to run terraform $ACTION for $STACK stack to terraform workspace $WORKSPACE for environment $ENVIRONMENT and project $PROJECT" +fi + +# specific to stack +STACK_TF_VARS_FILE="$STACK.tfvars" +# the directory that holds the stack to terraform +STACK_DIR=$PWD/$TERRAFORM_DIR/$STACK +# remove any previous local backend for stack +rm -rf "$STACK_DIR"/.terraform +rm -f "$STACK_DIR"/.terraform.lock.hcl +# copy shared tf files to stack +cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/common/locals.tf "$STACK_DIR" +cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/common/provider.tf "$STACK_DIR" +cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/common/common-variables.tf "$STACK_DIR" + +if [[ "$USE_REMOTE_STATE_STORE" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON) ]]; then + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/remote/versions.tf "$STACK_DIR" +else + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/local/versions.tf "$STACK_DIR" +fi +# switch to target stack directory ahead of tf init/plan/apply +cd "$STACK_DIR" || exit +# if no stack tfvars create temporary one +TEMP_STACK_TF_VARS_FILE=0 +if [[ ! -f "$ROOT_DIR/$INFRASTRUCTURE_DIR/$STACK_TF_VARS_FILE" ]] ; then + touch "$ROOT_DIR/$INFRASTRUCTURE_DIR/$STACK_TF_VARS_FILE" + TEMP_STACK_TF_VARS_FILE=1 +fi + +# init terraform +terraform-initialise + +if [ -n "$ACTION" ] && [ "$ACTION" = 'plan' ] ; then + terraform plan \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$COMMON_TF_VARS_FILE \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$STACK_TF_VARS_FILE \ + -var-file "$ENVIRONMENTS_DIR/$ENV_TF_VARS_FILE" +fi +if [ -n "$ACTION" ] && [ "$ACTION" = 'apply' ] ; then + terraform apply -auto-approve \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$COMMON_TF_VARS_FILE \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$STACK_TF_VARS_FILE \ + -var-file "$ENVIRONMENTS_DIR/$ENV_TF_VARS_FILE" +fi +if [ -n "$ACTION" ] && [ "$ACTION" = 'destroy' ] ; then + terraform destroy -auto-approve \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$COMMON_TF_VARS_FILE \ + -var-file "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/$STACK_TF_VARS_FILE \ + -var-file "$ENVIRONMENTS_DIR/$ENV_TF_VARS_FILE" +fi +# cleardown temp files +rm -f "$STACK_DIR"/common-variables.tf +rm -f "$STACK_DIR"/locals.tf +rm -f "$STACK_DIR"/provider.tf +rm -f "$STACK_DIR"/versions.tf + +if [[ $TEMP_STACK_TF_VARS_FILE == 1 ]]; then + rm "$ROOT_DIR/$INFRASTRUCTURE_DIR/$STACK_TF_VARS_FILE" +fi + +# back to root +cd "$ROOT_DIR" || exit + +# having build the stack using a local backend we need to migrate the state held locally to newly build remote + +if ! $USE_REMOTE_STATE_STORE ; then + # check if remote state bucket exists we are okay to migrate state to it + if aws s3api head-bucket --bucket "$TF_VAR_TERRAFORM_STATE_BUCKET_NAME" 2>/dev/null; then + export USE_REMOTE_STATE_STORE=true + echo Preparing to migrate stack from local backend to remote backend + # the directory that holds the stack to terraform + ROOT_DIR=$PWD + STACK_DIR=$PWD/$TERRAFORM_DIR/$STACK + cd "$STACK_DIR" || exit + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/remote/versions.tf "$STACK_DIR" + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/common/locals.tf "$STACK_DIR" + cp "$ROOT_DIR"/"$INFRASTRUCTURE_DIR"/common/provider.tf "$STACK_DIR" + # run terraform init with migrate flag set + terraform-init-migrate + # now push local state to remote + terraform state push "$STACK_DIR"/terraform.tfstate + rm -f "$STACK_DIR"/locals.tf + rm -f "$STACK_DIR"/provider.tf + rm -f "$STACK_DIR"/versions.tf + # remove local terraform state to prevent clash when re-running eg to plan + rm -f "$STACK_DIR"/terraform.tfstate + cd "$ROOT_DIR" || exit + else + export USE_REMOTE_STATE_STORE=false + fi +fi + +# back to root +cd "$ROOT_DIR" || exit +echo "Preparing the $TF_VAR_REPO_NAME repo github-runner in $ENVIRONMENT environment" +github_runner_stack diff --git a/scripts/workflow/cleardown-artefacts.sh b/scripts/workflow/cleardown-artefacts.sh index 90ca6045..8e18af3f 100755 --- a/scripts/workflow/cleardown-artefacts.sh +++ b/scripts/workflow/cleardown-artefacts.sh @@ -27,7 +27,7 @@ fi echo "Clearing down artefacts at or below $ARTEFACT_BUCKET_NAME/$WORKSPACE" -deletion_output=$(aws s3 rm --recursive s3://$ARTEFACT_BUCKET_NAME/$WORKSPACE/ 2>&1) +#deletion_output=$(aws s3 rm --recursive s3://$ARTEFACT_BUCKET_NAME/$WORKSPACE/ 2>&1) if [[ -n "$deletion_output" ]]; then echo "Sucessfully deleted following artefacts from $ARTEFACT_BUCKET_NAME/$WORKSPACE" diff --git a/scripts/workflow/cleardown-terraform-state.sh b/scripts/workflow/cleardown-terraform-state.sh new file mode 100755 index 00000000..344120c6 --- /dev/null +++ b/scripts/workflow/cleardown-terraform-state.sh @@ -0,0 +1,68 @@ +#! /bin/bash + +# fail on first error +set -e +EXPORTS_SET=0 + +# check necessary environment variables are set +if [[ -z "$WORKSPACE" ]] ; then + echo Set WORKSPACE + EXPORTS_SET=1 +fi + +if [[ -z "$ENVIRONMENT" ]] ; then + echo Set ENVIRONMENT + EXPORTS_SET=1 +fi + +if [[ -z "$STACK" ]] ; then + echo Set STACK + EXPORTS_SET=1 +fi + +if [[ $EXPORTS_SET = 1 ]] ; then + echo One or more exports not set + exit 1 +fi + +# set additional environment variable +export TF_VAR_repo_name="${REPOSITORY:-"$(basename -s .git "$(git config --get remote.origin.url)")"}" +# required for terraform management stack +export TERRAFORM_BUCKET_NAME="nhse-$ENVIRONMENT-$TF_VAR_repo_name-terraform-state" # globally unique name +export TERRAFORM_LOCK_TABLE="nhse-$ENVIRONMENT-$TF_VAR_repo_name-terraform-state-lock" + +echo "Current terraform workspace is --> $WORKSPACE" +echo "Terraform state S3 bucket name is --> $TERRAFORM_BUCKET_NAME" +echo "Terraform state lock DynamoDB table is --> $TERRAFORM_LOCK_TABLE" + +# Delete Terraform state and lock entries for each stack +echo "Stack to have terraform state deleted is: $STACK" + + # Delete terraform state for current terraform workspace & echo results following deletion + deletion_output=$(aws s3 rm s3://$TERRAFORM_BUCKET_NAME/env:/$WORKSPACE/$STACK/terraform.state 2>&1) + + if [[ -n "$deletion_output" ]]; then + echo "Successfully deleted Terraform State file for the following workspace --> $WORKSPACE" + + existing_item=$(aws dynamodb get-item \ + --table-name "$TERRAFORM_LOCK_TABLE" \ + --key '{"LockID": {"S": "'${TERRAFORM_BUCKET_NAME}'/env:/'${WORKSPACE}'/'${STACK}'/terraform.state-md5"}}' \ + 2>&1) + + aws dynamodb delete-item \ + --table-name "$TERRAFORM_LOCK_TABLE" \ + --key '{"LockID": {"S": "'${TERRAFORM_BUCKET_NAME}'/env:/'${WORKSPACE}'/'${STACK}'/terraform.state-md5"}}' \ + + after_deletion=$(aws dynamodb get-item \ + --table-name "$TERRAFORM_LOCK_TABLE" \ + --key '{"LockID": {"S": "'${TERRAFORM_BUCKET_NAME}'/env:/'${WORKSPACE}'/'${STACK}'/terraform.state-md5"}}' \ + 2>&1) + if [[ -n "$existing_item" && -z "$after_deletion" ]]; then + echo "Successfully deleted Terraform State Lock file for the following stack --> $STACK" + else + echo "Terraform state Lock file not found for deletion or deletion failed for the following stack --> $STACK" + exit 1 + fi + else + echo "Terraform State file not found for deletion or deletion failed for the following workspace --> $WORKSPACE" + fi