diff --git a/.gitignore b/.gitignore index 1e69fee..f4f157e 100644 --- a/.gitignore +++ b/.gitignore @@ -61,3 +61,6 @@ tests/test-team/test-results/ tests/test-team/playwright-report/ tests/test-team/blob-report/ tests/test-team/playwright/.cache/ + +#plugin-cache +plugin-cache/ diff --git a/.tool-versions b/.tool-versions index 739c42d..b5dd39d 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1,6 +1,6 @@ # This file is for you! Please, updated to the versions agreed by your team. -terraform 1.7.0 +terraform 1.9.2 pre-commit 3.6.0 nodejs 20.13.1 direnv 2.32.1 diff --git a/README.md b/README.md index 4b39d13..38722e8 100644 --- a/README.md +++ b/README.md @@ -116,3 +116,13 @@ Provide a way to contact the owners of this project. It can be a team, an indivi Unless stated otherwise, the codebase is released under the MIT License. This covers both the codebase and any sample code in the documentation. Any HTML or Markdown documentation is [© Crown Copyright](https://www.nationalarchives.gov.uk/information-management/re-using-public-sector-information/uk-government-licensing-framework/crown-copyright/) and available under the terms of the [Open Government Licence v3.0](https://www.nationalarchives.gov.uk/doc/open-government-licence/version/3/). + +### Shared Terraform Modules + +Before you setup modules for this repository and find that there might be modules that can be reused elsewhere, please do check out `https://github.com/NHSDigital/nhs-notify-shared-modules/`. If you find that the modules are shareable, you should set them up there as a separate PR and get that merged in and potentially tag the commit after testing it, so that it can be a stable release that can be used across all repos on Notify should others find the need to re-use that new module. You can simply point to the reference in your module call as below: + +```hcl +module "amp_branch" { + source = "git::https://github.com/NHSDigital/nhs-notify-shared-modules.git//infrastructure/modules/amp_branch?ref=v1.0.0" + ... +} diff --git a/infrastructure/terraform/bin/terraform.sh b/infrastructure/terraform/bin/terraform.sh new file mode 100755 index 0000000..756b4ef --- /dev/null +++ b/infrastructure/terraform/bin/terraform.sh @@ -0,0 +1,807 @@ +#!/bin/bash +# Terraform Scaffold +# +# A wrapper for running terraform projects +# - handles remote state +# - uses consistent .tfvars files for each environment + +## +# Set Script Version +## +readonly script_ver="1.8.1"; + +## +# Standardised failure function +## +function error_and_die { + echo -e "ERROR: ${1}" >&2; + exit 1; +}; + +## +# Print Script Version +## +function version() { + echo "${script_ver}"; +} + +## +# Print Usage Text +## +function usage() { + +cat < + +action: + - Special actions: + * plan / plan-destroy + * apply / destroy + * graph + * taint / untaint + * shell + - Generic actions: + * See https://www.terraform.io/docs/commands/ + +bucket_prefix (optional): + Defaults to: "\${project_name}-tfscaffold" + - myproject-terraform + - terraform-yourproject + - my-first-tfscaffold-project + +build_id (optional): + - testing + - \$BUILD_ID (jenkins) + +component_name: + - the name of the terraform component module in the components directory + +environment: + - dev + - test + - prod + - management + +group: + - dev + - live + - mytestgroup + +project: + - The name of the project being deployed + +region (optional): + Defaults to value of \$AWS_DEFAULT_REGION + - the AWS region name unique to all components and terraform processes + +detailed-exitcode (optional): + When not provided, false. + Changes the plan operation to exit 0 only when there are no changes. + Will be ignored for actions other than plan. + +no-color (optional): + Append -no-color to all terraform calls + +compact-warnings (optional): + Append -compact-warnings to all terraform calls + +lockfile: + Append -lockfile=MODE to calls to terraform init + +additional arguments: + Any arguments provided after "--" will be passed directly to terraform as its own arguments +EOF +}; + +## +# Test for GNU getopt +## +getopt_out=$(getopt -T) +if (( $? != 4 )) && [[ -n $getopt_out ]]; then + error_and_die "Non GNU getopt detected. If you're using a Mac then try \"brew install gnu-getopt\""; +fi + +## +# Execute getopt and process script arguments +## +readonly raw_arguments="${*}"; +ARGS=$(getopt \ + -o dhnvwa:b:c:e:g:i:l:p:r: \ + -l "help,version,bootstrap,action:,bucket-prefix:,build-id:,component:,environment:,group:,project:,region:,lockfile:,detailed-exitcode,no-color,compact-warnings" \ + -n "${0}" \ + -- \ + "$@"); + +#Bad arguments +if [ $? -ne 0 ]; then + usage; + error_and_die "command line argument parse failure"; +fi; + +eval set -- "${ARGS}"; + +declare bootstrap="false"; +declare component_arg; +declare region_arg; +declare environment_arg; +declare group; +declare action; +declare bucket_prefix; +declare build_id; +declare project; +declare detailed_exitcode; +declare no_color; +declare compact_warnings; +declare lockfile; + +while true; do + case "${1}" in + -h|--help) + usage; + exit 0; + ;; + -v|--version) + version; + exit 0; + ;; + -c|--component) + shift; + if [ -n "${1}" ]; then + component_arg="${1}"; + shift; + fi; + ;; + -r|--region) + shift; + if [ -n "${1}" ]; then + region_arg="${1}"; + shift; + fi; + ;; + -e|--environment) + shift; + if [ -n "${1}" ]; then + environment_arg="${1}"; + shift; + fi; + ;; + -g|--group) + shift; + if [ -n "${1}" ]; then + group="${1}"; + shift; + fi; + ;; + -a|--action) + shift; + if [ -n "${1}" ]; then + action="${1}"; + shift; + fi; + ;; + -b|--bucket-prefix) + shift; + if [ -n "${1}" ]; then + bucket_prefix="${1}"; + shift; + fi; + ;; + -i|--build-id) + shift; + if [ -n "${1}" ]; then + build_id="${1}"; + shift; + fi; + ;; + -l|--lockfile) + shift; + if [ -n "${1}" ]; then + lockfile="-lockfile=${1}"; + shift; + fi; + ;; + -p|--project) + shift; + if [ -n "${1}" ]; then + project="${1}"; + shift; + fi; + ;; + --bootstrap) + shift; + bootstrap="true"; + ;; + -d|--detailed-exitcode) + shift; + detailed_exitcode="true"; + ;; + -n|--no-color) + shift; + no_color="-no-color"; + ;; + -w|--compact-warnings) + shift; + compact_warnings="-compact-warnings"; + ;; + --) + shift; + break; + ;; + esac; +done; + +declare extra_args="${@} ${no_color} ${compact_warnings}"; # All arguments supplied after "--" + +## +# Script Set-Up +## + +# Determine where I am and from that derive basepath and project name +script_path="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"; +base_path="${script_path%%\/bin}"; +project_name_default="${base_path##*\/}"; + +status=0; + +echo "Args ${raw_arguments}"; + +# Ensure script console output is separated by blank line at top and bottom to improve readability +trap echo EXIT; +echo; + +## +# Munge Params +## + +# Set Region from args or environment. Exit if unset. +readonly region="${region_arg:-${AWS_DEFAULT_REGION}}"; +[ -n "${region}" ] \ + || error_and_die "No AWS region specified. No -r/--region argument supplied and AWS_DEFAULT_REGION undefined"; + +[ -n "${project}" ] \ + || error_and_die "Required argument -p/--project not specified"; + +# Bootstrapping is special +if [ "${bootstrap}" == "true" ]; then + [ -n "${component_arg}" ] \ + && error_and_die "The --bootstrap parameter and the -c/--component parameter are mutually exclusive"; + [ -n "${build_id}" ] \ + && error_and_die "The --bootstrap parameter and the -i/--build-id parameter are mutually exclusive. We do not currently support plan files for bootstrap"; + [ -n "${environment_arg}" ] && readonly environment="${environment_arg}"; +else + # Validate component to work with + [ -n "${component_arg}" ] \ + || error_and_die "Required argument missing: -c/--component"; + readonly component="${component_arg}"; + + # Validate environment to work with + [ -n "${environment_arg}" ] \ + || error_and_die "Required argument missing: -e/--environment"; + readonly environment="${environment_arg}"; +fi; + +[ -n "${action}" ] \ + || error_and_die "Required argument missing: -a/--action"; + +# Validate AWS Credentials Available +iam_iron_man="$(aws sts get-caller-identity --query 'Arn' --output text)"; +if [ -n "${iam_iron_man}" ]; then + echo -e "AWS Credentials Found. Using ARN '${iam_iron_man}'"; +else + error_and_die "No AWS Credentials Found. \"aws sts get-caller-identity --query 'Arn' --output text\" responded with ARN '${iam_iron_man}'"; +fi; + +# Query canonical AWS Account ID +aws_account_id="$(aws sts get-caller-identity --query 'Account' --output text)"; +if [ -n "${aws_account_id}" ]; then + echo -e "AWS Account ID: ${aws_account_id}"; +else + error_and_die "Couldn't determine AWS Account ID. \"aws sts get-caller-identity --query 'Account' --output text\" provided no output"; +fi; + +# Validate S3 bucket. Set default if undefined +if [ -n "${bucket_prefix}" ]; then + readonly bucket="${bucket_prefix}-${aws_account_id}-${region}" + echo -e "Using S3 bucket s3://${bucket}"; +else + readonly bucket="${project}-tfscaffold-${aws_account_id}-${region}"; + echo -e "No bucket prefix specified. Using S3 bucket s3://${bucket}"; +fi; + +declare component_path; +if [ "${bootstrap}" == "true" ]; then + component_path="${base_path}/bootstrap"; +else + component_path="${base_path}/components/${component}"; +fi; + +# Get the absolute path to the component +if [[ "${component_path}" != /* ]]; then + component_path="$(cd "$(pwd)/${component_path}" && pwd)"; +else + component_path="$(cd "${component_path}" && pwd)"; +fi; + +[ -d "${component_path}" ] || error_and_die "Component path ${component_path} does not exist"; + +## Debug +#echo $component_path; + +## +# Begin parameter-dependent logic +## + +case "${action}" in + apply) + refresh="-refresh=true"; + ;; + destroy) + destroy='-destroy'; + refresh="-refresh=true"; + ;; + plan) + refresh="-refresh=true"; + ;; + plan-destroy) + action="plan"; + destroy="-destroy"; + refresh="-refresh=true"; + ;; + *) + ;; +esac; + +# Tell terraform to moderate its output to be a little +# more friendly to automation wrappers +# Value is irrelavant, just needs to be non-null +export TF_IN_AUTOMATION="true"; + +for rc_path in "${base_path}" "${base_path}/etc" "${component_path}"; do + if [ -f "${rc_path}/.terraformrc" ]; then + echo "Found .terraformrc at ${rc_path}/.terraformrc. Overriding."; + export TF_CLI_CONFIG_FILE="${rc_path}/.terraformrc"; + fi; +done; + +# Configure the plugin-cache location so plugins are not +# downloaded to individual components +declare default_plugin_cache_dir="$(pwd)/plugin-cache"; +export TF_PLUGIN_CACHE_DIR="${TF_PLUGIN_CACHE_DIR:-${default_plugin_cache_dir}}" +mkdir -p "${TF_PLUGIN_CACHE_DIR}" \ + || error_and_die "Failed to created the plugin-cache directory (${TF_PLUGIN_CACHE_DIR})"; +[ -w "${TF_PLUGIN_CACHE_DIR}" ] \ + || error_and_die "plugin-cache directory (${TF_PLUGIN_CACHE_DIR}) not writable"; + +# Clear cache, safe enough as we enforce plugin cache +rm -rf ${component_path}/.terraform; + +# Run global pre.sh +if [ -f "pre.sh" ]; then + source pre.sh "${region}" "${environment}" "${action}" \ + || error_and_die "Global pre script execution failed with exit code ${?}"; +fi; + +# Make sure we're running in the component directory +pushd "${component_path}"; +readonly component_name=$(basename ${component_path}); + +# install terraform +# verify terraform version matches .tool-versions +echo ${PWD} +tool_version=$(grep "terraform " .tool-versions | cut -d ' ' -f 2) +asdf plugin-add terraform && asdf install terraform "${tool_version}" +current_version=$(terraform --version | head -n 1 | cut -d 'v' -f 2) + +if [ -z "${current_version}" ] || [ "${current_version}" != "${tool_version}" ]; then + error_and_die "Terraform version mismatch. Expected: ${tool_version}, Actual: ${current_version}" +fi + +# Regardless of bootstrapping or not, we'll be using this string. +# If bootstrapping, we will fill it with variables, +# if not we will fill it with variable file parameters +declare tf_var_params; + +if [ "${bootstrap}" == "true" ]; then + if [ "${action}" == "destroy" ]; then + error_and_die "You cannot destroy a bootstrap bucket using tfscaffold, it's just too dangerous. If you're absolutely certain that you want to delete the bucket and all contents, including any possible state files environments and components within this project, then you will need to do it from the AWS Console. Note you cannot do this from the CLI because the bootstrap bucket is versioned, and even the --force CLI parameter will not empty the bucket of versions"; + fi; + + # Bootstrap requires this parameter as explicit as it is constructed here + # for multiple uses, so we cannot just depend on it being set in tfvars + tf_var_params+=" -var bucket_name=${bucket}"; +fi; + +# Run pre.sh +if [ -f "pre.sh" ]; then + source pre.sh "${region}" "${environment}" "${action}" \ + || error_and_die "Component pre script execution failed with exit code ${?}"; +fi; + +# Pull down secret TFVAR file from S3 +# Anti-pattern and security warning: This secrets mechanism provides very little additional security. +# It permits you to inject secrets directly into terraform without storing them in source control or unencrypted in S3. +# Secrets will still be stored in all copies of your state file - which will be stored on disk wherever this script is run and in S3. +# This script does not currently support encryption of state files. +# Use this feature only if you're sure it's the right pattern for your use case. +declare -a secrets=(); +readonly secrets_file_name="secret.tfvars.enc"; +readonly secrets_file_path="build/${secrets_file_name}"; +aws s3 ls s3://${bucket}/${project}/${aws_account_id}/${region}/${environment}/${secrets_file_name} >/dev/null 2>&1; +if [ $? -eq 0 ]; then + mkdir -p build; + aws s3 cp s3://${bucket}/${project}/${aws_account_id}/${region}/${environment}/${secrets_file_name} ${secrets_file_path} \ + || error_and_die "S3 secrets file is present, but inaccessible. Ensure you have permission to read s3://${bucket}/${project}/${aws_account_id}/${region}/${environment}/${secrets_file_name}"; + if [ -f "${secrets_file_path}" ]; then + secrets=($(aws kms decrypt --ciphertext-blob fileb://${secrets_file_path} --output text --query Plaintext | base64 --decode)); + fi; +fi; + +if [ -n "${secrets[0]}" ]; then + secret_regex='^[A-Za-z0-9_-]+=.+$'; + secret_count=1; + for secret_line in "${secrets[@]}"; do + if [[ "${secret_line}" =~ ${secret_regex} ]]; then + var_key="${secret_line%=*}"; + var_val="${secret_line##*=}"; + export TF_VAR_${var_key}="${var_val}"; + ((secret_count++)); + else + echo "Malformed secret on line ${secret_count} - ignoring"; + fi; + done; +fi; + +# Pull down additional dynamic plaintext tfvars file from S3 +# Anti-pattern warning: Your variables should almost always be in source control. +# There are a very few use cases where you need constant variability in input variables, +# and even in those cases you should probably pass additional -var parameters to this script +# from your automation mechanism. +# Use this feature only if you're sure it's the right pattern for your use case. +readonly dynamic_file_name="dynamic.tfvars"; +readonly dynamic_file_path="build/${dynamic_file_name}"; +aws s3 ls s3://${bucket}/${project}/${aws_account_id}/${region}/${environment}/${dynamic_file_name} >/dev/null 2>&1; +if [ $? -eq 0 ]; then + aws s3 cp s3://${bucket}/${project}/${aws_account_id}/${region}/${environment}/${dynamic_file_name} ${dynamic_file_path} \ + || error_and_die "S3 tfvars file is present, but inaccessible. Ensure you have permission to read s3://${bucket}/${project}/${aws_account_id}/${region}/${environment}/${dynamic_file_name}"; +fi; + +# Use versions TFVAR files if exists +readonly versions_file_name="versions_${region}_${environment}.tfvars"; +readonly versions_file_path="${base_path}/etc/${versions_file_name}"; + +# Check for presence of an environment variables file, and use it if readable +if [ -n "${environment}" ]; then + readonly env_file_path="${base_path}/etc/env_${region}_${environment}.tfvars"; +fi; + +# Check for presence of a global variables file, and use it if readable +readonly global_vars_file_name="global.tfvars"; +readonly global_vars_file_path="${base_path}/etc/${global_vars_file_name}"; + +# Check for presence of a region variables file, and use it if readable +readonly region_vars_file_name="${region}.tfvars"; +readonly region_vars_file_path="${base_path}/etc/${region_vars_file_name}"; + +# Check for presence of a group variables file if specified, and use it if readable +if [ -n "${group}" ]; then + readonly group_vars_file_name="group_${group}.tfvars"; + readonly group_vars_file_path="${base_path}/etc/${group_vars_file_name}"; +fi; + +# Collect the paths of the variables files to use +declare -a tf_var_file_paths; + +# Use Global and Region first, to allow potential for terraform to do the +# honourable thing and override global and region settings with environment +# specific ones; however we do not officially support the same variable +# being declared in multiple locations, and we warn when we find any duplicates +[ -f "${global_vars_file_path}" ] && tf_var_file_paths+=("${global_vars_file_path}"); +[ -f "${region_vars_file_path}" ] && tf_var_file_paths+=("${region_vars_file_path}"); + +# If a group has been specified, load the vars for the group. If we are to assume +# terraform correctly handles override-ordering (which to be fair we don't hence +# the warning about duplicate variables below) we add this to the list after +# global and region-global variables, but before the environment variables +# so that the environment can explicitly override variables defined in the group. +if [ -n "${group}" ]; then + if [ -f "${group_vars_file_path}" ]; then + tf_var_file_paths+=("${group_vars_file_path}"); + else + echo -e "[WARNING] Group \"${group}\" has been specified, but no group variables file is available at ${group_vars_file_path}"; + fi; +fi; + +# Environment is normally expected, but in bootstrapping it may not be provided +if [ -n "${environment}" ]; then + if [ -f "${env_file_path}" ]; then + tf_var_file_paths+=("${env_file_path}"); + else + echo -e "[WARNING] Environment \"${environment}\" has been specified, but no environment variables file is available at ${env_file_path}"; + fi; +fi; + +# If present and readable, use versions and dynamic variables too +[ -f "${versions_file_path}" ] && tf_var_file_paths+=("${versions_file_path}"); +[ -f "${dynamic_file_path}" ] && tf_var_file_paths+=("${dynamic_file_path}"); + +# Warn on duplication +duplicate_variables="$(cat "${tf_var_file_paths[@]}" | sed -n -e 's/\(^[a-zA-Z0-9_\-]\+\)\s*=.*$/\1/p' | sort | uniq -d)"; +[ -n "${duplicate_variables}" ] \ + && echo -e " +################################################################### +# WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING # +################################################################### +The following input variables appear to be duplicated: + +${duplicate_variables} + +This could lead to unexpected behaviour. Overriding of variables +has previously been unpredictable and is not currently supported, +but it may work. + +Recent changes to terraform might give you useful overriding and +map-merging functionality, please use with caution and report back +on your successes & failures. +###################################################################"; + +# Build up the tfvars arguments for terraform command line +for file_path in "${tf_var_file_paths[@]}"; do + tf_var_params+=" -var-file=${file_path}"; +done; + +## +# Start Doing Real Things +## + +# Really Hashicorp? Really?! +# +# In order to work with terraform >=0.9.2 (I say 0.9.2 because 0.9 prior +# to 0.9.2 is barely usable due to key bugs and missing features) +# we now need to do some ugly things to our terraform remote backend configuration. +# The long term hope is that they will fix this, and maybe remove the need for it +# altogether by supporting interpolation in the backend config stanza. +# +# For now we're left with this garbage, and no more support for <0.9.0. +if [ -f backend_tfscaffold.tf ]; then + echo -e "WARNING: backend_tfscaffold.tf exists and will be overwritten!" >&2; +fi; + +declare backend_prefix; +declare backend_filename; + +if [ "${bootstrap}" == "true" ]; then + backend_prefix="${project}/${aws_account_id}/${region}/bootstrap"; + backend_filename="bootstrap.tfstate"; +else + backend_prefix="${project}/${aws_account_id}/${region}/${environment}"; + backend_filename="${component_name}.tfstate"; +fi; + +readonly backend_key="${backend_prefix}/${backend_filename}"; +readonly backend_config="terraform { + backend \"s3\" { + region = \"${region}\" + bucket = \"${bucket}\" + key = \"${backend_key}\" + dynamodb_table = \"${bucket}\" + } +}"; + +# We're now all ready to go. All that's left is to: +# * Write the backend config +# * terraform init +# * terraform ${action} +# +# But if we're dealing with the special bootstrap component +# we can't remotely store the backend until we've bootstrapped it +# +# So IF the S3 bucket already exists, we will continue as normal +# because we want to be able to manage changes to an existing +# bootstrap bucket. But if it *doesn't* exist, then we need to be +# able to plan and apply it with a local state, and *then* configure +# the remote state. + +# In default operations we assume we are already bootstrapped +declare bootstrapped="true"; + +# If we are in bootstrap mode, we need to know if we have already bootstrapped +# or we are working with or modifying an existing bootstrap bucket +if [ "${bootstrap}" == "true" ]; then + # For this exist check we could do many things, but we explicitly perform + # an ls against the key we will be working with so as to not require + # permissions to, for example, list all buckets, or the bucket root keyspace + aws s3 ls s3://${bucket}/${backend_prefix}/${backend_filename} >/dev/null 2>&1; + [ $? -eq 0 ] || bootstrapped="false"; +fi; + +if [ "${bootstrapped}" == "true" ]; then + echo -e "${backend_config}" > backend_tfscaffold.tf \ + || error_and_die "Failed to write backend config to $(pwd)/backend_tfscaffold.tf"; + + # Nix the horrible hack on exit + trap "rm -f $(pwd)/backend_tfscaffold.tf" EXIT; + + declare lockfile_or_upgrade; + [ -n ${lockfile} ] && lockfile_or_upgrade='-upgrade' || lockfile_or_upgrade="${lockfile}"; + + # Configure remote state storage + echo "Setting up S3 remote state from s3://${bucket}/${backend_key}"; + terraform init ${no_color} ${compact_warnings} ${lockfile_or_upgrade} \ + || error_and_die "Terraform init failed"; +else + # We are bootstrapping. Download the providers, skip the backend config. + terraform init \ + -backend=false \ + ${no_color} \ + ${compact_warnings} \ + ${lockfile} \ + || error_and_die "Terraform init failed"; +fi; + +case "${action}" in + 'plan') + if [ -n "${build_id}" ]; then + mkdir -p build; + + plan_file_name="${component_name}_${build_id}.tfplan"; + plan_file_remote_key="${backend_prefix}/plans/${plan_file_name}"; + + out="-out=build/${plan_file_name}"; + fi; + + if [ "${detailed_exitcode}" == "true" ]; then + detailed="-detailed-exitcode"; + fi; + + terraform "${action}" \ + -input=false \ + ${refresh} \ + ${tf_var_params} \ + ${extra_args} \ + ${destroy} \ + ${out} \ + ${detailed} \ + -parallelism=300; + + status="${?}"; + + # Even when detailed exitcode is set, a 1 is still a fail, + # so exit + # (detailed exit codes are 0 and 2) + if [ "${status}" -eq 1 ]; then + error_and_die "Terraform plan failed"; + fi; + + if [ -n "${build_id}" ]; then + aws s3 cp build/${plan_file_name} s3://${bucket}/${plan_file_remote_key} \ + || error_and_die "Plan file upload to S3 failed (s3://${bucket}/${plan_file_remote_key})"; + fi; + + exit ${status}; + ;; + 'graph') + mkdir -p build || error_and_die "Failed to create output directory '$(pwd)/build'"; + terraform graph ${extra_args} -draw-cycles | dot -Tpng > build/${project}-${aws_account_id}-${region}-${environment}.png \ + || error_and_die "Terraform simple graph generation failed"; + terraform graph ${extra_args} -draw-cycles -verbose | dot -Tpng > build/${project}-${aws_account_id}-${region}-${environment}-verbose.png \ + || error_and_die "Terraform verbose graph generation failed"; + exit 0; + ;; + 'apply'|'destroy'|'refresh') + + # Support for terraform <0.10 is now deprecated + if [ "${action}" == "apply" ]; then + echo "Compatibility: Adding to terraform arguments: -auto-approve=true"; + extra_args+=" -auto-approve=true"; + else # action is `destroy` + # Check terraform version - if pre-0.15, need to add `-force`; 0.15 and above instead use `-auto-approve` + if [ $(terraform version | head -n1 | cut -d" " -f2 | cut -d"." -f1) == "v0" ] && [ $(terraform version | head -n1 | cut -d" " -f2 | cut -d"." -f2) -lt 15 ]; then + echo "Compatibility: Adding to terraform arguments: -force"; + force='-force'; + elif [ "${action}" != "refresh" ]; then + extra_args+=" -auto-approve"; + fi; + fi; + + if [ -n "${build_id}" ]; then + mkdir -p build; + plan_file_name="${component_name}_${build_id}.tfplan"; + plan_file_remote_key="${backend_prefix}/plans/${plan_file_name}"; + + aws s3 cp s3://${bucket}/${plan_file_remote_key} build/${plan_file_name} \ + || error_and_die "Plan file download from S3 failed (s3://${bucket}/${plan_file_remote_key})"; + + apply_plan="build/${plan_file_name}"; + + terraform "${action}" \ + -input=false \ + ${refresh} \ + -parallelism=300 \ + ${extra_args} \ + ${force} \ + ${apply_plan}; + exit_code=$?; + else + terraform "${action}" \ + -input=false \ + ${refresh} \ + ${tf_var_params} \ + -parallelism=300 \ + ${extra_args} \ + ${force}; + exit_code=$?; + + if [ "${bootstrapped}" == "false" ]; then + # If we are here, and we are in bootstrap mode, and not already bootstrapped, + # Then we have just bootstrapped for the first time! Congratulations. + # Now we need to copy our state file into the bootstrap bucket + echo -e "${backend_config}" > backend_tfscaffold.tf \ + || error_and_die "Failed to write backend config to $(pwd)/backend_tfscaffold.tf"; + + # Nix the horrible hack on exit + trap "rm -f $(pwd)/backend_tfscaffold.tf" EXIT; + + # Push Terraform Remote State to S3 + # TODO: Add -upgrade to init when we drop support for <0.10 + echo "yes" | terraform init ${lockfile} || error_and_die "Terraform init failed"; + + # Hard cleanup + rm -f backend_tfscaffold.tf; + rm -f terraform.tfstate # Prime not the backup + rm -rf .terraform; + + # This doesn't mean anything here, we're just celebrating! + bootstrapped="true"; + fi; + + fi; + + if [ ${exit_code} -ne 0 ]; then + error_and_die "Terraform ${action} failed with exit code ${exit_code}"; + fi; + + if [ -f "post.sh" ]; then + source post.sh "${region}" "${environment}" "${action}" \ + || error_and_die "Component post script execution failed with exit code ${?}"; + fi; + ;; + '*taint') + terraform "${action}" ${extra_args} || error_and_die "Terraform ${action} failed."; + ;; + 'import') + terraform "${action}" ${tf_var_params} ${extra_args} || error_and_die "Terraform ${action} failed."; + ;; + 'shell') + echo -e "Here's a shell for the ${component} component.\nIf you want to run terraform actions specific to the ${environment}, pass the following options:\n\n${tf_var_params} ${extra_args}\n\n'exit 0' / 'Ctrl-D' to continue, other exit codes will abort tfscaffold with the same code."; + bash -l || exit "${?}"; + ;; + *) + echo -e "Generic action case invoked. Only the additional arguments will be passed to terraform, you break it you fix it:"; + echo -e "\tterraform ${action} ${extra_args}"; + terraform "${action}" ${extra_args} \ + || error_and_die "Terraform ${action} failed."; + ;; +esac; + +popd + +if [ -f "post.sh" ]; then + source post.sh "${region}" "${environment}" "${action}" \ + || error_and_die "Global post script execution failed with exit code ${?}"; +fi; + +exit 0; diff --git a/infrastructure/terraform/components/branch/module_amplify_branch.tf b/infrastructure/terraform/components/branch/module_amplify_branch.tf index 669c8d6..ddb1099 100644 --- a/infrastructure/terraform/components/branch/module_amplify_branch.tf +++ b/infrastructure/terraform/components/branch/module_amplify_branch.tf @@ -1,6 +1,5 @@ module "amplify_branch" { - source = "../../modules/amp_branch" - + source = "git::https://github.com/NHSDigital/nhs-notify-shared-modules.git//infrastructure/modules/amp_branch?ref=v1.0.0" name = local.normalised_branch_name aws_account_id = var.aws_account_id component = var.component @@ -8,12 +7,13 @@ module "amplify_branch" { project = var.project region = var.region group = var.group - - cognito_user_pool_client_id = local.iam.cognito_user_pool_client["id"] - cognito_user_pool_identity_provider_names = local.iam.cognito_user_pool["identity_providers"] - amplify_app_id = local.iam.amplify["id"] - branch = var.branch_name - domain_name = local.root_domain_name - subdomain = var.environment - base_path = "/auth~${local.normalised_branch_name}" + description = "Amplify branch for ${local.normalised_branch_name}" + amplify_app_id = local.iam.amplify["id"] + branch = var.branch_name + display_name = local.normalised_branch_name + environment_variables = { + USER_POOL_CLIENT_ID = local.iam.cognito_user_pool_client["id"] + NOTIFY_SUBDOMAIN = var.environment + NEXT_PUBLIC_BASE_PATH = "/auth~${local.normalised_branch_name}" + } } diff --git a/infrastructure/terraform/components/iam/module_amplify_branch.tf b/infrastructure/terraform/components/iam/module_amplify_branch.tf index 1377b38..7b936a8 100644 --- a/infrastructure/terraform/components/iam/module_amplify_branch.tf +++ b/infrastructure/terraform/components/iam/module_amplify_branch.tf @@ -1,21 +1,21 @@ module "amplify_branch" { - source = "../../modules/amp_branch" - - name = "main" - aws_account_id = var.aws_account_id - component = var.component - environment = var.environment - project = var.project - region = var.region - group = var.group - - cognito_user_pool_client_id = aws_cognito_user_pool_client.main.id - cognito_user_pool_identity_provider_names = aws_cognito_user_pool_client.main.supported_identity_providers - amplify_app_id = aws_amplify_app.main.id - branch = "main" - domain_name = local.root_domain_name - subdomain = var.environment - base_path = "/auth" - enable_auto_deploy = true - stage = "PRODUCTION" + source = "git::https://github.com/NHSDigital/nhs-notify-shared-modules.git//infrastructure/modules/amp_branch?ref=v1.0.0" + name = "main" + display_name = "main" + aws_account_id = var.aws_account_id + component = var.component + environment = var.environment + project = var.project + region = var.region + group = var.group + description = "Amplify branch for main" + amplify_app_id = aws_amplify_app.main.id + branch = "main" + stage = "PRODUCTION" + enable_auto_build = true + environment_variables = { + USER_POOL_CLIENT_ID = aws_cognito_user_pool_client.main.id + NOTIFY_SUBDOMAIN = var.environment + NEXT_PUBLIC_BASE_PATH = "/auth" + } } diff --git a/infrastructure/terraform/modules/amp_branch/amplify_branch.tf b/infrastructure/terraform/modules/amp_branch/amplify_branch.tf deleted file mode 100644 index fba0af0..0000000 --- a/infrastructure/terraform/modules/amp_branch/amplify_branch.tf +++ /dev/null @@ -1,14 +0,0 @@ -resource "aws_amplify_branch" "main" { - app_id = var.amplify_app_id - branch_name = var.branch - display_name = var.name - enable_pull_request_preview = false - enable_auto_build = var.enable_auto_deploy - stage = var.stage != "NONE" ? var.stage : null - - environment_variables = { - USER_POOL_CLIENT_ID = var.cognito_user_pool_client_id - NOTIFY_SUBDOMAIN = var.subdomain - NEXT_PUBLIC_BASE_PATH = var.base_path - } -} diff --git a/infrastructure/terraform/modules/amp_branch/locals.tf b/infrastructure/terraform/modules/amp_branch/locals.tf deleted file mode 100644 index 8d1bc72..0000000 --- a/infrastructure/terraform/modules/amp_branch/locals.tf +++ /dev/null @@ -1,33 +0,0 @@ -locals { - csi = format( - "%s-%s-%s-%s-%s", - var.project, - var.environment, - var.component, - var.module, - var.name, - ) - - # CSI for use in resources with an account namespace, eg IAM roles - csi_account = replace( - format( - "%s-%s-%s-%s-%s-%s", - var.project, - var.region, - var.environment, - var.component, - var.module, - var.name, - ), - "_", - "", - ) - - default_tags = merge( - var.default_tags, - { - Module = var.module - Name = local.csi - }, - ) -} diff --git a/infrastructure/terraform/modules/amp_branch/outputs.tf b/infrastructure/terraform/modules/amp_branch/outputs.tf deleted file mode 100644 index 2e300ce..0000000 --- a/infrastructure/terraform/modules/amp_branch/outputs.tf +++ /dev/null @@ -1,3 +0,0 @@ -output "name" { - value = aws_amplify_branch.main.branch_name -} diff --git a/infrastructure/terraform/modules/amp_branch/variables.tf b/infrastructure/terraform/modules/amp_branch/variables.tf deleted file mode 100644 index 26c64f6..0000000 --- a/infrastructure/terraform/modules/amp_branch/variables.tf +++ /dev/null @@ -1,115 +0,0 @@ -## -# Basic inherited variables for terraformscaffold modules -## - -variable "project" { - type = string - description = "The name of the terraformscaffold project calling the module" -} - -variable "environment" { - type = string - description = "The name of the terraformscaffold environment the module is called for" -} - -variable "component" { - type = string - description = "The name of the terraformscaffold component calling this module" -} - -variable "aws_account_id" { - type = string - description = "The AWS Account ID (numeric)" -} - -variable "group" { - type = string - description = "The group variables are being inherited from (often synonmous with account short-name)" -} - -## -# Module self-identification -## - -variable "module" { - type = string - description = "The name of this module. This is a special variable, it should be set only here and never overridden." - default = "kms" -} - -## -# Variable specific to the module -## - -# We presume this will always be specified. The default of {} will cause an error if a valid map is not specified. -# If we ever want to define this but allow it to not be specified, then we must provide a default tag keypair will be applied -# as the true default. In any other case default_tags should be removed from the module. -variable "default_tags" { - type = map(string) - description = "Default tag map for application to all taggable resources in the module" - default = {} -} - -variable "region" { - type = string - description = "The AWS Region" -} - -variable "name" { - type = string - description = "A unique name to distinguish this module invocation from others within the same CSI scope" -} - -variable "cognito_user_pool_client_id" { - type = string - description = "Cognito User Pool client ID" -} - -variable "cognito_user_pool_identity_provider_names" { - type = list(string) - description = "A list of Cognito IDP names" -} - -variable "amplify_app_id" { - type = string - description = "Amplify application ID" -} - -variable "branch" { - description = "The name of the branch being deployed" - type = string -} - -variable "domain_name" { - type = string - description = "Root domain name for this Amplify app" -} - -variable "subdomain" { - type = string - default = "main" - description = "Subdomain used as the branch alias" -} - -variable "enable_auto_deploy" { - type = bool - description = "Enable the auto deployment of the branch code as well as just the resources for it" - default = false -} - -variable "base_path" { - type = string - default = "/" - description = "Default base path to override NEXT_PUBLIC_BASE_PATH" -} - -variable "stage" { - type = string - default = "NONE" - description = "Optional branch stage" - - validation { - condition = contains(["NONE", "PRODUCTION", "BETA", "DEVELOPMENT", "EXPERIMENTAL", "PULL_REQUEST"], var.stage) - error_message = "The branch stage is optional but needs to be one of the valid options if provided" - } -} diff --git a/infrastructure/terraform/modules/amp_branch/versions.tf b/infrastructure/terraform/modules/amp_branch/versions.tf deleted file mode 100644 index f8dc86e..0000000 --- a/infrastructure/terraform/modules/amp_branch/versions.tf +++ /dev/null @@ -1,9 +0,0 @@ - -terraform { - required_providers { - aws = { - source = "hashicorp/aws" - } - } - required_version = ">= 1.9.0" -}