diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 000000000..02e8abcc5 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,21 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_size = 2 +indent_style = space +insert_final_newline = true +trim_trailing_whitespace = true + +[*.md] +indent_size = unset + +[*.py] +indent_size = 4 + +[{Dockerfile,Dockerfile.}*] +indent_size = 4 + +[{Makefile,*.mk,go.mod,go.sum,*.go,.gitmodules}] +indent_style = tab diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..1f1600805 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,7 @@ +scripts/docker/** linguist-vendored +scripts/githooks/** linguist-vendored +scripts/reports/** linguist-vendored +scripts/terraform/** linguist-vendored +scripts/tests/test.mk linguist-vendored +scripts/init.mk linguist-vendored +scripts/shellscript-linter.sh linguist-vendored diff --git a/.github/ISSUE_TEMPLATE/1_support_request.yaml b/.github/ISSUE_TEMPLATE/1_support_request.yaml new file mode 100644 index 000000000..eb2440186 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/1_support_request.yaml @@ -0,0 +1,52 @@ +# See: +# - https://docs.github.com/en/communities/using-templates-to-encourage-useful-issues-and-pull-requests/configuring-issue-templates-for-your-repository +# - https://docs.github.com/en/communities/using-templates-to-encourage-useful-issues-and-pull-requests/syntax-for-issue-forms +# - https://docs.github.com/en/communities/using-templates-to-encourage-useful-issues-and-pull-requests/common-validation-errors-when-creating-issue-forms + +name: 🔧 Support Request +description: Get help +labels: ["support"] +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to file a support request. Please fill out this form as completely as possible. + - type: textarea + attributes: + label: What exactly are you trying to do? + description: Describe in as much detail as possible. + validations: + required: true + - type: textarea + attributes: + label: What have you tried so far? + description: Describe what you have tried so far. + validations: + required: true + - type: textarea + attributes: + label: Output of any commands you have tried + description: Please copy and paste any relevant output. This will be automatically formatted into codeblock. + render: Shell + validations: + required: false + - type: textarea + attributes: + label: Additional context + description: Add any other context about the problem here. + validations: + required: false + - type: checkboxes + attributes: + label: Code of Conduct + description: By submitting this issue you agree to follow our [Code of Conduct](../../docs/CODE_OF_CONDUCT.md) + options: + - label: I agree to follow this project's Code of Conduct + required: true + - type: checkboxes + attributes: + label: Sensitive Information Declaration + description: To ensure the utmost confidentiality and protect your privacy, we kindly ask you to NOT including [PII (Personal Identifiable Information) / PID (Personal Identifiable Data)](https://digital.nhs.uk/data-and-information/keeping-data-safe-and-benefitting-the-public) or any other sensitive data in this form. We appreciate your cooperation in maintaining the security of your information. + options: + - label: I confirm that neither PII/PID nor sensitive data are included in this form + required: true diff --git a/.github/ISSUE_TEMPLATE/2_feature_request.yaml b/.github/ISSUE_TEMPLATE/2_feature_request.yaml new file mode 100644 index 000000000..705d0839c --- /dev/null +++ b/.github/ISSUE_TEMPLATE/2_feature_request.yaml @@ -0,0 +1,42 @@ +# See: +# - https://docs.github.com/en/communities/using-templates-to-encourage-useful-issues-and-pull-requests/configuring-issue-templates-for-your-repository +# - https://docs.github.com/en/communities/using-templates-to-encourage-useful-issues-and-pull-requests/syntax-for-issue-forms +# - https://docs.github.com/en/communities/using-templates-to-encourage-useful-issues-and-pull-requests/common-validation-errors-when-creating-issue-forms + +name: 🚀 Feature Request +description: Suggest an idea for this project +labels: ["feature request"] +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to file a feature request. Please fill out this form as completely as possible. + - type: textarea + attributes: + label: What is the problem this feature will solve? + description: Tell us why this change is needed or helpful and what problems it may help solve. + validations: + required: true + - type: textarea + attributes: + label: What is the feature that you are proposing to solve the problem? + description: Provide detailed information for what we should add. + validations: + required: true + - type: textarea + attributes: + label: What alternatives have you considered? + - type: checkboxes + attributes: + label: Code of Conduct + description: By submitting this issue you agree to follow our [Code of Conduct](../../docs/CODE_OF_CONDUCT.md) + options: + - label: I agree to follow this project's Code of Conduct + required: true + - type: checkboxes + attributes: + label: Sensitive Information Declaration + description: To ensure the utmost confidentiality and protect your privacy, we kindly ask you to NOT including [PII (Personal Identifiable Information) / PID (Personal Identifiable Data)](https://digital.nhs.uk/data-and-information/keeping-data-safe-and-benefitting-the-public) or any other sensitive data in this form. We appreciate your cooperation in maintaining the security of your information. + options: + - label: I confirm that neither PII/PID nor sensitive data are included in this form + required: true diff --git a/.github/ISSUE_TEMPLATE/3_bug_report.yaml b/.github/ISSUE_TEMPLATE/3_bug_report.yaml new file mode 100644 index 000000000..12a8c6e28 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/3_bug_report.yaml @@ -0,0 +1,63 @@ +# See: +# - https://docs.github.com/en/communities/using-templates-to-encourage-useful-issues-and-pull-requests/configuring-issue-templates-for-your-repository +# - https://docs.github.com/en/communities/using-templates-to-encourage-useful-issues-and-pull-requests/syntax-for-issue-forms +# - https://docs.github.com/en/communities/using-templates-to-encourage-useful-issues-and-pull-requests/common-validation-errors-when-creating-issue-forms + +name: 🐞 Bug Report +description: File a bug report +labels: ["bug", "triage"] +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to file a bug report. Please fill out this form as completely as possible. + - type: checkboxes + attributes: + label: Is there an existing issue for this? + description: Please, search the Issues to see if an issue already exists for the bug you have encountered. + options: + - label: I have searched the existing Issues + required: true + - type: textarea + attributes: + label: Current Behavior + description: A concise description of what you are experiencing. + validations: + required: true + - type: textarea + attributes: + label: Expected Behavior + description: A concise description of what you expect to happen. + validations: + required: true + - type: textarea + attributes: + label: Steps To Reproduce + description: Steps to reproduce the behavior + placeholder: | + 1. In this environment... + 2. With this config... + 3. Run `...` + validations: + required: false + - type: textarea + attributes: + label: Output + description: Please copy and paste any relevant output. This will be automatically formatted into codeblock. + render: Shell + validations: + required: false + - type: checkboxes + attributes: + label: Code of Conduct + description: By submitting this issue you agree to follow our [Code of Conduct](../../docs/CODE_OF_CONDUCT.md) + options: + - label: I agree to follow this project's Code of Conduct + required: true + - type: checkboxes + attributes: + label: Sensitive Information Declaration + description: To ensure the utmost confidentiality and protect your privacy, we kindly ask you to NOT including [PII (Personal Identifiable Information) / PID (Personal Identifiable Data)](https://digital.nhs.uk/data-and-information/keeping-data-safe-and-benefitting-the-public) or any other sensitive data in this form. We appreciate your cooperation in maintaining the security of your information. + options: + - label: I confirm that neither PII/PID nor sensitive data are included in this form + required: true diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..c00ff413f --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,35 @@ + +## Description + + + +## Context + + + +## Type of changes + + + +- [ ] Refactoring (non-breaking change) +- [ ] New feature (non-breaking change which adds functionality) +- [ ] Breaking change (fix or feature that would change existing functionality) +- [ ] Bug fix (non-breaking change which fixes an issue) + +## Checklist + + + +- [ ] I am familiar with the [contributing guidelines](../docs/CONTRIBUTING.md) +- [ ] I have followed the code style of the project +- [ ] I have added tests to cover my changes +- [ ] I have updated the documentation accordingly +- [ ] This PR is a result of pair or mob programming + +--- + +## Sensitive Information Declaration + +To ensure the utmost confidentiality and protect your and others privacy, we kindly ask you to NOT including [PII (Personal Identifiable Information) / PID (Personal Identifiable Data)](https://digital.nhs.uk/data-and-information/keeping-data-safe-and-benefitting-the-public) or any other sensitive data in this PR (Pull Request) and the codebase changes. We will remove any PR that do contain any sensitive information. We really appreciate your cooperation in this matter. + +- [ ] I confirm that neither PII/PID nor sensitive data are included in this PR and the codebase changes. diff --git a/.github/SECURITY.md b/.github/SECURITY.md new file mode 100644 index 000000000..241f1e337 --- /dev/null +++ b/.github/SECURITY.md @@ -0,0 +1,35 @@ +# Security + +NHS England takes security and the protection of private data extremely seriously. If you believe you have found a vulnerability or other issue which has compromised or could compromise the security of any of our systems and/or private data managed by our systems, please do not hesitate to contact us using the methods outlined below. + +## Table of Contents + +- [Security](#security) + - [Table of Contents](#table-of-contents) + - [Reporting a vulnerability](#reporting-a-vulnerability) + - [Email](#email) + - [NCSC](#ncsc) + - [General Security Enquiries](#general-security-enquiries) + +## Reporting a vulnerability + +Please note, email is our preferred method of receiving reports. + +### Email + +If you wish to notify us of a vulnerability via email, please include detailed information on the nature of the vulnerability and any steps required to reproduce it. + +You can reach us at: + +- _[ A product team email address ]_ +- [cybersecurity@nhs.net](cybersecurity@nhs.net) + +### NCSC + +You can send your report to the National Cyber Security Centre, who will assess your report and pass it on to NHS England if necessary. + +You can report vulnerabilities here: [https://www.ncsc.gov.uk/information/vulnerability-reporting](https://www.ncsc.gov.uk/information/vulnerability-reporting) + +## General Security Enquiries + +If you have general enquiries regarding our cybersecurity, please reach out to us at [cybersecurity@nhs.net](cybersecurity@nhs.net) diff --git a/.github/actions/check-english-usage/action.yaml b/.github/actions/check-english-usage/action.yaml new file mode 100644 index 000000000..9953bcc70 --- /dev/null +++ b/.github/actions/check-english-usage/action.yaml @@ -0,0 +1,10 @@ +name: "Check English usage" +description: "Check English usage" +runs: + using: "composite" + steps: + - name: "Check English usage" + shell: bash + run: | + export BRANCH_NAME=origin/${{ github.event.repository.default_branch }} + check=branch ./scripts/githooks/check-english-usage.sh diff --git a/.github/actions/check-file-format/action.yaml b/.github/actions/check-file-format/action.yaml new file mode 100644 index 000000000..bd0929a82 --- /dev/null +++ b/.github/actions/check-file-format/action.yaml @@ -0,0 +1,10 @@ +name: "Check file format" +description: "Check file format" +runs: + using: "composite" + steps: + - name: "Check file format" + shell: bash + run: | + export BRANCH_NAME=origin/${{ github.event.repository.default_branch }} + check=branch ./scripts/githooks/check-file-format.sh diff --git a/.github/actions/check-markdown-format/action.yaml b/.github/actions/check-markdown-format/action.yaml new file mode 100644 index 000000000..53a715b48 --- /dev/null +++ b/.github/actions/check-markdown-format/action.yaml @@ -0,0 +1,10 @@ +name: "Check Markdown format" +description: "Check Markdown format" +runs: + using: "composite" + steps: + - name: "Check Markdown format" + shell: bash + run: | + export BRANCH_NAME=origin/${{ github.event.repository.default_branch }} + check=branch ./scripts/githooks/check-markdown-format.sh diff --git a/.github/actions/create-lines-of-code-report/action.yaml b/.github/actions/create-lines-of-code-report/action.yaml new file mode 100644 index 000000000..452b432bd --- /dev/null +++ b/.github/actions/create-lines-of-code-report/action.yaml @@ -0,0 +1,57 @@ +name: "Count lines of code" +description: "Count lines of code" +inputs: + build_datetime: + description: "Build datetime, set by the CI/CD pipeline workflow" + required: true + build_timestamp: + description: "Build timestamp, set by the CI/CD pipeline workflow" + required: true + idp_aws_report_upload_account_id: + description: "IDP AWS account ID" + required: true + idp_aws_report_upload_region: + description: "IDP AWS account region" + required: true + idp_aws_report_upload_role_name: + description: "Role to upload the report" + required: true + idp_aws_report_upload_bucket_endpoint: + description: "Bucket endpoint for the report" + required: true +runs: + using: "composite" + steps: + - name: "Create CLOC report" + shell: bash + run: | + export BUILD_DATETIME=${{ inputs.build_datetime }} + ./scripts/reports/create-lines-of-code-report.sh + - name: "Compress CLOC report" + shell: bash + run: zip lines-of-code-report.json.zip lines-of-code-report.json + - name: "Upload CLOC report as an artefact" + if: ${{ !env.ACT }} + uses: actions/upload-artifact@v3 + with: + name: lines-of-code-report.json.zip + path: ./lines-of-code-report.json.zip + retention-days: 21 + - name: "Check prerequisites for sending the report" + shell: bash + id: check + run: | + echo "secrets_exist=${{ inputs.idp_aws_report_upload_role_name != '' && inputs.idp_aws_report_upload_bucket_endpoint != '' }}" >> $GITHUB_OUTPUT + - name: "Authenticate to send the report" + if: steps.check.outputs.secrets_exist == 'true' + uses: aws-actions/configure-aws-credentials@v2 + with: + role-to-assume: arn:aws:iam::${{ inputs.idp_aws_report_upload_account_id }}:role/${{ inputs.idp_aws_report_upload_role_name }} + aws-region: ${{ inputs.idp_aws_report_upload_region }} + - name: "Send the CLOC report to the central location" + shell: bash + if: steps.check.outputs.secrets_exist == 'true' + run: | + aws s3 cp \ + ./lines-of-code-report.json.zip \ + ${{ inputs.idp_aws_report_upload_bucket_endpoint }}/${{ inputs.build_timestamp }}-lines-of-code-report.json.zip diff --git a/.github/actions/lint-terraform/action.yaml b/.github/actions/lint-terraform/action.yaml new file mode 100644 index 000000000..d5dfe35d1 --- /dev/null +++ b/.github/actions/lint-terraform/action.yaml @@ -0,0 +1,20 @@ +name: "Lint Terraform" +description: "Lint Terraform" +inputs: + root-modules: + description: "Comma separated list of root module directories to validate, content of the 'infrastructure/environments' is checked by default" + required: false +runs: + using: "composite" + steps: + - name: "Check Terraform format" + shell: bash + run: | + check_only=true scripts/githooks/check-terraform-format.sh + - name: "Validate Terraform" + shell: bash + run: | + stacks=${{ inputs.root-modules }} + for dir in $(find infrastructure/environments -maxdepth 1 -mindepth 1 -type d; echo ${stacks//,/$'\n'}); do + dir=$dir make terraform-validate + done diff --git a/.github/actions/perform-static-analysis/action.yaml b/.github/actions/perform-static-analysis/action.yaml new file mode 100644 index 000000000..a619e9d29 --- /dev/null +++ b/.github/actions/perform-static-analysis/action.yaml @@ -0,0 +1,28 @@ +name: "Perform static analysis" +description: "Perform static analysis" +inputs: + sonar_organisation_key: + description: "Sonar organisation key, used to identify the project" + required: false + sonar_project_key: + description: "Sonar project key, used to identify the project" + required: false + sonar_token: + description: "Sonar token, the API key" + required: false +runs: + using: "composite" + steps: + - name: "Check prerequisites for performing static analysis" + shell: bash + id: check + run: echo "secret_exist=${{ inputs.sonar_token != '' }}" >> $GITHUB_OUTPUT + - name: "Perform static analysis" + shell: bash + if: steps.check.outputs.secret_exist == 'true' + run: | + export BRANCH_NAME=${GITHUB_HEAD_REF:-$(echo $GITHUB_REF | sed 's#refs/heads/##')} + export SONAR_ORGANISATION_KEY=${{ inputs.sonar_organisation_key }} + export SONAR_PROJECT_KEY=${{ inputs.sonar_project_key }} + export SONAR_TOKEN=${{ inputs.sonar_token }} + ./scripts/reports/perform-static-analysis.sh diff --git a/.github/actions/scan-dependencies/action.yaml b/.github/actions/scan-dependencies/action.yaml new file mode 100644 index 000000000..925ba0966 --- /dev/null +++ b/.github/actions/scan-dependencies/action.yaml @@ -0,0 +1,74 @@ +name: "Scan dependencies" +description: "Scan dependencies" +inputs: + build_datetime: + description: "Build datetime, set by the CI/CD pipeline workflow" + required: true + build_timestamp: + description: "Build timestamp, set by the CI/CD pipeline workflow" + required: true + idp_aws_report_upload_account_id: + description: "IDP AWS report upload account ID to upload the report to" + required: false + idp_aws_report_upload_region: + description: "IDP AWS report upload account region to upload the report to" + required: false + idp_aws_report_upload_role_name: + description: "IDP AWS report upload role name for OIDC authentication" + required: false + idp_aws_report_upload_bucket_endpoint: + description: "IDP AWS report upload endpoint to upload the report to" + required: false +runs: + using: "composite" + steps: + - name: "Generate SBOM" + shell: bash + run: | + export BUILD_DATETIME=${{ inputs.build_datetime }} + ./scripts/reports/create-sbom-report.sh + - name: "Compress SBOM report" + shell: bash + run: zip sbom-repository-report.json.zip sbom-repository-report.json + - name: "Upload SBOM report as an artefact" + if: ${{ !env.ACT }} + uses: actions/upload-artifact@v3 + with: + name: sbom-repository-report.json.zip + path: ./sbom-repository-report.json.zip + retention-days: 21 + - name: "Scan vulnerabilities" + shell: bash + run: | + export BUILD_DATETIME=${{ inputs.build_datetime }} + ./scripts/reports/scan-vulnerabilities.sh + - name: "Compress vulnerabilities report" + shell: bash + run: zip vulnerabilities-repository-report.json.zip vulnerabilities-repository-report.json + - name: "Upload vulnerabilities report as an artefact" + if: ${{ !env.ACT }} + uses: actions/upload-artifact@v3 + with: + name: vulnerabilities-repository-report.json.zip + path: ./vulnerabilities-repository-report.json.zip + retention-days: 21 + - name: "Check prerequisites for sending the reports" + shell: bash + id: check + run: echo "secrets_exist=${{ inputs.idp_aws_report_upload_role_name != '' && inputs.idp_aws_report_upload_bucket_endpoint != '' }}" >> $GITHUB_OUTPUT + - name: "Authenticate to send the reports" + if: steps.check.outputs.secrets_exist == 'true' + uses: aws-actions/configure-aws-credentials@v2 + with: + role-to-assume: arn:aws:iam::${{ inputs.idp_aws_report_upload_account_id }}:role/${{ inputs.idp_aws_report_upload_role_name }} + aws-region: ${{ inputs.idp_aws_report_upload_region }} + - name: "Send the SBOM and vulnerabilities reports to the central location" + shell: bash + if: steps.check.outputs.secrets_exist == 'true' + run: | + aws s3 cp \ + ./sbom-repository-report.json.zip \ + ${{ inputs.idp_aws_report_upload_bucket_endpoint }}/${{ inputs.build_timestamp }}-sbom-repository-report.json.zip + aws s3 cp \ + ./vulnerabilities-repository-report.json.zip \ + ${{ inputs.idp_aws_report_upload_bucket_endpoint }}/${{ inputs.build_timestamp }}-vulnerabilities-repository-report.json.zip diff --git a/.github/actions/scan-secrets/action.yaml b/.github/actions/scan-secrets/action.yaml new file mode 100644 index 000000000..1ed8bac27 --- /dev/null +++ b/.github/actions/scan-secrets/action.yaml @@ -0,0 +1,10 @@ +name: "Scan secrets" +description: "Scan secrets" +runs: + using: "composite" + steps: + - name: "Scan secrets" + shell: bash + run: | + # Please do not change this `check=whole-history` setting, as new patterns may be added or history may be rewritten. + check=whole-history ./scripts/githooks/scan-secrets.sh diff --git a/.github/dependabot.yaml b/.github/dependabot.yaml new file mode 100644 index 000000000..17ef311a3 --- /dev/null +++ b/.github/dependabot.yaml @@ -0,0 +1,28 @@ +version: 2 + +updates: + + - package-ecosystem: "docker" + directory: "/" + schedule: + interval: "daily" + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" + + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "daily" + + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "daily" + + - package-ecosystem: "terraform" + directory: "/" + schedule: + interval: "daily" diff --git a/.github/workflows/cicd-1-pull-request.yaml b/.github/workflows/cicd-1-pull-request.yaml new file mode 100644 index 000000000..cd068ec06 --- /dev/null +++ b/.github/workflows/cicd-1-pull-request.yaml @@ -0,0 +1,122 @@ +name: "CI/CD pull request" + +# The total recommended execution time for the "CI/CD Pull Request" workflow is around 20 minutes. + +on: + push: + branches: + - "**" + pull_request: + types: [opened, reopened] + +jobs: + metadata: + name: "Set CI/CD metadata" + runs-on: ubuntu-latest + timeout-minutes: 1 + outputs: + build_datetime_london: ${{ steps.variables.outputs.build_datetime_london }} + build_datetime: ${{ steps.variables.outputs.build_datetime }} + build_timestamp: ${{ steps.variables.outputs.build_timestamp }} + build_epoch: ${{ steps.variables.outputs.build_epoch }} + nodejs_version: ${{ steps.variables.outputs.nodejs_version }} + python_version: ${{ steps.variables.outputs.python_version }} + terraform_version: ${{ steps.variables.outputs.terraform_version }} + version: ${{ steps.variables.outputs.version }} + does_pull_request_exist: ${{ steps.pr_exists.outputs.does_pull_request_exist }} + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Set CI/CD variables" + id: variables + run: | + datetime=$(date -u +'%Y-%m-%dT%H:%M:%S%z') + BUILD_DATETIME=$datetime make version-create-effective-file + echo "build_datetime_london=$(TZ=Europe/London date --date=$datetime +'%Y-%m-%dT%H:%M:%S%z')" >> $GITHUB_OUTPUT + echo "build_datetime=$datetime" >> $GITHUB_OUTPUT + echo "build_timestamp=$(date --date=$datetime -u +'%Y%m%d%H%M%S')" >> $GITHUB_OUTPUT + echo "build_epoch=$(date --date=$datetime -u +'%s')" >> $GITHUB_OUTPUT + echo "nodejs_version=$(grep "^nodejs" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT + echo "python_version=$(grep "^nodejs" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT + echo "terraform_version=$(grep "^terraform" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT + echo "version=$(head -n 1 .version 2> /dev/null || echo unknown)" >> $GITHUB_OUTPUT + - name: "Check if pull request exists for this branch" + id: pr_exists + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + branch_name=${GITHUB_HEAD_REF:-$(echo $GITHUB_REF | sed 's#refs/heads/##')} + echo "Current branch is '$branch_name'" + if gh pr list --head $branch_name | grep -q .; then + echo "Pull request exists" + echo "does_pull_request_exist=true" >> $GITHUB_OUTPUT + else + echo "Pull request doesn't exist" + echo "does_pull_request_exist=false" >> $GITHUB_OUTPUT + fi + - name: "List variables" + run: | + export BUILD_DATETIME_LONDON="${{ steps.variables.outputs.build_datetime_london }}" + export BUILD_DATETIME="${{ steps.variables.outputs.build_datetime }}" + export BUILD_TIMESTAMP="${{ steps.variables.outputs.build_timestamp }}" + export BUILD_EPOCH="${{ steps.variables.outputs.build_epoch }}" + export NODEJS_VERSION="${{ steps.variables.outputs.nodejs_version }}" + export PYTHON_VERSION="${{ steps.variables.outputs.python_version }}" + export TERRAFORM_VERSION="${{ steps.variables.outputs.terraform_version }}" + export VERSION="${{ steps.variables.outputs.version }}" + export DOES_PULL_REQUEST_EXIST="${{ steps.pr_exists.outputs.does_pull_request_exist }}" + make list-variables + commit-stage: # Recommended maximum execution time is 2 minutes + name: "Commit stage" + needs: [metadata] + uses: ./.github/workflows/stage-1-commit.yaml + with: + build_datetime: "${{ needs.metadata.outputs.build_datetime }}" + build_timestamp: "${{ needs.metadata.outputs.build_timestamp }}" + build_epoch: "${{ needs.metadata.outputs.build_epoch }}" + nodejs_version: "${{ needs.metadata.outputs.nodejs_version }}" + python_version: "${{ needs.metadata.outputs.python_version }}" + terraform_version: "${{ needs.metadata.outputs.terraform_version }}" + version: "${{ needs.metadata.outputs.version }}" + secrets: inherit + test-stage: # Recommended maximum execution time is 5 minutes + name: "Test stage" + needs: [metadata, commit-stage] + uses: ./.github/workflows/stage-2-test.yaml + with: + build_datetime: "${{ needs.metadata.outputs.build_datetime }}" + build_timestamp: "${{ needs.metadata.outputs.build_timestamp }}" + build_epoch: "${{ needs.metadata.outputs.build_epoch }}" + nodejs_version: "${{ needs.metadata.outputs.nodejs_version }}" + python_version: "${{ needs.metadata.outputs.python_version }}" + terraform_version: "${{ needs.metadata.outputs.terraform_version }}" + version: "${{ needs.metadata.outputs.version }}" + secrets: inherit + build-stage: # Recommended maximum execution time is 3 minutes + name: "Build stage" + needs: [metadata, test-stage] + uses: ./.github/workflows/stage-3-build.yaml + if: needs.metadata.outputs.does_pull_request_exist == 'true' || (github.event_name == 'pull_request' && (github.event.action == 'opened' || github.event.action == 'reopened')) + with: + build_datetime: "${{ needs.metadata.outputs.build_datetime }}" + build_timestamp: "${{ needs.metadata.outputs.build_timestamp }}" + build_epoch: "${{ needs.metadata.outputs.build_epoch }}" + nodejs_version: "${{ needs.metadata.outputs.nodejs_version }}" + python_version: "${{ needs.metadata.outputs.python_version }}" + terraform_version: "${{ needs.metadata.outputs.terraform_version }}" + version: "${{ needs.metadata.outputs.version }}" + secrets: inherit + acceptance-stage: # Recommended maximum execution time is 10 minutes + name: "Acceptance stage" + needs: [metadata, build-stage] + uses: ./.github/workflows/stage-4-acceptance.yaml + if: needs.metadata.outputs.does_pull_request_exist == 'true' || (github.event_name == 'pull_request' && (github.event.action == 'opened' || github.event.action == 'reopened')) + with: + build_datetime: "${{ needs.metadata.outputs.build_datetime }}" + build_timestamp: "${{ needs.metadata.outputs.build_timestamp }}" + build_epoch: "${{ needs.metadata.outputs.build_epoch }}" + nodejs_version: "${{ needs.metadata.outputs.nodejs_version }}" + python_version: "${{ needs.metadata.outputs.python_version }}" + terraform_version: "${{ needs.metadata.outputs.terraform_version }}" + version: "${{ needs.metadata.outputs.version }}" + secrets: inherit diff --git a/.github/workflows/cicd-2-publish.yaml b/.github/workflows/cicd-2-publish.yaml new file mode 100644 index 000000000..44eebbf2c --- /dev/null +++ b/.github/workflows/cicd-2-publish.yaml @@ -0,0 +1,98 @@ +name: "CI/CD publish" + +on: + pull_request: + types: [closed] + branches: + - main + +jobs: + metadata: + name: "Set CI/CD metadata" + runs-on: ubuntu-latest + if: github.event.pull_request.merged == true + timeout-minutes: 1 + outputs: + build_datetime: ${{ steps.variables.outputs.build_datetime }} + build_timestamp: ${{ steps.variables.outputs.build_timestamp }} + build_epoch: ${{ steps.variables.outputs.build_epoch }} + nodejs_version: ${{ steps.variables.outputs.nodejs_version }} + python_version: ${{ steps.variables.outputs.python_version }} + terraform_version: ${{ steps.variables.outputs.terraform_version }} + version: ${{ steps.variables.outputs.version }} + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Set CI/CD variables" + id: variables + run: | + datetime=$(date -u +'%Y-%m-%dT%H:%M:%S%z') + echo "build_datetime=$datetime" >> $GITHUB_OUTPUT + echo "build_timestamp=$(date --date=$datetime -u +'%Y%m%d%H%M%S')" >> $GITHUB_OUTPUT + echo "build_epoch=$(date --date=$datetime -u +'%s')" >> $GITHUB_OUTPUT + echo "nodejs_version=$(grep "^nodejs" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT + echo "python_version=$(grep "^nodejs" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT + echo "terraform_version=$(grep "^terraform" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT + # TODO: Get the version, but it may not be the .version file as this should come from the CI/CD Pull Request Workflow + echo "version=$(head -n 1 .version 2> /dev/null || echo unknown)" >> $GITHUB_OUTPUT + - name: "List variables" + run: | + export BUILD_DATETIME="${{ steps.variables.outputs.build_datetime }}" + export BUILD_TIMESTAMP="${{ steps.variables.outputs.build_timestamp }}" + export BUILD_EPOCH="${{ steps.variables.outputs.build_epoch }}" + export NODEJS_VERSION="${{ steps.variables.outputs.nodejs_version }}" + export PYTHON_VERSION="${{ steps.variables.outputs.python_version }}" + export TERRAFORM_VERSION="${{ steps.variables.outputs.terraform_version }}" + export VERSION="${{ steps.variables.outputs.version }}" + make list-variables + publish: + name: "Publish packages" + runs-on: ubuntu-latest + needs: [metadata] + if: github.event.pull_request.merged == true + timeout-minutes: 3 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Get the artefacts" + run: | + echo "Getting the artefacts created by the build stage ..." + # TODO: Use either action/cache or action/upload-artifact + - name: "Create release" + id: create_release + uses: actions/create-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + tag_name: ${{ needs.metadata.outputs.version }} + release_name: Release ${{ needs.metadata.outputs.version }} + body: | + Release of ${{ needs.metadata.outputs.version }} + draft: false + prerelease: false + # - name: "Upload release asset" + # uses: actions/upload-release-asset@v1 + # env: + # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # with: + # upload_url: "${{ steps.create_release.outputs.upload_url }}" + # asset_path: ./* + # asset_name: repository-template-${{ needs.metadata.outputs.version }}.tar.gz + # asset_content_type: "application/gzip" + success: + name: "Success notification" + runs-on: ubuntu-latest + needs: [publish] + steps: + - name: "Check prerequisites for notification" + id: check + run: echo "secret_exist=${{ secrets.TEAMS_NOTIFICATION_WEBHOOK_URL != '' }}" >> $GITHUB_OUTPUT + - name: "Notify on publishing packages" + if: steps.check.outputs.secret_exist == 'true' + uses: nhs-england-tools/notify-msteams-action@v0.0.4 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + teams-webhook-url: ${{ secrets.TEAMS_NOTIFICATION_WEBHOOK_URL }} + message-title: "Notification title" + message-text: "This is a notification body" + link: ${{ github.event.pull_request.html_url }} diff --git a/.github/workflows/cicd-3-deploy.yaml b/.github/workflows/cicd-3-deploy.yaml new file mode 100644 index 000000000..2745b3801 --- /dev/null +++ b/.github/workflows/cicd-3-deploy.yaml @@ -0,0 +1,77 @@ +name: "CI/CD deploy" + +on: + workflow_dispatch: + inputs: + tag: + description: "This is the tag that is oging to be deployed" + required: true + default: "latest" + +jobs: + metadata: + name: "Set CI/CD metadata" + runs-on: ubuntu-latest + timeout-minutes: 1 + outputs: + build_datetime: ${{ steps.variables.outputs.build_datetime }} + build_timestamp: ${{ steps.variables.outputs.build_timestamp }} + build_epoch: ${{ steps.variables.outputs.build_epoch }} + nodejs_version: ${{ steps.variables.outputs.nodejs_version }} + python_version: ${{ steps.variables.outputs.python_version }} + terraform_version: ${{ steps.variables.outputs.terraform_version }} + version: ${{ steps.variables.outputs.version }} + tag: ${{ steps.variables.outputs.tag }} + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Set CI/CD variables" + id: variables + run: | + datetime=$(date -u +'%Y-%m-%dT%H:%M:%S%z') + echo "build_datetime=$datetime" >> $GITHUB_OUTPUT + echo "build_timestamp=$(date --date=$datetime -u +'%Y%m%d%H%M%S')" >> $GITHUB_OUTPUT + echo "build_epoch=$(date --date=$datetime -u +'%s')" >> $GITHUB_OUTPUT + echo "nodejs_version=$(grep "^nodejs" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT + echo "python_version=$(grep "^nodejs" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT + echo "terraform_version=$(grep "^terraform" .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT + # TODO: Get the version, but it may not be the .version file as this should come from the CI/CD Pull Request Workflow + echo "version=$(head -n 1 .version 2> /dev/null || echo unknown)" >> $GITHUB_OUTPUT + echo "tag=${{ github.event.inputs.tag }}" >> $GITHUB_OUTPUT + - name: "List variables" + run: | + export BUILD_DATETIME="${{ steps.variables.outputs.build_datetime }}" + export BUILD_TIMESTAMP="${{ steps.variables.outputs.build_timestamp }}" + export BUILD_EPOCH="${{ steps.variables.outputs.build_epoch }}" + export NODEJS_VERSION="${{ steps.variables.outputs.nodejs_version }}" + export PYTHON_VERSION="${{ steps.variables.outputs.python_version }}" + export TERRAFORM_VERSION="${{ steps.variables.outputs.terraform_version }}" + export VERSION="${{ steps.variables.outputs.version }}" + export TAG="${{ steps.variables.outputs.tag }}" + make list-variables + deploy: + name: "Deploy to an environment" + runs-on: ubuntu-latest + needs: [metadata] + timeout-minutes: 10 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + # TODO: More jobs or/and steps here + # success: + # name: "Success notification" + # runs-on: ubuntu-latest + # needs: [deploy] + # steps: + # - name: "Check prerequisites for notification" + # id: check + # run: echo "secret_exist=${{ secrets.TEAMS_NOTIFICATION_WEBHOOK_URL != '' }}" >> $GITHUB_OUTPUT + # - name: "Notify on deployment to an environment" + # if: steps.check.outputs.secret_exist == 'true' + # uses: nhs-england-tools/notify-msteams-action@v0.0.4 + # with: + # github-token: ${{ secrets.GITHUB_TOKEN }} + # teams-webhook-url: ${{ secrets.TEAMS_NOTIFICATION_WEBHOOK_URL }} + # message-title: "Notification title" + # message-text: "This is a notification body" + # link: ${{ github.event.pull_request.html_url }} diff --git a/.github/workflows/stage-1-commit.yaml b/.github/workflows/stage-1-commit.yaml new file mode 100644 index 000000000..a516b7863 --- /dev/null +++ b/.github/workflows/stage-1-commit.yaml @@ -0,0 +1,126 @@ +name: "Commit stage" + +on: + workflow_call: + inputs: + build_datetime: + description: "Build datetime, set by the CI/CD pipeline workflow" + required: true + type: string + build_timestamp: + description: "Build timestamp, set by the CI/CD pipeline workflow" + required: true + type: string + build_epoch: + description: "Build epoch, set by the CI/CD pipeline workflow" + required: true + type: string + nodejs_version: + description: "Node.js version, set by the CI/CD pipeline workflow" + required: true + type: string + python_version: + description: "Python version, set by the CI/CD pipeline workflow" + required: true + type: string + terraform_version: + description: "Terraform version, set by the CI/CD pipeline workflow" + required: true + type: string + version: + description: "Version of the software, set by the CI/CD pipeline workflow" + required: true + type: string + +jobs: + scan-secrets: + name: "Scan secrets" + runs-on: ubuntu-latest + timeout-minutes: 2 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + with: + fetch-depth: 0 # Full history is needed to scan all commits + - name: "Scan secrets" + uses: ./.github/actions/scan-secrets + check-file-format: + name: "Check file format" + runs-on: ubuntu-latest + timeout-minutes: 2 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + with: + fetch-depth: 0 # Full history is needed to compare branches + - name: "Check file format" + uses: ./.github/actions/check-file-format + check-markdown-format: + name: "Check Markdown format" + runs-on: ubuntu-latest + timeout-minutes: 2 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + with: + fetch-depth: 0 # Full history is needed to compare branches + - name: "Check Markdown format" + uses: ./.github/actions/check-markdown-format + check-english-usage: + name: "Check English usage" + runs-on: ubuntu-latest + timeout-minutes: 2 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + with: + fetch-depth: 0 # Full history is needed to compare branches + - name: "Check English usage" + uses: ./.github/actions/check-english-usage + lint-terraform: + name: "Lint Terraform" + runs-on: ubuntu-latest + timeout-minutes: 2 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Lint Terraform" + uses: ./.github/actions/lint-terraform + count-lines-of-code: + name: "Count lines of code" + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + timeout-minutes: 2 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Count lines of code" + uses: ./.github/actions/create-lines-of-code-report + with: + build_datetime: "${{ inputs.build_datetime }}" + build_timestamp: "${{ inputs.build_timestamp }}" + idp_aws_report_upload_account_id: "${{ secrets.IDP_AWS_REPORT_UPLOAD_ACCOUNT_ID }}" + idp_aws_report_upload_region: "${{ secrets.IDP_AWS_REPORT_UPLOAD_REGION }}" + idp_aws_report_upload_role_name: "${{ secrets.IDP_AWS_REPORT_UPLOAD_ROLE_NAME }}" + idp_aws_report_upload_bucket_endpoint: "${{ secrets.IDP_AWS_REPORT_UPLOAD_BUCKET_ENDPOINT }}" + scan-dependencies: + name: "Scan dependencies" + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + timeout-minutes: 2 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Scan dependencies" + uses: ./.github/actions/scan-dependencies + with: + build_datetime: "${{ inputs.build_datetime }}" + build_timestamp: "${{ inputs.build_timestamp }}" + idp_aws_report_upload_account_id: "${{ secrets.IDP_AWS_REPORT_UPLOAD_ACCOUNT_ID }}" + idp_aws_report_upload_region: "${{ secrets.IDP_AWS_REPORT_UPLOAD_REGION }}" + idp_aws_report_upload_role_name: "${{ secrets.IDP_AWS_REPORT_UPLOAD_ROLE_NAME }}" + idp_aws_report_upload_bucket_endpoint: "${{ secrets.IDP_AWS_REPORT_UPLOAD_BUCKET_ENDPOINT }}" diff --git a/.github/workflows/stage-2-test.yaml b/.github/workflows/stage-2-test.yaml new file mode 100644 index 000000000..efcb2ac46 --- /dev/null +++ b/.github/workflows/stage-2-test.yaml @@ -0,0 +1,94 @@ +name: "Test stage" + +on: + workflow_call: + inputs: + build_datetime: + description: "Build datetime, set by the CI/CD pipeline workflow" + required: true + type: string + build_timestamp: + description: "Build timestamp, set by the CI/CD pipeline workflow" + required: true + type: string + build_epoch: + description: "Build epoch, set by the CI/CD pipeline workflow" + required: true + type: string + nodejs_version: + description: "Node.js version, set by the CI/CD pipeline workflow" + required: true + type: string + python_version: + description: "Python version, set by the CI/CD pipeline workflow" + required: true + type: string + terraform_version: + description: "Terraform version, set by the CI/CD pipeline workflow" + required: true + type: string + version: + description: "Version of the software, set by the CI/CD pipeline workflow" + required: true + type: string + +jobs: + test-unit: + name: "Unit tests" + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Run unit test suite" + run: | + make test-unit + - name: "Save the result of fast test suite" + run: | + echo "Nothing to save" + test-lint: + name: "Linting" + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Run linting" + run: | + make test-lint + - name: "Save the linting result" + run: | + echo "Nothing to save" + test-coverage: + name: "Test coverage" + needs: [test-unit] + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Run test coverage check" + run: | + make test-coverage + - name: "Save the coverage check result" + run: | + echo "Nothing to save" + perform-static-analysis: + name: "Perform static analysis" + needs: [test-unit] + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + timeout-minutes: 5 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + with: + fetch-depth: 0 # Full history is needed to improving relevancy of reporting + - name: "Perform static analysis" + uses: ./.github/actions/perform-static-analysis + with: + sonar_organisation_key: "${{ vars.SONAR_ORGANISATION_KEY }}" + sonar_project_key: "${{ vars.SONAR_PROJECT_KEY }}" + sonar_token: "${{ secrets.SONAR_TOKEN }}" diff --git a/.github/workflows/stage-3-build.yaml b/.github/workflows/stage-3-build.yaml new file mode 100644 index 000000000..2a53a07a1 --- /dev/null +++ b/.github/workflows/stage-3-build.yaml @@ -0,0 +1,69 @@ +name: "Build stage" + +on: + workflow_call: + inputs: + build_datetime: + description: "Build datetime, set by the CI/CD pipeline workflow" + required: true + type: string + build_timestamp: + description: "Build timestamp, set by the CI/CD pipeline workflow" + required: true + type: string + build_epoch: + description: "Build epoch, set by the CI/CD pipeline workflow" + required: true + type: string + nodejs_version: + description: "Node.js version, set by the CI/CD pipeline workflow" + required: true + type: string + python_version: + description: "Python version, set by the CI/CD pipeline workflow" + required: true + type: string + terraform_version: + description: "Terraform version, set by the CI/CD pipeline workflow" + required: true + type: string + version: + description: "Version of the software, set by the CI/CD pipeline workflow" + required: true + type: string + +jobs: + artefact-1: + name: "Artefact 1" + runs-on: ubuntu-latest + timeout-minutes: 3 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Build artefact 1" + run: | + echo "Building artefact 1 ..." + - name: "Check artefact 1" + run: | + echo "Checking artefact 1 ..." + - name: "Upload artefact 1" + run: | + echo "Uploading artefact 1 ..." + # TODO: Use either action/cache or action/upload-artifact + artefact-2: + name: "Artefact 2" + runs-on: ubuntu-latest + timeout-minutes: 3 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Build artefact 2" + run: | + echo "Building artefact 2 ..." + - name: "Check artefact 2" + run: | + echo "Checking artefact 2 ..." + - name: "Upload artefact 2" + run: | + echo "Uploading artefact 2 ..." + # TODO: Use either action/cache or action/upload-artifact diff --git a/.github/workflows/stage-4-acceptance.yaml b/.github/workflows/stage-4-acceptance.yaml new file mode 100644 index 000000000..d554f98af --- /dev/null +++ b/.github/workflows/stage-4-acceptance.yaml @@ -0,0 +1,170 @@ +name: "Acceptance stage" + +on: + workflow_call: + inputs: + build_datetime: + description: "Build datetime, set by the CI/CD pipeline workflow" + required: true + type: string + build_timestamp: + description: "Build timestamp, set by the CI/CD pipeline workflow" + required: true + type: string + build_epoch: + description: "Build epoch, set by the CI/CD pipeline workflow" + required: true + type: string + nodejs_version: + description: "Node.js version, set by the CI/CD pipeline workflow" + required: true + type: string + python_version: + description: "Python version, set by the CI/CD pipeline workflow" + required: true + type: string + terraform_version: + description: "Terraform version, set by the CI/CD pipeline workflow" + required: true + type: string + version: + description: "Version of the software, set by the CI/CD pipeline workflow" + required: true + type: string + +jobs: + environment-set-up: + name: "Environment set up" + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Create infractructure" + run: | + echo "Creating infractructure..." + - name: "Update database" + run: | + echo "Updating database..." + - name: "Deploy application" + run: | + echo "Deploying application..." + test-contract: + name: "Contract test" + runs-on: ubuntu-latest + needs: environment-set-up + timeout-minutes: 10 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Run contract test" + run: | + make test-contract + - name: "Save result" + run: | + echo "Nothing to save" + test-security: + name: "Security test" + runs-on: ubuntu-latest + needs: environment-set-up + timeout-minutes: 10 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Run security test" + run: | + make test-security + - name: "Save result" + run: | + echo "Nothing to save" + test-ui: + name: "UI test" + runs-on: ubuntu-latest + needs: environment-set-up + timeout-minutes: 10 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Run UI test" + run: | + make test-ui + - name: "Save result" + run: | + echo "Nothing to save" + test-ui-performance: + name: "UI performance test" + runs-on: ubuntu-latest + needs: environment-set-up + timeout-minutes: 10 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Run UI performance test" + run: | + make test-ui-performance + - name: "Save result" + run: | + echo "Nothing to save" + test-integration: + name: "Integration test" + runs-on: ubuntu-latest + needs: environment-set-up + timeout-minutes: 10 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Run integration test" + run: | + make test-integration + - name: "Save result" + run: | + echo "Nothing to save" + test-accessibility: + name: "Accessibility test" + runs-on: ubuntu-latest + needs: environment-set-up + timeout-minutes: 10 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Run accessibility test" + run: | + make test-accessibility + - name: "Save result" + run: | + echo "Nothing to save" + test-load: + name: "Load test" + runs-on: ubuntu-latest + needs: environment-set-up + timeout-minutes: 10 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Run load tests" + run: | + make test-load + - name: "Save result" + run: | + echo "Nothing to save" + environment-tear-down: + name: "Environment tear down" + runs-on: ubuntu-latest + needs: + [ + test-accessibility, + test-contract, + test-integration, + test-load, + test-security, + test-ui-performance, + test-ui, + ] + if: always() + timeout-minutes: 5 + steps: + - name: "Checkout code" + uses: actions/checkout@v4 + - name: "Tear down environment" + run: | + echo "Tearing down environment..." diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..8bdff9a9b --- /dev/null +++ b/.gitignore @@ -0,0 +1,13 @@ +# WARNING: Please, DO NOT edit this section of the file! It is maintained in the repository template. + +.scannerwork +*cloc*report*.json +*sbom*report*.json +*vulnerabilities*report*.json +*report*json.zip +.version + +*.code-workspace +!project.code-workspace + +# Please, add your custom content below! diff --git a/.gitleaksignore b/.gitleaksignore new file mode 100644 index 000000000..cceb449a3 --- /dev/null +++ b/.gitleaksignore @@ -0,0 +1,3 @@ +# SEE: https://github.com/gitleaks/gitleaks/blob/master/README.md#gitleaksignore + +cd9c0efec38c5d63053dd865e5d4e207c0760d91:docs/guides/Perform_static_analysis.md:generic-api-key:37 diff --git a/.tool-versions b/.tool-versions new file mode 100644 index 000000000..a69438dbc --- /dev/null +++ b/.tool-versions @@ -0,0 +1,21 @@ +# This file is for you! Please, updated to the versions agreed by your team. + +terraform 1.5.7 +pre-commit 3.4.0 + +# ============================================================================== +# The section below is reserved for Docker image versions. + +# TODO: Move this section - consider using a different file for the repository template dependencies. +# docker/ghcr.io/anchore/grype v0.69.1@sha256:d41fcb371d0af59f311e72123dff46900ebd6d0482391b5a830853ee4f9d1a76 # SEE: https://github.com/anchore/grype/pkgs/container/grype +# docker/ghcr.io/anchore/syft v0.92.0@sha256:63c60f0a21efb13e80aa1359ab243e49213b6cc2d7e0f8179da38e6913b997e0 # SEE: https://github.com/anchore/syft/pkgs/container/syft +# docker/ghcr.io/gitleaks/gitleaks v8.18.0@sha256:fd2b5cab12b563d2cc538b14631764a1c25577780e3b7dba71657d58da45d9d9 # SEE: https://github.com/gitleaks/gitleaks/pkgs/container/gitleaks +# docker/ghcr.io/igorshubovych/markdownlint-cli v0.37.0@sha256:fb3e79946fce78e1cde84d6798c6c2a55f2de11fc16606a40d49411e281d950d # SEE: https://github.com/igorshubovych/markdownlint-cli/pkgs/container/markdownlint-cli +# docker/ghcr.io/make-ops-tools/gocloc latest@sha256:6888e62e9ae693c4ebcfed9f1d86c70fd083868acb8815fe44b561b9a73b5032 # SEE: https://github.com/make-ops-tools/gocloc/pkgs/container/gocloc +# docker/ghcr.io/nhs-england-tools/github-runner-image 20230909-321fd1e-rt@sha256:ce4fd6035dc450a50d3cbafb4986d60e77cb49a71ab60a053bb1b9518139a646 # SEE: https://github.com/nhs-england-tools/github-runner-image/pkgs/container/github-runner-image +# docker/hadolint/hadolint 2.12.0-alpine@sha256:7dba9a9f1a0350f6d021fb2f6f88900998a4fb0aaf8e4330aa8c38544f04db42 # SEE: https://hub.docker.com/r/hadolint/hadolint/tags +# docker/hashicorp/terraform 1.5.6@sha256:180a7efa983386a27b43657ed610e9deed9e6c3848d54f9ea9b6cb8a5c8c25f5 # SEE: https://hub.docker.com/r/hashicorp/terraform/tags +# docker/jdkato/vale v2.29.7@sha256:5ccfac574231b006284513ac3e4e9f38833989d83f2a68db149932c09de85149 # SEE: https://hub.docker.com/r/jdkato/vale/tags +# docker/koalaman/shellcheck latest@sha256:e40388688bae0fcffdddb7e4dea49b900c18933b452add0930654b2dea3e7d5c # SEE: https://hub.docker.com/r/koalaman/shellcheck/tags +# docker/mstruebing/editorconfig-checker 2.7.1@sha256:dd3ca9ea50ef4518efe9be018d669ef9cf937f6bb5cfe2ef84ff2a620b5ddc24 # SEE: https://hub.docker.com/r/mstruebing/editorconfig-checker/tags +# docker/sonarsource/sonar-scanner-cli 5.0.1@sha256:494ecc3b5b1ee1625bd377b3905c4284e4f0cc155cff397805a244dee1c7d575 # SEE: https://hub.docker.com/r/sonarsource/sonar-scanner-cli/tags diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 000000000..957d73ff7 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,36 @@ +{ + "recommendations": [ + "alefragnani.bookmarks", + "davidanson.vscode-markdownlint", + "dbaeumer.vscode-eslint", + "donjayamanne.githistory", + "eamodio.gitlens", + "editorconfig.editorconfig", + "esbenp.prettier-vscode", + "github.codespaces", + "github.github-vscode-theme", + "github.remotehub", + "github.vscode-github-actions", + "github.vscode-pull-request-github", + "hediet.vscode-drawio", + "johnpapa.vscode-peacock", + "mhutchie.git-graph", + "ms-azuretools.vscode-docker", + "ms-vscode-remote.remote-containers", + "ms-vscode-remote.remote-wsl", + "ms-vscode.hexeditor", + "ms-vscode.live-server", + "ms-vsliveshare.vsliveshare", + "redhat.vscode-xml", + "streetsidesoftware.code-spell-checker-british-english", + "tamasfe.even-better-toml", + "tomoki1207.pdf", + "vscode-icons-team.vscode-icons", + "vstirbu.vscode-mermaid-preview", + "wayou.vscode-todo-highlight", + "yzane.markdown-pdf", + "yzhang.dictionary-completion", + "yzhang.markdown-all-in-one" + ], + "unwantedRecommendations": [] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 000000000..423458c12 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,7 @@ +{ + "markdownlint.config": { + "MD013": false, + "MD024": { "siblings_only": true }, + "MD033": false + } +} diff --git a/LICENCE.md b/LICENCE.md new file mode 100644 index 000000000..0ead43267 --- /dev/null +++ b/LICENCE.md @@ -0,0 +1,21 @@ +# MIT Licence + +Copyright (c) 2023 Crown Copyright NHS England. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..2c893b23a --- /dev/null +++ b/Makefile @@ -0,0 +1,36 @@ +# This file is for you! Edit it to implement your own hooks (make targets) into +# the project as automated steps to be executed on locally and in the CD pipeline. + +include scripts/init.mk + +# ============================================================================== + +# Example CI/CD targets are: dependencies, build, publish, deploy, clean, etc. + +dependencies: # Install dependencies needed to build and test the project @Pipeline + # TODO: Implement installation of your project dependencies + +build: # Build the project artefact @Pipeline + # TODO: Implement the artefact build step + +publish: # Publish the project artefact @Pipeline + # TODO: Implement the artefact publishing step + +deploy: # Deploy the project artefact to the target environment @Pipeline + # TODO: Implement the artefact deployment step + +clean:: # Clean-up project resources (main) @Operations + # TODO: Implement project resources clean-up step + +config:: # Configure development environment (main) @Configuration + # TODO: Use only 'make' targets that are specific to this project, e.g. you may not need to install Node.js + make _install-dependencies + +# ============================================================================== + +${VERBOSE}.SILENT: \ + build \ + clean \ + config \ + dependencies \ + deploy \ diff --git a/README.md b/README.md new file mode 100644 index 000000000..29f86801f --- /dev/null +++ b/README.md @@ -0,0 +1,112 @@ +# Repository Template + +[![CI/CD Pull Request](https://github.com/nhs-england-tools/repository-template/actions/workflows/cicd-1-pull-request.yaml/badge.svg)](https://github.com/nhs-england-tools/repository-template/actions/workflows/cicd-1-pull-request.yaml) +[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=repository-template&metric=alert_status)](https://sonarcloud.io/summary/new_code?id=repository-template) + +Start with an overview or a brief description of what the project is about and what it does. For example - + +Welcome to our repository template designed to streamline your project setup! This robust template provides a reliable starting point for your new projects, covering an essential tech stack and encouraging best practices in documenting. + +This repository template aims to foster a user-friendly development environment by ensuring that every included file is concise and adequately self-documented. By adhering to this standard, we can promote increased clarity and maintainability throughout your project's lifecycle. Bundled within this template are resources that pave the way for seamless repository creation. Currently supported technologies are: + +- Terraform +- Docker + +Make use of this repository template to expedite your project setup and enhance your productivity right from the get-go. Enjoy the advantage of having a well-structured, self-documented project that reduces overhead and increases focus on what truly matters - coding! + +## Table of Contents + +- [Repository Template](#repository-template) + - [Table of Contents](#table-of-contents) + - [Setup](#setup) + - [Prerequisites](#prerequisites) + - [Configuration](#configuration) + - [Usage](#usage) + - [Testing](#testing) + - [Design](#design) + - [Diagrams](#diagrams) + - [Modularity](#modularity) + - [Contributing](#contributing) + - [Contacts](#contacts) + - [Licence](#licence) + +## Setup + +By including preferably a one-liner or if necessary a set of clear CLI instructions we improve user experience. This should be a frictionless installation process that works on various operating systems (macOS, Linux, Windows WSL) and handles all the dependencies. + +Clone the repository + +```shell +git clone https://github.com/nhs-england-tools/repository-template.git +cd nhs-england-tools/repository-template +``` + +### Prerequisites + +The following software packages, or their equivalents, are expected to be installed and configured: + +- [Docker](https://www.docker.com/) container runtime or a compatible tool, e.g. [Podman](https://podman.io/), +- [asdf](https://asdf-vm.com/) version manager, +- [GNU make](https://www.gnu.org/software/make/) 3.82 or later, +- [GNU coreutils](https://www.gnu.org/software/coreutils/) and [GNU binutils](https://www.gnu.org/software/binutils/) may be required to build dependencies like Python, which may need to be compiled during installation. For macOS users, this has been scripted and automated by the `dotfiles` project; please see this [script](https://github.com/nhs-england-tools/dotfiles/blob/main/assets/20-install-base-packages.macos.sh) for details, +- [Python](https://www.python.org/) required to run Git hooks, +- [`jq`](https://jqlang.github.io/jq/) a lightweight and flexible command-line JSON processor. + +> [!NOTE]
+> The version of GNU make available by default on macOS is earlier than 3.82. You will need to upgrade it or certain `make` tasks will fail. On macOS, you will need [Homebrew](https://brew.sh/) installed, then to install `make`, like so: +> +> ```shell +> brew install make +> ``` +> +> You will then see instructions to fix your `$PATH` variable to make the newly installed version available. If you are using [dotfiles](https://github.com/nhs-england-tools/dotfiles), this is all done for you. + +### Configuration + +Installation and configuration of the toolchain dependencies + +```shell +make config +``` + +## Usage + +After a successful installation, provide an informative example of how this project can be used. Additional code snippets, screenshots and demos work well in this space. You may also link to the other documentation resources, e.g. the [User Guide](./docs/user-guide.md) to demonstrate more use cases and to show more features. + +### Testing + +There are `make` tasks for you to configure to run your tests. Run `make test` to see how they work. You should be able to use the same entry points for local development as in your CI pipeline. + +## Design + +### Diagrams + +The [C4 model](https://c4model.com/) is a simple and intuitive way to create software architecture diagrams that are clear, consistent, scalable and most importantly collaborative. This should result in documenting all the system interfaces, external dependencies and integration points. + +![Repository Template](./docs/diagrams/Repository_Template_GitHub_Generic.png) + +### Modularity + +Most of the projects are built with customisability and extendability in mind. At a minimum, this can be achieved by implementing service level configuration options and settings. The intention of this section is to show how this can be used. If the system processes data, you could mention here for example how the input is prepared for testing - anonymised, synthetic or live data. + +## Contributing + +Describe or link templates on how to raise an issue, feature request or make a contribution to the codebase. Reference the other documentation files, like + +- Environment setup for contribution, i.e. `CONTRIBUTING.md` +- Coding standards, branching, linting, practices for development and testing +- Release process, versioning, changelog +- Backlog, board, roadmap, ways of working +- High-level requirements, guiding principles, decision records, etc. + +## Contacts + +Provide a way to contact the owners of this project. It can be a team, an individual or information on the means of getting in touch via active communication channels, e.g. opening a GitHub discussion, raising an issue, etc. + +## Licence + +> The [LICENCE.md](./LICENCE.md) file will need to be updated with the correct year and owner + +Unless stated otherwise, the codebase is released under the MIT License. This covers both the codebase and any sample code in the documentation. + +Any HTML or Markdown documentation is [© Crown Copyright](https://www.nationalarchives.gov.uk/information-management/re-using-public-sector-information/uk-government-licensing-framework/crown-copyright/) and available under the terms of the [Open Government Licence v3.0](https://www.nationalarchives.gov.uk/doc/open-government-licence/version/3/). diff --git a/VERSION b/VERSION new file mode 100644 index 000000000..f25af7f1a --- /dev/null +++ b/VERSION @@ -0,0 +1 @@ +${yyyy}${mm}${dd} diff --git a/docs/adr/ADR-001_Use_git_hook_and_GitHub_action_to_check_the_editorconfig_compliance.md b/docs/adr/ADR-001_Use_git_hook_and_GitHub_action_to_check_the_editorconfig_compliance.md new file mode 100644 index 000000000..0ddc032b5 --- /dev/null +++ b/docs/adr/ADR-001_Use_git_hook_and_GitHub_action_to_check_the_editorconfig_compliance.md @@ -0,0 +1,148 @@ +# ADR-001: Use git hook and GitHub Action to check the `.editorconfig` compliance + +>| | | +>| ------------ | ---------------------------------------------------- | +>| Date | `04/07/2023` | +>| Status | `RFC` | +>| Deciders | `Engineering` | +>| Significance | `Construction techniques` | +>| Owners | `Dan Stefaniuk, Amaan Ibn-Nasar, Thomas Judd-Cooper` | + +--- + +- [ADR-001: Use git hook and GitHub Action to check the `.editorconfig` compliance](#adr-001-use-git-hook-and-github-action-to-check-the-editorconfig-compliance) + - [Context](#context) + - [Decision](#decision) + - [Assumptions](#assumptions) + - [Drivers](#drivers) + - [Options](#options) + - [Options 1: The pre-commit project](#options-1-the-pre-commit-project) + - [Options 2a: Custom shell script](#options-2a-custom-shell-script) + - [Options 2b: Docker-based custom shell script](#options-2b-docker-based-custom-shell-script) + - [Option 3: A GitHub Action from the Marketplace](#option-3-a-github-action-from-the-marketplace) + - [Outcome](#outcome) + - [Rationale](#rationale) + - [Consequences](#consequences) + - [Compliance](#compliance) + - [Notes](#notes) + - [Actions](#actions) + - [Tags](#tags) + +## Context + +As part of the Repository Template project a need for a simple text formatting feature using the [EditorConfig](https://editorconfig.org/) rules was identified that is accessible and consistent for all contributors. To ensure that formatting rules are applied, a compliance check has to be implemented on a developer workstation and as a part of the CI/CD pipeline. This will establish a fast feedback loop and a fallback option, if the former has not worked. + +## Decision + +### Assumptions + +This decision is based on the following assumptions that are used to form a set of generic requirements for the implementation as a guide. A solution should be + +- Cross-platform and portable, supporting systems like + - macOS + - Windows WSL (Ubuntu) + - Ubuntu and potentially other Linux distributions like Alpine +- Configurable + - can run on a file or a directory + - can be turned on/off entirely +- Run locally (aka developer workstation) and remotely (aka CI/CD pipeline) +- Reusable and avoid code duplication + +### Drivers + +Implementation of this compliance check (like text encoding, line endings, tabs vs. spaces etc.) will help with any potential debate or discussion, removing personal preferences and opinions, enabling teams to focus on delivering value to the product they work on. + +Other linting tools like for example [Prettier](https://prettier.io/) and [ESLint](https://eslint.org/) are not considered here as they are code formatting tools dedicated to specific technologies and languages. The main drivers for this decision are the style consistency across all files in the codebase and to eliminate any disruptive changes introduced based on preferences. EditorConfig rules are recognised and supported by most if not all major editors and IDEs. + +Here is the recommended ruleset: + +```console +charset = utf-8 +end_of_line = lf +indent_size = 2 +indent_style = space +insert_final_newline = true +trim_trailing_whitespace = true +``` + +### Options + +#### Options 1: The [pre-commit](https://pre-commit.com/) project + +- Pros + - Python is installed on most if not all platforms + - A pythonist friendly tool + - Well-documented +- Cons + - Dependency on Python even for a non-Python project + - Potential versioning issues with Python runtime and dependencies compatibility + - Lack of process isolation, access to resources with user-level privileges + - Dependency on multiple parties and plugins + +#### Options 2a: Custom shell script + +- Pros + - Execution environment is installed everywhere, no setup required + - Ease of maintainability and testability + - It is a simple solution +- Cons + - May potentially require more coding in Bash + - Requires shell scripting skills + +#### Options 2b: Docker-based custom shell script + +This option is an extension built upon option 2a. + +- Pros + - Cross-platform compatibility + - Isolation of the process dependencies and runtime + - Docker is an expected dependency for most/all projects +- Cons + - Requires Docker as a preinstalled dependency + - Requires basic Docker skills + +#### Option 3: A GitHub Action from the Marketplace + +- Pros + - Usage of a GitHub native functionality +- Cons + - Reliance on the GitHub DSL (coding in YAML) may lead to less portable solution + - Implementation of the functionality has to be duplicated for the git hook + +### Outcome + +The decision is to implement the Option 2b for all the git hooks included in this repository template with in conjunction the Option 1 as the git hook executor. This will allow for flexibility and extensibility of the git hooks used by a repository built upon this template. + +### Rationale + +A choice of shell scripting along with Docker offers a good support for simplicity, process isolation, portability across the operating systems and reuse of the same code and its configuration. This approach makes it consistent for a local environment and the CI/CD pipeline, where the process can be gated and compliance enforced. + +## Consequences + +As a result of the above decision + +- a single Bash script will be implemented +- it will be placed in the `scripts/githooks` directory +- the name of the file will be `check-file-format.sh` +- there will be a `pre-commit` runner installed by the [pre-commit](https://pre-commit.com/) framework using a make target +- the GitHub Action will call the git hook `check-file-format.sh` script directly +- and a couple of `Makefile` targets like `config`, `githooks-config` will be implemented to bootstrap the project + +The intention of this decision is to guide any other git hook and GitHub Action implementations. + +## Compliance + +Both, the git hook and the GitHub Action should be executed automatically as part of the developer workflow. + +## Notes + +There is an emerging practice to use projects like [act](https://github.com/nektos/act) to make GitHub Actions even more portable. ~~The recommendation is for this tool to be assessed at further stages of the [nhs-england-tools/repository-template](https://github.com/nhs-england-tools/repository-template) project implementation, in the context of this decision record.~~ Update: Please see the [Test GitHub Actions locally](../user-guides/Test_GitHub_Actions_locally.md) user guide. + +## Actions + +- [x] Ensure the naming convention of the other git hooks follows the pattern set by the `scripts/githooks/check-file-format.sh` script +- [ ] ~~Relocate shell scripts that are called by the GitHub workflow, currently placed in the `scripts` directory, to `.github/workflows/scripts`.~~ Update: Scripts provided along with this repository template are made to be agnostic from any provider's workflow implementation; this is by design. Therefore, the `scripts` directory is the right place for the shell scripts. + +## Tags + +`#maintainability, #testability, #simplicity, #security` diff --git a/docs/adr/ADR-002_Scan_repository_for_hardcoded_secrets.md b/docs/adr/ADR-002_Scan_repository_for_hardcoded_secrets.md new file mode 100644 index 000000000..0cf78d43c --- /dev/null +++ b/docs/adr/ADR-002_Scan_repository_for_hardcoded_secrets.md @@ -0,0 +1,149 @@ +# ADR-002: Scan repository for hard-coded secrets + +>| | | +>| ------------ | ------------------------------------------------------------- | +>| Date | `05/06/2023` | +>| Status | `RFC` | +>| Deciders | `Engineering` | +>| Significance | `Construction techniques` | +>| Owners | `Dan Stefaniuk, Jon Pearce, Tamara Goldschmidt, Tim Rickwood` | + +--- + +- [ADR-002: Scan repository for hard-coded secrets](#adr-002-scan-repository-for-hard-coded-secrets) + - [Context](#context) + - [Decision](#decision) + - [Assumptions](#assumptions) + - [Drivers](#drivers) + - [Options](#options) + - [Outcome](#outcome) + - [Rationale](#rationale) + - [Consequences](#consequences) + - [Compliance](#compliance) + - [Notes](#notes) + - [Actions](#actions) + - [Tags](#tags) + +## Context + +To safeguard sensitive details like passwords, API keys etc. from being incorporated into code repositories, it is imperative that we employ secret scanning of the code. This safeguarding process should be conducted in two key areas. Firstly, on the developer's machine, we utilise a git pre-commit hook to halt the inclusion of any secrets within the committed code. Secondly, as a safety net, a similar scan should be integrated into the CI/CD pipeline. Should a secret be detected within this pipeline, it is crucial that the pipeline serves as a gate to fail the build, subsequently blocking any related pull requests. + +## Decision + +### Assumptions + +There is already a well-known and fit-for-purpose tool `git-secrets` in use that was selected as the outcome of a decision made around 4 years ago. The purpose of this document is to review that decision. + +### Drivers + +Within NHS England, we are observing an adoption of the `gitleaks` tool, which is an alternative to `git-secrets`. + +### Options + +There are three options presented in this decision record. + +1. [Git-secrets](https://github.com/awslabs/git-secrets) + + - Repository metadata + - Contributions + - Number of contributors: **28** + - Number of commits: **110** + - Commit dates / frequency: **last commit more than a half a year ago, very low frequency** + - Number of Stars & Forks: **11.1k & 1.1k** + - Implementation technologies: **Shell script** + - Licence: **[Apache-2.0](https://choosealicense.com/licenses/apache-2.0/)** + - Features + - [x] Scan whole history + - [x] Scan single commit + - [ ] Predefined set of rules: _A very limited number of rules_ + - [x] Definition of custom rules + - [x] Definition of custom exclusions patterns + - [ ] Entropy detection + - [ ] Pre-backed Docker image + + - Pros + - A well-known tool that has been around for a while + - Cons + - Rules and exclusion patterns are not easy to manage as no comments or metadata are allowed in the definition + - No pre-backed Docker image + - Activity of the repository has dropped (last commit a while ago) + +2. [Trufflehog](https://github.com/trufflesecurity/trufflehog) + + - Repository metadata + - Contributions + - Number of contributors: **69** + - Number of commits: **2050** + - Commit dates / frequency: **last commit today, high frequency** + - Number of Stars & Forks: **11.3k & 1.3k** + - Implementation technologies: **Go language** + - Licence: **[AGPL-3.0](https://choosealicense.com/licenses/agpl-3.0/)** + - Features + - [x] Scan whole history + - [x] Scan single commit + - [ ] Predefined set of rules + - [x] Definition of custom rules + - [x] Definition of custom exclusions patterns: _Only whole files_ + - [x] Entropy detection + - [x] Pre-backed Docker image + + - Pros + - Entropy detection + - Fast to scan the whole history + - Cons + - [AGPL-3.0](https://choosealicense.com/licenses/agpl-3.0/) licence comes with conditions + +3. [Gitleaks](https://github.com/gitleaks/gitleaks) + + - Repository metadata + + - Contributions + - Number of contributors: **135** + - Number of commits: **929** + - Commit dates / frequency: **last commit three days ago, medium frequency** + - Number of Stars & Forks: **13k & 1.2k** + - Implementation technologies: **Go language** + - Licence: **[MIT](https://choosealicense.com/licenses/mit/)** + - Features + - [x] Scan whole history + - [x] Scan single commit + - [x] Predefined set of rules + - [x] Definition of custom rules + - [x] Definition of custom exclusions patterns + - [x] Entropy detection: _Set against a rule_ + - [x] Pre-backed Docker image + + - Pros + - Ease of managing rules and exclusion patterns as the configuration file uses the `toml` format + - Entropy detection at a rule level + - Fast to scan the whole history + - Cons + - No full entropy detection as an option + +### Outcome + +The decision is to support Option 3 and endorse the usage of the `gitleaks` tool. This decision is reversible, and the state of secret scan tooling will be monitored by means of the NHS England Tech Radar. + +### Rationale + +This decision was made with the understanding that the chosen tool must support the NHS England [Coding in the Open](https://github.com/nhsx/open-source-policy) initiative/policy and also be compatible with usage in private repositories. + +## Consequences + +As a result of this decision, any new repository created from the repository template should contain a secret scanning implementation based on `gitleaks` provided as a GitHub Action. + +## Compliance + +Compliance will be checked by the [GitHub Scanning Tool](https://github.com/NHSDigital/github-scanning-utils). + +## Notes + +This is an addition to the [GitHub Secret Scanning](https://docs.github.com/en/code-security/secret-scanning/about-secret-scanning) feature that should be considered to be turned on for any public repository within the NHS England GitHub subscription. + +## Actions + +- [ ] Update the NHS England [Software Engineering Quality Framework](https://github.com/NHSDigital/software-engineering-quality-framework) accordingly + +## Tags + +`#maintainability, #testability, #simplicity, #security` diff --git a/docs/adr/ADR-003_Acceptable_use_of_GitHub_PAT_and_Apps_for_authN_and_authZ.md b/docs/adr/ADR-003_Acceptable_use_of_GitHub_PAT_and_Apps_for_authN_and_authZ.md new file mode 100644 index 000000000..cd82a32c0 --- /dev/null +++ b/docs/adr/ADR-003_Acceptable_use_of_GitHub_PAT_and_Apps_for_authN_and_authZ.md @@ -0,0 +1,244 @@ +# ADR-003: Acceptable use of GitHub authentication and authorisation mechanisms + +>| | | +>| ------------ | --- | +>| Date | `04/09/2023` | +>| Status | `RFC` | +>| Deciders | `Engineering` | +>| Significance | `Construction techniques` | +>| Owners | `Amaan Ibn-Nasar, Jacob Gill, Dan Stefaniuk` | + +--- + +- [ADR-003: Acceptable use of GitHub authentication and authorisation mechanisms](#adr-003-acceptable-use-of-github-authentication-and-authorisation-mechanisms) + - [Context](#context) + - [Decision](#decision) + - [Assumptions](#assumptions) + - [Drivers](#drivers) + - [Options](#options) + - [Outcome](#outcome) + - [Built-in authentication using `GITHUB_TOKEN` secret](#built-in-authentication-using-github_token-secret) + - [GitHub PAT (fine-grained Personal Access Token)](#github-pat-fine-grained-personal-access-token) + - [GitHub App](#github-app) + - [Rationale](#rationale) + - [Notes](#notes) + - [GitHub App setup](#github-app-setup) + - [Recommendation for GitHub Admins](#recommendation-for-github-admins) + - [Diagram](#diagram) + - [Context diagram showing the GitHub App setup](#context-diagram-showing-the-github-app-setup) + - [Authentication flow diagram](#authentication-flow-diagram) + - [Limitations](#limitations) + - [Examples of acquiring access token](#examples-of-acquiring-access-token) + - [Actions](#actions) + - [Tags](#tags) + - [Footnotes](#footnotes) + +## Context + +As teams increasingly adopt GitHub and invest in refining development processes, there is a growing need to facilitate automated bot access to repositories, for tasks such as managing Pull Requests or integrating self-hosted runners with preferred Cloud providers. While GitHub's official documentation provides detailed technical instructions, it might not always offer a clear and holistic understanding of the platform's authentication and authorisation mechanisms. This document seeks to bridge that gap. It elucidates not just the "_how_" but also the "_why_", "_when_", and "_what_" behind these mechanisms, aiming to promote both effective and secure usage. + +## Decision + +### Assumptions + +_A **GitHub App** is a type of integration that you can build to interact with and extend the functionality of GitHub. You can build a GitHub App to provide flexibility and reduce friction in your processes, without needing to sign in a user or create a service account._ [^1] + +_**Personal access tokens** are an alternative to using passwords for authentication to GitHub when using the GitHub API or the command line. Personal access tokens are intended to access GitHub resources on behalf of yourself._ [^2] + +_When you enable GitHub Actions, GitHub installs a GitHub App on your repository. The **GITHUB_TOKEN** secret is a GitHub App installation access token. You can use the installation access token to authenticate on behalf of the GitHub App installed on your repository._ [^3] + +### Drivers + +The aim of this decision record, or more precisely, this guide, is to provide clear guidelines on the appropriate use of GitHub's authentication and authorisation mechanisms. Our objective is to ensure that any automated process utilises correct authentication when executing GitHub Actions and Workflows. These processes underpin the implementation of the CI/CD (Continuous Integration and Continuous Delivery) pipeline. By adhering to these guidelines, we can maintain robust, secure and effective operations. + +### Options + +There are three options available to support automated GitHub Action and Workflow authentication processes: + +1. [Built-in authentication](https://docs.github.com/en/actions/security-guides/automatic-token-authentication) using `GITHUB_TOKEN` secret + + - ➕ **No set-up required**. It works effortlessly, even for forked repositories. + - ➕ **The token can only access the repository containing the workflow file**. This token cannot be used to access other private repositories. + - ➖ **The token can only access a repository containing the workflow file**. If you need to access other private repositories or require write access to other public repositories this token will not be sufficient. + - ➖ **The token cannot trigger other workflows**. If you have a workflow that creates a release and another workflow that runs when someone creates a release, the first workflow will not trigger the second workflow if it utilises this token based mechanism for authentication. + +2. [GitHub PAT](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens) (fine-grained Personal Access Token) + + - ➕ **Simple to set up**. You can create a [fine-grained personal access token](https://github.com/settings/tokens?type=beta) with a repository scope. Classic personal access token should never be used. + - ➕ **GitHub PAT provides a more fine-grained permission model** than the built-in `GITHUB_TOKEN` + - ➕ **The token can trigger other workflows**. + - ➖ **It is bound to a person**. The owner of the token leaving the organisation can cause your workflow to break. + +3. [GitHub App](https://docs.github.com/en/apps/creating-github-apps/about-creating-github-apps/about-creating-github-apps) + + - ➕ **You can control which repositories your token has access to** by installing the GitHub App to selected repositories. + - ➕ **An organisation can own multiple GitHub Apps** and they do not consume a team seat. + - ➕ **GitHub App provides a more fine-grained permission model** than the built-in `GITHUB_TOKEN` + - ➕ **The token can trigger other workflows**. + - ➖ **Not very well documented**. Despite the extensive content of the GitHub documentation, it does not effectively communicate the pros & cons, use-cases and comparison of each authentication method. This was one of the reasons we created this ADR. + - ➖ **The setup is a bit more complicated**. + +### Outcome + +#### Built-in authentication using `GITHUB_TOKEN` secret + +A `GITHUB_TOKEN` is automatically generated and used within GitHub Action and Workflow for tasks related to the current repository such as creating or updating issues, pushing commits, etc. + +- **Scope**: The `GITHUB_TOKEN` is automatically created by GitHub in each run of a GitHub Action and Workflow, with its scope restricted to the repository initiating the workflow. The permissions of the `GITHUB_TOKEN` are limited to read and write access to the repository files, with an exception of write access to the `.github/workflows` directory. +- **Life Span**: The `GITHUB_TOKEN` has a temporary lifespan automatically expiring after the completion of the job that initiated its creation. + +This method enables basic operations expected from the repository pipeline, like accessing GitHub secret variables. + +#### GitHub PAT (fine-grained Personal Access Token) + +Use personal access token when: + +- **Scripted access**: When you are writing scripts that automate tasks related to your repositories PATs can be a good choice. These tokens can authenticate your script with GitHub allowing it to perform various operations like cloning repositories, creating issues, or fetching data from the API. Since PATs can act with nearly all the same scopes as a user, they can be a versatile tool for script-based interactions with your repositories. + +- **Command-line access**: If you are directly using the GitHub API from the command-line (e.g. with `curl`), PATs provide a convenient way to authenticate. They allow you to perform a wide range of actions, including getting the number of stars on a repository, posting a comment on an issue or triggering a new build or deployment. In this use case a common task that a contributor has to perform daily can be automated using a PAT generated with a scope specifically for it. + +- **Two-Factor Authentication (2FA)**: If you have enabled 2FA for added account security, performing `https` Git operations like clone, fetch, pull or push will require a PAT instead of a password. This helps ensure that operations remain secure even from the command-line. + +Do not use it when: + +- **Sharing your account**: PATs should never be used to provide access to your GitHub account to others. Instead, use GitHub's built-in features for collaboration and access management, such as adding collaborators to repositories or using organisations and teams. + +- **Public repositories or code**: PATs provide broad access to your account, so you should never embed them in your code, especially if that code is public. This could allow someone to take control of your account, modify your repositories or steal your data. The [scan secrets functionality](../../docs/user-guides/Scan_secrets.md) that is part of this repository template should prevent you from doing so anyway. + +- **Broad permissions**: While PATs can have broad permissions, you should aim to restrict each token's scope to what is necessary for its purpose. For instance, a token used only for reading repository metadata does not need write or admin access. + +- **Long-term usage without rotation**: To limit potential exposure of your PAT, it is recommended to periodically change or "rotate" your tokens. This is a common security best practice for all kinds of secret keys or tokens. + +This method of authentication and authorisation using the fine-grained PAT for the purpose of automation should mostly be used by the GitHub organisation owners, administrators and maintainers. + +#### GitHub App + +Use app when: + +- **Acting on behalf of a user or an organisation**: GitHub Apps can be installed directly onto an organisation or a user account and can access specific repositories. They act as separate entities and do not need a specific user to authenticate actions, thus separating the app's actions from individual users and preventing user-related issues (like a user leaving the organisation) from disrupting the app's operation. In this model, a GitHub App can act on behalf of a user to perform actions that the user has permissions for. For example, if a GitHub App is used to manage issues in a repository, it can act on behalf of a user to open, close, or comment on issues. The actions the app can perform are determined by the user's permissions and the permissions granted to the app during its installation. + +- **When you need fine-grained permissions**: GitHub Apps provide more detailed control over permissions than the classic PAT, which should no longer be used. You can set access permissions on a per-resource basis (issues, pull requests, repositories, etc.). This allows you to follow the principle of least privilege, granting your app only the permissions it absolutely needs. + +- **Webhook events**: GitHub Apps can be configured to receive a variety of webhook events. Unlike personal tokens, apps can receive granular event data and respond accordingly. For instance, an app can listen for `push` events to trigger a CI/CD pipeline or `issue_comment` events to moderate comments. + +- **Server-to-server communication**: Unlike users, GitHub Apps have their own identities and can perform actions directly on a repository without a user action triggering them. They are associated with the GitHub account (individual or organisation) that owns the app, not necessarily the account that installed the app. In this model the GitHub App can perform actions based on the permissions it was given during setup. These permissions are separate from any user permissions and allow the app to interact with the GitHub API directly. For example, an app might be set up to automatically run a test suite whenever code is pushed to a repository. This action would happen regardless of which user pushed the code. + +This method of authentication and authorisation is intended for the engineering teams to implement and support automated processes. Setting up the [GitHub OAuth App](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/authorizing-oauth-apps) access is outside the scope of this document as this mechanism should not be employed in the context of development process automation. + +### Rationale + +This guide describes the essence of the fundamental aspects of GitHub authentication and authorisation mechanisms along with the common use cases identified by the GitHub organisation administrators of the NHS England. + +## Notes + +### GitHub App setup + +To be executed by a GitHub organisation administrator: + +- Identify the GitHub repository name for which the team has requested a GitHub App integration +- Create a shared email address [england.[repository-name]-app@nhs.net](england.[repository-name]-app@nhs.net) by filling in the `New shared mailbox request` form using the Internal Portal (ServiceNow) + - Delegate access to this mailbox for the GitHub organisation owners, administrators and the engineering team +- Create a GitHub bot account named `[repository-name]-app` using the email address mentioned above. The bot account should not be added to the organisation; therefore, **no GitHub seat will be taken**. It serves as an identity, but authentication and authorisation are handled via the GitHub App. This avoids granting the bot admin permissions to the repository, enabling commits to be signed by that bot account. Access is controlled solely through the GitHub App. + - Use the `nhs.net` email address as the default and only email + - Set the email address as private + - Make profile private and hide any activity + - Block command line pushes that expose email + - Set up commit signing + - Flag unsigned commits as unverified +- [Register new GitHub App](https://docs.github.com/en/apps/creating-github-apps/registering-a-github-app/registering-a-github-app) under the `[repository-name]-app` bot account with a name following a pattern that could include information like `[Team] [Repository Name] [Purpose]`, which would help to search for and identify owners of the app once it is installed within the GitHub organisation + - Make note of the `App ID` + - Generate and store securely a `Private key` for the app + - Provide a `Homepage URL` to the repository this app will operate on + - Make this app `public` + - Set the relevant `Repository permissions` based on the team's requirements. There should be no organisation or account permissions set at all + +To be executed by a GitHub organisation owner: + +- Install the `[Team] [Repository Name] [Purpose]` app on the GitHub organisation and set repository access to the `[repository-name]` only + +#### Recommendation for GitHub Admins + +It is advisable to create a separate bot account for each service or programme. This approach fosters responsible ownership practices. It also allows the team to use the bot's identity for signing commits and integrating their service with other SaaS products, such as SonarCloud, without relying on individual team member accounts. Exceptions can be made on a case-by-case basis, allowing for the use of a central organisation account instead. + +### Diagram + +#### Context diagram showing the GitHub App setup + +```mermaid +C4Context + Enterprise_Boundary(b0, "Internal Developer Platform, part of the NHS England CoE") { + + Boundary(b1, "Service", "boundary") { + System(repo, "Repository", "Repository
[repository-name]") + System(github_app_runner, "GitHub App (runner)", "Bot app runner
for the repository") + } + Rel(repo, github_app_runner, "Is managed by") + + Boundary(b2, "Bot", "boundary") { + System(email_account, "NHSmail shared account", "Bot email
england.[repository-name]-app@nhs.net") + System(github_account, "GitHub account", "Bot user
[repository-name]-app
(not assigned to any org)") + System(github_app_registration, "GitHub App (registration)", "Bot app registration
'[Team] [Repository Name] [Purpose]'") + } + Rel(github_account, email_account, "Belongs to") + Rel(github_app_registration, github_account, "Is registered by") + + Boundary(b3, "GitHub Admins", "boundary") { + System(github_org, "GitHub organisation", "Org") + System(github_app_installation, "GitHub App (installation)", "Bot app installation
for the repository") + } + Rel(github_app_installation, github_org, "Is installed within") + + Rel(repo, github_org, "Belongs to") + Rel(repo, github_account, "Can accept contributions from") + Rel(github_app_runner, github_app_installation, "Authenticates via") + Rel(github_app_installation, github_app_registration, "Is an app installation of") + + UpdateElementStyle(repo, $bgColor="grey") + UpdateElementStyle(github_app_runner, $bgColor="grey") + } +``` + +Please see the above being implemented for the _update from template_ capability: + +- [Repository and GitHub App (runner)](https://github.com/nhs-england-tools/update-from-template-app) for the "Update from Template" app. The runner is built on a GitHub Action but it can be a serverless workload or self-hosted compute +- [GitHub account (bot)](https://github.com/update-from-template-app) linked to an `nhs.net` email address, but not part of any GitHub organisation +- [GitHub App (registration)](https://github.com/apps/nhs-england-update-from-template) to be installed within the GitHub organisations in use, e.g. `nhs-england-tools` + +#### Authentication flow diagram + +The diagram below represents all the steps needed for an app implementation (aka app runner) to be authenticated and authorised to perform operations defined by the GitHub App registration and installation. + +```mermaid +graph LR + A[Initialisation] -- App ID, App PK --> B[Generate JWT] + B -- JWT, Org name --> C[Get installation ID] + C -- JWT, Installation ID --> D[Generate Access Token] + D -- GITHUB_TOKEN --> E[Perform actions] +``` + +### Limitations + +- Only 100 app registrations are allowed per user or organisation, but there is [no limit on the number of installed apps](https://docs.github.com/en/apps/creating-github-apps/registering-a-github-app/registering-a-github-app#about-registering-github-apps) +- [Access rate limits apply](https://docs.github.com/en/apps/creating-github-apps/registering-a-github-app/rate-limits-for-github-apps) depending on the number of repositories or users within organisation +- The app name cannot exceed 34 characters + +### Examples of acquiring access token + +- [Bash](./assets/ADR-003/examples/bash/README.md) +- [Golang](./assets/ADR-003/examples/golang/README.md) +- [Node.js TypeScript (Octokit)](./assets/ADR-003/examples/nodejs/README.md) - This is our preferred method for implementing GitHub Apps. It is supported by the Octokit library, which is an official client for the GitHub API. +- [Python](./assets/ADR-003/examples/python/README.md) + +## Actions + +- [ ] Provide an example of commit signing by bot in the unattended mode, i.e. include a link on how this is implemented in the [Update from Template](https://github.com/nhs-england-tools/update-from-template-app/blob/c1b87f3aaa568caf4a8bfdd5b07d0c4ef88a2e4a/entrypoint.sh#L81) app. + +## Tags + +`#maintainability, #security` + +## Footnotes + +[^1]: [About creating GitHub Apps](https://docs.github.com/en/apps/creating-github-apps/about-creating-github-apps/about-creating-github-apps) +[^2]: [Managing your personal access tokens](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens) +[^3]: [Publishing and installing a package with GitHub Actions](https://docs.github.com/en/packages/managing-github-packages-using-github-actions-workflows/publishing-and-installing-a-package-with-github-actions) diff --git a/docs/adr/ADR-XXX_Agree_CICD_pipeline_structure.md b/docs/adr/ADR-XXX_Agree_CICD_pipeline_structure.md new file mode 100644 index 000000000..979f879a1 --- /dev/null +++ b/docs/adr/ADR-XXX_Agree_CICD_pipeline_structure.md @@ -0,0 +1,101 @@ +# ADR-XXX: Agree CI/CD pipeline structure + +>| | | +>| ------------ | --- | +>| Date | `dd/mm/YYYY` _when the decision was last updated_ | +>| Status | `RFC by dd/mm/YYYY, Proposed, In Discussion, Pending Approval, Withdrawn, Rejected, Accepted, Deprecated, ..., Superseded by ADR-XXX or Supersedes ADR-XXX` | +>| Deciders | `Tech Radar, Engineering, Architecture, Solution Assurance, Clinical Assurance, Technical Review and Governance, Information Governance, Cyber Security, Live Services Board,` ... | +>| Significance | `Structure, Nonfunctional characteristics, Dependencies, Interfaces, Construction techniques,` ... | +>| Owners | | + +--- + +- [ADR-XXX: Agree CI/CD pipeline structure](#adr-xxx-agree-cicd-pipeline-structure) + - [Context](#context) + - [Decision](#decision) + - [Assumptions](#assumptions) + - [Drivers](#drivers) + - [Options](#options) + - [Outcome](#outcome) + - [Rationale](#rationale) + - [Consequences](#consequences) + - [Compliance](#compliance) + - [Notes](#notes) + - [Actions](#actions) + - [Tags](#tags) + +## Context + +Describe the context and the problem statement. Is there a relationship to other decisions previously made? Are there any dependencies and/or constraints within which the decision will be made? Do these need to be reviewed or validated? Please note that environmental limitations or restrictions such as accepted technology standards, commonly recognised and used patterns, engineering and architecture principles, organisation policies, governance and so on, may as an effect narrow down the choices. This should also be explicitly documented, as this is a point-in-time decision with the intention of being able to articulate it clearly and justify it later. + +Requirements: + +- Implement the exemplar CI/CD pipeline using GitHub workflows and actions +- Incorporate the four main CI/CD stages, which are as follows: + 1. Commit, max. execution time 2 mins + 2. Test, max. execution time 5 mins + 3. Build, max. execution time 3 mins + 4. Acceptance, max. execution time 10 mins +- Provide `publish` and `deploy` workflows as the complementary processes +- Maintain simplicity in the pipeline but ensure it is scalable and extensible for larger projects +- Enable parallel execution of jobs to speed up the overall process +- Prevent the workflow from being triggered twice, i.e. when pushing to a branch with an existing pull request +- Implement good CI/CD practices, such as: + - Setting the build time variables at the start of the process + - Storing the tooling versions like Terraform, Python and Node.js in the `./.tools-version` file + - Storing the software/project version in the `./VERSION` file + - Keeping the main workflow modular + - Ensuring a timeout is set for each job + - Listing environment variables + - Making actions portable, e.g. allowing them to be run on a workstation or on Azure DevOps using external scripts + +![CD Pipeline Structure](../diagrams/CD%20Pipeline%20Structure.png) + +## Decision + +### Assumptions + +Summarise the underlying assumptions in the environment in which you make the decision. This could be related to technology changes, forecast of the monetary and non-monetary costs, further delivery commitments, impact from external drivers etc., and any known unknowns that translate to risks. + +### Drivers + +List the decision drivers that motivate this change or course of action. This may include any identified risks and residual risks after applying the decision. + +### Options + +Consider a comprehensive set of alternative options; provide weighting if applicable. + +### Outcome + +State the decision outcome as a result of taking into account all of the above. Is it a reversible or irreversible decision? + +### Rationale + +Provide a rationale for the decision that is based on weighing the options to ensure that the same questions are not going to be asked again and again unless the decision needs to be superseded. + +For non-trivial decisions a comparison table can be useful for the reviewer. Decision criteria down one side, options across the top. You'll likely find decision criteria come from the Drivers section above. Effort can be an important driving factor. You may have an intuitive feel for this, but reviewers will not. T-shirt sizing the effort for each option may help communicate. + +## Consequences + +Describe the resulting context, after applying the decision. All the identified consequences should be listed here, not just the positive ones. Any decision comes with many implications. For example, it may introduce a need to make other decisions as an effect of cross-cutting concerns; it may impact structural or operational characteristics of the software, and influence non-functional requirements; as a result, some things may become easier or more difficult to do because of this change. What are the trade-offs? + +What are the conditions under which this decision no longer applies or becomes irrelevant? + +## Compliance + +Establish how the success is going to be measured. Once implemented, the effect might lend itself to be measured, therefore if appropriate a set of criteria for success could be established. Compliance checks of the decision can be manual or automated using a fitness function. If it is the latter this section can then specify how that fitness function would be implemented and whether there are any other changes to the codebase needed to measure this decision for compliance. + +## Notes + +Include any links to existing epics, decisions, dependencies, risks, and policies related to this decision record. This section could also include any further links to configuration items within the project or the codebase, signposting to the areas of change. + +It is important that if the decision is sub-optimal or the choice is tactical or misaligned with the strategic directions the risk related to it is identified and clearly articulated. As a result of that, the expectation is that a [Tech Debt](./tech-debt.md) record is going to be created on the backlog. + +## Actions + +- [x] name, date by, action +- [ ] name, date by, action + +## Tags + +`#availability|#scalability|#elasticity|#performance|#reliability|#resilience|#maintainability|#testability|#deployability|#modularity|#simplicity|#security|#data|#cost|#usability|#accessibility|…` these tags are intended to be operational, structural or cross-cutting architecture characteristics to link to related decisions. diff --git a/docs/adr/ADR-nnn_Any_Decision_Record_Template.md b/docs/adr/ADR-nnn_Any_Decision_Record_Template.md new file mode 100644 index 000000000..dcca708b6 --- /dev/null +++ b/docs/adr/ADR-nnn_Any_Decision_Record_Template.md @@ -0,0 +1,78 @@ +# ADR-nnn: Any Decision Record Template + +>| | | +>| ------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +>| Date | `dd/mm/YYYY` _when the decision was last updated_ | +>| Status | `RFC by dd/mm/YYYY, Proposed, In Discussion, Pending Approval, Withdrawn, Rejected, Accepted, Deprecated, ..., Superseded by ADR-XXX or Supersedes ADR-XXX` | +>| Deciders | `Tech Radar, Engineering, Architecture, Solution Assurance, Clinical Assurance, Technical Review and Governance, Information Governance, Cyber Security, Live Services Board,` ... | +>| Significance | `Structure, Nonfunctional characteristics, Dependencies, Interfaces, Construction techniques,` ... | +>| Owners | | + +--- + +- [ADR-nnn: Any Decision Record Template](#adr-nnn-any-decision-record-template) + - [Context](#context) + - [Decision](#decision) + - [Assumptions](#assumptions) + - [Drivers](#drivers) + - [Options](#options) + - [Outcome](#outcome) + - [Rationale](#rationale) + - [Consequences](#consequences) + - [Compliance](#compliance) + - [Notes](#notes) + - [Actions](#actions) + - [Tags](#tags) + +## Context + +Describe the context and the problem statement. Is there a relationship to other decisions previously made? Are there any dependencies and/or constraints within which the decision will be made? Do these need to be reviewed or validated? Please note that environmental limitations or restrictions such as accepted technology standards, commonly recognised and used patterns, engineering and architecture principles, organisation policies, governance and so on, may as an effect narrow down the choices. This should also be explicitly documented, as this is a point-in-time decision with the intention of being able to articulate it clearly and justify it later. + +## Decision + +### Assumptions + +Summarise the underlying assumptions in the environment in which you make the decision. This could be related to technology changes, forecast of the monetary and non-monetary costs, further delivery commitments, impact from external drivers etc., and any known unknowns that translate to risks. + +### Drivers + +List the decision drivers that motivate this change or course of action. This may include any identified risks and residual risks after applying the decision. + +### Options + +Consider a comprehensive set of alternative options; provide weighting if applicable. + +### Outcome + +State the decision outcome as a result of taking into account all of the above. Is it a reversible or irreversible decision? + +### Rationale + +Provide a rationale for the decision that is based on weighing the options to ensure that the same questions are not going to be asked again and again unless the decision needs to be superseded. + +For non-trivial decisions a comparison table can be useful for the reviewer. Decision criteria down one side, options across the top. You'll likely find decision criteria come from the Drivers section above. Effort can be an important driving factor. You may have an intuitive feel for this, but reviewers will not. T-shirt sizing the effort for each option may help communicate. + +## Consequences + +Describe the resulting context, after applying the decision. All the identified consequences should be listed here, not just the positive ones. Any decision comes with many implications. For example, it may introduce a need to make other decisions as an effect of cross-cutting concerns; it may impact structural or operational characteristics of the software, and influence non-functional requirements; as a result, some things may become easier or more difficult to do because of this change. What are the trade-offs? + +What are the conditions under which this decision no longer applies or becomes irrelevant? + +## Compliance + +Establish how the success is going to be measured. Once implemented, the effect might lend itself to be measured, therefore if appropriate a set of criteria for success could be established. Compliance checks of the decision can be manual or automated using a fitness function. If it is the latter this section can then specify how that fitness function would be implemented and whether there are any other changes to the codebase needed to measure this decision for compliance. + +## Notes + +Include any links to existing epics, decisions, dependencies, risks, and policies related to this decision record. This section could also include any further links to configuration items within the project or the codebase, signposting to the areas of change. + +It is important that if the decision is sub-optimal or the choice is tactical or misaligned with the strategic directions the risk related to it is identified and clearly articulated. As a result of that, the expectation is that a [Tech Debt](./tech-debt.md) record is going to be created on the backlog. + +## Actions + +- [x] name, date by, action +- [ ] name, date by, action + +## Tags + +`#availability|#scalability|#elasticity|#performance|#reliability|#resilience|#maintainability|#testability|#deployability|#modularity|#simplicity|#security|#data|#cost|#usability|#accessibility|…` these tags are intended to be operational, structural or cross-cutting architecture characteristics to link to related decisions. diff --git a/docs/adr/assets/ADR-003/examples/bash/README.md b/docs/adr/assets/ADR-003/examples/bash/README.md new file mode 100644 index 000000000..90e56cf5f --- /dev/null +++ b/docs/adr/assets/ADR-003/examples/bash/README.md @@ -0,0 +1,32 @@ +# Example: Get GitHub App access token in Bash + +Dependencies are `openssl`, `curl`, `jq` and `gh`. + +Prepare environment: + +```bash +export GITHUB_APP_ID=... +export GITHUB_APP_PK_FILE=... +export GITHUB_ORG="nhs-england-tools" +``` + +Run script: + +```bash +$ cd docs/adr/assets/ADR-003/examples/bash +$ ./script.sh +GITHUB_TOKEN=ghs_... +``` + +Check the token: + +```bash +$ GITHUB_TOKEN=ghs_...; echo "$GITHUB_TOKEN" | gh auth login --with-token +$ gh auth status +github.com + ✓ Logged in to github.com as nhs-england-update-from-template[bot] (keyring) + ✓ Git operations for github.com configured to use https protocol. + ✓ Token: ghs_************************************ +``` + +See the [example (script.sh)](./script.sh) implementation. This script has been written to illustrate the concept in a clear and simple way. It is not a production ready code. diff --git a/docs/adr/assets/ADR-003/examples/bash/script.sh b/docs/adr/assets/ADR-003/examples/bash/script.sh new file mode 100755 index 000000000..7dd83464c --- /dev/null +++ b/docs/adr/assets/ADR-003/examples/bash/script.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +function main() { + + if [[ -z "$GITHUB_APP_ID" || -z "$GITHUB_APP_PK_FILE" || -z "$GITHUB_ORG" ]]; then + echo "Environment variables GITHUB_APP_ID, GITHUB_APP_PK_FILE and GITHUB_ORG must be passed to this program." + exit 1 + fi + + jwt_token=$(get-jwt-token) + installation_id=$(get-installation-id) + access_token=$(get-access-token) + + echo "GITHUB_TOKEN=$access_token" +} + +function get-jwt-token() { + + header=$(echo -n '{"alg":"RS256","typ":"JWT"}' | base64 | tr -d '=' | tr -d '\n=' | tr -- '+/' '-_') + payload=$(echo -n '{"iat":'"$(date +%s)"',"exp":'$(($(date +%s)+600))',"iss":"'"$GITHUB_APP_ID"'"}' | base64 | tr -d '\n=' | tr -- '+/' '-_') + signature=$(echo -n "$header.$payload" | openssl dgst -binary -sha256 -sign "$GITHUB_APP_PK_FILE" | openssl base64 | tr -d '\n=' | tr -- '+/' '-_') + + echo "$header.$payload.$signature" +} + +function get-installation-id() { + + installations_response=$(curl -sX GET \ + -H "Authorization: Bearer $jwt_token" \ + -H "Accept: application/vnd.github.v3+json" \ + https://api.github.com/app/installations) + + echo "$installations_response" | jq '.[] | select(.account.login == "'"$GITHUB_ORG"'") .id' +} + +function get-access-token() { + + token_response=$(curl -sX POST \ + -H "Authorization: Bearer $jwt_token" \ + -H "Accept: application/vnd.github.v3+json" \ + "https://api.github.com/app/installations/$installation_id/access_tokens") + + echo "$token_response" | jq .token -r +} + +main diff --git a/docs/adr/assets/ADR-003/examples/golang/README.md b/docs/adr/assets/ADR-003/examples/golang/README.md new file mode 100644 index 000000000..87071afcc --- /dev/null +++ b/docs/adr/assets/ADR-003/examples/golang/README.md @@ -0,0 +1,32 @@ +# Example: Get GitHub App access token in Golang + +Dependencies are listed in the `go.mod` file. + +Prepare environment: + +```bash +export GITHUB_APP_ID=... +export GITHUB_APP_PK_FILE=... +export GITHUB_ORG="nhs-england-tools" +``` + +Run script: + +```bash +$ cd docs/adr/assets/ADR-003/examples/golang +$ go run main.go +GITHUB_TOKEN=ghs_... +``` + +Check the token: + +```bash +$ GITHUB_TOKEN=ghs_...; echo "$GITHUB_TOKEN" | gh auth login --with-token +$ gh auth status +github.com + ✓ Logged in to github.com as nhs-england-update-from-template[bot] (keyring) + ✓ Git operations for github.com configured to use https protocol. + ✓ Token: ghs_************************************ +``` + +See the [example (main.go)](./main.go) implementation. This script has been written to illustrate the concept in a clear and simple way. It is not a production ready code. diff --git a/docs/adr/assets/ADR-003/examples/golang/go.mod b/docs/adr/assets/ADR-003/examples/golang/go.mod new file mode 100644 index 000000000..870c97cbf --- /dev/null +++ b/docs/adr/assets/ADR-003/examples/golang/go.mod @@ -0,0 +1,10 @@ +module github-app-get-tokent + +go 1.21.0 + +require ( + github.com/go-resty/resty/v2 v2.7.0 + github.com/golang-jwt/jwt v3.2.2+incompatible +) + +require golang.org/x/net v0.17.0 // indirect diff --git a/docs/adr/assets/ADR-003/examples/golang/go.sum b/docs/adr/assets/ADR-003/examples/golang/go.sum new file mode 100644 index 000000000..ab2f1b8ff --- /dev/null +++ b/docs/adr/assets/ADR-003/examples/golang/go.sum @@ -0,0 +1,12 @@ +github.com/go-resty/resty/v2 v2.7.0 h1:me+K9p3uhSmXtrBZ4k9jcEAfJmuC8IivWHwaLZwPrFY= +github.com/go-resty/resty/v2 v2.7.0/go.mod h1:9PWDzw47qPphMRFfhsyk0NnSgvluHcljSMVIq3w7q0I= +github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= +github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= +golang.org/x/net v0.0.0-20211029224645-99673261e6eb/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= +golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= diff --git a/docs/adr/assets/ADR-003/examples/golang/main.go b/docs/adr/assets/ADR-003/examples/golang/main.go new file mode 100644 index 000000000..42553cf74 --- /dev/null +++ b/docs/adr/assets/ADR-003/examples/golang/main.go @@ -0,0 +1,88 @@ +package main + +import ( + "crypto/x509" + "encoding/json" + "encoding/pem" + "fmt" + "io/ioutil" + "log" + "os" + "time" + + "github.com/go-resty/resty/v2" + "github.com/golang-jwt/jwt" +) + +type Installation struct { + ID int `json:"id"` + Account struct { + Login string `json:"login"` + } `json:"account"` +} + +func main() { + + ghAppId := os.Getenv("GITHUB_APP_ID") + ghAppPkFile := os.Getenv("GITHUB_APP_PK_FILE") + ghOrg := os.Getenv("GITHUB_ORG") + + if ghAppId == "" || ghAppPkFile == "" || ghOrg == "" { + log.Fatalf("Environment variables GITHUB_APP_ID, GITHUB_APP_PK_FILE and GITHUB_ORG must be passed to this program.") + } + + jwtToken := getJwtToken(ghAppId, ghAppPkFile) + installationId := getInstallationId(jwtToken, ghOrg) + accessToken := getAccessToken(jwtToken, installationId) + + fmt.Printf("GITHUB_TOKEN=%s\n", accessToken) +} + +func getJwtToken(ghAppId string, ghAppPkFile string) string { + + pemContent, _ := ioutil.ReadFile(ghAppPkFile) + block, _ := pem.Decode(pemContent) + privateKey, _ := x509.ParsePKCS1PrivateKey(block.Bytes) + token := jwt.NewWithClaims(jwt.SigningMethodRS256, jwt.MapClaims{ + "iat": time.Now().Unix(), + "exp": time.Now().Add(10 * time.Minute).Unix(), + "iss": ghAppId, + }) + jwtToken, _ := token.SignedString(privateKey) + + return jwtToken +} + +func getInstallationId(jwtToken string, ghOrg string) int { + + client := resty.New() + resp, _ := client.R(). + SetHeader("Authorization", "Bearer "+jwtToken). + SetHeader("Accept", "application/vnd.github.v3+json"). + Get("https://api.github.com/app/installations") + + var installations []Installation + json.Unmarshal(resp.Body(), &installations) + installationId := 0 + for _, installation := range installations { + if installation.Account.Login == ghOrg { + installationId = installation.ID + } + } + + return installationId +} + +func getAccessToken(jwtToken string, installationId int) string { + + client := resty.New() + resp, _ := client.R(). + SetHeader("Authorization", "Bearer "+jwtToken). + SetHeader("Accept", "application/vnd.github.v3+json"). + Post(fmt.Sprintf("https://api.github.com/app/installations/%d/access_tokens", installationId)) + + var result map[string]interface{} + json.Unmarshal(resp.Body(), &result) + + return result["token"].(string) +} diff --git a/docs/adr/assets/ADR-003/examples/nodejs/.gitignore b/docs/adr/assets/ADR-003/examples/nodejs/.gitignore new file mode 100644 index 000000000..3db80f746 --- /dev/null +++ b/docs/adr/assets/ADR-003/examples/nodejs/.gitignore @@ -0,0 +1,134 @@ +yarn.lock + +SEE: https://github.com/github/gitignore/blob/main/Node.gitignore + +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* +.pnpm-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage +*.lcov + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# Snowpack dependency directory (https://snowpack.dev/) +web_modules/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional stylelint cache +.stylelintcache + +# Microbundle cache +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variable files +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# parcel-bundler cache (https://parceljs.org/) +.cache +.parcel-cache + +# Next.js build output +.next +out + +# Nuxt.js build / generate output +.nuxt +dist + +# Gatsby files +.cache/ +# Comment in the public line in if your project uses Gatsby and not Next.js +# https://nextjs.org/blog/next-9-1#public-directory-support +# public + +# vuepress build output +.vuepress/dist + +# vuepress v2.x temp and cache directory +.temp +.cache + +# Docusaurus cache and generated files +.docusaurus + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# TernJS port file +.tern-port + +# Stores VSCode versions used for testing VSCode extensions +.vscode-test + +# yarn v2 +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.* diff --git a/docs/adr/assets/ADR-003/examples/nodejs/README.md b/docs/adr/assets/ADR-003/examples/nodejs/README.md new file mode 100644 index 000000000..914f710a4 --- /dev/null +++ b/docs/adr/assets/ADR-003/examples/nodejs/README.md @@ -0,0 +1,29 @@ +# Example: Get GitHub App access token in Node.js TypeScript (using Octokit) + +Dependencies are listed in the `package.json` file. + +Prepare environment: + +```bash +export GITHUB_APP_ID=... +export GITHUB_APP_PK_FILE=... +export GITHUB_ORG="nhs-england-tools" +``` + +Run script: + +```bash +$ cd docs/adr/assets/ADR-003/examples/nodejs +$ yarn install +$ yarn start +[ + { + name: 'repository-template', + full_name: 'nhs-england-tools/repository-template', + private: false, + owner: { + login: 'nhs-england-tools', + ... +``` + +See the [example (`main.ts`)](./main.ts) implementation. This script has been written to illustrate the concept in a clear and simple way. It is not a production ready code. diff --git a/docs/adr/assets/ADR-003/examples/nodejs/main.ts b/docs/adr/assets/ADR-003/examples/nodejs/main.ts new file mode 100644 index 000000000..e3a72ff69 --- /dev/null +++ b/docs/adr/assets/ADR-003/examples/nodejs/main.ts @@ -0,0 +1,57 @@ +import { Octokit } from "octokit"; +import { createAppAuth } from "@octokit/auth-app"; +import * as fs from "fs"; + +export const getOctokit = async ( + appId: string, + privateKey: string, + orgName: string +): Promise => { + const appOctokit = new Octokit({ + authStrategy: createAppAuth, + auth: { + appId, + privateKey, + }, + }); + const installations = await appOctokit.request("GET /app/installations"); + for (const d of installations.data) { + //@ts-ignore + if (d.account.login === orgName) { + const installationId = d.id; + const installationOctokit = new Octokit({ + authStrategy: createAppAuth, + auth: { + appId, + privateKey, + installationId, + }, + }); + return installationOctokit; + } + } + + throw new Error(`No installation found for organization ${orgName}`); +}; + +const ghAppId = process.env.GITHUB_APP_ID; +const ghAppPkFile = process.env.GITHUB_APP_PK_FILE; +const ghOrg = process.env.GITHUB_ORG; + +(async () => { + if (!ghAppId || !ghAppPkFile || !ghOrg) { + throw new Error( + "Environment variables GITHUB_APP_ID, GITHUB_APP_PK_FILE, and GITHUB_ORG must be passed to this program." + ); + } + const octokit = await getOctokit( + ghAppId, + fs.readFileSync(ghAppPkFile, "utf8"), + ghOrg + ); + const repos = await octokit.request("GET /orgs/{org}/repos", { + org: ghOrg, + }); + + console.log(repos.data); +})(); diff --git a/docs/adr/assets/ADR-003/examples/nodejs/package.json b/docs/adr/assets/ADR-003/examples/nodejs/package.json new file mode 100644 index 000000000..0a867c4ab --- /dev/null +++ b/docs/adr/assets/ADR-003/examples/nodejs/package.json @@ -0,0 +1,14 @@ +{ + "main": "main.ts", + "scripts": { + "start": "ts-node main.ts" + }, + "dependencies": { + "@octokit/auth-app": "^6.0.0", + "octokit": "^3.1.0" + }, + "devDependencies": { + "ts-node": "^10.9.1", + "typescript": "^5.2.2" + } +} diff --git a/docs/adr/assets/ADR-003/examples/nodejs/tsconfig.json b/docs/adr/assets/ADR-003/examples/nodejs/tsconfig.json new file mode 100644 index 000000000..251052903 --- /dev/null +++ b/docs/adr/assets/ADR-003/examples/nodejs/tsconfig.json @@ -0,0 +1,10 @@ +{ + "compilerOptions": { + "target": "ES6", + "module": "commonjs", + "strict": true, + "esModuleInterop": true + }, + "include": ["*.ts"], + "exclude": ["node_modules"] +} diff --git a/docs/adr/assets/ADR-003/examples/python/README.md b/docs/adr/assets/ADR-003/examples/python/README.md new file mode 100644 index 000000000..36d0e1888 --- /dev/null +++ b/docs/adr/assets/ADR-003/examples/python/README.md @@ -0,0 +1,33 @@ +# Example: Get GitHub App access token in Python + +Dependencies are listed in the `requirements.txt` file. + +Prepare environment: + +```bash +export GITHUB_APP_ID=... +export GITHUB_APP_PK_FILE=... +export GITHUB_ORG="nhs-england-tools" +``` + +Run script: + +```bash +$ cd docs/adr/assets/ADR-003/examples/python +$ pip install -r requirements.txt +$ python main.py +GITHUB_TOKEN=ghs_... +``` + +Check the token: + +```bash +$ GITHUB_TOKEN=ghs_...; echo "$GITHUB_TOKEN" | gh auth login --with-token +$ gh auth status +github.com + ✓ Logged in to github.com as nhs-england-update-from-template[bot] (keyring) + ✓ Git operations for github.com configured to use https protocol. + ✓ Token: ghs_************************************ +``` + +See the [example (main.py)](./main.py) implementation. This script has been written to illustrate the concept in a clear and simple way. It is not a production ready code. diff --git a/docs/adr/assets/ADR-003/examples/python/main.py b/docs/adr/assets/ADR-003/examples/python/main.py new file mode 100644 index 000000000..4968d3851 --- /dev/null +++ b/docs/adr/assets/ADR-003/examples/python/main.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python3 + +import jwt +import os +import requests +import time + + +def main(): + + gh_app_id = os.environ.get("GITHUB_APP_ID") + gh_app_pk_file = os.environ.get("GITHUB_APP_PK_FILE") + gh_org = os.environ.get("GITHUB_ORG") + + if not gh_app_id or not gh_app_pk_file or not gh_org: + raise ValueError("Environment variables GITHUB_APP_ID, GITHUB_APP_PK_FILE and GITHUB_ORG must be passed to this program.") + + jwt_token = get_jwt_token(gh_app_id, gh_app_pk_file) + installation_id = get_installation_id(jwt_token, gh_org) + access_token = get_access_token(jwt_token, installation_id) + + print(f"GITHUB_TOKEN={access_token}") + + +def get_jwt_token(gh_app_id, gh_app_pk_file): + + with open(gh_app_pk_file, "rb") as file: + private_key = file.read() + payload = {"iat": int(time.time()), "exp": int(time.time()) + 600, "iss": gh_app_id} + jwt_token = jwt.encode(payload, private_key, algorithm="RS256") + + return jwt_token + + +def get_installation_id(jwt_token, gh_org): + + headers = { + "Authorization": f"Bearer {jwt_token}", + "Accept": "application/vnd.github.v3+json", + } + url = "https://api.github.com/app/installations" + response = requests.get(url, headers=headers) + + installation_id = None + for installation in response.json(): + if installation["account"]["login"] == gh_org: + installation_id = installation["id"] + break + + return installation_id + + +def get_access_token(jwt_token, installation_id): + + headers = { + "Authorization": f"Bearer {jwt_token}", + "Accept": "application/vnd.github.v3+json", + } + url = f"https://api.github.com/app/installations/{installation_id}/access_tokens" + response = requests.post(url, headers=headers) + + return response.json().get("token") + + +if __name__ == "__main__": + main() diff --git a/docs/adr/assets/ADR-003/examples/python/requirements.txt b/docs/adr/assets/ADR-003/examples/python/requirements.txt new file mode 100644 index 000000000..0e4c971b1 --- /dev/null +++ b/docs/adr/assets/ADR-003/examples/python/requirements.txt @@ -0,0 +1,2 @@ +PyJWT==2.8.0 +requests==2.31.0 diff --git a/docs/developer-guides/Bash_and_Make.md b/docs/developer-guides/Bash_and_Make.md new file mode 100644 index 000000000..d43799100 --- /dev/null +++ b/docs/developer-guides/Bash_and_Make.md @@ -0,0 +1,153 @@ +# Developer Guide: Bash and Make + +- [Developer Guide: Bash and Make](#developer-guide-bash-and-make) + - [Using Make](#using-make) + - [Using Bash](#using-bash) + - [Make and Bash working together](#make-and-bash-working-together) + - [Conventions](#conventions) + - [Debugging](#debugging) + - [Scripts](#scripts) + +## Using Make + +Sample make target signature definition: + +```makefile +some-target: # Target description - mandatory: foo=[description]; optional: baz=[description, default is 'qux'] @Category + # Recipe implementation... +``` + +- `some-target`: This is the name of the target you would specify when you want to run this particular target. Use the kebab-case naming convention and prefix with an underscore `_` to mark it as a "private" target. The first part of the name is used for grouping, e.g. `docker-*` or `terraform-*`. +- `Target Description`: Provided directly after the target name as a single line, so be concise. +- `mandatory` parameters: Parameters that must be provided when invoking the target. Each parameter has its own description. Please follow the specified format as it is used by `make help`. +- `optional` parameters: Parameters that are not required when invoking the target. They may have a default value. Each parameter has its own description. +- `@Category` label: Used for grouping by the `make help` command. +- `Recipe implementation`: This section defines the actual commands or steps the target will execute. **Do not exceed 5 lines of effective code**. For more complex operations, use a shell script. Refer to the `docker-build` implementation in the [docker.mk](../../scripts/docker/docker.mk) file. More complex operations are implemented in the [docker.sh](../../scripts/docker/docker.lib.sh) script for readability and simplicity. + +Run make target from a terminal: + +```shell +foo=bar make some-target # Environment variable is passed to the make target execution process +make some-target foo=bar # Make argument is passed to the make target execution process +``` + +By convention we use uppercase variables for global settings that you would ordinarily associate with environment variables. We use lower-case variables as arguments to call functions or make targets, in this case. + +All make targets should be added to the `${VERBOSE}.SILENT:` section of the `make` file, which prevents `make` from printing commands before executing them. The `${VERBOSE}` prefix on the `.SILENT:` special target allows toggling it if needed. If you explicitly want output from a certain line, use `echo`. + +It is worth noting that by default, `make` creates a new system process to execute each line of a recipe. This is not the desired behaviour for us and the entire content of a make recipe (a target) should be run in a single shell invocation. This has been configured in this repository by setting the [`.ONESHELL:`](https://www.gnu.org/software/make/manual/html_node/One-Shell.html) special target in the `scripts/init.mk` file. + +To see all available make targets, run `make help`. + +![make help](./assets/make_help.png) + +## Using Bash + +When working in the command-line ensure the environment variables are reset to their initial state. This can be done by reloading shell using the `env -i $SHELL` command. + +Sample Bash function definition: + +```shell +# Short function description +# Arguments (provided as environment variables): +# foo=[description] +# baz=[description, default is 'qux'] +function some-shell-function() { + # Function implementation... +``` + +Run Bash function from a terminal: + +```shell +source scripts/a-suite-of-shell-functions +foo=bar some-shell-function # Environment variable is accessible by the function executed in the same operating system process +``` + +```shell +source scripts/a-suite-of-shell-functions +foo=bar +some-shell-function # Environment variable is still accessible by the function +``` + +Run Bash script from a terminal, bear in mind that executing a script creates a child operating system process: + +```shell +# Environment variable has to be exported to be passed to a child process, DO NOT use this pattern +export foo=bar +scripts/a-shell-script +``` + +```shell +# or to be set in the same line before creating a new process, prefer this pattern over the previous one +foo=bar scripts/a-shell-script + +# or when multiple variables are required +foo=bar \ +baz=qux \ + scripts/a-shell-script +``` + +By convention we use uppercase variables for global settings that you would ordinarily associate with environment variables. We use lower-case variables as arguments to be passed into specific functions we call, usually on the same line, right before the function name. + +The command `set -euo pipefail` is commonly used in the Bash scripts, to configure the behavior of the script in a way that makes it more robust and easier to debug. Here is a breakdown of each option switch: + +- `-e`: Ensures that the script exits immediately if any command returns a non-zero exit status. +- `-u`: Makes the script exit if there is an attempt to use an uninitialised variable. +- `-o pipefail`: ensures that if any command in a pipeline fails (i.e., returns a non-zero exit status), then the entire pipeline will return that non-zero status. By default, a pipeline returns the exit status of the last command. + +## Make and Bash working together + +Sample make target calling a Bash function. Notice that `baz` is going to be accessible to the function as it is executed in the same operating system process: + +```makefile +some-target: # Run shell function - mandatory: foo=[description] + source scripts/a-suite-of-shell-function + baz=qux + some-shell-function # 'foo' and 'baz' are accessible by the function +``` + +Sample make target calling another make target. In this case a parameter `baz` has to be passed as a variable to the make target, which is executed in a child process: + +```makefile +some-target: # Call another target - mandatory: foo=[description] + baz=qux \ + make another-target # 'foo' and 'baz' are passed to the make target +``` + +Run it from a terminal: + +```shell +foo=bar make some-target +``` + +## Conventions + +### Debugging + +To assist in investigating scripting issues, the `VERBOSE` variable is available for both Make and Bash scripts. If it is set to `true` or `1`, it prints all the commands that the script executes to the standard output. Here is how to use it: + +for Make targets + +```shell +VERBOSE=1 make docker-example-build +``` + +for Bash scripts + +```shell +VERBOSE=1 scripts/shellscript-linter.sh +``` + +### Scripts + +Most scripts provided with this repository template can use tools installed on your `PATH` if they are available or run them from within a Docker container. To force a script to use Docker, the `FORCE_USE_DOCKER` variable is provided. This feature allows you to use custom tooling if it is present on the command-line path. Here is how to use it: + +```shell +FORCE_USE_DOCKER=1 scripts/shellscript-linter.sh +``` + +You can combine it with the `VERBOSE` flag to see the details of the execution flow: + +```shell +VERBOSE=1 FORCE_USE_DOCKER=1 scripts/shellscript-linter.sh +``` diff --git a/docs/developer-guides/Scripting_Docker.md b/docs/developer-guides/Scripting_Docker.md new file mode 100644 index 000000000..fa12093c1 --- /dev/null +++ b/docs/developer-guides/Scripting_Docker.md @@ -0,0 +1,223 @@ +# Developer Guide: Scripting Docker + +- [Developer Guide: Scripting Docker](#developer-guide-scripting-docker) + - [Overview](#overview) + - [Features](#features) + - [Key files](#key-files) + - [Usage](#usage) + - [Quick start](#quick-start) + - [Your image implementation](#your-image-implementation) + - [Conventions](#conventions) + - [Versioning](#versioning) + - [Variables](#variables) + - [Platform architecture](#platform-architecture) + - [FAQ](#faq) + +## Overview + +Docker is a tool for developing, shipping and running applications inside containers for Serverless and Kubernetes-based workloads. It has grown in popularity due to its ability to address several challenges faced by engineers, like: + +- **Consistency across environments**: One of the common challenges in software development is the "it works on my machine" problem. Docker containers ensure that applications run the same regardless of where the container is run, be it a developer's local machine, a test environment or a production server. +- **Isolation**: Docker containers are isolated from each other and from the host system. This means that you can run multiple versions of the same software (like databases or libraries) on the same machine without them interfering with each other. +- **Rapid development and deployment**: With Docker, setting up a new instance or environment is just a matter of spinning up a new container, which can be done in seconds. This is especially useful for scaling applications or rapidly deploying fixes. +- **Version control for environments**: Docker images can be versioned, allowing developers to keep track of application environments in the same way they version code. This makes it easy to roll back to a previous version if needed. +- **Resource efficiency**: Containers are lightweight compared to virtual machines (VMs) because they share the same OS kernel and do not require a full OS stack to run. This means you can run many more containers than VMs on a host machine. +- **Microservices architecture**: Docker is particularly well-suited for microservices architectures, where an application is split into smaller, independent services that run in their own containers. This allows for easier scaling, maintenance and updates of individual services. +- **Integration with development tools**: There is a rich ecosystem of tools and platforms that integrate with Docker, including CI/CD tools (like GitHub and Azure DevOps), orchestration platforms (like Kubernetes) and cloud providers (like AWS and Azure). +- **Developer productivity**: With Docker, developers can easily share their environment with teammates. If a new developer joins the team, they can get up and running quickly by simply pulling the necessary Docker images. +- **Easy maintenance and update**: With containers, it is easy to update a base image or a software component and then propagate those changes to all instances of the application. +- **Cross-platform compatibility**: Docker containers can be run on any platform that supports Docker, be it Linux, Windows or macOS. This ensures compatibility across different development and production environments. +- **Security**: Docker provides features like secure namespaces and cgroups which isolate applications. Additionally, you can define fine-grained access controls and policies for your containers. +- **Reusable components**: Docker images can be used as base images for other projects, allowing for reusable components. For example, if you have a base image with a configured web server, other teams or projects can use that image as a starting point. + +## Features + +Here are some key features built into this repository's Docker module: + +- Implements the most common Docker routines for efficient container management, e.g. build, test and push +- Utilises `sha256` digests for robust image versioning and to enhance security posture +- Enables pull-image-once retrieval based on its digest to optimise performance (Docker does not store `sha256` digests locally) +- Consolidates image versions in a unified `.tool-versions` file for easier dependency management +- Optimises the build process specifically for the `amd64` architecture for consistency +- Applies automatic image versioning according to a predefined pattern for artefact publishing and deployment +- Incorporates metadata through `Dockerfile` labels for enhanced documentation and to conform to standards +- Integrates a linting routine to ensure `Dockerfile` code quality +- Includes an automated test suite to validate Docker scripts +- Provides a ready-to-run example to demonstrate the module's functionality +- Incorporates a best practice guide + +## Key files + +- Scripts + - [`docker.lib.sh`](../../scripts/docker/docker.lib.sh): A library code loaded by custom make targets and CLI scripts + - [`docker.mk`](../../scripts/docker/docker.mk): Customised implementation of the Docker routines loaded by the `scripts/init.mk` file + - [`dgoss.sh`](../../scripts/docker/dgoss.sh): Docker image spec test framework + - [`dockerfile-linter.sh`](../../scripts/docker/dockerfile-linter.sh): `Dockerfile` linter +- Configuration + - [`.tool-versions`](../../.tool-versions): Stores Docker image versions + - [`hadolint.yaml`](../../scripts/config/hadolint.yaml): `Dockerfile` linter configuration file + - [`Dockerfile.metadata`](../../scripts/docker/Dockerfile.metadata): Labels added to image definition as specified by the spec +- Test suite + - [`docker.test.sh`](../../scripts/docker/tests/docker.test.sh): Main file containing all the tests + - [`Dockerfile`](../../scripts/docker/tests/Dockerfile): Image definition for the test suite + - [`VERSION`](../../scripts/docker/tests/VERSION): Version patterns for the test suite +- Usage example + - Python-based example [`hello_world`](../../scripts/docker/examples/python) app showing a multi-staged build + - A set of [make targets](https://github.com/nhs-england-tools/repository-template/blob/main/scripts/docker/docker.mk#L18) to run the example + +## Usage + +### Quick start + +Run the test suite: + +```shell +$ make docker-test-suite-run + +test-docker-build PASS +test-docker-test PASS +test-docker-run PASS +test-docker-clean PASS +``` + +Run the example: + +```shell +$ make docker-example-build + +#0 building with "desktop-linux" instance using docker driver +... +#12 DONE 0.0s + +$ make docker-example-run + + * Serving Flask app 'app' + * Debug mode: off +WARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead. + * Running on all addresses (0.0.0.0) + * Running on http://127.0.0.1:8000 + * Running on http://172.17.0.2:8000 +Press CTRL+C to quit +``` + +### Your image implementation + +Always follow [Docker best practices](https://docs.docker.com/develop/develop-images/dockerfile_best-practices/) while developing images. Start with creating your container definition for the service and store it in the `infrastructure/images` directory. + +Here is a step-by-step guide: + +1. Create `infrastructure/images/cypress/Dockerfile` + + ```Dockerfile + # hadolint ignore=DL3007 + FROM cypress/browsers:latest + ``` + +2. Add the following entry to the `.tool-versions` file. This will be used to replace the `latest` version placeholder in the `Dockerfile`. + + ```text + # docker/cypress/browsers node-20.5.0-chrome-114.0.5735.133-1-ff-114.0.2-edge-114.0.1823.51-1@sha256:8b899d0292e700c80629d13a98ae309295e719f5b4f9aa50a98c6cdd2b6c5215 + ``` + +3. Create `infrastructure/images/cypress/VERSION` + + ```text + ${yyyy}${mm}${dd}-${hash} + ``` + +4. Add make target to the `Makefile` + + ```text + build-cypress-image: # Build Cypress Docker image + docker_image=ghcr.io/nhs-england-tools/cypress \ + docker_title="Browser testing" \ + make docker-build dir=infrastructure/images/cypress + + ${VERBOSE}.SILENT: \ + build-cypress-image \ + ``` + +5. Run the build + + ```text + $ make build-cypress-image + #0 building with "desktop-linux" instance using docker driver + ... + #5 exporting to image + #5 exporting layers done + #5 writing image sha256:7440a1a25110cf51f9a1c8a2e0b446e9770ac0db027e55a7d31f2e217f2ff0c7 done + #5 naming to ghcr.io/nhs-england-tools/cypress:20230828-eade960 done + #5 DONE 0.0s + + $ docker images + REPOSITORY TAG IMAGE ID CREATED SIZE + ghcr.io/nhs-england-tools/cypress 20230828-eade960 7440a1a25110 2 weeks ago 608MB + ghcr.io/nhs-england-tools/cypress latest 7440a1a25110 2 weeks ago 608MB + ``` + +6. Commit all changes to these files + +- `infrastructure/images/cypress/Dockerfile` +- `infrastructure/images/cypress/Dockerfile.effective` +- `infrastructure/images/cypress/VERSION` +- `.tool-versions` + +## Conventions + +### Versioning + +You can specify the version tags that the automated build process applies to your images with a `VERSION` file. This file must be located adjacent to the `Dockerfile` where each image is defined. + +It may be a "_statically defined_" version, such as `1.2.3`, `20230601`, etc., or a "_dynamic pattern_" based on the current time and commit hash, e.g. `${yyyy}${mm}${dd}${HH}${MM}${SS}-${hash}`. This pattern will be substituted during the build process to create a `.version` file in the same directory, containing effective content like `20230601153000-123abcd`. See [this function](https://github.com/nhs-england-tools/repository-template/blob/main/scripts/docker/docker.lib.sh#L118) for what template substitutions are available. + +This file is then used by functions defined in [docker.lib.sh](../../scripts/docker/docker.lib.sh) but is ignored by Git, and is not checked in with other files. + +Support for multiple version entries is provided. For instance, if the `VERSION` file contains: + +```text +${yyyy}${mm}${dd} +${yyyy}${mm}${dd}${HH}${MM} +${yyyy}${mm}${dd}-${hash} +squirrel +``` + +The corresponding `.version` file generated by the `docker-build` function may appear as: + +```text +20230601 +20230601-123abcd +squirrel +``` + +In this case, the image is automatically tagged as `20230601`, `20230601-123abcd`, `squirrel` and `latest`, which can be then pushed to a registry by running the `docker-push` function. This versioning approach is particularly useful for projects with multiple deployments per day. + +> [!NOTE]
+> The preferred pattern for versioning is `${yyyy}${mm}${dd}${HH}${MM}` or/and `${yyyy}${mm}${dd}-${hash}` for projects with a cadence of multiple deployments per day. This is compatible with the [Calendar Versioning / CalVer](https://calver.org/) convention. + +Base image versions are maintained in the [.tool-versions](../../.tool-versions) file located in the project's top-level directory. The format is as follows: + +```text +# docker/image/name 1.0.0@sha256:1234567890...abcdef +``` + +This method facilitates dependency management through a single file. The `docker-build` function will replace any instance of `FROM image/name:latest` with `FROM image/name:1.0.0@sha256:1234567890...abcdef`. Additionally, the [Dockerfile.metadata](../../scripts/docker/Dockerfile.metadata) file will be appended to the end of the `Dockerfile.effective` created by the process. + +The reason we do this is so that the deployment version is source-controlled, but the tooling does not interfere with using a more recent Docker image during local development before the new version can be added to the `.tool-versions` file. It also serves as a clean way of templating Docker image definition. + +### Variables + +Set the `docker_image` or `DOCKER_IMAGE` variable for your image. Alternatively, you can use their shorthand versions, `image` or `IMAGE`. To emphasize that it is a global variable, using the uppercase version is recommended, depending on your implementation. + +### Platform architecture + +For cross-platform image support, the `--platform linux/amd64` flag is used to build Docker images, enabling containers to run without any changes on both `amd64` and `arm64` architectures (via emulation). + +## FAQ + +1. _We built our serverless workloads based on AWS Lambda and package them as `.zip` archives. Why do we need Docker?_ + + The primary use case for Docker and the thing it was invented for, is as a tool for aligning development environments. If you have no need for containers as a deployment artefact it is still worth using Docker as a development tool to ensure that everyone working on the project has the same versions of all dependencies, no matter what is installed on your individual machine. + +2. _Should we use custom images for AWS Lambdas?_ + + There should be few cases where this is necessary. Using the AWS-provided images should be the first preference, to minimise the amount of code and infrastructure effort we need to exert. However, there will be cases where the provided images do not work for you. If you think this applies - for instance, if you have inherited a deployable that requires an unsupported runtime - speak to Engineering so that we have awareness of the impact to you and your project and can try to help. See [Working with Lambda container images](https://docs.aws.amazon.com/lambda/latest/dg/images-create.html). diff --git a/docs/developer-guides/Scripting_Terraform.md b/docs/developer-guides/Scripting_Terraform.md new file mode 100644 index 000000000..14b80ca1a --- /dev/null +++ b/docs/developer-guides/Scripting_Terraform.md @@ -0,0 +1,174 @@ +# Developer Guide: Scripting Terraform + +- [Developer Guide: Scripting Terraform](#developer-guide-scripting-terraform) + - [Overview](#overview) + - [Features](#features) + - [Key files](#key-files) + - [Usage](#usage) + - [Quick start](#quick-start) + - [Your stack implementation](#your-stack-implementation) + - [Conventions](#conventions) + - [Secrets](#secrets) + - [Variables](#variables) + - [IaC directory](#iac-directory) + - [FAQ](#faq) + +## Overview + +Terraform is an open-source infrastructure as code (IaC) tool. It allows you to define, provision and manage infrastructure in a declarative way, using a configuration language called HCL. Terraform can manage a wide variety of resources, such as virtual machines, databases, networking components and many more, across multiple cloud providers like AWS and Azure. + +Some advantages of using Terraform are as outlined below: + +- **Declarative configuration**: Terraform enables the precise definition of the desired state of infrastructure, streamlining its creation through a readable and understandable codebase. +- **Version control**: The infrastructure code may be subject to version control, thereby providing an audit trail of environmental changes. +- **Modularisation and reusability**: Terraform facilitates the packaging of infrastructure into modular components, enhancing both reusability and ease of sharing across organisational teams. +- **State management**: Terraform's state management capabilities ensure an accurate representation of real-world resources, enabling features such as resource dependencies and idempotence. +- **Collaboration and workflow**: The platform supports collaboration through features like remote backends and state locking, thereby fostering collective work on infrastructure projects. +- **Community and ecosystem**: A robust community actively contributes to the Terraform ecosystem, providing a wealth of modules and examples that expedite infrastructure development. + +## Features + +Here are some key features built into this repository's Terraform module: + +- Provides Make targets for frequently-used Terraform commands for streamlined execution +- Offers code completion and command signature assistance via Make for enhanced CLI usability +- Supports named arguments with default values for an improved coding experience +- Allows the working directory to be controlled by either arguments or a predefined constant for flexible stack management +- Features a command wrapper to improve the onboarding experience and ensure environmental consistency +- Incorporates both a Git hook and a GitHub action to enforce code quality standards +- Comes with the CI/CD pipeline workflow integration +- Includes a file cleanup routine to efficiently remove temporary resources +- Incorporates a ready-to-run example to demonstrate the module's capabilities +- Integrates a code linting routine to ensure scripts are free from unintended side effects +- Includes a verbose mode for in-depth troubleshooting and debugging +- Incorporates a best practice guide + +## Key files + +- Scripts + - [`terraform.lib.sh`](../../scripts/terraform/terraform.lib.sh) A library code loaded by custom make targets and CLI scripts + - [`terraform.mk`](../../scripts/terraform/terraform.mk): Customised implementation of the Terraform routines loaded by the `scripts/init.mk` file + - [`terraform.sh`](../../scripts/terraform/terraform.sh): Terraform command wrapper +- Configuration + - [`.tool-versions`](../../.tool-versions): Stores Terraform version to be used +- Code quality gates + - [`lint-terraform/action.yaml`](../../.github/actions/lint-terraform/action.yaml): GitHub action + - [`check-terraform-format.sh`](../../scripts/githooks/check-terraform-format.sh): Git hook +- Usage example + - Declarative infrastructure definition example [`terraform-state-aws-s3`](../../scripts/terraform/examples/terraform-state-aws-s3) to store Terraform state + - A set of [make targets](https://github.com/nhs-england-tools/repository-template/blob/main/scripts/terraform/terraform.mk#L44) to run the example + +## Usage + +### Quick start + +Run the example: + +```shell +# AWS console access setup +export AWS_ACCESS_KEY_ID="..." +export AWS_SECRET_ACCESS_KEY="..." +export AWS_SESSION_TOKEN="..." +``` + +```shell +$ make terraform-example-provision-aws-infrastructure + +Initializing the backend.. +... +Plan: 5 to add, 0 to change, 0 to destroy. +Saved the plan to: terraform.tfplan +To perform exactly these actions, run the following command to apply: + terraform apply "terraform.tfplan" +... +Apply complete! Resources: 5 added, 0 changed, 0 destroyed. + +$ make terraform-example-destroy-aws-infrastructure + +... +Plan: 0 to add, 0 to change, 5 to destroy. +... +Apply complete! Resources: 0 added, 0 changed, 5 destroyed. +``` + +### Your stack implementation + +Always follow [best practices for using Terraform](https://cloud.google.com/docs/terraform/best-practices-for-terraform) while providing infrastructure as code (IaC) for your service. + +Directory structure: + +```shell +service-repository/ +├─ ... +└─ infrastructure/ + ├─ modules/ + │ ├─ service-module-name/ + │ │ ├─ main.tf + │ │ ├─ outputs.tf + │ │ ├─ variables.tf + │ │ ├─ versions.tf + │ │ └─ README.md + │ ... + ├─ environments/ + │ ├─ dev/ # This is where your ephemeral environments live + │ │ ├─ backend.tf + │ │ ├─ main.tf + │ │ ├─ provider.tf + │ │ └─ terraform.tfvars + │ ├─ nonprod/ + | │ ├─ ... + │ └─ prod/ + | ├─ ... + └─ .gitignore +``` + +At its core, the structure of the Terraform setup consists of two main parts. The `modules` section is designed to house the shared or common configurations for a service. Meanwhile, the individual folders for each environment, like `dev` (ephemeral environments), `nonprod`, `prod` and so on, invoke these shared modules while also defining their unique variables and parameters. By arranging resources in distinct Terraform directories for every component, we ensure clarity and promote cohesion. Each environment directory not only references shared code from the `modules` section but also represents a default Terraform workspace as a deployment of the service to the designated environment. + +Stack management: + +```shell +export STACK=infrastructure/environments/dev # or use 'dir' argument on each command +make terraform-init +make terraform-plan opts="-out=terraform.tfplan" +make terraform-apply opts="-auto-approve terraform.tfplan" +make terraform-destroy opts="-auto-approve" +``` + +Plugging it in to the CI/CD pipeline lifecycle: + +```shell +deploy: # Deploy the project artefact to the target environment + # The value assigned to this variable should be driven by the GitHub environments setup + STACK=infrastructure/environments/security-test \ + make environment-set-up + # Prepare datastore + # Deploy artefact + +environment-set-up: # Use for all environment types - STACK=[path to your stack] + make terraform-init + make terraform-plan opts="-out=terraform.tfplan" + make terraform-apply opts="-auto-approve terraform.tfplan" + +environment-tear-down: # Use only for ephemeral environments, e.g. dev and test automation - STACK=[path to your stack] + make terraform-destroy opts="-auto-approve" +``` + +## Conventions + +### Secrets + +GitHub secrets for Terraform must be granular to avoid appearing in logs. For example, use `arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ASSUME_ROLE_NAME }}`. It has been proven that if a role ARN is defined as `AWS_ROLE_ARN`, details such as the account number are not redacted from the output and are visible in plain text. While this information may not be considered sensitive on its own, it could contribute to a vector attack and therefore be used to exploit the service. + +### Variables + +To specify the location of your Terraform [root module](https://developer.hashicorp.com/terraform/language/modules#the-root-module) set the `terraform_stack` or `TERRAFORM_STACK` variable. Alternatively, you can use their shorthand versions, `stack` or `STACK`. To emphasize that it is a global variable, using the uppercase version is recommended, depending on your implementation. All environment stacks must be root modules and should be located in the `infrastructure/environments` directory. + +### IaC directory + +The `infrastructure` directory is used to store IaC, as it is the most descriptive and portable name. This approach enables the use of supporting technologies, CDKs and solutions specific to the cloud providers like AWS and Azure. + +## FAQ + +1. _What are the advantages of using this module over directly invoking Terraform commands?_ + + The primary purpose of this module is to integrate best practices for CI/CD pipeline workflows with infrastructure as code (IaC), offering a well-defined structural framework that encourages modularisation and reusability of components. Additionally, it enhances the onboarding experience and increases the portability of the provisioning process. diff --git a/docs/developer-guides/assets/make_help.png b/docs/developer-guides/assets/make_help.png new file mode 100644 index 000000000..de5ce7638 Binary files /dev/null and b/docs/developer-guides/assets/make_help.png differ diff --git a/docs/diagrams/.gitignore b/docs/diagrams/.gitignore new file mode 100644 index 000000000..57b2098f8 --- /dev/null +++ b/docs/diagrams/.gitignore @@ -0,0 +1,2 @@ +*.bkp +*.dtmp diff --git a/docs/diagrams/CD_Pipeline_Structure.drawio b/docs/diagrams/CD_Pipeline_Structure.drawio new file mode 100644 index 000000000..e8da50877 --- /dev/null +++ b/docs/diagrams/CD_Pipeline_Structure.drawio @@ -0,0 +1,205 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/diagrams/CD_Pipeline_Structure.png b/docs/diagrams/CD_Pipeline_Structure.png new file mode 100644 index 000000000..810249de4 Binary files /dev/null and b/docs/diagrams/CD_Pipeline_Structure.png differ diff --git a/docs/diagrams/Repository_Template.drawio b/docs/diagrams/Repository_Template.drawio new file mode 100644 index 000000000..bbe2cbf83 --- /dev/null +++ b/docs/diagrams/Repository_Template.drawio @@ -0,0 +1,127 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/diagrams/Repository_Template_GitHub_Generic.png b/docs/diagrams/Repository_Template_GitHub_Generic.png new file mode 100644 index 000000000..c2170fb2a Binary files /dev/null and b/docs/diagrams/Repository_Template_GitHub_Generic.png differ diff --git a/docs/user-guides/Perform_static_analysis.md b/docs/user-guides/Perform_static_analysis.md new file mode 100644 index 000000000..a1f701183 --- /dev/null +++ b/docs/user-guides/Perform_static_analysis.md @@ -0,0 +1,83 @@ +# Guide: Perform static analysis + +- [Guide: Perform static analysis](#guide-perform-static-analysis) + - [Overview](#overview) + - [Key files](#key-files) + - [Setup](#setup) + - [Testing](#testing) + - [Configuration checklist](#configuration-checklist) + +## Overview + +Static code analysis is an essential part of modern software development. It provides automatic checks on your codebase to identify potential bugs, code smells, security vulnerabilities, and maintainability issues. + +[SonarCloud](https://sonarcloud.io), an online service for continuous code quality inspection and static analysis, can be easily integrated with a GitHub repository. This repository template includes all the necessary setup for minimal configuration on your part, facilitating smooth integration with this SaaS offering. + +## Key files + +- [perform-static-analysis.sh](../../scripts/reports/perform-static-analysis.sh): A shell script that performs analysis +- [sonar-scanner.properties](../../scripts/config/sonar-scanner.properties): A configuration file that includes the project details +- [perform-static-analysis/action.yaml](../../.github/actions/perform-static-analysis/action.yaml): GitHub action to run the script as part of the CI/CD pipeline +- [.gitignore](../../.gitignore): Excludes the `.scannerwork` temporary directory created during the process + +## Setup + +Contact the GitHub Admins via their mailbox to have your [SonarCloud](https://sonarcloud.io) access set up. + +## Testing + +You can run and test static analysis locally on a developer's workstation using the following command + +```shell +export SONAR_ORGANISATION_KEY=nhs-england-tools # Replace with your organisation key +export SONAR_PROJECT_KEY=repository-template # Replace with your project key +export SONAR_TOKEN=[replace-with-your-sonar-token] +./scripts/reports/perform-static-analysis.sh +``` + +## Configuration checklist + +> [!WARNING]
+> This section is to be used by the GitHub Admins. + +The list demonstrates the manual way of configuring a project, however our aim is to automate all the activities below. + +- Create a Sonar project within the organisation space: + - Navigate to `+ > Analyze new project > create a project manually` + - Choose the appropriate organisation + - Set "Display name" + - Set "Project key" (it should be populated automatically) + - Set project visibility to "Public" + - After clicking the 'Next' button, set "The new code for this project will be based on" to "Previous version" + - Click "Create project" +- Add two new groups under `Administration > Groups`: + - `[Programme Name]`, all members of the project + - `[Programme Name] Admins`, who will the project's quality gates and quality profiles +- Assign members to the above groups accordingly +- Set group permissions under `Administration > Permissions`: + - For the `[Programme Name] Admins` group, assign: + - "Quality Gates" + - "Quality Profiles" +- Manage project permissions, navigate to `Administration > Projects Management` and select the project you created + - Click on `Edit Permissions` + - Search for `[Programme Name] Admins` group and assign the following: + - "Administer Issues" + - "Administer Security Hotspots" + - "Administer" + - Ensure that other groups do not have unnecessary permissions to administer this project +- Navigate to project `Administration > Analysis Method > Manually` and select `Other (for JS, TS, Go, Python, PHP, ...)` +- In the [sonar-scanner.properties](../../scripts/config/sonar-scanner.properties) file in your repository, set the following properties according to the information provided above + - Set `sonar.[language].[coverage-tool].reportPaths` to ensure the unit test coverage is reported back to Sonar + - Do not set the `sonar.organization` and `sonar.projectKey` properties in this file; do the next step instead + +- Use the Sonar token owned by the "SonarCloud Token GitHub Admins" service user. There is an existing token named "Scan all" + +> [!NOTE]
+> For an advance configuration create a bot account for your service. For more details, please see this [note](../../docs/adr/ADR-003_Acceptable_use_of_GitHub_PAT_and_Apps_for_authN_and_authZ.md#recommendation-for-github-admins). This account should be given access to your project and must own the `SONAR_TOKEN` for security reasons. + +- Follow the documentation on [creating encrypted secrets](https://docs.github.com/en/actions/security-guides/encrypted-secrets) to add the `SONAR_TOKEN` secret to your repository. The GitHub action is already configured to fetch that secret and pass it as a variable. In addition to that: + - Add `SONAR_ORGANISATION_KEY` variable (not a secret) + - Add `SONAR_PROJECT_KEY` variable (not a secret) +- Navigate to project `Administration > Analysis Method` and turn off the `Automatic Analysis` option +- Please refrain from adding your repository to the GitHub SonarCloud App, as this app should not be used. Doing so will duplicate reports and initiate them outside the primary pipeline workflow +- Confirm that the _"Perform static analysis"_ GitHub action is part of your GitHub CI/CD workflow and enforces the _"Sonar Way"_ quality gates. You can find more information about this in the [NHSE Software Engineering Quality Framework](https://github.com/NHSDigital/software-engineering-quality-framework/blob/main/tools/sonarqube.md) diff --git a/docs/user-guides/Run_Git_hooks_on_commit.md b/docs/user-guides/Run_Git_hooks_on_commit.md new file mode 100644 index 000000000..5dd191f5e --- /dev/null +++ b/docs/user-guides/Run_Git_hooks_on_commit.md @@ -0,0 +1,32 @@ +# Guide: Run Git hooks on commit + +- [Guide: Run Git hooks on commit](#guide-run-git-hooks-on-commit) + - [Overview](#overview) + - [Key files](#key-files) + - [Testing](#testing) + +## Overview + +Git hooks are scripts that are located in the [`./scripts/githooks`](../../scripts/githooks) directory. They are executed automatically on each commit, provided that the `make config` command has been run locally to set up the project. These same scripts are also part of the CI/CD pipeline execution. This setup serves as a safety net and helps to ensure consistency. + +The [pre-commit](https://pre-commit.com/) framework is a powerful tool for managing Git hooks, providing automated hook installation and management capabilities. + +## Key files + +- Scripts + - [`check-file-format.sh`](../../scripts/githooks/check-file-format.sh) + - [`check-markdown-format.sh`](../../scripts/githooks/check-markdown-format.sh) + - [`check-terraform-format.sh`](../../scripts/githooks/check-terraform-format.sh) + - [`scan-secrets.sh`](../../scripts/githooks/scan-secrets.sh) +- Configuration + - [`pre-commit.yaml`](../../scripts/config/pre-commit.yaml) + - [`init.mk`](../../scripts/init.mk): make targets + +## Testing + +You can run and test the process by executing the following commands from your terminal. These commands should be run from the top-level directory of the repository: + +```shell +make githooks-config +make githooks-run +``` diff --git a/docs/user-guides/Scan_dependencies.md b/docs/user-guides/Scan_dependencies.md new file mode 100644 index 000000000..4145897e5 --- /dev/null +++ b/docs/user-guides/Scan_dependencies.md @@ -0,0 +1,73 @@ +# Guide: Scan dependencies + +- [Guide: Scan dependencies](#guide-scan-dependencies) + - [Overview](#overview) + - [Key files](#key-files) + - [Configuration checklist](#configuration-checklist) + - [Testing](#testing) + - [FAQ](#faq) + +## Overview + +In modern software development, leveraging third-party dependencies is a common practice to reduce redundancy and improve efficiency. However, this introduces potential security risks and compliance issues into our codebase, making dependency scanning crucial. This process helps identify known vulnerabilities, or Common Vulnerabilities and Exposures (CVEs), in third-party libraries, allowing us to mitigate security threats proactively. Regular CVE scanning strengthens our codebase's security, ensuring adherence to top-tier security standards. In addition, generating a Software Bill of Materials (SBOM) - a comprehensive inventory of software components, libraries, and modules - is a valuable practice. SBOMs enhance transparency and traceability, giving an overview of all software elements, their versions, and associated licenses. This facilitates effective dependency management, compliance assurance, and timely response to version-specific vulnerabilities. + +[Syft](https://github.com/anchore/syft) and [Grype](https://github.com/anchore/grype) are valuable tools that can bolster this process. Syft generates a detailed SBOM, ensuring full visibility and traceability of all incorporated software components. This facilitates precise tracking, management, and potential updating of dependencies. On the other hand, Grype, as a vulnerability scanner, meticulously examines dependencies for known CVEs, providing an extra layer of security and allowing us to rectify vulnerabilities promptly. By incorporating Syft and Grype into our CI/CD pipeline, we can ensure continuous scanning of dependencies and generate an up-to-date SBOM. This approach enables real-time detection and resolution of vulnerabilities, thereby fortifying our software development lifecycle against security risks and ensuring adherence to compliance requirements. + +## Key files + +- [`create-sbom-report.sh`](../../scripts/reports/create-sbom-report.sh): A shell script that generates SBOM (Software Bill of Materials) +- [`syft.yaml`](../../scripts/config/syft.yaml): A configuration file for the SBOM generator +- [`scan-vulnerabilities.sh`](../../scripts/reports/scan-vulnerabilities.sh): A shell script that performs CVE analysis +- [`grype.yaml`](../../scripts/config/grype.yaml): A configuration file for the CVE scanner +- [`scan-dependencies/action.yaml`](../../.github/actions/scan-dependencies/action.yaml): GitHub action to run the scripts as part of the CI/CD pipeline +- [`.gitignore`](../../.gitignore): Excludes the `*sbom*report.json` and `*vulnerabilities*report.json` report files created during the process + +## Configuration checklist + +- [Adjust the configuration settings](../../scripts/config/grype.yaml) to align with your project's specific requirements +- [Create a dependency baseline](https://github.com/anchore/grype#specifying-matches-to-ignore) for your repository excluding false-positives from the scanning process +- Make sure the GitHub action, which incorporates Syft and Grype, is part of your GitHub CI/CD workflow. More details on this can be found in the [NHSE Software Engineering Quality Framework](https://github.com/NHSDigital/software-engineering-quality-framework/blob/main/tools/dependency-scan/README.md) +- It is crucial to ensure that both, the SBOM and the vulnerabilities reports are uploaded to the central repository or a designated location for streamlined reporting and easy access. Here are the secret variables that has to be set for this functionality to work: + - `IDP_AWS_REPORT_UPLOAD_ACCOUNT_ID` - the central AWS account ID where the reports will be uploaded; this will be set by a GitHub organization owner + - `IDP_AWS_REPORT_UPLOAD_REGION` - the region of the AWS account; this will be set by a GitHub organization owner + - `IDP_AWS_REPORT_UPLOAD_ROLE_NAME` - a dedicated role name for this repository that can authenticate to the central location for the purpose of uploading the reports + - `IDP_AWS_REPORT_UPLOAD_BUCKET_ENDPOINT` - this is a dedicated S3 bucket endpoint for uploading the reports and should be in the following format `s3://bucket-name/repository-identifier`, without the trailing `/` +- If you have any queries about how to set this up, please contact either [@stefaniuk](https://github.com/stefaniuk) or [@andyblundell](https://github.com/andyblundell) + +## Testing + +You can run and test the process locally on a developer's workstation using the following commands + +SBOM generator + +```shell +./scripts/reports/create-sbom-report.sh +cat sbom-repository-report.json | jq +``` + +CVE scanner + +```shell +./scripts/reports/scan-vulnerabilities.sh +cat vulnerabilities-repository-reportc.json | jq +``` + +## FAQ + +1. _Why do we need to use all three tools: Syft, Grype, and Dependabot?_ + + Syft, Grype, and Dependabot each serve unique functions in our CI/CD pipeline. Syft is used to generate a detailed Software Bill of Materials (SBOM), providing full visibility and traceability of all incorporated software components. Grype performs detailed scans of dependencies against the Common Vulnerabilities and Exposures (CVEs) list, adding an extra layer of security by introducing a quality gate in the delivery pipeline. Dependabot helps to keep your dependencies up-to-date and can also alert you to known vulnerabilities affecting your dependencies, showing the best path to resolution. By using all three, we ensure comprehensive dependency management is in place, from tracking and updating dependencies to identifying and rectifying found vulnerabilities. + +2. _Why don't we use a GitHub Action already available on the Marketplace or the built-in security features of GitHub?_ + + While we indeed leverage GitHub Actions within our CI/CD pipeline, they don't serve as a comprehensive solution for dependency management. Additionally, the built-in security features of GitHub aren't advanced enough to meet our specific requirements. Syft, Grype, and Dependabot provide specialised functionality that we integrate into our pipeline via GitHub Actions. By managing these tools as separate components, we gain greater flexibility in our configuration and can make finer adjustments as required, such as data enrichment. + +3. _Is it feasible to consolidate this functionality into a custom GitHub Action?_ + + Although consolidating this functionality into a custom GitHub Action seems like an optimal approach, this functionality also needs to run as a Git hook. Hence, shell scripting is a more suitable method as it makes less assumptions about local environment configuration or rely on third-party runners, providing quicker feedback. Additionally, incorporating this functionality directly into the repository has several advantages, including: + + - Improved transparency and visibility of the implementation + - Easier investigation of CVEs found in the repository, eliminating dependence on a third party like GitHub + - Enhanced portability and flexibility, allowing the scans to run in diverse environments + + However, this approach should be periodically reviewed as there is an emerging practice to use projects like [act](https://github.com/nektos/act) ~~to make GitHub Actions portable~~. Update: Please see the [Test GitHub Actions locally](../user-guides/Test_GitHub_Actions_locally.md) user guide. diff --git a/docs/user-guides/Scan_secrets.md b/docs/user-guides/Scan_secrets.md new file mode 100644 index 000000000..1e3e1e10f --- /dev/null +++ b/docs/user-guides/Scan_secrets.md @@ -0,0 +1,46 @@ +# Guide: Scan secrets + +- [Guide: Scan secrets](#guide-scan-secrets) + - [Overview](#overview) + - [Key files](#key-files) + - [Configuration checklist](#configuration-checklist) + - [Testing](#testing) + - [Removing sensitive data](#removing-sensitive-data) + +## Overview + +Scanning a repository for hard-coded secrets is a crucial security practice. "Hard-coded secrets" pertain to sensitive data such as passwords, API keys and encryption keys that are embedded directly into the code. This practice is strongly discouraged as it may lead to security incidents. + +[Gitleaks](https://github.com/gitleaks/gitleaks) is a powerful open-source tool designed to identify hard-coded secrets and other sensitive information in Git repositories. It works by scanning the commit history and the working directory for sensitive data that should not be there. + +## Key files + +- [`scan-secrets.sh`](../../scripts/githooks/scan-secrets.sh): A shell script that scans the codebase for hard-coded secrets +- [`gitleaks.toml`](../../scripts/config/gitleaks.toml): A configuration file for the secret scanner +- [`.gitleaksignore`](../../.gitleaksignore): A list of fingerprints to ignore by the secret scanner +- [`scan-secrets/action.yaml`](../../.github/actions/scan-secrets/action.yaml): GitHub action to run the scripts as part of the CI/CD pipeline +- [`pre-commit.yaml`](../../scripts/config/pre-commit.yaml): Run the secret scanner as a pre-commit git hook + +## Configuration checklist + +- [Add custom secret patterns](../../scripts/config/gitleaks.toml) to the configuration file to align with your project's specific requirements +- [Create a secret scan baseline](https://github.com/gitleaks/gitleaks/blob/master/README.md#gitleaksignore) for your repository by adding false-positive fingerprints to the ignore list +- Ensure that the GitHub action, which incorporates Gitleaks, forms part of your GitHub CI/CD workflow. It is designed to run a full scan as a part of the pipeline, providing additional protection against hard-coded secrets that might have been included prior to the rule additions or by bypassing the scanner +- Further details on this topic can be found in the [decision record](https://github.com/nhs-england-tools/repository-template/blob/main/docs/adr/ADR-002_Scan_repository_for_hardcoded_secrets.md) as well as in the [NHSE Software Engineering Quality Framework](https://github.com/NHSDigital/software-engineering-quality-framework/tree/main/tools/nhsd-git-secrets) where a usage of an alternative tool is shown + +## Testing + +You can execute and test the secret scanning across all commits locally on a developer's workstation using the following command + +```shell +ALL_FILES=true ./scripts/githooks/scan-secrets.sh +``` + +## Removing sensitive data + +Here are some tools that can help in removing sensitive data, such as passwords or API keys, from the Git history + +- [`rtyley/bfg-repo-cleaner`](https://github.com/rtyley/bfg-repo-cleaner) +- [`newren/git-filter-repo`](https://github.com/newren/git-filter-repo) + +For additional guidance, please refer also to the official [GitHub documentation](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/removing-sensitive-data-from-a-repository). diff --git a/docs/user-guides/Sign_Git_commits.md b/docs/user-guides/Sign_Git_commits.md new file mode 100644 index 000000000..0ad07281c --- /dev/null +++ b/docs/user-guides/Sign_Git_commits.md @@ -0,0 +1,208 @@ +# Guide: Sign Git commits + +- [Guide: Sign Git commits](#guide-sign-git-commits) + - [Overview](#overview) + - [Signing commits using GPG](#signing-commits-using-gpg) + - [Generate GPG key](#generate-gpg-key) + - [Configure Git](#configure-git) + - [Configure GitHub](#configure-github) + - [Troubleshooting](#troubleshooting) + - [Additional settings](#additional-settings) + - [Signing commits using SSH](#signing-commits-using-ssh) + - [Generate SSH key](#generate-ssh-key) + - [Configure Git](#configure-git-1) + - [Configure GitHub](#configure-github-1) + - [Testing](#testing) + +## Overview + +Signing Git commits is a good practice and ensures the correct web of trust has been established for the distributed version control management, e.g. [Bitwarden](https://bitwarden.com/). + +There are two ways to sign commits in GitHub, using a GPG or an SSH signature. Detailed information about this can be found in the following [documentation](https://docs.github.com/en/authentication/managing-commit-signature-verification/about-commit-signature-verification). It is recommended to use the GPG method for signing commits as GPG keys can be set to expire or be revoked if needed. Below is a step-by-step guide on how to set it up. + +## Signing commits using GPG + +### Generate GPG key + + +If you do not have it already generate a new pair of GPG keys. Please change the passphrase (pleaseChooseYourKeyPassphrase) below and save it in your password manager. + +```shell +USER_NAME="Your Name" +USER_EMAIL="your.name@email" +file=$(echo $USER_EMAIL | sed "s/[^[:alpha:]]/-/g") + +mkdir -p "$HOME/.gnupg" +chmod 0700 "$HOME/.gnupg" +cd "$HOME/.gnupg" +cat > "$file.gpg-key.script" < +ssb nistp256/BBBBBBBBBBBBBBBB 2023-01-01 [E] +``` + +Export your keys. + +```shell +ID=XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX +gpg --armor --export $ID > $file.gpg-key.pub +gpg --armor --export-secret-keys $ID > $file.gpg-key +``` + +Import already existing private key. GPG keys are stored in the `~/.gnupg` directory. + +```shell +gpg --import $file.gpg-key +``` + +Remove keys from the GPG agent if no longer needed. + +```shell +gpg --delete-secret-keys $ID +gpg --delete-keys $ID +``` + +### Configure Git + +Use the [following commands](https://docs.github.com/en/authentication/managing-commit-signature-verification/telling-git-about-your-signing-key#telling-git-about-your-gpg-key) to set your default signing key in Git to the ID of the GPG key you generated. Replace `$ID` with your actual GPG key ID from the script above. + + ```shell + git config --global user.signingkey $ID + ``` + +Then enable automatic signing of Git commits by running: + +```shell +git config --global commit.gpgsign true +``` + +### Configure GitHub + +To [add your GPG public key to your GitHub account](https://docs.github.com/en/authentication/managing-commit-signature-verification/adding-a-gpg-key-to-your-github-account) follow these steps: + +1. Navigate to your GitHub account settings. +2. From the sidebar, click on "**SSH and GPG keys**". +3. Click on the "**New GPG key**" button. +4. In the "**Title**" field, enter a descriptive name for the key, like "My GitHub signing key". +5. Copy the contents of your public key file and paste it into the "**Key**" field. + + ```shell + cat $file.gpg-key.pub + ``` + +6. Click "**Add GPG key**" to save. + +After completing these steps, your new signing key will be listed in the "**SSH and GPG keys**" section of your GitHub profile. + +### Troubleshooting + +If you receive the error message `error: gpg failed to sign the data`, make sure you added `export GPG_TTY=$(tty)` to your `~/.zshrc` or `~/.bashrc`, and restarted your terminal. + +```shell +sed -i '/^export GPG_TTY/d' ~/.exports +echo "export GPG_TTY=\$TTY" >> ~/.exports +``` + +### Additional settings + +Configure caching git commit signature passphrase for 3 hours + +```shell +source ~/.zshrc # or ~/.bashrc +mkdir -p ~/.gnupg +sed -i '/^pinentry-program/d' ~/.gnupg/gpg-agent.conf 2>/dev/null ||: +echo "pinentry-program $(whereis -q pinentry)" >> ~/.gnupg/gpg-agent.conf +sed -i '/^default-cache-ttl/d' ~/.gnupg/gpg-agent.conf +echo "default-cache-ttl 10800" >> ~/.gnupg/gpg-agent.conf +sed -i '/^max-cache-ttl/d' ~/.gnupg/gpg-agent.conf +echo "max-cache-ttl 10800" >> ~/.gnupg/gpg-agent.conf +gpgconf --kill gpg-agent +git config --global credential.helper cache +#git config --global --unset credential.helper +``` + +## Signing commits using SSH + +### Generate SSH key + +You should not do this if you already have GPG signing set up. One or the other is fine, but not both. + +If you do not already have SSH key access set up on your GitHub account, first [generate a new SSH key](https://docs.github.com/en/authentication/connecting-to-github-with-ssh/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent). To create a new SSH key, you need to run the following command. This will generate a new SSH key of the type `ed25519` and associate it with your email address. Please replace your.name@email with your actual email address. + +```shell +ssh-keygen -t ed25519 -C "your.name@email" -f "~/.ssh/github-signing-key" +``` + +When you run this command, it will ask you to enter a passphrase. Choose a strong passphrase and make sure to remember it, as you will need to provide it when your key is loaded by the SSH agent. + +### Configure Git + +If you are signing commits locally using an SSH key, you need to [configure Git](https://docs.github.com/en/authentication/managing-commit-signature-verification/telling-git-about-your-signing-key#telling-git-about-your-ssh-key) accordingly since it is not the default method. + +Run the following command to instruct Git to use the SSH signing key format, instead of the default GPG: + +```shell +git config --global gpg.format ssh +``` + +Next, specify the private key for Git to use: + +```shell +git config --global user.signingkey ~/.ssh/github-signing-key +``` + +Lastly, instruct Git to sign all of your commits: + +```shell +git config --global commit.gpgsign true +``` + +### Configure GitHub + +To [add your SSH public key to your GitHub account](https://docs.github.com/en/authentication/connecting-to-github-with-ssh/adding-a-new-ssh-key-to-your-github-account) follow these steps: + +1. Navigate to your GitHub account settings. +2. From the sidebar, click on "**SSH and GPG keys**". +3. Click on the "**New SSH key**" button. +4. In the "**Title**" field, enter a descriptive name for the key, like "My GitHub signing key". +5. Copy the contents of your public key file and paste it into the "**Key**" field. + + ```shell + cat ~/.ssh/github-signing-key.pub + ``` + +6. Ensure to select "**Signing Key**" from the "**Key type**" dropdown. +7. Click "**Add SSH key**" to save. + +After completing these steps, your new signing key will be listed in the "**SSH and GPG keys**" section of your GitHub profile. + +## Testing + +To ensure your configuration works as expected, make a commit to a branch locally and push it to GitHub. When you view the commit history of the branch on GitHub, [your latest commit](https://docs.github.com/en/authentication/managing-commit-signature-verification/about-commit-signature-verification#about-commit-signature-verification) should now display a `Verified` tag, which indicates successful signing with your GPG or SSH key. diff --git a/docs/user-guides/Test_GitHub_Actions_locally.md b/docs/user-guides/Test_GitHub_Actions_locally.md new file mode 100644 index 000000000..2da643b2e --- /dev/null +++ b/docs/user-guides/Test_GitHub_Actions_locally.md @@ -0,0 +1,64 @@ +# Guide: Test GitHub Actions locally + +- [Guide: Test GitHub Actions locally](#guide-test-github-actions-locally) + - [Overview](#overview) + - [Key files](#key-files) + - [Prerequisites](#prerequisites) + - [Testing](#testing) + - [FAQ](#faq) + +## Overview + +A GitHub workflow job can be run locally for the purpose of testing. The [nektos/act](https://github.com/nektos/act) project is an open-source tool that allows you to do so. The project aims to make it easier for developers to test and debug their GitHub Actions workflows before pushing changes to their repositories. By using act, you can avoid the potential delays and resource limitations associated with running workflows directly on GitHub. The tool provides a command-line interface and uses Docker containers to emulate the GitHub Actions runner environment. This enables you to execute the entire workflow or individual jobs and steps just as they would run on GitHub. + +## Key files + +- [init.mk](../../scripts/init.mk): Provides the `runner-act` make target +- [.tool-versions](../../.tool-versions): Defines the version of the `actions/actions-runner` Docker image + +## Prerequisites + +The following command-line tools are expected to be installed: + +- [act](https://github.com/nektos/act#installation) +- [docker](https://docs.docker.com/engine/install/) + +## Testing + +Here is an example on how to run a GitHub workflow job: + +```shell +$ make runner-act workflow="stage-1-commit" job="create-lines-of-code-report" + +[Commit stage/Count lines of code] 🚀 Start image=ghcr.io/nhs-england-tools/github-runner-image:20230101-abcdef0-rt +[Commit stage/Count lines of code] 🐳 docker pull image=ghcr.io/nhs-england-tools/github-runner-image:20230101-abcdef0-rt platform=linux/amd64 username= forcePull=false +[Commit stage/Count lines of code] 🐳 docker create image=ghcr.io/nhs-england-tools/github-runner-image:20230101-abcdef0-rt platform=linux/amd64 entrypoint=["tail" "-f" "/dev/null"] cmd=[] +[Commit stage/Count lines of code] 🐳 docker run image=ghcr.io/nhs-england-tools/github-runner-image:20230101-abcdef0-rt platform=linux/amd64 entrypoint=["tail" "-f" "/dev/null"] cmd=[] +[Commit stage/Count lines of code] ⭐ Run Main Checkout code +[Commit stage/Count lines of code] ✅ Success - Main Checkout code +[Commit stage/Count lines of code] ⭐ Run Main Count lines of code +[Commit stage/Count lines of code] ⭐ Run Main Create CLOC report +[Commit stage/Count lines of code] 🐳 docker exec cmd=[bash --noprofile --norc -e -o pipefail /var/run/act/workflow/1-composite-0.sh] user= workdir= +[Commit stage/Count lines of code] ✅ Success - Main Create CLOC report +[Commit stage/Count lines of code] ⭐ Run Main Compress CLOC report +[Commit stage/Count lines of code] 🐳 docker exec cmd=[bash --noprofile --norc -e -o pipefail /var/run/act/workflow/1-composite-1.sh] user= workdir= +| updating: lines-of-code-report.json (deflated 68%) +[Commit stage/Count lines of code] ✅ Success - Main Compress CLOC report +[Commit stage/Count lines of code] ☁ git clone 'https://github.com/actions/upload-artifact' # ref=v3 +[Commit stage/Count lines of code] ⭐ Run Main Check prerequisites for sending the report +[Commit stage/Count lines of code] 🐳 docker exec cmd=[bash --noprofile --norc -e -o pipefail /var/run/act/workflow/1-composite-check.sh] user= workdir= +[Commit stage/Count lines of code] ✅ Success - Main Check prerequisites for sending the report +[Commit stage/Count lines of code] ⚙ ::set-output:: secrets_exist=false +[Commit stage/Count lines of code] ☁ git clone 'https://github.com/aws-actions/configure-aws-credentials' # ref=v2 +[Commit stage/Count lines of code] ✅ Success - Main Count lines of code +[Commit stage/Count lines of code] ⚙ ::set-output:: secrets_exist=false +[Commit stage/Count lines of code] ⭐ Run Post Count lines of code +[Commit stage/Count lines of code] ✅ Success - Post Count lines of code +[Commit stage/Count lines of code] 🏁 Job succeeded +``` + +## FAQ + +1. _Can `act` be used to run Git hooks?_ + + The `act` project is a powerful tool that can run a 3rd-party GitHub Actions. You might think about using it to perform the same tasks you have set up in your CI/CD pipeline. However, it is not designed to run or replace Git hooks, like the ones managed by the `pre-commit` framework. What `act` does is mimic the actions that happen on GitHub after you push a commit or make some other change that kicks off a GitHub Actions workflow. This usually involves more rigorous tasks like building your software, running a set of tests or even deploying your code. Utilising it for any other purpose could introduce unnecessary complexity and reduce the reliability of both the development process and the software itself. It is best used only for testing locally jobs and workflows. diff --git a/infrastructure/.gitignore b/infrastructure/.gitignore new file mode 100644 index 000000000..22ebdac35 --- /dev/null +++ b/infrastructure/.gitignore @@ -0,0 +1,37 @@ +# SEE: https://github.com/github/gitignore/blob/main/Terraform.gitignore + +# Local .terraform directories +**/.terraform/* + +# .tfstate files +*.tfstate +*.tfstate.* + +# Crash log files +crash.log +crash.*.log + +# Exclude all .tfvars files, which are likely to contain sensitive data, such as +# password, private keys, and other secrets. These should not be part of version +# control as they are data points which are potentially sensitive and subject +# to change depending on the environment. +*.tfvars +*.tfvars.json + +# Ignore override files as they are usually used to override resources locally and so +# are not checked in +override.tf +override.tf.json +*_override.tf +*_override.tf.json + +# Include override files you do wish to add to version control using negated pattern +# !example_override.tf + +# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan +# example: *tfplan* +*tfplan* + +# Ignore CLI configuration files +.terraformrc +terraform.rc diff --git a/infrastructure/environments/.gitkeep b/infrastructure/environments/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/infrastructure/images/.gitkeep b/infrastructure/images/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/infrastructure/modules/.gitkeep b/infrastructure/modules/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/project.code-workspace b/project.code-workspace new file mode 100644 index 000000000..fbcf948a5 --- /dev/null +++ b/project.code-workspace @@ -0,0 +1,8 @@ +{ + "folders": [ + { + "name": "Repository Template", + "path": "." + } + ] +} diff --git a/scripts/config/gitleaks.toml b/scripts/config/gitleaks.toml new file mode 100644 index 000000000..af5f0bb71 --- /dev/null +++ b/scripts/config/gitleaks.toml @@ -0,0 +1,19 @@ +# SEE: https://github.com/gitleaks/gitleaks/#configuration + +[extend] +useDefault = true # SEE: https://github.com/gitleaks/gitleaks/blob/master/config/gitleaks.toml + +[[rules]] +description = "IPv4" +id = "ipv4" +regex = '''[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}''' + +[rules.allowlist] +regexTarget = "match" +regexes = [ + # Exclude the private network IPv4 addresses as well as the DNS servers for Google and OpenDNS + '''(127\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}|10\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}|172\.(1[6-9]|2[0-9]|3[0-1])\.[0-9]{1,3}\.[0-9]{1,3}|192\.168\.[0-9]{1,3}\.[0-9]{1,3}|0\.0\.0\.0|255\.255\.255\.255|8\.8\.8\.8|8\.8\.4\.4|208\.67\.222\.222|208\.67\.220\.220)''', +] + +[allowlist] +paths = ['''.terraform.lock.hcl''', '''poetry.lock''', '''yarn.lock'''] diff --git a/scripts/config/grype.yaml b/scripts/config/grype.yaml new file mode 100644 index 000000000..80c752e22 --- /dev/null +++ b/scripts/config/grype.yaml @@ -0,0 +1,19 @@ +# If using SBOM input, automatically generate CPEs when packages have none +add-cpes-if-none: true + +# ignore: +# # This is the full set of supported rule fields: +# - vulnerability: CVE-2008-4318 +# fix-state: unknown +# package: +# name: libcurl +# version: 1.5.1 +# type: npm +# location: "/usr/local/lib/node_modules/**" + +# # We can make rules to match just by vulnerability ID: +# - vulnerability: CVE-2014-54321 + +# # ...or just by a single package field: +# - package: +# type: gem diff --git a/scripts/config/hadolint.yaml b/scripts/config/hadolint.yaml new file mode 100644 index 000000000..d01a9cebb --- /dev/null +++ b/scripts/config/hadolint.yaml @@ -0,0 +1,7 @@ +# SEE: https://github.com/hadolint/hadolint#configure + +trustedRegistries: + - docker.io + - "*.gcr.io" + - "*.dkr.ecr.*.amazonaws.com" + - "*.azurecr.io" diff --git a/scripts/config/markdownlint.yaml b/scripts/config/markdownlint.yaml new file mode 100644 index 000000000..554ab554b --- /dev/null +++ b/scripts/config/markdownlint.yaml @@ -0,0 +1,11 @@ +# SEE: https://github.com/DavidAnson/markdownlint/blob/main/schema/.markdownlint.yaml + +# https://github.com/DavidAnson/markdownlint/blob/main/doc/md013.md +MD013: false + +# https://github.com/DavidAnson/markdownlint/blob/main/doc/md024.md +MD024: + siblings_only: true + +# https://github.com/DavidAnson/markdownlint/blob/main/doc/md033.md +MD033: false diff --git a/scripts/config/pre-commit.yaml b/scripts/config/pre-commit.yaml new file mode 100644 index 000000000..37ca63750 --- /dev/null +++ b/scripts/config/pre-commit.yaml @@ -0,0 +1,40 @@ +repos: +- repo: local + hooks: + - id: scan-secrets + name: Scan secrets + entry: ./scripts/githooks/scan-secrets.sh + args: ["check=staged-changes"] + language: script + pass_filenames: false +- repo: local + hooks: + - id: check-file-format + name: Check file format + entry: ./scripts/githooks/check-file-format.sh + args: ["check=staged-changes"] + language: script + pass_filenames: false +- repo: local + hooks: + - id: check-markdown-format + name: Check Markdown format + entry: ./scripts/githooks/check-markdown-format.sh + args: ["check=staged-changes"] + language: script + pass_filenames: false +- repo: local + hooks: + - id: check-english-usage + name: Check English usage + entry: ./scripts/githooks/check-english-usage.sh + args: ["check=staged-changes"] + language: script + pass_filenames: false +- repo: local + hooks: + - id: lint-terraform + name: Lint Terraform + entry: ./scripts/githooks/check-terraform-format.sh + language: script + pass_filenames: false diff --git a/scripts/config/repository-template.yaml b/scripts/config/repository-template.yaml new file mode 100644 index 000000000..eb37ceece --- /dev/null +++ b/scripts/config/repository-template.yaml @@ -0,0 +1 @@ +update-from-template: diff --git a/scripts/config/sonar-scanner.properties b/scripts/config/sonar-scanner.properties new file mode 100644 index 000000000..147891dc5 --- /dev/null +++ b/scripts/config/sonar-scanner.properties @@ -0,0 +1,9 @@ +# Please DO NOT set the following properties `sonar.organization` and `sonar.projectKey` in this file. They must be stored as `SONAR_ORGANISATION_KEY` and `SONAR_PROJECT_KEY` GitHub secrets. + +sonar.host.url=https://sonarcloud.io +sonar.qualitygate.wait=true +sonar.sourceEncoding=UTF-8 +sonar.sources=. + +#sonar.python.coverage.reportPaths=.coverage/coverage.xml +#sonar.[javascript|typescript].lcov.reportPaths=.coverage/lcov.info diff --git a/scripts/config/syft.yaml b/scripts/config/syft.yaml new file mode 100644 index 000000000..e9f5f580b --- /dev/null +++ b/scripts/config/syft.yaml @@ -0,0 +1,83 @@ +# a list of globs to exclude from scanning. same as --exclude ; for example: +# exclude: +# - "/etc/**" +# - "./out/**/*.json" +exclude: + - ./.git/** + +# maximum number of workers used to process the list of package catalogers in parallel +parallelism: 3 + +# cataloging packages is exposed through the packages and power-user subcommands +package: + # search within archives that do contain a file index to search against (zip) + # note: for now this only applies to the java package cataloger + # SYFT_PACKAGE_SEARCH_INDEXED_ARCHIVES env var + search-indexed-archives: true + # search within archives that do not contain a file index to search against (tar, tar.gz, tar.bz2, etc) + # note: enabling this may result in a performance impact since all discovered compressed tars will be decompressed + # note: for now this only applies to the java package cataloger + # SYFT_PACKAGE_SEARCH_UNINDEXED_ARCHIVES env var + search-unindexed-archives: true + cataloger: + # enable/disable cataloging of packages + # SYFT_PACKAGE_CATALOGER_ENABLED env var + enabled: true + # the search space to look for packages (options: all-layers, squashed) + # same as -s ; SYFT_PACKAGE_CATALOGER_SCOPE env var + scope: "squashed" + +# cataloging file contents is exposed through the power-user subcommand +file-contents: + cataloger: + # enable/disable cataloging of secrets + # SYFT_FILE_CONTENTS_CATALOGER_ENABLED env var + enabled: true + # the search space to look for secrets (options: all-layers, squashed) + # SYFT_FILE_CONTENTS_CATALOGER_SCOPE env var + scope: "squashed" + # skip searching a file entirely if it is above the given size (default = 1MB; unit = bytes) + # SYFT_FILE_CONTENTS_SKIP_FILES_ABOVE_SIZE env var + skip-files-above-size: 1048576 + # file globs for the cataloger to match on + # SYFT_FILE_CONTENTS_GLOBS env var + globs: [] + +# cataloging file metadata is exposed through the power-user subcommand +file-metadata: + cataloger: + # enable/disable cataloging of file metadata + # SYFT_FILE_METADATA_CATALOGER_ENABLED env var + enabled: true + # the search space to look for file metadata (options: all-layers, squashed) + # SYFT_FILE_METADATA_CATALOGER_SCOPE env var + scope: "squashed" + # the file digest algorithms to use when cataloging files (options: "sha256", "md5", "sha1") + # SYFT_FILE_METADATA_DIGESTS env var + digests: ["sha256"] + +# cataloging secrets is exposed through the power-user subcommand +secrets: + cataloger: + # enable/disable cataloging of secrets + # SYFT_SECRETS_CATALOGER_ENABLED env var + enabled: true + # the search space to look for secrets (options: all-layers, squashed) + # SYFT_SECRETS_CATALOGER_SCOPE env var + scope: "all-layers" + # show extracted secret values in the final JSON report + # SYFT_SECRETS_REVEAL_VALUES env var + reveal-values: false + # skip searching a file entirely if it is above the given size (default = 1MB; unit = bytes) + # SYFT_SECRETS_SKIP_FILES_ABOVE_SIZE env var + skip-files-above-size: 1048576 + # name-regex pairs to consider when searching files for secrets. Note: the regex must match single line patterns + # but may also have OPTIONAL multiline capture groups. Regexes with a named capture group of "value" will + # use the entire regex to match, but the secret value will be assumed to be entirely contained within the + # "value" named capture group. + additional-patterns: {} + # names to exclude from the secrets search, valid values are: "aws-access-key", "aws-secret-key", "pem-private-key", + # "docker-config-auth", and "generic-api-key". Note: this does not consider any names introduced in the + # "secrets.additional-patterns" config option. + # SYFT_SECRETS_EXCLUDE_PATTERN_NAMES env var + exclude-pattern-names: [] diff --git a/scripts/config/vale/styles/Vocab/words/accept.txt b/scripts/config/vale/styles/Vocab/words/accept.txt new file mode 100644 index 000000000..eb9cd04eb --- /dev/null +++ b/scripts/config/vale/styles/Vocab/words/accept.txt @@ -0,0 +1,17 @@ +Bitwarden +Cyber +Dependabot +Gitleaks +Grype +OAuth +Octokit +Podman +Python +Syft +Terraform +Trufflehog +bot +idempotence +onboarding +toolchain +[A-Z]+s diff --git a/scripts/config/vale/styles/Vocab/words/reject.txt b/scripts/config/vale/styles/Vocab/words/reject.txt new file mode 100644 index 000000000..fdc793e78 --- /dev/null +++ b/scripts/config/vale/styles/Vocab/words/reject.txt @@ -0,0 +1 @@ +python diff --git a/scripts/config/vale/vale.ini b/scripts/config/vale/vale.ini new file mode 100644 index 000000000..171494e5f --- /dev/null +++ b/scripts/config/vale/vale.ini @@ -0,0 +1,8 @@ +StylesPath = styles + +MinAlertLevel = suggestion + +Vocab = words + +[*.md] +BasedOnStyles = Vale diff --git a/scripts/docker/Dockerfile.metadata b/scripts/docker/Dockerfile.metadata new file mode 100644 index 000000000..f54092e88 --- /dev/null +++ b/scripts/docker/Dockerfile.metadata @@ -0,0 +1,22 @@ + +# === Metadata ================================================================= + +ARG IMAGE +ARG TITLE +ARG DESCRIPTION +ARG LICENCE +ARG GIT_URL +ARG GIT_BRANCH +ARG GIT_COMMIT_HASH +ARG BUILD_DATE +ARG BUILD_VERSION +LABEL \ + org.opencontainers.image.base.name=$IMAGE \ + org.opencontainers.image.title="$TITLE" \ + org.opencontainers.image.description="$DESCRIPTION" \ + org.opencontainers.image.licenses="$LICENCE" \ + org.opencontainers.image.url=$GIT_URL \ + org.opencontainers.image.ref.name=$GIT_BRANCH \ + org.opencontainers.image.revision=$GIT_COMMIT_HASH \ + org.opencontainers.image.created=$BUILD_DATE \ + org.opencontainers.image.version=$BUILD_VERSION diff --git a/scripts/docker/dgoss.sh b/scripts/docker/dgoss.sh new file mode 100644 index 000000000..e573a48ba --- /dev/null +++ b/scripts/docker/dgoss.sh @@ -0,0 +1,139 @@ +#!/bin/bash +# shellcheck disable=SC2016,SC2154,SC2166 + +# WARNING: Please DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. + +# SEE: https://github.com/goss-org/goss/blob/master/extras/dgoss/dgoss + +set -e + +USAGE="USAGE: $(basename "$0") [run|edit] " +GOSS_FILES_PATH="${GOSS_FILES_PATH:-.}" + +# Container runtime +CONTAINER_RUNTIME="${CONTAINER_RUNTIME:-docker}" + +info() { + echo -e "INFO: $*" >&2; +} +error() { + echo -e "ERROR: $*" >&2; + exit 1; +} + +cleanup() { + set +e + { kill "$log_pid" && wait "$log_pid"; } 2> /dev/null + if [ -n "$CONTAINER_LOG_OUTPUT" ]; then + cp "$tmp_dir/docker_output.log" "$CONTAINER_LOG_OUTPUT" + fi + rm -rf "$tmp_dir" + if [[ $id ]];then + info "Deleting container" + $CONTAINER_RUNTIME rm -vf "$id" > /dev/null + fi +} + +run(){ + # Copy in goss + cp "${GOSS_PATH}" "$tmp_dir/goss" + chmod 755 "$tmp_dir/goss" + [[ -e "${GOSS_FILES_PATH}/${GOSS_FILE:-goss.yaml}" ]] && cp "${GOSS_FILES_PATH}/${GOSS_FILE:-goss.yaml}" "$tmp_dir/goss.yaml" && chmod 644 "$tmp_dir/goss.yaml" + [[ -e "${GOSS_FILES_PATH}/goss_wait.yaml" ]] && cp "${GOSS_FILES_PATH}/goss_wait.yaml" "$tmp_dir" && chmod 644 "$tmp_dir/goss_wait.yaml" + [[ -n "${GOSS_VARS}" ]] && [[ -e "${GOSS_FILES_PATH}/${GOSS_VARS}" ]] && cp "${GOSS_FILES_PATH}/${GOSS_VARS}" "$tmp_dir" && chmod 644 "$tmp_dir/${GOSS_VARS}" + + # Switch between mount or cp files strategy + GOSS_FILES_STRATEGY=${GOSS_FILES_STRATEGY:="mount"} + case "$GOSS_FILES_STRATEGY" in + mount) + info "Starting $CONTAINER_RUNTIME container" + if [ "$CONTAINER_RUNTIME" == "podman" -a $# == 2 ]; then + id=$($CONTAINER_RUNTIME run -d -v "$tmp_dir:/goss:z" "${@:2}" sleep infinity) + else + id=$($CONTAINER_RUNTIME run -d -v "$tmp_dir:/goss:z" "${@:2}") + fi + ;; + cp) + info "Creating $CONTAINER_RUNTIME container" + id=$($CONTAINER_RUNTIME create "${@:2}") + info "Copy goss files into container" + $CONTAINER_RUNTIME cp "$tmp_dir/." "$id:/goss" + info "Starting $CONTAINER_RUNTIME container" + $CONTAINER_RUNTIME start "$id" > /dev/null + ;; + *) error "Wrong goss files strategy used! Correct options are \"mount\" or \"cp\"." + esac + + $CONTAINER_RUNTIME logs -f "$id" > "$tmp_dir/docker_output.log" 2>&1 & + log_pid=$! + info "Container ID: ${id:0:8}" +} + +get_docker_file() { + local cid=$1 # Docker container ID + local src=$2 # Source file path (in the container) + local dst=$3 # Destination file path + + if $CONTAINER_RUNTIME exec "${cid}" sh -c "test -e ${src}" > /dev/null; then + mkdir -p "${GOSS_FILES_PATH}" + $CONTAINER_RUNTIME cp "${cid}:${src}" "${dst}" + info "Copied '${src}' from container to '${dst}'" + fi +} + +# Main +tmp_dir=$(mktemp -d /tmp/tmp.XXXXXXXXXX) +chmod 777 "$tmp_dir" +trap 'ret=$?;cleanup;exit $ret' EXIT + +GOSS_PATH="${GOSS_PATH:-$(which goss 2> /dev/null || true)}" +[[ $GOSS_PATH ]] || { error "Couldn't find goss installation, please set GOSS_PATH to it"; } +[[ ${GOSS_OPTS+x} ]] || GOSS_OPTS="--color --format documentation" +[[ ${GOSS_WAIT_OPTS+x} ]] || GOSS_WAIT_OPTS="-r 30s -s 1s > /dev/null" +GOSS_SLEEP=${GOSS_SLEEP:-0.2} + +[[ $CONTAINER_RUNTIME =~ ^(docker|podman)$ ]] || { error "Runtime must be one of docker or podman"; } + +case "$1" in + run) + run "$@" + if [[ -e "${GOSS_FILES_PATH}/goss_wait.yaml" ]]; then + info "Found goss_wait.yaml, waiting for it to pass before running tests" + if [[ -z "${GOSS_VARS}" ]]; then + if ! $CONTAINER_RUNTIME exec "$id" sh -c "/goss/goss -g /goss/goss_wait.yaml validate $GOSS_WAIT_OPTS"; then + $CONTAINER_RUNTIME logs "$id" >&2 + error "goss_wait.yaml never passed" + fi + else + if ! $CONTAINER_RUNTIME exec "$id" sh -c "/goss/goss -g /goss/goss_wait.yaml --vars='/goss/${GOSS_VARS}' validate $GOSS_WAIT_OPTS"; then + $CONTAINER_RUNTIME logs "$id" >&2 + error "goss_wait.yaml never passed" + fi + fi + fi + [[ $GOSS_SLEEP ]] && { info "Sleeping for $GOSS_SLEEP"; sleep "$GOSS_SLEEP"; } + info "Container health" + if [ "true" != "$($CONTAINER_RUNTIME inspect -f '{{.State.Running}}' "$id")" ]; then + $CONTAINER_RUNTIME logs "$id" >&2 + error "the container failed to start" + fi + info "Running Tests" + if [[ -z "${GOSS_VARS}" ]]; then + $CONTAINER_RUNTIME exec "$id" sh -c "/goss/goss -g /goss/goss.yaml validate $GOSS_OPTS" + else + $CONTAINER_RUNTIME exec "$id" sh -c "/goss/goss -g /goss/goss.yaml --vars='/goss/${GOSS_VARS}' validate $GOSS_OPTS" + fi + ;; + edit) + run "$@" + info "Run goss add/autoadd to add resources" + $CONTAINER_RUNTIME exec -it "$id" sh -c 'cd /goss; PATH="/goss:$PATH" exec sh' + get_docker_file "$id" "/goss/goss.yaml" "${GOSS_FILES_PATH}/${GOSS_FILE:-goss.yaml}" + get_docker_file "$id" "/goss/goss_wait.yaml" "${GOSS_FILES_PATH}/goss_wait.yaml" + if [[ -n "${GOSS_VARS}" ]]; then + get_docker_file "$id" "/goss/${GOSS_VARS}" "${GOSS_FILES_PATH}/${GOSS_VARS}" + fi + ;; + *) + error "$USAGE" +esac diff --git a/scripts/docker/docker.lib.sh b/scripts/docker/docker.lib.sh new file mode 100644 index 000000000..187871050 --- /dev/null +++ b/scripts/docker/docker.lib.sh @@ -0,0 +1,303 @@ +#!/bin/bash +# shellcheck disable=SC2155 + +# WARNING: Please DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. + +set -euo pipefail + +# A set of Docker functions written in Bash. +# +# Usage: +# $ source ./docker.lib.sh +# +# Arguments (provided as environment variables): +# DOCKER_IMAGE=ghcr.io/org/repo # Docker image name +# DOCKER_TITLE="My Docker image" # Docker image title +# TOOL_VERSIONS=$project_dir/.tool-versions # Path to the tool versions file + +# ============================================================================== +# Functions to be used with custom images. + +# Build Docker image. +# Arguments (provided as environment variables): +# dir=[path to the Dockerfile to use, default is '.'] +function docker-build() { + + local dir=${dir:-$PWD} + + version-create-effective-file + _create-effective-dockerfile + # The current directory must be changed for the image build script to access + # assets that need to be copied + current_dir=$(pwd) + cd "$dir" + docker build \ + --progress=plain \ + --platform linux/amd64 \ + --build-arg IMAGE="${DOCKER_IMAGE}" \ + --build-arg TITLE="${DOCKER_TITLE}" \ + --build-arg DESCRIPTION="${DOCKER_TITLE}" \ + --build-arg LICENCE=MIT \ + --build-arg GIT_URL="$(git config --get remote.origin.url)" \ + --build-arg GIT_BRANCH="$(_get-git-branch-name)" \ + --build-arg GIT_COMMIT_HASH="$(git rev-parse --short HEAD)" \ + --build-arg BUILD_DATE="$(date -u +"%Y-%m-%dT%H:%M:%S%z")" \ + --build-arg BUILD_VERSION="$(_get-effective-version)" \ + --tag "${DOCKER_IMAGE}:$(_get-effective-version)" \ + --rm \ + --file "${dir}/Dockerfile.effective" \ + . + cd "$current_dir" + # Tag the image with all the stated versions, see the documentation for more details + for version in $(_get-all-effective-versions) latest; do + docker tag "${DOCKER_IMAGE}:$(_get-effective-version)" "${DOCKER_IMAGE}:${version}" + done + docker rmi --force "$(docker images | grep "" | awk '{print $3}')" 2> /dev/null ||: +} + +# Check test Docker image. +# Arguments (provided as environment variables): +# args=[arguments to pass to Docker to run the container, default is none/empty] +# cmd=[command to pass to the container for execution, default is none/empty] +# dir=[path to the image directory where the Dockerfile is located, default is '.'] +# check=[output string to search for] +function docker-check-test() { + + local dir=${dir:-$PWD} + + # shellcheck disable=SC2086,SC2154 + docker run --rm --platform linux/amd64 \ + ${args:-} \ + "${DOCKER_IMAGE}:$(_get-effective-version)" 2>/dev/null \ + ${cmd:-} \ + | grep -q "${check}" && echo PASS || echo FAIL +} + +# Run Docker image. +# Arguments (provided as environment variables): +# args=[arguments to pass to Docker to run the container, default is none/empty] +# cmd=[command to pass to the container for execution, default is none/empty] +# dir=[path to the image directory where the Dockerfile is located, default is '.'] +function docker-run() { + + local dir=${dir:-$PWD} + + # shellcheck disable=SC2086 + docker run --rm --platform linux/amd64 \ + ${args:-} \ + "${DOCKER_IMAGE}:$(dir="$dir" _get-effective-version)" \ + ${cmd:-} +} + +# Push Docker image. +# Arguments (provided as environment variables): +# dir=[path to the image directory where the Dockerfile is located, default is '.'] +function docker-push() { + + local dir=${dir:-$PWD} + + # Push all the image tags based on the stated versions, see the documentation for more details + for version in $(dir="$dir" _get-all-effective-versions) latest; do + docker push "${DOCKER_IMAGE}:${version}" + done +} + +# Remove Docker resources. +# Arguments (provided as environment variables): +# dir=[path to the image directory where the Dockerfile is located, default is '.'] +function docker-clean() { + + local dir=${dir:-$PWD} + + for version in $(dir="$dir" _get-all-effective-versions) latest; do + docker rmi "${DOCKER_IMAGE}:${version}" > /dev/null 2>&1 ||: + done + rm -f \ + .version \ + Dockerfile.effective +} + +# Create effective version from the VERSION file. +# Arguments (provided as environment variables): +# dir=[path to the VERSION file to use, default is '.'] +# BUILD_DATETIME=[build date and time in the '%Y-%m-%dT%H:%M:%S%z' format generated by the CI/CD pipeline, default is current date and time] +function version-create-effective-file() { + + local dir=${dir:-$PWD} + local version_file="$dir/VERSION" + local build_datetime=${BUILD_DATETIME:-$(date -u +'%Y-%m-%dT%H:%M:%S%z')} + + if [ -f "$version_file" ]; then + # shellcheck disable=SC2002 + cat "$version_file" | \ + sed "s/\(\${yyyy}\|\$yyyy\)/$(date --date="${build_datetime}" -u +"%Y")/g" | \ + sed "s/\(\${mm}\|\$mm\)/$(date --date="${build_datetime}" -u +"%m")/g" | \ + sed "s/\(\${dd}\|\$dd\)/$(date --date="${build_datetime}" -u +"%d")/g" | \ + sed "s/\(\${HH}\|\$HH\)/$(date --date="${build_datetime}" -u +"%H")/g" | \ + sed "s/\(\${MM}\|\$MM\)/$(date --date="${build_datetime}" -u +"%M")/g" | \ + sed "s/\(\${SS}\|\$SS\)/$(date --date="${build_datetime}" -u +"%S")/g" | \ + sed "s/\(\${hash}\|\$hash\)/$(git rev-parse --short HEAD)/g" \ + > "$dir/.version" + fi +} + +# ============================================================================== +# Functions to be used with external images. + +# Retrieve the Docker image version from the '.tool-versions' file and pull the +# image if required. This function is to be used in conjunction with the +# external images and it prevents Docker from downloading an image each time it +# is used, since the digest is not stored locally for compressed images. To +# optimise, the solution is to pull the image using its digest and then tag it, +# checking this tag for existence for any subsequent use. +# Arguments (provided as environment variables): +# name=[full name of the Docker image] +# match_version=[regexp to match the version, for example if the same image is used with multiple tags, default is '.*'] +# shellcheck disable=SC2001 +function docker-get-image-version-and-pull() { + + # E.g. for the given entry "# docker/ghcr.io/org/image 1.2.3@sha256:hash" in + # the '.tool-versions' file, the following variables will be set to: + # name="ghcr.io/org/image" + # version="1.2.3@sha256:hash" + # tag="1.2.3" + # digest="sha256:hash" + + # Get the image full version from the '.tool-versions' file, + # match it by name and version regex, if given. + local versions_file="${TOOL_VERSIONS:=$(git rev-parse --show-toplevel)/.tool-versions}" + local version="latest" + if [ -f "$versions_file" ]; then + line=$(grep "docker/${name} " "$versions_file" | sed "s/^#\s*//; s/\s*#.*$//" | grep "${match_version:-".*"}") + [ -n "$line" ] && version=$(echo "$line" | awk '{print $2}') + fi + + # Split the image version into two, tag name and digest sha256. + local tag="$(echo "$version" | sed 's/@.*$//')" + local digest="$(echo "$version" | sed 's/^.*@//')" + + # Check if the image exists locally already + if ! docker images | awk '{ print $1 ":" $2 }' | grep -q "^${name}:${tag}$"; then + if [ "$digest" != "latest" ]; then + # Pull image by the digest sha256 and tag it + docker pull \ + --platform linux/amd64 \ + "${name}@${digest}" \ + > /dev/null 2>&1 || true + docker tag "${name}@${digest}" "${name}:${tag}" + else + # Pull the latest image + docker pull \ + --platform linux/amd64 \ + "${name}:latest" \ + > /dev/null 2>&1 || true + fi + fi + + echo "${name}:${version}" +} + +# ============================================================================== +# "Private" functions. + +# Create effective Dockerfile. +# Arguments (provided as environment variables): +# dir=[path to the image directory where the Dockerfile is located, default is '.'] +function _create-effective-dockerfile() { + + local dir=${dir:-$PWD} + + cp "${dir}/Dockerfile" "${dir}/Dockerfile.effective" + _replace-image-latest-by-specific-version + _append-metadata +} + +# Replace image:latest by a specific version. +# Arguments (provided as environment variables): +# dir=[path to the image directory where the Dockerfile is located, default is '.'] +function _replace-image-latest-by-specific-version() { + + local dir=${dir:-$PWD} + local versions_file="${TOOL_VERSIONS:=$(git rev-parse --show-toplevel)/.tool-versions}" + local dockerfile="${dir}/Dockerfile.effective" + local build_datetime=${BUILD_DATETIME:-$(date -u +'%Y-%m-%dT%H:%M:%S%z')} + + if [ -f "$versions_file" ]; then + # First, list the entries specific for Docker to take precedence, then the rest but exclude comments + content=$(grep " docker/" "$versions_file"; grep -v " docker/" "$versions_file" ||: | grep -v "^#") + echo "$content" | while IFS= read -r line; do + [ -z "$line" ] && continue + line=$(echo "$line" | sed "s/^#\s*//; s/\s*#.*$//" | sed "s;docker/;;") + name=$(echo "$line" | awk '{print $1}') + version=$(echo "$line" | awk '{print $2}') + sed -i "s;\(FROM .*\)${name}:latest;\1${name}:${version};g" "$dockerfile" + done + fi + + if [ -f "$dockerfile" ]; then + # shellcheck disable=SC2002 + cat "$dockerfile" | \ + sed "s/\(\${yyyy}\|\$yyyy\)/$(date --date="${build_datetime}" -u +"%Y")/g" | \ + sed "s/\(\${mm}\|\$mm\)/$(date --date="${build_datetime}" -u +"%m")/g" | \ + sed "s/\(\${dd}\|\$dd\)/$(date --date="${build_datetime}" -u +"%d")/g" | \ + sed "s/\(\${HH}\|\$HH\)/$(date --date="${build_datetime}" -u +"%H")/g" | \ + sed "s/\(\${MM}\|\$MM\)/$(date --date="${build_datetime}" -u +"%M")/g" | \ + sed "s/\(\${SS}\|\$SS\)/$(date --date="${build_datetime}" -u +"%S")/g" | \ + sed "s/\(\${hash}\|\$hash\)/$(git rev-parse --short HEAD)/g" \ + > "$dockerfile.tmp" + mv "$dockerfile.tmp" "$dockerfile" + fi + + # Do not ignore the issue if 'latest' is used in the effective image + sed -Ei "/# hadolint ignore=DL3007$/d" "${dir}/Dockerfile.effective" +} + +# Append metadata to the end of Dockerfile. +# Arguments (provided as environment variables): +# dir=[path to the image directory where the Dockerfile is located, default is '.'] +function _append-metadata() { + + local dir=${dir:-$PWD} + + cat \ + "$dir/Dockerfile.effective" \ + "$(git rev-parse --show-toplevel)/scripts/docker/Dockerfile.metadata" \ + > "$dir/Dockerfile.effective.tmp" + mv "$dir/Dockerfile.effective.tmp" "$dir/Dockerfile.effective" +} + +# Print top Docker image version. +# Arguments (provided as environment variables): +# dir=[path to the image directory where the Dockerfile is located, default is '.'] +function _get-effective-version() { + + local dir=${dir:-$PWD} + + head -n 1 "${dir}/.version" 2> /dev/null ||: +} + +# Print all Docker image versions. +# Arguments (provided as environment variables): +# dir=[path to the image directory where the Dockerfile is located, default is '.'] +function _get-all-effective-versions() { + + local dir=${dir:-$PWD} + + cat "${dir}/.version" 2> /dev/null ||: +} + +# Print Git branch name. Check the GitHub variables first and then the local Git +# repo. +function _get-git-branch-name() { + + local branch_name=$(git rev-parse --abbrev-ref HEAD) + + if [ -n "${GITHUB_HEAD_REF:-}" ]; then + branch_name=$GITHUB_HEAD_REF + elif [ -n "${GITHUB_REF:-}" ]; then + # shellcheck disable=SC2001 + branch_name=$(echo "$GITHUB_REF" | sed "s#refs/heads/##") + fi + + echo "$branch_name" +} diff --git a/scripts/docker/docker.mk b/scripts/docker/docker.mk new file mode 100644 index 000000000..a31ad9dba --- /dev/null +++ b/scripts/docker/docker.mk @@ -0,0 +1,83 @@ +# This file is for you! Edit it to implement your own Docker make targets. + +# ============================================================================== +# Custom implementation - implementation of a make target should not exceed 5 lines of effective code. +# In most cases there should be no need to modify the existing make targets. + +docker-build: # Build Docker image - optional: docker_dir|dir=[path to the Dockerfile to use, default is '.'] @Development + make _docker cmd="build" \ + dir=$(or ${docker_dir}, ${dir}) + file=$(or ${docker_dir}, ${dir})/Dockerfile.effective + scripts/docker/dockerfile-linter.sh + +docker-push: # Push Docker image - optional: docker_dir|dir=[path to the image directory where the Dockerfile is located, default is '.'] @Development + make _docker cmd="push" \ + dir=$(or ${docker_dir}, ${dir}) + +clean:: # Remove Docker resources (docker) - optional: docker_dir|dir=[path to the image directory where the Dockerfile is located, default is '.'] @Operations + make _docker cmd="clean" \ + dir=$(or ${docker_dir}, ${dir}) + +_docker: # Docker command wrapper - mandatory: cmd=[command to execute]; optional: dir=[path to the image directory where the Dockerfile is located, relative to the project's top-level directory, default is '.'] + # 'DOCKER_IMAGE' and 'DOCKER_TITLE' are passed to the functions as environment variables + DOCKER_IMAGE=$(or ${DOCKER_IMAGE}, $(or ${docker_image}, $(or ${IMAGE}, $(or ${image}, ghcr.io/org/repo)))) + DOCKER_TITLE=$(or "${DOCKER_TITLE}", $(or "${docker_title}", $(or "${TITLE}", $(or "${title}", "Service Docker image")))) + source scripts/docker/docker.lib.sh + dir=$(realpath ${dir}) + docker-${cmd} # 'dir' is accessible by the function as environment variable + +# ============================================================================== +# Quality checks - please DO NOT edit this section! + +docker-shellscript-lint: # Lint all Docker module shell scripts @Quality + for file in $$(find scripts/docker -type f -name "*.sh"); do + file=$${file} scripts/shellscript-linter.sh + done + +# ============================================================================== +# Module tests and examples - please DO NOT edit this section! + +docker-test-suite-run: # Run Docker test suite @ExamplesAndTests + scripts/docker/tests/docker.test.sh + +docker-example-build: # Build Docker example @ExamplesAndTests + source scripts/docker/docker.lib.sh + cd scripts/docker/examples/python + DOCKER_IMAGE=repository-template/docker-example-python + DOCKER_TITLE="Repository Template Docker Python Example" + TOOL_VERSIONS="$(shell git rev-parse --show-toplevel)/scripts/docker/examples/python/.tool-versions.example" + docker-build + +docker-example-lint: # Lint Docker example @ExamplesAndTests + dockerfile=scripts/docker/examples/python/Dockerfile + file=$${dockerfile} scripts/docker/dockerfile-linter.sh + +docker-example-run: # Run Docker example @ExamplesAndTests + source scripts/docker/docker.lib.sh + cd scripts/docker/examples/python + DOCKER_IMAGE=repository-template/docker-example-python + args=" \ + -it \ + --publish 8000:8000 \ + " + docker-run + +docker-example-clean: # Remove Docker example resources @ExamplesAndTests + source scripts/docker/docker.lib.sh + cd scripts/docker/examples/python + DOCKER_IMAGE=repository-template/docker-example-python + docker-clean + +# ============================================================================== + +${VERBOSE}.SILENT: \ + _docker \ + clean \ + docker-build \ + docker-example-build \ + docker-example-clean \ + docker-example-lint \ + docker-example-run \ + docker-push \ + docker-shellscript-lint \ + docker-test-suite-run \ diff --git a/scripts/docker/dockerfile-linter.sh b/scripts/docker/dockerfile-linter.sh new file mode 100755 index 000000000..7e8c75f05 --- /dev/null +++ b/scripts/docker/dockerfile-linter.sh @@ -0,0 +1,78 @@ +#!/bin/bash + +# WARNING: Please DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. + +set -euo pipefail + +# Hadolint command wrapper. It will run hadolint natively if it is installed, +# otherwise it will run it in a Docker container. +# +# Usage: +# $ [options] ./dockerfile-linter.sh +# +# Arguments (provided as environment variables): +# file=Dockerfile # Path to the Dockerfile to lint, relative to the project's top-level directory, default is './Dockerfile.effective' +# FORCE_USE_DOCKER=true # If set to true the command is run in a Docker container, default is 'false' +# VERBOSE=true # Show all the executed commands, default is 'false' + +# ============================================================================== + +function main() { + + cd "$(git rev-parse --show-toplevel)" + + local file=${file:-./Dockerfile.effective} + if command -v hadolint > /dev/null 2>&1 && ! is-arg-true "${FORCE_USE_DOCKER:-false}"; then + file="$file" run-hadolint-natively + else + file="$file" run-hadolint-in-docker + fi +} + +# Run hadolint natively. +# Arguments (provided as environment variables): +# file=[path to the Dockerfile to lint, relative to the project's top-level directory] +function run-hadolint-natively() { + + # shellcheck disable=SC2001 + hadolint "$(echo "$file" | sed "s#$PWD#.#")" +} + +# Run hadolint in a Docker container. +# Arguments (provided as environment variables): +# file=[path to the Dockerfile to lint, relative to the project's top-level directory] +function run-hadolint-in-docker() { + + # shellcheck disable=SC1091 + source ./scripts/docker/docker.lib.sh + + # shellcheck disable=SC2155 + local image=$(name=hadolint/hadolint docker-get-image-version-and-pull) + # shellcheck disable=SC2001 + docker run --rm --platform linux/amd64 \ + --volume "$PWD:/workdir" \ + --workdir /workdir \ + "$image" \ + hadolint \ + --config /workdir/scripts/config/hadolint.yaml \ + "/workdir/$(echo "$file" | sed "s#$PWD#.#")" +} + +# ============================================================================== + +function is-arg-true() { + + if [[ "$1" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$ ]]; then + return 0 + else + return 1 + fi +} + +# ============================================================================== + +is-arg-true "${VERBOSE:-false}" && set -x + +main "$@" + +exit 0 diff --git a/scripts/docker/examples/python/.tool-versions.example b/scripts/docker/examples/python/.tool-versions.example new file mode 100644 index 000000000..920931162 --- /dev/null +++ b/scripts/docker/examples/python/.tool-versions.example @@ -0,0 +1,2 @@ +# python, SEE: https://hub.docker.com/_/python/tags +# docker/python 3.11.4-alpine3.18@sha256:0135ae6442d1269379860b361760ad2cf6ab7c403d21935a8015b48d5bf78a86 diff --git a/scripts/docker/examples/python/Dockerfile b/scripts/docker/examples/python/Dockerfile new file mode 100644 index 000000000..d0780aa41 --- /dev/null +++ b/scripts/docker/examples/python/Dockerfile @@ -0,0 +1,33 @@ +# `*:latest` will be replaced with a corresponding version stored in the '.tool-versions' file +# hadolint ignore=DL3007 +FROM python:latest as base + +# === Builder ================================================================== + +FROM base AS builder +COPY ./assets/hello_world/requirements.txt /requirements.txt +WORKDIR /packages +RUN set -eux; \ + \ + # Install dependencies + pip install \ + --requirement /requirements.txt \ + --prefix=/packages \ + --no-warn-script-location \ + --no-cache-dir + +# === Runtime ================================================================== + +FROM base +ENV \ + LANG="C.UTF-8" \ + LC_ALL="C.UTF-8" \ + PYTHONDONTWRITEBYTECODE="1" \ + PYTHONUNBUFFERED="1" \ + TZ="UTC" +COPY --from=builder /packages /usr/local +COPY ./assets/hello_world /hello_world +WORKDIR /hello_world +USER nobody +CMD [ "python", "app.py" ] +EXPOSE 8000 diff --git a/scripts/docker/examples/python/Dockerfile.effective b/scripts/docker/examples/python/Dockerfile.effective new file mode 100644 index 000000000..3f1ea6b07 --- /dev/null +++ b/scripts/docker/examples/python/Dockerfile.effective @@ -0,0 +1,54 @@ +# `*:latest` will be replaced with a corresponding version stored in the '.tool-versions' file +FROM python:3.11.4-alpine3.18@sha256:0135ae6442d1269379860b361760ad2cf6ab7c403d21935a8015b48d5bf78a86 as base + +# === Builder ================================================================== + +FROM base AS builder +COPY ./assets/hello_world/requirements.txt /requirements.txt +WORKDIR /packages +RUN set -eux; \ + \ + # Install dependencies + pip install \ + --requirement /requirements.txt \ + --prefix=/packages \ + --no-warn-script-location \ + --no-cache-dir + +# === Runtime ================================================================== + +FROM base +ENV \ + LANG="C.UTF-8" \ + LC_ALL="C.UTF-8" \ + PYTHONDONTWRITEBYTECODE="1" \ + PYTHONUNBUFFERED="1" \ + TZ="UTC" +COPY --from=builder /packages /usr/local +COPY ./assets/hello_world /hello_world +WORKDIR /hello_world +USER nobody +CMD [ "python", "app.py" ] +EXPOSE 8000 + +# === Metadata ================================================================= + +ARG IMAGE +ARG TITLE +ARG DESCRIPTION +ARG LICENCE +ARG GIT_URL +ARG GIT_BRANCH +ARG GIT_COMMIT_HASH +ARG BUILD_DATE +ARG BUILD_VERSION +LABEL \ + org.opencontainers.image.base.name=$IMAGE \ + org.opencontainers.image.title="$TITLE" \ + org.opencontainers.image.description="$DESCRIPTION" \ + org.opencontainers.image.licenses="$LICENCE" \ + org.opencontainers.image.url=$GIT_URL \ + org.opencontainers.image.ref.name=$GIT_BRANCH \ + org.opencontainers.image.revision=$GIT_COMMIT_HASH \ + org.opencontainers.image.created=$BUILD_DATE \ + org.opencontainers.image.version=$BUILD_VERSION diff --git a/scripts/docker/examples/python/VERSION b/scripts/docker/examples/python/VERSION new file mode 100644 index 000000000..8acdd82b7 --- /dev/null +++ b/scripts/docker/examples/python/VERSION @@ -0,0 +1 @@ +0.0.1 diff --git a/scripts/docker/examples/python/assets/hello_world/app.py b/scripts/docker/examples/python/assets/hello_world/app.py new file mode 100644 index 000000000..4844e89cb --- /dev/null +++ b/scripts/docker/examples/python/assets/hello_world/app.py @@ -0,0 +1,12 @@ +from flask import Flask +from flask_wtf.csrf import CSRFProtect + +app = Flask(__name__) +csrf = CSRFProtect() +csrf.init_app(app) + +@app.route("/") +def index(): + return "Hello World!" + +app.run(host='0.0.0.0', port=8000) diff --git a/scripts/docker/examples/python/assets/hello_world/requirements.txt b/scripts/docker/examples/python/assets/hello_world/requirements.txt new file mode 100644 index 000000000..a38fca783 --- /dev/null +++ b/scripts/docker/examples/python/assets/hello_world/requirements.txt @@ -0,0 +1,12 @@ +blinker==1.6.2 +click==8.1.7 +Flask-WTF==1.2.0 +Flask==2.3.3 +itsdangerous==2.1.2 +Jinja2==3.1.3 +MarkupSafe==2.1.3 +pip==23.3 +setuptools==65.5.1 +Werkzeug==3.0.1 +wheel==0.41.1 +WTForms==3.0.1 diff --git a/scripts/docker/examples/python/tests/goss.yaml b/scripts/docker/examples/python/tests/goss.yaml new file mode 100644 index 000000000..589db37bf --- /dev/null +++ b/scripts/docker/examples/python/tests/goss.yaml @@ -0,0 +1,8 @@ +package: + python: + installed: true + +command: + pip list | grep -i flask: + exit-status: 0 + timeout: 60000 diff --git a/scripts/docker/tests/.gitignore b/scripts/docker/tests/.gitignore new file mode 100644 index 000000000..c50e8c0a8 --- /dev/null +++ b/scripts/docker/tests/.gitignore @@ -0,0 +1 @@ +Dockerfile.effective diff --git a/scripts/docker/tests/.tool-versions.test b/scripts/docker/tests/.tool-versions.test new file mode 100644 index 000000000..920931162 --- /dev/null +++ b/scripts/docker/tests/.tool-versions.test @@ -0,0 +1,2 @@ +# python, SEE: https://hub.docker.com/_/python/tags +# docker/python 3.11.4-alpine3.18@sha256:0135ae6442d1269379860b361760ad2cf6ab7c403d21935a8015b48d5bf78a86 diff --git a/scripts/docker/tests/Dockerfile b/scripts/docker/tests/Dockerfile new file mode 100644 index 000000000..b5ea56060 --- /dev/null +++ b/scripts/docker/tests/Dockerfile @@ -0,0 +1,3 @@ +# `*:latest` will be replaced with a corresponding version stored in the '.tool-versions' file +# hadolint ignore=DL3007 +FROM python:latest diff --git a/scripts/docker/tests/VERSION b/scripts/docker/tests/VERSION new file mode 100644 index 000000000..fb366351e --- /dev/null +++ b/scripts/docker/tests/VERSION @@ -0,0 +1,3 @@ +${yyyy}${mm}${dd}-${hash} +$yyyy.$mm.$dd-$hash +somme-name-yyyyeah diff --git a/scripts/docker/tests/docker.test.sh b/scripts/docker/tests/docker.test.sh new file mode 100755 index 000000000..8f487b8f9 --- /dev/null +++ b/scripts/docker/tests/docker.test.sh @@ -0,0 +1,162 @@ +#!/bin/bash +# shellcheck disable=SC1091,SC2034,SC2317 + +# WARNING: Please DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. + +set -euo pipefail + +# Test suite for Docker functions. +# +# Usage: +# $ ./docker.test.sh +# +# Arguments (provided as environment variables): +# VERBOSE=true # Show all the executed commands, default is 'false' + +# ============================================================================== + +function main() { + + cd "$(git rev-parse --show-toplevel)" + source ./scripts/docker/docker.lib.sh + cd ./scripts/docker/tests + + DOCKER_IMAGE=repository-template/docker-test + DOCKER_TITLE="Repository Template Docker Test" + + test-docker-suite-setup + tests=( \ + test-docker-build \ + test-docker-image-from-signature \ + test-docker-version-file \ + test-docker-test \ + test-docker-run \ + test-docker-clean \ + test-docker-get-image-version-and-pull \ + ) + local status=0 + for test in "${tests[@]}"; do + { + echo -n "$test" + # shellcheck disable=SC2015 + $test && echo " PASS" || { echo " FAIL"; ((status++)); } + } + done + echo "Total: ${#tests[@]}, Passed: $(( ${#tests[@]} - status )), Failed: $status" + test-docker-suite-teardown + [ $status -gt 0 ] && return 1 || return 0 +} + +# ============================================================================== + +function test-docker-suite-setup() { + + : +} + +function test-docker-suite-teardown() { + + : +} + +# ============================================================================== + +function test-docker-build() { + + # Arrange + export BUILD_DATETIME="2023-09-04T15:46:34+0000" + # Act + docker-build > /dev/null 2>&1 + # Assert + docker image inspect "${DOCKER_IMAGE}:$(_get-effective-version)" > /dev/null 2>&1 && return 0 || return 1 +} + +function test-docker-image-from-signature() { + + # Arrange + TOOL_VERSIONS="$(git rev-parse --show-toplevel)/scripts/docker/tests/.tool-versions.test" + cp Dockerfile Dockerfile.effective + # Act + _replace-image-latest-by-specific-version + # Assert + grep -q "FROM python:.*-alpine.*@sha256:.*" Dockerfile.effective && return 0 || return 1 +} + +function test-docker-version-file() { + + # Arrange + export BUILD_DATETIME="2023-09-04T15:46:34+0000" + # Act + version-create-effective-file + # Assert + # shellcheck disable=SC2002 + ( + cat .version | grep -q "20230904-" && + cat .version | grep -q "2023.09.04-" && + cat .version | grep -q "somme-name-yyyyeah" + ) && return 0 || return 1 +} + +function test-docker-test() { + + # Arrange + cmd="python --version" + check="Python" + # Act + output=$(docker-check-test) + # Assert + echo "$output" | grep -q "PASS" +} + +function test-docker-run() { + + # Arrange + cmd="python --version" + # Act + output=$(docker-run) + # Assert + echo "$output" | grep -Eq "Python [0-9]+\.[0-9]+\.[0-9]+" +} + +function test-docker-clean() { + + # Arrange + version="$(_get-effective-version)" + # Act + docker-clean + # Assert + docker image inspect "${DOCKER_IMAGE}:${version}" > /dev/null 2>&1 && return 1 || return 0 +} + +function test-docker-get-image-version-and-pull() { + + # Arrange + name="ghcr.io/nhs-england-tools/github-runner-image" + match_version=".*-rt.*" + # Act + docker-get-image-version-and-pull > /dev/null 2>&1 + # Assert + docker images \ + --filter=reference="$name" \ + --format "{{.Tag}}" \ + | grep -vq "" +} + +# ============================================================================== + +function is-arg-true() { + + if [[ "$1" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$ ]]; then + return 0 + else + return 1 + fi +} + +# ============================================================================== + +is-arg-true "${VERBOSE:-false}" && set -x + +main "$@" + +exit 0 diff --git a/scripts/githooks/check-english-usage.sh b/scripts/githooks/check-english-usage.sh new file mode 100755 index 000000000..b3942deb6 --- /dev/null +++ b/scripts/githooks/check-english-usage.sh @@ -0,0 +1,108 @@ +#!/bin/bash + +# WARNING: Please, DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. + +set -euo pipefail + +# Git hook to check prose style +# +# Usage: +# $ check={all,staged-changes,working-tree-changes,branch} ./check-english-usage.sh +# +# Exit codes: +# 0 - All files are formatted correctly +# 1 - Files are not formatted correctly +# +# The `check` parameter controls which files are checked, so you can +# limit the scope of the check according to what is appropriate at the +# point the check is being applied. +# +# check=all: check all files in the repository +# check=staged-changes: check only files staged for commit. +# check=working-tree-changes: check modified, unstaged files. This is the default. +# check=branch: check for all changes since branching from $BRANCH_NAME + +# ============================================================================== + +function main() { + + cd "$(git rev-parse --show-toplevel)" + + check=${check:-working-tree-changes} + case $check in + "all") + filter="git ls-files" + ;; + "staged-changes") + filter="git diff --diff-filter=ACMRT --name-only --cached" + ;; + "working-tree-changes") + filter="git diff --diff-filter=ACMRT --name-only" + ;; + "branch") + filter="git diff --diff-filter=ACMRT --name-only ${BRANCH_NAME:-origin/main}" + ;; + *) + echo "Unrecognised check mode: $check" >&2 && exit 1 + ;; + esac + + if command -v vale > /dev/null 2>&1 && ! is-arg-true "${FORCE_USE_DOCKER:-false}"; then + filter="$filter" run-vale-natively + else + filter="$filter" run-vale-in-docker + fi +} + +# Run Vale natively. +# Arguments (provided as environment variables): +# filter=[git command to filter the files to check] +function run-vale-natively() { + + # shellcheck disable=SC2046 + vale \ + --config "$PWD/scripts/config/vale/vale.ini" \ + $($filter) +} + +# Run Vale in a Docker container. +# Arguments (provided as environment variables): +# filter=[git command to filter the files to check] +function run-vale-in-docker() { + + # shellcheck disable=SC1091 + source ./scripts/docker/docker.lib.sh + + # shellcheck disable=SC2155 + local image=$(name=jdkato/vale docker-get-image-version-and-pull) + # We use /dev/null here to stop `vale` from complaining that it's + # not been called correctly if the $filter happens to return an + # empty list. As long as there's a filename, even if it's one that + # will be ignored, `vale` is happy. + # shellcheck disable=SC2046,SC2086 + docker run --rm --platform linux/amd64 \ + --volume "$PWD:/workdir" \ + --workdir /workdir \ + "$image" \ + --config /workdir/scripts/config/vale/vale.ini \ + $($filter) /dev/null +} + +# ============================================================================== + +function is-arg-true() { + + if [[ "$1" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$ ]]; then + return 0 + else + return 1 + fi +} + +# ============================================================================== + +is-arg-true "${VERBOSE:-false}" && set -x + +main "$@" + +exit 0 diff --git a/scripts/githooks/check-file-format.sh b/scripts/githooks/check-file-format.sh new file mode 100755 index 000000000..d7c94747d --- /dev/null +++ b/scripts/githooks/check-file-format.sh @@ -0,0 +1,124 @@ +#!/bin/bash + +# WARNING: Please, DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. + +set -euo pipefail + +# Pre-commit git hook to check the EditorConfig rules compliance over changed +# files. It ensures all non-binary files across the codebase are formatted +# according to the style defined in the `.editorconfig` file. This is a +# editorconfig command wrapper. It will run editorconfig natively if it is +# installed, otherwise it will run it in a Docker container. +# +# Usage: +# $ [options] ./check-file-format.sh +# +# Options: +# check={all,staged-changes,working-tree-changes,branch} # Check mode, default is 'working-tree-changes' +# dry_run=true # Do not check, run dry run only, default is 'false' +# BRANCH_NAME=other-branch-than-main # Branch to compare with, default is `origin/main` +# FORCE_USE_DOCKER=true # If set to true the command is run in a Docker container, default is 'false' +# VERBOSE=true # Show all the executed commands, default is `false` +# +# Exit codes: +# 0 - All files are formatted correctly +# 1 - Files are not formatted correctly +# +# The `check` parameter controls which files are checked, so you can +# limit the scope of the check according to what is appropriate at the +# point the check is being applied. +# +# check=all: check all files in the repository +# check=staged-changes: check only files staged for commit. +# check=working-tree-changes: check modified, unstaged files. This is the default. +# check=branch: check for all changes since branching from $BRANCH_NAME +# +# Notes: +# Please make sure to enable EditorConfig linting in your IDE. For the +# Visual Studio Code editor it is `editorconfig.editorconfig` that is already +# specified in the `./.vscode/extensions.json` file. + +# ============================================================================== + +function main() { + + cd "$(git rev-parse --show-toplevel)" + + # shellcheck disable=SC2154 + is-arg-true "${dry_run:-false}" && dry_run_opt="--dry-run" + + check=${check:-working-tree-changes} + case $check in + "all") + filter="git ls-files" + ;; + "staged-changes") + filter="git diff --diff-filter=ACMRT --name-only --cached" + ;; + "working-tree-changes") + filter="git diff --diff-filter=ACMRT --name-only" + ;; + "branch") + filter="git diff --diff-filter=ACMRT --name-only ${BRANCH_NAME:-origin/main}" + ;; + *) + echo "Unrecognised check mode: $check" >&2 && exit 1 + ;; + esac + + if command -v editorconfig > /dev/null 2>&1 && ! is-arg-true "${FORCE_USE_DOCKER:-false}"; then + filter="$filter" dry_run_opt="${dry_run_opt:-}" run-editorconfig-natively + else + filter="$filter" dry_run_opt="${dry_run_opt:-}" run-editorconfig-in-docker + fi +} + +# Run editorconfig natively. +# Arguments (provided as environment variables): +# dry_run_opt=[dry run option] +# filter=[git command to filter the files to check] +function run-editorconfig-natively() { + + # shellcheck disable=SC2046,SC2086 + editorconfig \ + --exclude '.git/' $dry_run_opt $($filter) +} + +# Run editorconfig in a Docker container. +# Arguments (provided as environment variables): +# dry_run_opt=[dry run option] +# filter=[git command to filter the files to check] +function run-editorconfig-in-docker() { + + # shellcheck disable=SC1091 + source ./scripts/docker/docker.lib.sh + + # shellcheck disable=SC2155 + local image=$(name=mstruebing/editorconfig-checker docker-get-image-version-and-pull) + # We use /dev/null here as a backstop in case there are no files in the state + # we choose. If the filter comes back empty, adding `/dev/null` onto it has + # the effect of preventing `ec` from treating "no files" as "all the files". + docker run --rm --platform linux/amd64 \ + --volume "$PWD":/check \ + "$image" \ + sh -c "ec --exclude '.git/' $dry_run_opt \$($filter) /dev/null" +} + +# ============================================================================== + +function is-arg-true() { + + if [[ "$1" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$ ]]; then + return 0 + else + return 1 + fi +} + +# ============================================================================== + +is-arg-true "${VERBOSE:-false}" && set -x + +main "$@" + +exit 0 diff --git a/scripts/githooks/check-markdown-format.sh b/scripts/githooks/check-markdown-format.sh new file mode 100755 index 000000000..698df4a57 --- /dev/null +++ b/scripts/githooks/check-markdown-format.sh @@ -0,0 +1,109 @@ +#!/bin/bash + +# WARNING: Please, DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. + +set -euo pipefail + +# Pre-commit git hook to check the Markdown file formatting rules compliance +# over changed files. This is a markdownlint command wrapper. It will run +# markdownlint natively if it is installed, otherwise it will run it in a Docker +# container. +# +# Usage: +# $ [options] ./check-markdown-format.sh +# +# Options: +# check={all,staged-changes,working-tree-changes,branch} # Check mode, default is 'working-tree-changes' +# BRANCH_NAME=other-branch-than-main # Branch to compare with, default is `origin/main` +# FORCE_USE_DOCKER=true # If set to true the command is run in a Docker container, default is 'false' +# VERBOSE=true # Show all the executed commands, default is `false` +# +# Exit codes: +# 0 - All files are formatted correctly +# 1 - Files are not formatted correctly +# +# Notes: +# 1) Please make sure to enable Markdown linting in your IDE. For the Visual +# Studio Code editor it is `davidanson.vscode-markdownlint` that is already +# specified in the `./.vscode/extensions.json` file. +# 2) To see the full list of the rules, please visit +# https://github.com/DavidAnson/markdownlint/blob/main/doc/Rules.md + +# ============================================================================== + +function main() { + + cd "$(git rev-parse --show-toplevel)" + + check=${check:-working-tree-changes} + case $check in + "all") + files="$(find ./ -type f -name "*.md")" + ;; + "staged-changes") + files="$(git diff --diff-filter=ACMRT --name-only --cached "*.md")" + ;; + "working-tree-changes") + files="$(git diff --diff-filter=ACMRT --name-only "*.md")" + ;; + "branch") + files="$( (git diff --diff-filter=ACMRT --name-only "${BRANCH_NAME:-origin/main}" "*.md"; git diff --name-only "*.md") | sort | uniq )" + ;; + esac + + if [ -n "$files" ]; then + if command -v markdownlint > /dev/null 2>&1 && ! is-arg-true "${FORCE_USE_DOCKER:-false}"; then + files="$files" run-markdownlint-natively + else + files="$files" run-markdownlint-in-docker + fi + fi +} + +# Run markdownlint natively. +# Arguments (provided as environment variables): +# files=[files to check] +function run-markdownlint-natively() { + + # shellcheck disable=SC2086 + markdownlint \ + $files \ + --config "$PWD/scripts/config/markdownlint.yaml" +} + +# Run markdownlint in a Docker container. +# Arguments (provided as environment variables): +# files=[files to check] +function run-markdownlint-in-docker() { + + # shellcheck disable=SC1091 + source ./scripts/docker/docker.lib.sh + + # shellcheck disable=SC2155 + local image=$(name=ghcr.io/igorshubovych/markdownlint-cli docker-get-image-version-and-pull) + # shellcheck disable=SC2086 + docker run --rm --platform linux/amd64 \ + --volume "$PWD":/workdir \ + "$image" \ + $files \ + --config /workdir/scripts/config/markdownlint.yaml +} + +# ============================================================================== + +function is-arg-true() { + + if [[ "$1" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$ ]]; then + return 0 + else + return 1 + fi +} + +# ============================================================================== + +is-arg-true "${VERBOSE:-false}" && set -x + +main "$@" + +exit 0 diff --git a/scripts/githooks/check-terraform-format.sh b/scripts/githooks/check-terraform-format.sh new file mode 100755 index 000000000..7255e5126 --- /dev/null +++ b/scripts/githooks/check-terraform-format.sh @@ -0,0 +1,56 @@ +#!/bin/bash + +# WARNING: Please DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. + +set -euo pipefail + +# Pre-commit git hook to check format Terraform code. +# +# Usage: +# $ [options] ./check-terraform-format.sh +# +# Options: +# check_only=true # Do not format, run check only, default is 'false' +# FORCE_USE_DOCKER=true # If set to true the command is run in a Docker container, default is 'false' +# VERBOSE=true # Show all the executed commands, default is 'false' + +# ============================================================================== + +function main() { + + cd "$(git rev-parse --show-toplevel)" + + local check_only=${check_only:-false} + check_only=$check_only terraform-fmt +} + +# Format Terraform files. +# Arguments (provided as environment variables): +# check_only=[do not format, run check only] +function terraform-fmt() { + + local opts= + if is-arg-true "$check_only"; then + opts="-check" + fi + opts=$opts make terraform-fmt +} + +# ============================================================================== + +function is-arg-true() { + + if [[ "$1" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$ ]]; then + return 0 + else + return 1 + fi +} + +# ============================================================================== + +is-arg-true "${VERBOSE:-false}" && set -x + +main "$@" + +exit 0 diff --git a/scripts/githooks/scan-secrets.sh b/scripts/githooks/scan-secrets.sh new file mode 100755 index 000000000..06155b8ab --- /dev/null +++ b/scripts/githooks/scan-secrets.sh @@ -0,0 +1,111 @@ +#!/bin/bash + +# WARNING: Please, DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. + +set -euo pipefail + +# Pre-commit git hook to scan for secrets hard-coded in the codebase. This is a +# gitleaks command wrapper. It will run gitleaks natively if it is installed, +# otherwise it will run it in a Docker container. +# +# Usage: +# $ [options] ./scan-secrets.sh +# +# Options: +# check={whole-history,last-commit,staged-changes} # Type of the check to run, default is 'staged-changes' +# FORCE_USE_DOCKER=true # If set to true the command is run in a Docker container, default is 'false' +# VERBOSE=true # Show all the executed commands, default is 'false' +# +# Exit codes: +# 0 - No leaks present +# 1 - Leaks or error encountered +# 126 - Unknown flag + +# ============================================================================== + +function main() { + + cd "$(git rev-parse --show-toplevel)" + + if command -v gitleaks > /dev/null 2>&1 && ! is-arg-true "${FORCE_USE_DOCKER:-false}"; then + dir="$PWD" + cmd="$(get-cmd-to-run)" run-gitleaks-natively + else + dir="/workdir" + cmd="$(get-cmd-to-run)" run-gitleaks-in-docker + fi +} + +# Get Gitleaks command to execute and configuration. +# Arguments (provided as environment variables): +# dir=[project's top-level directory] +function get-cmd-to-run() { + + check=${check:-staged-changes} + case $check in + "whole-history") + cmd="detect --source $dir --verbose --redact" + ;; + "last-commit") + cmd="detect --source $dir --verbose --redact --log-opts -1" + ;; + "staged-changes") + cmd="protect --source $dir --verbose --staged" + ;; + esac + # Include base line file if it exists + if [ -f "$dir/scripts/config/.gitleaks-baseline.json" ]; then + cmd="$cmd --baseline-path $dir/scripts/config/.gitleaks-baseline.json" + fi + # Include the config file + cmd="$cmd --config $dir/scripts/config/gitleaks.toml" + + echo "$cmd" +} + +# Run Gitleaks natively. +# Arguments (provided as environment variables): +# cmd=[command to run] +function run-gitleaks-natively() { + + # shellcheck disable=SC2086 + gitleaks $cmd +} + +# Run Gitleaks in a Docker container. +# Arguments (provided as environment variables): +# cmd=[command to run] +# dir=[directory to mount as a volume] +function run-gitleaks-in-docker() { + + # shellcheck disable=SC1091 + source ./scripts/docker/docker.lib.sh + + # shellcheck disable=SC2155 + local image=$(name=ghcr.io/gitleaks/gitleaks docker-get-image-version-and-pull) + # shellcheck disable=SC2086 + docker run --rm --platform linux/amd64 \ + --volume "$PWD:$dir" \ + --workdir $dir \ + "$image" \ + $cmd +} + +# ============================================================================== + +function is-arg-true() { + + if [[ "$1" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$ ]]; then + return 0 + else + return 1 + fi +} + +# ============================================================================== + +is-arg-true "${VERBOSE:-false}" && set -x + +main "$@" + +exit 0 diff --git a/scripts/init.mk b/scripts/init.mk new file mode 100644 index 000000000..373f8a4f4 --- /dev/null +++ b/scripts/init.mk @@ -0,0 +1,157 @@ +# WARNING: Please DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. + +include scripts/docker/docker.mk +include scripts/tests/test.mk +-include scripts/terraform/terraform.mk + +# ============================================================================== + +runner-act: # Run GitHub Actions locally - mandatory: workflow=[workflow file name], job=[job name] @Development + source ./scripts/docker/docker.lib.sh + act $(shell [[ "${VERBOSE}" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$$ ]] && echo --verbose) \ + --container-architecture linux/amd64 \ + --platform ubuntu-latest=$$(name="ghcr.io/nhs-england-tools/github-runner-image" docker-get-image-version-and-pull) \ + --container-options "--privileged" \ + --bind \ + --pull=false \ + --reuse \ + --rm \ + --defaultbranch main \ + --workflows .github/workflows/${workflow}.yaml \ + --job ${job} + +version-create-effective-file: # Create effective version file - optional: dir=[path to the VERSION file to use, default is '.'], BUILD_DATETIME=[build date and time in the '%Y-%m-%dT%H:%M:%S%z' format generated by the CI/CD pipeline, default is current date and time] @Development + source scripts/docker/docker.lib.sh + version-create-effective-file + +shellscript-lint-all: # Lint all shell scripts in this project, do not fail on error, just print the error messages @Quality + for file in $$(find . -type f -name "*.sh"); do + file=$${file} scripts/shellscript-linter.sh ||: + done + +githooks-config: # Trigger Git hooks on commit that are defined in this repository @Configuration + make _install-dependency name="pre-commit" + pre-commit install \ + --config scripts/config/pre-commit.yaml \ + --install-hooks + +githooks-run: # Run git hooks configured in this repository @Operations + pre-commit run \ + --config scripts/config/pre-commit.yaml \ + --all-files + +_install-dependency: # Install asdf dependency - mandatory: name=[listed in the '.tool-versions' file]; optional: version=[if not listed] + echo ${name} + asdf plugin add ${name} ||: + asdf install ${name} $(or ${version},) + +_install-dependencies: # Install all the dependencies listed in .tool-versions + for plugin in $$(grep ^[a-z] .tool-versions | sed 's/[[:space:]].*//'); do + make _install-dependency name="$${plugin}" + done + +clean:: # Remove all generated and temporary files (common) @Operations + rm -rf \ + .scannerwork \ + *report*.json \ + *report*json.zip \ + docs/diagrams/.*.bkp \ + docs/diagrams/.*.dtmp \ + .version + +config:: # Configure development environment (common) @Configuration + make \ + githooks-config + +help: # Print help @Others + printf "\nUsage: \033[3m\033[93m[arg1=val1] [arg2=val2] \033[0m\033[0m\033[32mmake\033[0m\033[34m \033[0m\n\n" + perl -e '$(HELP_SCRIPT)' $(MAKEFILE_LIST) + +list-variables: # List all the variables available to make @Others + $(foreach v, $(sort $(.VARIABLES)), + $(if $(filter-out default automatic, $(origin $v)), + $(if $(and $(patsubst %_PASSWORD,,$v), $(patsubst %_PASS,,$v), $(patsubst %_KEY,,$v), $(patsubst %_SECRET,,$v)), + $(info $v=$($v) ($(value $v)) [$(flavor $v),$(origin $v)]), + $(info $v=****** (******) [$(flavor $v),$(origin $v)]) + ) + ) + ) + +# ============================================================================== + +.DEFAULT_GOAL := help +.EXPORT_ALL_VARIABLES: +.NOTPARALLEL: +.ONESHELL: +.PHONY: * # Please do not change this line! The alternative usage of it introduces unnecessary complexity and is considered an anti-pattern. +MAKEFLAGS := --no-print-director +SHELL := /bin/bash +ifeq (true, $(shell [[ "${VERBOSE}" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$$ ]] && echo true)) + .SHELLFLAGS := -cex +else + .SHELLFLAGS := -ce +endif + +# This script parses all the make target descriptions and renders the help output. +HELP_SCRIPT = \ + \ + use Text::Wrap; \ + %help_info; \ + my $$max_command_length = 0; \ + my $$terminal_width = `tput cols` || 120; chomp($$terminal_width); \ + \ + while(<>){ \ + next if /^_/; \ + \ + if (/^([\w-_]+)\s*:.*\#(.*?)(@(\w+))?\s*$$/) { \ + my $$command = $$1; \ + my $$description = $$2; \ + $$description =~ s/@\w+//; \ + my $$category_key = $$4 // 'Others'; \ + (my $$category_name = $$category_key) =~ s/(?<=[a-z])([A-Z])/\ $$1/g; \ + $$category_name = lc($$category_name); \ + $$category_name =~ s/^(.)/\U$$1/; \ + \ + push @{$$help_info{$$category_name}}, [$$command, $$description]; \ + $$max_command_length = (length($$command) > 37) ? 40 : $$max_command_length; \ + } \ + } \ + \ + my $$description_width = $$terminal_width - $$max_command_length - 4; \ + $$Text::Wrap::columns = $$description_width; \ + \ + for my $$category (sort { $$a eq 'Others' ? 1 : $$b eq 'Others' ? -1 : $$a cmp $$b } keys %help_info) { \ + print "\033[1m$$category\033[0m:\n\n"; \ + for my $$item (sort { $$a->[0] cmp $$b->[0] } @{$$help_info{$$category}}) { \ + my $$description = $$item->[1]; \ + my @desc_lines = split("\n", wrap("", "", $$description)); \ + my $$first_line_description = shift @desc_lines; \ + \ + $$first_line_description =~ s/(\w+)(\|\w+)?=/\033[3m\033[93m$$1$$2\033[0m=/g; \ + \ + my $$formatted_command = $$item->[0]; \ + $$formatted_command = substr($$formatted_command, 0, 37) . "..." if length($$formatted_command) > 37; \ + \ + print sprintf(" \033[0m\033[34m%-$${max_command_length}s\033[0m%s %s\n", $$formatted_command, $$first_line_description); \ + for my $$line (@desc_lines) { \ + $$line =~ s/(\w+)(\|\w+)?=/\033[3m\033[93m$$1$$2\033[0m=/g; \ + print sprintf(" %-$${max_command_length}s %s\n", " ", $$line); \ + } \ + print "\n"; \ + } \ + } + +# ============================================================================== + +${VERBOSE}.SILENT: \ + _install-dependencies \ + _install-dependency \ + clean \ + config \ + githooks-config \ + githooks-run \ + help \ + list-variables \ + runner-act \ + shellscript-lint-all \ + version-create-effective-file \ diff --git a/scripts/reports/create-lines-of-code-report.sh b/scripts/reports/create-lines-of-code-report.sh new file mode 100755 index 000000000..01645c7d0 --- /dev/null +++ b/scripts/reports/create-lines-of-code-report.sh @@ -0,0 +1,99 @@ +#!/bin/bash + +# WARNING: Please, DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. + +set -euo pipefail + +# Count lines of code of this repository. This is a gocloc command wrapper. It +# will run gocloc natively if it is installed, otherwise it will run it in a +# Docker container. +# +# Usage: +# $ [options] ./create-lines-of-code-report.sh +# +# Options: +# BUILD_DATETIME=%Y-%m-%dT%H:%M:%S%z # Build datetime, default is `date -u +'%Y-%m-%dT%H:%M:%S%z'` +# FORCE_USE_DOCKER=true # If set to true the command is run in a Docker container, default is 'false' +# VERBOSE=true # Show all the executed commands, default is `false` + +# ============================================================================== + +function main() { + + cd "$(git rev-parse --show-toplevel)" + + create-report + enrich-report +} + +function create-report() { + + if command -v gocloc > /dev/null 2>&1 && ! is-arg-true "${FORCE_USE_DOCKER:-false}"; then + run-gocloc-natively + else + run-gocloc-in-docker + fi + # shellcheck disable=SC2002 + cat lines-of-code-report.tmp.json \ + | jq -r '["Language","files","blank","comment","code"],["--------"],(.languages[]|[.name,.files,.blank,.comment,.code]),["-----"],(.total|["TOTAL",.files,.blank,.comment,.code])|@tsv' \ + | sed 's/Plain Text/Plaintext/g' \ + | column -t +} + +function run-gocloc-natively() { + + gocloc --output-type=json . > lines-of-code-report.tmp.json +} + +function run-gocloc-in-docker() { + + # shellcheck disable=SC1091 + source ./scripts/docker/docker.lib.sh + + # shellcheck disable=SC2155 + local image=$(name=ghcr.io/make-ops-tools/gocloc docker-get-image-version-and-pull) + docker run --rm --platform linux/amd64 \ + --volume "$PWD":/workdir \ + "$image" \ + --output-type=json \ + . \ + > lines-of-code-report.tmp.json +} + +function enrich-report() { + + build_datetime=${BUILD_DATETIME:-$(date -u +'%Y-%m-%dT%H:%M:%S%z')} + git_url=$(git config --get remote.origin.url) + git_branch=$(git rev-parse --abbrev-ref HEAD) + git_commit_hash=$(git rev-parse HEAD) + git_tags=$(echo \""$(git tag | tr '\n' ',' | sed 's/,$//' | sed 's/,/","/g')"\" | sed 's/""//g') + pipeline_run_id=${GITHUB_RUN_ID:-0} + pipeline_run_number=${GITHUB_RUN_NUMBER:-0} + pipeline_run_attempt=${GITHUB_RUN_ATTEMPT:-0} + + # shellcheck disable=SC2086 + jq \ + '.creationInfo |= . + {"created":"'${build_datetime}'","repository":{"url":"'${git_url}'","branch":"'${git_branch}'","tags":['${git_tags}'],"commitHash":"'${git_commit_hash}'"},"pipeline":{"id":'${pipeline_run_id}',"number":'${pipeline_run_number}',"attempt":'${pipeline_run_attempt}'}}' \ + lines-of-code-report.tmp.json \ + > lines-of-code-report.json + rm -f lines-of-code-report.tmp.json +} + +# ============================================================================== + +function is-arg-true() { + + if [[ "$1" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$ ]]; then + return 0 + else + return 1 + fi +} + +# ============================================================================== + +is-arg-true "${VERBOSE:-false}" && set -x + +main "$@" + +exit 0 diff --git a/scripts/reports/create-sbom-report.sh b/scripts/reports/create-sbom-report.sh new file mode 100755 index 000000000..1ed735a74 --- /dev/null +++ b/scripts/reports/create-sbom-report.sh @@ -0,0 +1,97 @@ +#!/bin/bash + +# WARNING: Please, DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. + +set -euo pipefail + +# Script to generate SBOM (Software Bill of Materials) for the repository +# content and any artefact created by the CI/CD pipeline. This is a syft command +# wrapper. It will run syft natively if it is installed, otherwise it will run +# it in a Docker container. +# +# Usage: +# $ [options] ./create-sbom-report.sh +# +# Options: +# BUILD_DATETIME=%Y-%m-%dT%H:%M:%S%z # Build datetime, default is `date -u +'%Y-%m-%dT%H:%M:%S%z'` +# FORCE_USE_DOCKER=true # If set to true the command is run in a Docker container, default is 'false' +# VERBOSE=true # Show all the executed commands, default is `false` + +# ============================================================================== + +function main() { + + cd "$(git rev-parse --show-toplevel)" + + create-report + enrich-report +} + +function create-report() { + + if command -v syft > /dev/null 2>&1 && ! is-arg-true "${FORCE_USE_DOCKER:-false}"; then + run-syft-natively + else + run-syft-in-docker + fi +} + +function run-syft-natively() { + + syft packages dir:"$PWD" \ + --config "$PWD/scripts/config/syft.yaml" \ + --output spdx-json="$PWD/sbom-repository-report.tmp.json" +} + +function run-syft-in-docker() { + + # shellcheck disable=SC1091 + source ./scripts/docker/docker.lib.sh + + # shellcheck disable=SC2155 + local image=$(name=ghcr.io/anchore/syft docker-get-image-version-and-pull) + docker run --rm --platform linux/amd64 \ + --volume "$PWD":/workdir \ + "$image" \ + packages dir:/workdir \ + --config /workdir/scripts/config/syft.yaml \ + --output spdx-json=/workdir/sbom-repository-report.tmp.json +} + +function enrich-report() { + + build_datetime=${BUILD_DATETIME:-$(date -u +'%Y-%m-%dT%H:%M:%S%z')} + git_url=$(git config --get remote.origin.url) + git_branch=$(git rev-parse --abbrev-ref HEAD) + git_commit_hash=$(git rev-parse HEAD) + git_tags=$(echo \""$(git tag | tr '\n' ',' | sed 's/,$//' | sed 's/,/","/g')"\" | sed 's/""//g') + pipeline_run_id=${GITHUB_RUN_ID:-0} + pipeline_run_number=${GITHUB_RUN_NUMBER:-0} + pipeline_run_attempt=${GITHUB_RUN_ATTEMPT:-0} + + # shellcheck disable=SC2086 + jq \ + '.creationInfo |= . + {"created":"'${build_datetime}'","repository":{"url":"'${git_url}'","branch":"'${git_branch}'","tags":['${git_tags}'],"commitHash":"'${git_commit_hash}'"},"pipeline":{"id":'${pipeline_run_id}',"number":'${pipeline_run_number}',"attempt":'${pipeline_run_attempt}'}}' \ + sbom-repository-report.tmp.json \ + > sbom-repository-report.json + rm -f sbom-repository-report.tmp.json +} + +# ============================================================================== + +function is-arg-true() { + + if [[ "$1" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$ ]]; then + return 0 + else + return 1 + fi +} + +# ============================================================================== + +is-arg-true "${VERBOSE:-false}" && set -x + +main "$@" + +exit 0 diff --git a/scripts/reports/perform-static-analysis.sh b/scripts/reports/perform-static-analysis.sh new file mode 100755 index 000000000..2426e6d06 --- /dev/null +++ b/scripts/reports/perform-static-analysis.sh @@ -0,0 +1,80 @@ +#!/bin/bash + +# WARNING: Please, DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. + +set -euo pipefail + +# Script to perform static analysis of the repository content and upload the +# report to SonarCloud. +# +# Usage: +# $ [options] ./perform-static-analysis.sh +# +# Expects: +# BRANCH_NAME=branch-name # Branch to report on +# SONAR_ORGANISATION_KEY=org-key # SonarCloud organisation key +# SONAR_PROJECT_KEY=project-key # SonarCloud project key +# SONAR_TOKEN=token # SonarCloud token +# +# Options: +# FORCE_USE_DOCKER=true # If set to true the command is run in a Docker container, default is 'false' +# VERBOSE=true # Show all the executed commands, default is 'false' + +# ============================================================================== + +function main() { + + cd "$(git rev-parse --show-toplevel)" + + if command -v sonar-scanner > /dev/null 2>&1 && ! is-arg-true "${FORCE_USE_DOCKER:-false}"; then + run-sonar-scanner-natively + else + run-sonar-scanner-in-docker + fi +} + +function run-sonar-scanner-natively() { + + sonar-scanner \ + -Dproject.settings="$PWD/scripts/config/sonar-scanner.properties" \ + -Dsonar.branch.name="${BRANCH_NAME:-$(git rev-parse --abbrev-ref HEAD)}" \ + -Dsonar.organization="$SONAR_ORGANISATION_KEY" \ + -Dsonar.projectKey="$SONAR_PROJECT_KEY" \ + -Dsonar.token="$SONAR_TOKEN" +} + +function run-sonar-scanner-in-docker() { + + # shellcheck disable=SC1091 + source ./scripts/docker/docker.lib.sh + + # shellcheck disable=SC2155 + local image=$(name=sonarsource/sonar-scanner-cli docker-get-image-version-and-pull) + docker run --rm --platform linux/amd64 \ + --volume "$PWD":/usr/src \ + "$image" \ + -Dproject.settings=/usr/src/scripts/config/sonar-scanner.properties \ + -Dsonar.branch.name="${BRANCH_NAME:-$(git rev-parse --abbrev-ref HEAD)}" \ + -Dsonar.organization="$SONAR_ORGANISATION_KEY" \ + -Dsonar.projectKey="$SONAR_PROJECT_KEY" \ + -Dsonar.token="$SONAR_TOKEN" +} + +# ============================================================================== + +function is-arg-true() { + + if [[ "$1" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$ ]]; then + return 0 + else + return 1 + fi +} + +# ============================================================================== + +is-arg-true "${VERBOSE:-false}" && set -x + +main "$@" + +exit 0 diff --git a/scripts/reports/scan-vulnerabilities.sh b/scripts/reports/scan-vulnerabilities.sh new file mode 100755 index 000000000..eb68d4b52 --- /dev/null +++ b/scripts/reports/scan-vulnerabilities.sh @@ -0,0 +1,103 @@ +#!/bin/bash + +# WARNING: Please, DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. + +set -euo pipefail + +# Script to scan an SBOM file for CVEs (Common Vulnerabilities and Exposures). +# This is a grype command wrapper. It will run grype natively if it is +# installed, otherwise it will run it in a Docker container. +# +# Usage: +# $ [options] ./scan-vulnerabilities.sh +# +# Options: +# BUILD_DATETIME=%Y-%m-%dT%H:%M:%S%z # Build datetime, default is `date -u +'%Y-%m-%dT%H:%M:%S%z'` +# FORCE_USE_DOCKER=true # If set to true the command is run in a Docker container, default is 'false' +# VERBOSE=true # Show all the executed commands, default is `false` +# +# Depends on: +# $ ./create-sbom-report.sh + +# ============================================================================== + +function main() { + + cd "$(git rev-parse --show-toplevel)" + + create-report + enrich-report +} + +function create-report() { + + if command -v grype > /dev/null 2>&1 && ! is-arg-true "${FORCE_USE_DOCKER:-false}"; then + run-grype-natively + else + run-grype-in-docker + fi +} + +function run-grype-natively() { + + grype \ + sbom:"$PWD/sbom-repository-report.json" \ + --config "$PWD/scripts/config/grype.yaml" \ + --output json \ + --file "$PWD/vulnerabilities-repository-report.tmp.json" +} + +function run-grype-in-docker() { + + # shellcheck disable=SC1091 + source ./scripts/docker/docker.lib.sh + + # shellcheck disable=SC2155 + local image=$(name=ghcr.io/anchore/grype docker-get-image-version-and-pull) + docker run --rm --platform linux/amd64 \ + --volume "$PWD":/workdir \ + --volume /tmp/grype/db:/.cache/grype/db \ + "$image" \ + sbom:/workdir/sbom-repository-report.json \ + --config /workdir/scripts/config/grype.yaml \ + --output json \ + --file /workdir/vulnerabilities-repository-report.tmp.json +} + +function enrich-report() { + + build_datetime=${BUILD_DATETIME:-$(date -u +'%Y-%m-%dT%H:%M:%S%z')} + git_url=$(git config --get remote.origin.url) + git_branch=$(git rev-parse --abbrev-ref HEAD) + git_commit_hash=$(git rev-parse HEAD) + git_tags=$(echo \""$(git tag | tr '\n' ',' | sed 's/,$//' | sed 's/,/","/g')"\" | sed 's/""//g') + pipeline_run_id=${GITHUB_RUN_ID:-0} + pipeline_run_number=${GITHUB_RUN_NUMBER:-0} + pipeline_run_attempt=${GITHUB_RUN_ATTEMPT:-0} + + # shellcheck disable=SC2086 + jq \ + '.creationInfo |= . + {"created":"'${build_datetime}'","repository":{"url":"'${git_url}'","branch":"'${git_branch}'","tags":['${git_tags}'],"commitHash":"'${git_commit_hash}'"},"pipeline":{"id":'${pipeline_run_id}',"number":'${pipeline_run_number}',"attempt":'${pipeline_run_attempt}'}}' \ + vulnerabilities-repository-report.tmp.json \ + > vulnerabilities-repository-report.json + rm -f vulnerabilities-repository-report.tmp.json +} + +# ============================================================================== + +function is-arg-true() { + + if [[ "$1" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$ ]]; then + return 0 + else + return 1 + fi +} + +# ============================================================================== + +is-arg-true "${VERBOSE:-false}" && set -x + +main "$@" + +exit 0 diff --git a/scripts/shellscript-linter.sh b/scripts/shellscript-linter.sh new file mode 100755 index 000000000..8b3fe09c0 --- /dev/null +++ b/scripts/shellscript-linter.sh @@ -0,0 +1,77 @@ +#!/bin/bash + +# WARNING: Please DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. + +set -euo pipefail + +# ShellCheck command wrapper. It will run ShellCheck natively if it is +# installed, otherwise it will run it in a Docker container. +# +# Usage: +# $ [options] ./shellscript-linter.sh +# +# Arguments (provided as environment variables): +# file=shellscript # Path to the shell script to lint, relative to the project's top-level directory, default is itself +# FORCE_USE_DOCKER=true # If set to true the command is run in a Docker container, default is 'false' +# VERBOSE=true # Show all the executed commands, default is 'false' + +# ============================================================================== + +function main() { + + cd "$(git rev-parse --show-toplevel)" + + [ -z "${file:-}" ] && echo "WARNING: 'file' variable not set, defaulting to itself" + local file=${file:-scripts/shellscript-linter.sh} + if command -v shellcheck > /dev/null 2>&1 && ! is-arg-true "${FORCE_USE_DOCKER:-false}"; then + file="$file" run-shellcheck-natively + else + file="$file" run-shellcheck-in-docker + fi +} + +# Run ShellCheck natively. +# Arguments (provided as environment variables): +# file=[path to the shell script to lint, relative to the project's top-level directory] +function run-shellcheck-natively() { + + # shellcheck disable=SC2001 + shellcheck "$(echo "$file" | sed "s#$PWD#.#")" +} + +# Run ShellCheck in a Docker container. +# Arguments (provided as environment variables): +# file=[path to the shell script to lint, relative to the project's top-level directory] +function run-shellcheck-in-docker() { + + # shellcheck disable=SC1091 + source ./scripts/docker/docker.lib.sh + + # shellcheck disable=SC2155 + local image=$(name=koalaman/shellcheck docker-get-image-version-and-pull) + # shellcheck disable=SC2001 + docker run --rm --platform linux/amd64 \ + --volume "$PWD:/workdir" \ + --workdir /workdir \ + "$image" \ + "/workdir/$(echo "$file" | sed "s#$PWD#.#")" +} + +# ============================================================================== + +function is-arg-true() { + + if [[ "$1" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$ ]]; then + return 0 + else + return 1 + fi +} + +# ============================================================================== + +is-arg-true "${VERBOSE:-false}" && set -x + +main "$@" + +exit 0 diff --git a/scripts/terraform/examples/terraform-state-aws-s3/.gitignore b/scripts/terraform/examples/terraform-state-aws-s3/.gitignore new file mode 100644 index 000000000..c83114071 --- /dev/null +++ b/scripts/terraform/examples/terraform-state-aws-s3/.gitignore @@ -0,0 +1,41 @@ +# Ignore the lock file as this is just an example +.terraform.lock.hcl +# Ignore Terraform plan +*tfplan* + +# SEE: https://github.com/github/gitignore/blob/main/Terraform.gitignore + +# Local .terraform directories +**/.terraform/* + +# .tfstate files +*.tfstate +*.tfstate.* + +# Crash log files +crash.log +crash.*.log + +# Exclude all .tfvars files, which are likely to contain sensitive data, such as +# password, private keys, and other secrets. These should not be part of version +# control as they are data points which are potentially sensitive and subject +# to change depending on the environment. +*.tfvars +*.tfvars.json + +# Ignore override files as they are usually used to override resources locally and so +# are not checked in +override.tf +override.tf.json +*_override.tf +*_override.tf.json + +# Include override files you do wish to add to version control using negated pattern +# !example_override.tf + +# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan +# example: *tfplan* + +# Ignore CLI configuration files +.terraformrc +terraform.rc diff --git a/scripts/terraform/examples/terraform-state-aws-s3/main.tf b/scripts/terraform/examples/terraform-state-aws-s3/main.tf new file mode 100644 index 000000000..a4ca5b0e5 --- /dev/null +++ b/scripts/terraform/examples/terraform-state-aws-s3/main.tf @@ -0,0 +1,46 @@ +resource "aws_s3_bucket" "terraform_state_store" { + bucket = var.terraform_state_bucket_name + lifecycle { + prevent_destroy = false // FIXME: Normally, this should be 'true' - this is just an example + } +} + +resource "aws_s3_bucket_versioning" "enabled" { + bucket = aws_s3_bucket.terraform_state_store.id + versioning_configuration { + status = "Enabled" + } +} + +resource "aws_s3_bucket_server_side_encryption_configuration" "default" { + bucket = aws_s3_bucket.terraform_state_store.id + rule { + apply_server_side_encryption_by_default { + sse_algorithm = "AES256" + } + } +} + +resource "aws_s3_bucket_public_access_block" "public_access" { + bucket = aws_s3_bucket.terraform_state_store.id + block_public_acls = true + block_public_policy = true + ignore_public_acls = true + restrict_public_buckets = true +} + +resource "aws_dynamodb_table" "dynamodb_terraform_state_lock" { + name = var.terraform_state_table_name + billing_mode = "PAY_PER_REQUEST" + hash_key = "LockID" + attribute { + name = "LockID" + type = "S" + } + server_side_encryption { + enabled = true + } + point_in_time_recovery { + enabled = true + } +} diff --git a/scripts/terraform/examples/terraform-state-aws-s3/provider.tf b/scripts/terraform/examples/terraform-state-aws-s3/provider.tf new file mode 100644 index 000000000..b64be2afe --- /dev/null +++ b/scripts/terraform/examples/terraform-state-aws-s3/provider.tf @@ -0,0 +1,3 @@ +provider "aws" { + region = "eu-west-2" +} diff --git a/scripts/terraform/examples/terraform-state-aws-s3/variables.tf b/scripts/terraform/examples/terraform-state-aws-s3/variables.tf new file mode 100644 index 000000000..07f60cb18 --- /dev/null +++ b/scripts/terraform/examples/terraform-state-aws-s3/variables.tf @@ -0,0 +1,9 @@ +variable "terraform_state_bucket_name" { + description = "The S3 bucket name to store Terraform state" + default = "repository-template-example-terraform-state-store" +} + +variable "terraform_state_table_name" { + description = "The DynamoDB table name to acquire Terraform lock" + default = "repository-template-example-terraform-state-lock" +} diff --git a/scripts/terraform/examples/terraform-state-aws-s3/versions.tf b/scripts/terraform/examples/terraform-state-aws-s3/versions.tf new file mode 100644 index 000000000..18fd04af8 --- /dev/null +++ b/scripts/terraform/examples/terraform-state-aws-s3/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_version = ">= 1.5.0" + required_providers { + aws = { + version = ">= 5.14.0" + } + } +} diff --git a/scripts/terraform/terraform.lib.sh b/scripts/terraform/terraform.lib.sh new file mode 100644 index 000000000..7793b9b02 --- /dev/null +++ b/scripts/terraform/terraform.lib.sh @@ -0,0 +1,93 @@ +#!/bin/bash + +# WARNING: Please DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. + +set -euo pipefail + +# A set of Terraform functions written in Bash. +# +# Usage: +# $ source ./terraform.lib.sh + +# ============================================================================== +# Common Terraform functions. + +# Initialise Terraform. +# Arguments (provided as environment variables): +# dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is '.'] +# opts=[options to pass to the Terraform init command, default is none/empty] +function terraform-init() { + + _terraform init # 'dir' and 'opts' are passed to the function as environment variables, if set +} + +# Plan Terraform changes. +# Arguments (provided as environment variables): +# dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is '.'] +# opts=[options to pass to the Terraform plan command, default is none/empty] +function terraform-plan() { + + _terraform plan # 'dir' and 'opts' are passed to the function as environment variables, if set +} + +# Apply Terraform changes. +# Arguments (provided as environment variables): +# dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is '.'] +# opts=[options to pass to the Terraform apply command, default is none/empty] +function terraform-apply() { + + _terraform apply # 'dir' and 'opts' are passed to the function as environment variables, if set +} + +# Destroy Terraform resources. +# Arguments (provided as environment variables): +# dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is '.'] +# opts=[options to pass to the Terraform destroy command, default is none/empty] +function terraform-destroy() { + + _terraform apply -destroy # 'dir' and 'opts' are passed to the function as environment variables, if set +} + +# Format Terraform code. +# Arguments (provided as environment variables): +# dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is '.'] +# opts=[options to pass to the Terraform fmt command, default is '-recursive'] +function terraform-fmt() { + + _terraform fmt -recursive # 'dir' and 'opts' are passed to the function as environment variables, if set +} + +# Validate Terraform code. +# Arguments (provided as environment variables): +# dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is '.'] +# opts=[options to pass to the Terraform validate command, default is none/empty] +function terraform-validate() { + + _terraform validate # 'dir' and 'opts' are passed to the function as environment variables, if set +} + +# shellcheck disable=SC2001,SC2155 +function _terraform() { + + local dir="$(echo "${dir:-$PWD}" | sed "s#$PWD#.#")" + local cmd="-chdir=$dir $* ${opts:-}" + local project_dir="$(git rev-parse --show-toplevel)" + + cmd="$cmd" "$project_dir/scripts/terraform/terraform.sh" +} + +# Remove Terraform files. +# Arguments (provided as environment variables): +# dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is '.'] +function terraform-clean() { + + ( + cd "${dir:-$PWD}" + rm -rf \ + .terraform \ + terraform.log \ + terraform.tfplan \ + terraform.tfstate \ + terraform.tfstate.backup + ) +} diff --git a/scripts/terraform/terraform.mk b/scripts/terraform/terraform.mk new file mode 100644 index 000000000..120a0591e --- /dev/null +++ b/scripts/terraform/terraform.mk @@ -0,0 +1,96 @@ +# This file is for you! Edit it to implement your own Terraform make targets. + +# ============================================================================== +# Custom implementation - implementation of a make target should not exceed 5 lines of effective code. +# In most cases there should be no need to modify the existing make targets. + +terraform-init: # Initialise Terraform - optional: terraform_dir|dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is one of the module variables or the example directory, if not set], terraform_opts|opts=[options to pass to the Terraform init command, default is none/empty] @Development + make _terraform cmd="init" \ + dir=$(or ${terraform_dir}, ${dir}) \ + opts=$(or ${terraform_opts}, ${opts}) + +terraform-plan: # Plan Terraform changes - optional: terraform_dir|dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is one of the module variables or the example directory, if not set], terraform_opts|opts=[options to pass to the Terraform plan command, default is none/empty] @Development + make _terraform cmd="plan" \ + dir=$(or ${terraform_dir}, ${dir}) \ + opts=$(or ${terraform_opts}, ${opts}) + +terraform-apply: # Apply Terraform changes - optional: terraform_dir|dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is one of the module variables or the example directory, if not set], terraform_opts|opts=[options to pass to the Terraform apply command, default is none/empty] @Development + make _terraform cmd="apply" \ + dir=$(or ${terraform_dir}, ${dir}) \ + opts=$(or ${terraform_opts}, ${opts}) + +terraform-destroy: # Destroy Terraform resources - optional: terraform_dir|dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is one of the module variables or the example directory, if not set], terraform_opts|opts=[options to pass to the Terraform destroy command, default is none/empty] @Development + make _terraform \ + cmd="destroy" \ + dir=$(or ${terraform_dir}, ${dir}) \ + opts=$(or ${terraform_opts}, ${opts}) + +terraform-fmt: # Format Terraform files - optional: terraform_dir|dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is one of the module variables or the example directory, if not set], terraform_opts|opts=[options to pass to the Terraform fmt command, default is '-recursive'] @Quality + make _terraform cmd="fmt" \ + dir=$(or ${terraform_dir}, ${dir}) \ + opts=$(or ${terraform_opts}, ${opts}) + +terraform-validate: # Validate Terraform configuration - optional: terraform_dir|dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is one of the module variables or the example directory, if not set], terraform_opts|opts=[options to pass to the Terraform validate command, default is none/empty] @Quality + make _terraform cmd="validate" \ + dir=$(or ${terraform_dir}, ${dir}) \ + opts=$(or ${terraform_opts}, ${opts}) + +clean:: # Remove Terraform files (terraform) - optional: terraform_dir|dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is one of the module variables or the example directory, if not set] @Operations + make _terraform cmd="clean" \ + dir=$(or ${terraform_dir}, ${dir}) \ + opts=$(or ${terraform_opts}, ${opts}) + +_terraform: # Terraform command wrapper - mandatory: cmd=[command to execute]; optional: dir=[path to a directory where the command will be executed, relative to the project's top-level directory, default is one of the module variables or the example directory, if not set], opts=[options to pass to the Terraform command, default is none/empty] + # 'TERRAFORM_STACK' is passed to the functions as environment variable + TERRAFORM_STACK=$(or ${TERRAFORM_STACK}, $(or ${terraform_stack}, $(or ${STACK}, $(or ${stack}, scripts/terraform/examples/terraform-state-aws-s3)))) + dir=$(or ${dir}, ${TERRAFORM_STACK}) + source scripts/terraform/terraform.lib.sh + terraform-${cmd} # 'dir' and 'opts' are accessible by the function as environment variables, if set + +# ============================================================================== +# Quality checks - please DO NOT edit this section! + +terraform-shellscript-lint: # Lint all Terraform module shell scripts @Quality + for file in $$(find scripts/terraform -type f -name "*.sh"); do + file=$${file} scripts/shellscript-linter.sh + done + +# ============================================================================== +# Module tests and examples - please DO NOT edit this section! + +terraform-example-provision-aws-infrastructure: # Provision example of AWS infrastructure @ExamplesAndTests + make terraform-init + make terraform-plan opts="-out=terraform.tfplan" + make terraform-apply opts="-auto-approve terraform.tfplan" + +terraform-example-destroy-aws-infrastructure: # Destroy example of AWS infrastructure @ExamplesAndTests + make terraform-destroy opts="-auto-approve" + +terraform-example-clean: # Remove Terraform example files @ExamplesAndTests + dir=$(or ${dir}, ${TERRAFORM_STACK}) + source scripts/terraform/terraform.lib.sh + terraform-clean + rm -f ${TERRAFORM_STACK}/.terraform.lock.hcl + +# ============================================================================== +# Configuration - please DO NOT edit this section! + +terraform-install: # Install Terraform @Installation + make _install-dependency name="terraform" + +# ============================================================================== + +${VERBOSE}.SILENT: \ + _terraform \ + clean \ + terraform-apply \ + terraform-destroy \ + terraform-example-clean \ + terraform-example-destroy-aws-infrastructure \ + terraform-example-provision-aws-infrastructure \ + terraform-fmt \ + terraform-init \ + terraform-install \ + terraform-plan \ + terraform-shellscript-lint \ + terraform-validate \ diff --git a/scripts/terraform/terraform.sh b/scripts/terraform/terraform.sh new file mode 100755 index 000000000..73f37c1af --- /dev/null +++ b/scripts/terraform/terraform.sh @@ -0,0 +1,76 @@ +#!/bin/bash + +# WARNING: Please DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. + +set -euo pipefail + +# Terraform command wrapper. It will run the command natively if Terraform is +# installed, otherwise it will run it in a Docker container. +# +# Usage: +# $ [options] ./terraform.sh +# +# Options: +# cmd=command # Terraform command to execute +# FORCE_USE_DOCKER=true # If set to true the command is run in a Docker container, default is 'false' +# VERBOSE=true # Show all the executed commands, default is 'false' + +# ============================================================================== + +function main() { + + cd "$(git rev-parse --show-toplevel)" + + if command -v terraform > /dev/null 2>&1 && ! is-arg-true "${FORCE_USE_DOCKER:-false}"; then + # shellcheck disable=SC2154 + cmd=$cmd run-terraform-natively + else + cmd=$cmd run-terraform-in-docker + fi +} + +# Run Terraform natively. +# Arguments (provided as environment variables): +# cmd=[Terraform command to execute] +function run-terraform-natively() { + + # shellcheck disable=SC2086 + terraform $cmd +} + +# Run Terraform in a Docker container. +# Arguments (provided as environment variables): +# cmd=[Terraform command to execute] +function run-terraform-in-docker() { + + # shellcheck disable=SC1091 + source ./scripts/docker/docker.lib.sh + + # shellcheck disable=SC2155 + local image=$(name=hashicorp/terraform docker-get-image-version-and-pull) + # shellcheck disable=SC2086 + docker run --rm --platform linux/amd64 \ + --volume "$PWD":/workdir \ + --workdir /workdir \ + "$image" \ + $cmd +} + +# ============================================================================== + +function is-arg-true() { + + if [[ "$1" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$ ]]; then + return 0 + else + return 1 + fi +} + +# ============================================================================== + +is-arg-true "${VERBOSE:-false}" && set -x + +main "$@" + +exit 0 diff --git a/scripts/tests/style.sh b/scripts/tests/style.sh new file mode 100755 index 000000000..da042fadb --- /dev/null +++ b/scripts/tests/style.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +set -euo pipefail + +cd "$(git rev-parse --show-toplevel)" + + +# This file is for you! Edit it to call your prose style checker. +# It's preconfigured to use `vale`, the same as the github action, +# except that here it only checks unstaged files first, and only if +# those files are OK does it then go on to check staged files. This +# is to give you fast feedback on the changes you've most recently +# made. + +check=working-tree-changes ./scripts/githooks/check-english-usage.sh && \ + check=staged-changes ./scripts/githooks/check-english-usage.sh diff --git a/scripts/tests/test.mk b/scripts/tests/test.mk new file mode 100644 index 000000000..aab47c625 --- /dev/null +++ b/scripts/tests/test.mk @@ -0,0 +1,91 @@ +# WARNING: Please DO NOT edit this file! It is maintained in the Repository Template (https://github.com/nhs-england-tools/repository-template). Raise a PR instead. + +# The test types listed here are both those which might run both locally and in CI, or +# in one but not the other. All of the test types listed at +# https://github.com/NHSDigital/software-engineering-quality-framework/blob/main/quality-checks.md +# should be represented here with the exception of: +# - dependency scanning, which we expect to be applied at the repository level +# - secret scanning, which we expect to be a pre-commit hook +# - code review, which is outside the scope of automated testing for the moment + +test-unit: # Run your unit tests from scripts/test/unit @Testing + make _test name="unit" + +test-lint: # Lint your code from scripts/test/lint @Testing + make _test name="lint" + +test-coverage: # Evaluate code coverage from scripts/test/coverage @Testing + make _test name="coverage" + +test-accessibility: # Run your accessibility tests from scripts/test/accessibility @Testing + make _test name="accessibility" + +test-contract: # Run your contract tests from scripts/test/contract @Testing + make _test name="contract" + +test-integration: # Run your integration tests from scripts/test/integration @Testing + make _test name="integration" + +test-load: # Run all your load tests @Testing + make \ + test-capacity \ + test-soak \ + test-response-time + # You may wish to add more here, depending on your app + +test-capacity: # Test what load level your app fails at from scripts/test/capacity @Testing + make _test name="capacity" + +test-soak: # Test that resources don't get exhausted over time from scripts/test/soak @Testing + make _test name="soak" + +test-response-time: # Test your API response times from scripts/test/response-time @Testing + make _test name="response-time" + +test-security: # Run your security tests from scripts/test/security @Testing + make _test name="security" + +test-ui: # Run your UI tests from scripts/test/ui @Testing + make _test name="ui" + +test-ui-performance: # Run UI render tests from scripts/test/ui-performance @Testing + make _test name="ui-performance" + +test: # Run all the test tasks @Testing + make \ + test-unit \ + test-lint \ + test-coverage \ + test-contract \ + test-security \ + test-ui \ + test-ui-performance \ + test-integration \ + test-accessibility \ + test-load + +_test: + set -e + script="./scripts/tests/${name}.sh" + if [ -e "$${script}" ]; then + exec $${script} + else + echo "make test-${name} not implemented: $${script} not found" >&2 + fi + +${VERBOSE}.SILENT: \ + _test \ + test \ + test-accessibility \ + test-capacity \ + test-contract \ + test-coverage \ + test-soak \ + test-integration \ + test-lint \ + test-load \ + test-response-time \ + test-security \ + test-ui \ + test-ui-performance \ + test-unit \ diff --git a/scripts/tests/unit.sh b/scripts/tests/unit.sh new file mode 100755 index 000000000..c589be5b9 --- /dev/null +++ b/scripts/tests/unit.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +set -euo pipefail + +cd "$(git rev-parse --show-toplevel)" + +# This file is for you! Edit it to call your unit test suite. Note that the same +# file will be called if you run it locally as if you run it on CI. + +# Replace the following line with something like: +# +# rails test:unit +# python manage.py test +# npm run test +# +# or whatever is appropriate to your project. You should *only* run your fast +# tests from here. If you want to run other test suites, see the predefined +# tasks in scripts/test.mk. + +echo "Unit tests are not yet implemented. See scripts/tests/unit.sh for more." diff --git a/tests/.gitkeep b/tests/.gitkeep new file mode 100644 index 000000000..e69de29bb