diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index f10886e..0000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,78 +0,0 @@ -version: 2.1 - -references: - platform-docker: - docker: - - image: objectrocket/platform-cicd:latest - auth: - username: ${DOCKER_USERNAME} - password: ${DOCKER_PASSWORD} - - context-to-use: &context-to-use - context: objectrocket-shared - objectrocket-docker-auth: &objectrocket-docker-auth - auth: - username: ${DOCKER_USERNAME} - password: ${DOCKER_PASSWORD} -jobs: - lint_and_test: - docker: - - image: circleci/python:3.8 - auth: - username: ${DOCKER_USERNAME} - password: ${DOCKER_PASSWORD} - - steps: - - checkout - - restore_cache: - key: deps-{{ checksum "poetry.lock" }} - - run: - name: Install dependencies - command: | - poetry install - - run: - name: Lint - command: | - make lint - - run: - name: Test - command: | - make test - - save_cache: - key: deps-{{ checksum "poetry.lock" }} - paths: - - /home/circleci/.cache/pypoetry/cache - - /home/circleci/.cache/pypoetry/virtualenvs - - build_deploy: - docker: - - <<: *objectrocket-docker-auth - image: circleci/python:3.8 - steps: - - checkout - - setup_remote_docker - - run: - name: docker login - command: | - docker login -u ${DOCKER_USERNAME} -p ${DOCKER_PASSWORD} - docker info - - run: - name: docker build and push - command: | - make OR_PYPI_PROD_PASSWORD="${OBJECTROCKET_PYPI_PASSWORD}" docker-build - make docker-deploy - -workflows: - version: 2 - # runs on all untagged commits - lint_and_test: - jobs: - - lint_and_test: - <<: *context-to-use - filters: - tags: - ignore: - - /^[0-9]+.[0-9]+.[0-9]+$/ - - /^[0-9]+.[0-9]+.[0-9]+rc[0-9]+$/ - branches: - ignore: develop diff --git a/.gitignore b/.gitignore index 449be05..aacd9a5 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,6 @@ __pycache__ htmlcov .pytest_cache registry.terraform.io/ +.scannerwork/ +coverage.xml +reports/* diff --git a/.isort.cfg b/.isort.cfg index 2fbb95e..11ea026 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -1,3 +1,4 @@ [settings] -known_third_party = atlassian,boto3,botocore,click,deepdiff,google,hcl2,jinja2,lark,mergedeep,moto,pydantic,pytest,yaml +known_third_party = atlassian,boto3,botocore,click,google,hcl2,jinja2,lark,moto,packaging,pydantic,pydantic_core,pydantic_settings,pytest,yaml profile = black +skip = ["*/__init__.py"] diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..78ad28c --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,18 @@ +# Changes 0.12.0 -> 0.13.0 (2024-06...) + +## Notes +This version reconsiders many of the core elements of the worker application. + +The application now relies on pydantic to define the models for what is consumed from the configuration file and the command line. Previously there was a heinous mix of classes which just had attributes and methods seemingly hung on +them at random. This was a nightmare to maintain and understand. + +- the CopierFactory was reorganized to be more modular +- providers became a package, with a model used to validation all provider configurations +- all of the configuration file logic was moved into the `commands` module, the logic to handle managing the cofig is all contained there instead of being spread out many places +- added tfworker.util.log to handle interaction with the user, via the CLI or via a logger in the future +- made significant strides towards consolidating all exceptions / error handling +- made significant strides in only having the primary cli.py and commands/ actually cause the program to terminate +- validation of options and inputs is now handled centrally in the pydantic models instead of being spread out *everywhere* + + +- ... @TODO find time to update this :) diff --git a/Makefile b/Makefile index 4a47a98..12ddd81 100644 --- a/Makefile +++ b/Makefile @@ -1,21 +1,28 @@ init: poetry install +init-dev: + poetry install --with dev + default: lint test -lint: init +lint: init-dev poetry run flake8 --ignore E501,W503 tfworker tests -format: init +format: init-dev poetry run black tfworker tests - poetry run seed-isort-config || echo "known_third_party setting changed. Please commit pyproject.toml" + @poetry run seed-isort-config || echo "known_third_party setting changed. Please commit pyproject.toml" poetry run isort tfworker tests -test: init +test: init-dev poetry run pytest -p no:warnings --disable-socket poetry run coverage report --fail-under=60 -m --skip-empty -dep-test: init +ci-test: init-dev + poetry run pytest --disable-socket --junitxml=reports/junit.xml + poetry run coverage xml -o reports/coverage.xml + +dep-test: init-dev poetry run pytest --disable-socket poetry run coverage report --fail-under=60 -m --skip-empty diff --git a/poetry.lock b/poetry.lock index 634a011..0855745 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "alabaster" @@ -155,17 +155,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.34.127" +version = "1.34.139" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.34.127-py3-none-any.whl", hash = "sha256:d370befe4fb7aea5bc383057d7dad18dda5d0cf3cd3295915bcc8c8c4191905c"}, - {file = "boto3-1.34.127.tar.gz", hash = "sha256:58ccdeae3a96811ecc9d5d866d8226faadbd0ee1891756e4a04d5186e9a57a64"}, + {file = "boto3-1.34.139-py3-none-any.whl", hash = "sha256:98b2a12bcb30e679fa9f60fc74145a39db5ec2ca7b7c763f42896e3bd9b3a38d"}, + {file = "boto3-1.34.139.tar.gz", hash = "sha256:32b99f0d76ec81fdca287ace2c9744a2eb8b92cb62bf4d26d52a4f516b63a6bf"}, ] [package.dependencies] -botocore = ">=1.34.127,<1.35.0" +botocore = ">=1.34.139,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -174,13 +174,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.127" +version = "1.34.139" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.127-py3-none-any.whl", hash = "sha256:e14fa28c8bb141de965e700f88b196d17c67a703c7f0f5c7e14f7dd1cf636011"}, - {file = "botocore-1.34.127.tar.gz", hash = "sha256:a377871742c40603d559103f19acb7bc93cfaf285e68f21b81637ec396099877"}, + {file = "botocore-1.34.139-py3-none-any.whl", hash = "sha256:dd1e085d4caa2a4c1b7d83e3bc51416111c8238a35d498e9d3b04f3b63b086ba"}, + {file = "botocore-1.34.139.tar.gz", hash = "sha256:df023d8cf8999d574214dad4645cb90f9d2ccd1494f6ee2b57b1ab7522f6be77"}, ] [package.dependencies] @@ -204,13 +204,13 @@ files = [ [[package]] name = "certifi" -version = "2024.6.2" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, - {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] @@ -403,63 +403,63 @@ files = [ [[package]] name = "coverage" -version = "7.5.3" +version = "7.5.4" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, - {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, - {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, - {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, - {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, - {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, - {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, - {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, - {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, - {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, - {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, - {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, - {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, - {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, + {file = "coverage-7.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cfb5a4f556bb51aba274588200a46e4dd6b505fb1a5f8c5ae408222eb416f99"}, + {file = "coverage-7.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2174e7c23e0a454ffe12267a10732c273243b4f2d50d07544a91198f05c48f47"}, + {file = "coverage-7.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2214ee920787d85db1b6a0bd9da5f8503ccc8fcd5814d90796c2f2493a2f4d2e"}, + {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1137f46adb28e3813dec8c01fefadcb8c614f33576f672962e323b5128d9a68d"}, + {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b385d49609f8e9efc885790a5a0e89f2e3ae042cdf12958b6034cc442de428d3"}, + {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b4a474f799456e0eb46d78ab07303286a84a3140e9700b9e154cfebc8f527016"}, + {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5cd64adedf3be66f8ccee418473c2916492d53cbafbfcff851cbec5a8454b136"}, + {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e564c2cf45d2f44a9da56f4e3a26b2236504a496eb4cb0ca7221cd4cc7a9aca9"}, + {file = "coverage-7.5.4-cp310-cp310-win32.whl", hash = "sha256:7076b4b3a5f6d2b5d7f1185fde25b1e54eb66e647a1dfef0e2c2bfaf9b4c88c8"}, + {file = "coverage-7.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:018a12985185038a5b2bcafab04ab833a9a0f2c59995b3cec07e10074c78635f"}, + {file = "coverage-7.5.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:db14f552ac38f10758ad14dd7b983dbab424e731588d300c7db25b6f89e335b5"}, + {file = "coverage-7.5.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3257fdd8e574805f27bb5342b77bc65578e98cbc004a92232106344053f319ba"}, + {file = "coverage-7.5.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a6612c99081d8d6134005b1354191e103ec9705d7ba2754e848211ac8cacc6b"}, + {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d45d3cbd94159c468b9b8c5a556e3f6b81a8d1af2a92b77320e887c3e7a5d080"}, + {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed550e7442f278af76d9d65af48069f1fb84c9f745ae249c1a183c1e9d1b025c"}, + {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a892be37ca35eb5019ec85402c3371b0f7cda5ab5056023a7f13da0961e60da"}, + {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8192794d120167e2a64721d88dbd688584675e86e15d0569599257566dec9bf0"}, + {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:820bc841faa502e727a48311948e0461132a9c8baa42f6b2b84a29ced24cc078"}, + {file = "coverage-7.5.4-cp311-cp311-win32.whl", hash = "sha256:6aae5cce399a0f065da65c7bb1e8abd5c7a3043da9dceb429ebe1b289bc07806"}, + {file = "coverage-7.5.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2e344d6adc8ef81c5a233d3a57b3c7d5181f40e79e05e1c143da143ccb6377d"}, + {file = "coverage-7.5.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:54317c2b806354cbb2dc7ac27e2b93f97096912cc16b18289c5d4e44fc663233"}, + {file = "coverage-7.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:042183de01f8b6d531e10c197f7f0315a61e8d805ab29c5f7b51a01d62782747"}, + {file = "coverage-7.5.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6bb74ed465d5fb204b2ec41d79bcd28afccf817de721e8a807d5141c3426638"}, + {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3d45ff86efb129c599a3b287ae2e44c1e281ae0f9a9bad0edc202179bcc3a2e"}, + {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5013ed890dc917cef2c9f765c4c6a8ae9df983cd60dbb635df8ed9f4ebc9f555"}, + {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1014fbf665fef86cdfd6cb5b7371496ce35e4d2a00cda501cf9f5b9e6fced69f"}, + {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3684bc2ff328f935981847082ba4fdc950d58906a40eafa93510d1b54c08a66c"}, + {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:581ea96f92bf71a5ec0974001f900db495488434a6928a2ca7f01eee20c23805"}, + {file = "coverage-7.5.4-cp312-cp312-win32.whl", hash = "sha256:73ca8fbc5bc622e54627314c1a6f1dfdd8db69788f3443e752c215f29fa87a0b"}, + {file = "coverage-7.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:cef4649ec906ea7ea5e9e796e68b987f83fa9a718514fe147f538cfeda76d7a7"}, + {file = "coverage-7.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdd31315fc20868c194130de9ee6bfd99755cc9565edff98ecc12585b90be882"}, + {file = "coverage-7.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:02ff6e898197cc1e9fa375581382b72498eb2e6d5fc0b53f03e496cfee3fac6d"}, + {file = "coverage-7.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d05c16cf4b4c2fc880cb12ba4c9b526e9e5d5bb1d81313d4d732a5b9fe2b9d53"}, + {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5986ee7ea0795a4095ac4d113cbb3448601efca7f158ec7f7087a6c705304e4"}, + {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df54843b88901fdc2f598ac06737f03d71168fd1175728054c8f5a2739ac3e4"}, + {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ab73b35e8d109bffbda9a3e91c64e29fe26e03e49addf5b43d85fc426dde11f9"}, + {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:aea072a941b033813f5e4814541fc265a5c12ed9720daef11ca516aeacd3bd7f"}, + {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:16852febd96acd953b0d55fc842ce2dac1710f26729b31c80b940b9afcd9896f"}, + {file = "coverage-7.5.4-cp38-cp38-win32.whl", hash = "sha256:8f894208794b164e6bd4bba61fc98bf6b06be4d390cf2daacfa6eca0a6d2bb4f"}, + {file = "coverage-7.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:e2afe743289273209c992075a5a4913e8d007d569a406ffed0bd080ea02b0633"}, + {file = "coverage-7.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b95c3a8cb0463ba9f77383d0fa8c9194cf91f64445a63fc26fb2327e1e1eb088"}, + {file = "coverage-7.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d7564cc09dd91b5a6001754a5b3c6ecc4aba6323baf33a12bd751036c998be4"}, + {file = "coverage-7.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44da56a2589b684813f86d07597fdf8a9c6ce77f58976727329272f5a01f99f7"}, + {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e16f3d6b491c48c5ae726308e6ab1e18ee830b4cdd6913f2d7f77354b33f91c8"}, + {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbc5958cb471e5a5af41b0ddaea96a37e74ed289535e8deca404811f6cb0bc3d"}, + {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a04e990a2a41740b02d6182b498ee9796cf60eefe40cf859b016650147908029"}, + {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ddbd2f9713a79e8e7242d7c51f1929611e991d855f414ca9996c20e44a895f7c"}, + {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b1ccf5e728ccf83acd313c89f07c22d70d6c375a9c6f339233dcf792094bcbf7"}, + {file = "coverage-7.5.4-cp39-cp39-win32.whl", hash = "sha256:56b4eafa21c6c175b3ede004ca12c653a88b6f922494b023aeb1e836df953ace"}, + {file = "coverage-7.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:65e528e2e921ba8fd67d9055e6b9f9e34b21ebd6768ae1c1723f4ea6ace1234d"}, + {file = "coverage-7.5.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:79b356f3dd5b26f3ad23b35c75dbdaf1f9e2450b6bcefc6d0825ea0aa3f86ca5"}, + {file = "coverage-7.5.4.tar.gz", hash = "sha256:a44963520b069e12789d0faea4e9fdb1e410cdc4aab89d94f7f55cbb7fef0353"}, ] [package.extras] @@ -644,20 +644,20 @@ rewrite = ["tokenize-rt (>=3)"] [[package]] name = "google-api-core" -version = "2.19.0" +version = "2.19.1" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.19.0.tar.gz", hash = "sha256:cf1b7c2694047886d2af1128a03ae99e391108a08804f87cfd35970e49c9cd10"}, - {file = "google_api_core-2.19.0-py3-none-any.whl", hash = "sha256:8661eec4078c35428fd3f69a2c7ee29e342896b70f01d1a1cbcb334372dd6251"}, + {file = "google-api-core-2.19.1.tar.gz", hash = "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd"}, + {file = "google_api_core-2.19.1-py3-none-any.whl", hash = "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125"}, ] [package.dependencies] google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" [package.extras] @@ -667,13 +667,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.30.0" +version = "2.31.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.30.0.tar.gz", hash = "sha256:ab630a1320f6720909ad76a7dbdb6841cdf5c66b328d690027e4867bdfb16688"}, - {file = "google_auth-2.30.0-py2.py3-none-any.whl", hash = "sha256:8df7da660f62757388b8a7f249df13549b3373f24388cb5d2f1dd91cc18180b5"}, + {file = "google-auth-2.31.0.tar.gz", hash = "sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871"}, + {file = "google_auth-2.31.0-py2.py3-none-any.whl", hash = "sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23"}, ] [package.dependencies] @@ -828,17 +828,17 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.63.1" +version = "1.63.2" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.63.1.tar.gz", hash = "sha256:c6442f7a0a6b2a80369457d79e6672bb7dcbaab88e0848302497e3ec80780a6a"}, - {file = "googleapis_common_protos-1.63.1-py2.py3-none-any.whl", hash = "sha256:0e1c2cdfcbc354b76e4a211a35ea35d6926a835cba1377073c4861db904a1877"}, + {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"}, + {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"}, ] [package.dependencies] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] @@ -878,13 +878,13 @@ files = [ [[package]] name = "ipython" -version = "8.25.0" +version = "8.26.0" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.10" files = [ - {file = "ipython-8.25.0-py3-none-any.whl", hash = "sha256:53eee7ad44df903a06655871cbab66d156a051fd86f3ec6750470ac9604ac1ab"}, - {file = "ipython-8.25.0.tar.gz", hash = "sha256:c6ed726a140b6e725b911528f80439c534fac915246af3efc39440a6b0f9d716"}, + {file = "ipython-8.26.0-py3-none-any.whl", hash = "sha256:e6b347c27bdf9c32ee9d31ae85defc525755a1869f14057e900675b9e8d6e6ff"}, + {file = "ipython-8.26.0.tar.gz", hash = "sha256:1cec0fbba8404af13facebe83d04436a7434c7400e59f47acf467c64abd0956c"}, ] [package.dependencies] @@ -910,7 +910,7 @@ nbformat = ["nbformat"] notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] -test = ["pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] +test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] [[package]] @@ -1098,13 +1098,13 @@ files = [ [[package]] name = "moto" -version = "5.0.9" +version = "5.0.10" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "moto-5.0.9-py2.py3-none-any.whl", hash = "sha256:21a13e02f83d6a18cfcd99949c96abb2e889f4bd51c4c6a3ecc8b78765cb854e"}, - {file = "moto-5.0.9.tar.gz", hash = "sha256:eb71f1cba01c70fff1f16086acb24d6d9aeb32830d646d8989f98a29aeae24ba"}, + {file = "moto-5.0.10-py2.py3-none-any.whl", hash = "sha256:9ffae2f64cc8fe95b9a12d63ae7268a7d6bea9993b922905b5abd8197d852cd0"}, + {file = "moto-5.0.10.tar.gz", hash = "sha256:eff37363221c93ea44f95721ae0ddb56f977fe70437a041b6cc641ee90266279"}, ] [package.dependencies] @@ -1300,39 +1300,39 @@ wcwidth = "*" [[package]] name = "proto-plus" -version = "1.23.0" +version = "1.24.0" description = "Beautiful, Pythonic protocol buffers." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, - {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, + {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, + {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, ] [package.dependencies] -protobuf = ">=3.19.0,<5.0.0dev" +protobuf = ">=3.19.0,<6.0.0dev" [package.extras] -testing = ["google-api-core[grpc] (>=1.31.5)"] +testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "4.25.3" +version = "5.27.2" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, - {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, - {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, - {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, - {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, - {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, - {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, - {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, - {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, + {file = "protobuf-5.27.2-cp310-abi3-win32.whl", hash = "sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38"}, + {file = "protobuf-5.27.2-cp310-abi3-win_amd64.whl", hash = "sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505"}, + {file = "protobuf-5.27.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5"}, + {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b"}, + {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e"}, + {file = "protobuf-5.27.2-cp38-cp38-win32.whl", hash = "sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863"}, + {file = "protobuf-5.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6"}, + {file = "protobuf-5.27.2-cp39-cp39-win32.whl", hash = "sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca"}, + {file = "protobuf-5.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce"}, + {file = "protobuf-5.27.2-py3-none-any.whl", hash = "sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470"}, + {file = "protobuf-5.27.2.tar.gz", hash = "sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714"}, ] [[package]] @@ -1423,114 +1423,146 @@ files = [ [[package]] name = "pydantic" -version = "2.7.4" +version = "2.8.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"}, - {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"}, + {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, + {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.18.4" -typing-extensions = ">=4.6.1" +pydantic-core = "2.20.1" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] [package.extras] email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.18.4" +version = "2.20.1" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"}, - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"}, - {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"}, - {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"}, - {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"}, - {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"}, - {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"}, - {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"}, - {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"}, - {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"}, - {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"}, - {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"}, - {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"}, - {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"}, - {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, + {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, + {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, + {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, + {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, + {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, + {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, + {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, + {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, + {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, + {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, + {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, + {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, + {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, ] [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" +[[package]] +name = "pydantic-settings" +version = "2.3.4" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_settings-2.3.4-py3-none-any.whl", hash = "sha256:11ad8bacb68a045f00e4f862c7a718c8a9ec766aa8fd4c32e39a0594b207b53a"}, + {file = "pydantic_settings-2.3.4.tar.gz", hash = "sha256:c5802e3d62b78e82522319bbc9b8f8ffb28ad1c988a99311d04f2a6051fca0a7"}, +] + +[package.dependencies] +pydantic = ">=2.7.0" +python-dotenv = ">=0.21.0" + +[package.extras] +toml = ["tomli (>=2.0.1)"] +yaml = ["pyyaml (>=6.0.1)"] + [[package]] name = "pyflakes" version = "3.2.0" @@ -1611,6 +1643,23 @@ future-fstrings = "*" networkx = "*" pytest = ">=3" +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + [[package]] name = "pytest-socket" version = "0.7.0" @@ -1653,6 +1702,20 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + [[package]] name = "python-hcl2" version = "4.3.4" @@ -1702,6 +1765,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1709,8 +1773,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1727,6 +1799,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1734,6 +1807,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1813,13 +1887,13 @@ pyasn1 = ">=0.1.3" [[package]] name = "s3transfer" -version = "0.10.1" +version = "0.10.2" description = "An Amazon S3 Transfer Manager" optional = false -python-versions = ">= 3.8" +python-versions = ">=3.8" files = [ - {file = "s3transfer-0.10.1-py3-none-any.whl", hash = "sha256:ceb252b11bcf87080fb7850a224fb6e05c8a776bab8f2b64b7f25b969464839d"}, - {file = "s3transfer-0.10.1.tar.gz", hash = "sha256:5683916b4c724f799e600f41dd9e10a9ff19871bf87623cc8f491cb4f5fa0a19"}, + {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, + {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, ] [package.dependencies] @@ -1844,18 +1918,18 @@ files = [ [[package]] name = "setuptools" -version = "70.0.0" +version = "70.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, - {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, + {file = "setuptools-70.2.0-py3-none-any.whl", hash = "sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05"}, + {file = "setuptools-70.2.0.tar.gz", hash = "sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -2215,4 +2289,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "1b93c05b0096aa4fc3f8ad0e4b09d47664c652fccb41d58349dc2c401a829a4c" +content-hash = "abdaa04cd2215b9c2969684444987d76d90af7e7a01489acdbf0e43ea4196666" diff --git a/pyproject.toml b/pyproject.toml index b95831f..26340a2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,16 +28,18 @@ classifiers = [ [tool.poetry.dependencies] python = "^3.11" +atlassian-python-api = "^3.41" boto3 = "^1.34" click = "^8.1" -jinja2 = "^3.1" google-cloud-storage = "^2.17" +jinja2 = "^3.1" +mergedeep = "^1.3" +pydantic = "^2.7" python-hcl2 = "^4.3" pyyaml = "^6.0" -mergedeep = "^1.3" setuptools = "^70.0" -atlassian-python-api = "^3.41" -pydantic = "^2.7" +pydantic-settings = "^2.3.4" +packaging = "^24.1" [tool.poetry.scripts] worker = 'tfworker.cli:cli' @@ -46,24 +48,29 @@ worker = 'tfworker.cli:cli' optional = true [tool.poetry.group.dev.dependencies] -pytest-timeout = "2.3.1" -ipython = "^8.24" -pytest = "^8.2" black = "^24.4" -isort = "^5.13" -seed-isort-config = "^2.2" +coverage = "^7.5" +deepdiff = "^7.0" flake8 = "^7.0" -wheel = "^0.43" +ipython = "^8.24" +isort = "^5.13" +pytest = "^8.2" +pytest-cov = "^5.0" pytest-depends = "^1.0" +pytest-mock = "^3.14" pytest-socket = "^0.7" -coverage = "^7.5" -pytest-cov = "^5.0" -moto = {extras = ["sts","dynamodb", "s3"], version = "^5.0"} -deepdiff = "^7.0" +pytest-timeout = "2.3.1" +seed-isort-config = "^2.2" Sphinx = "^7.3" +wheel = "^0.43" +moto = {version = "^5.0.10", extras = ["sts", "dynamodb", "s3"]} + [tool.pytest.ini_options] -addopts = "--capture=sys --cov=tfworker --cov-report=" +addopts = "--capture=sys --cov=tfworker --cov-report= -m 'not performance'" +markers = [ + "performance: mark a test as a performance test" +] [build-system] requires = ["poetry>=0.12"] diff --git a/tests/__init__.py b/tests/__init__.py index bf6e46e..792d600 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,13 +1 @@ -# Copyright 2020 Richard Maynard (richard.maynard@gmail.com) # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/tests/authenticators/test_auth_aws.py b/tests/authenticators/test_auth_aws.py new file mode 100644 index 0000000..ea5449e --- /dev/null +++ b/tests/authenticators/test_auth_aws.py @@ -0,0 +1,357 @@ +from unittest.mock import MagicMock, patch + +import boto3.session +import pytest +from botocore.exceptions import NoCredentialsError +from moto import mock_aws +from pydantic import ValidationError + +from tfworker.authenticators import AWSAuthenticator, AWSAuthenticatorConfig +from tfworker.authenticators.aws import ( + _assume_role_session, + _get_backend_session, + _get_init_session_args, + _need_backend_session, +) +from tfworker.constants import DEFAULT_AWS_REGION +from tfworker.exceptions import TFWorkerException + +# Mock AWS credentials +MOCK_AWS_CREDS = { + "aws_region": "us-east-1", + "aws_access_key_id": "AKIAEXAMPLE", + "aws_secret_access_key": "SECRETEXAMPLE", +} + + +@pytest.fixture +def aws_auth_config(): + return AWSAuthenticatorConfig( + aws_role_arn="arn:aws:iam::123456789012:role/testRole", + aws_access_key_id="AKIAEXAMPLE", + aws_secret_access_key="SECRETEXAMPLE", + aws_region="us-east-1", + backend_region="us-west-2", + backend_role_arn="arn:aws:iam::210987654321:role/backendTestRole", + aws_external_id="123456789012", + ) + + +@pytest.fixture +def boto_session(): + return boto3.Session(region_name="us-east-1") + + +class TestAWSAuthenticatorConfig: + def test_valid_config(self): + config = AWSAuthenticatorConfig(**MOCK_AWS_CREDS) + assert config.aws_region == MOCK_AWS_CREDS["aws_region"] + assert config.aws_access_key_id == MOCK_AWS_CREDS["aws_access_key_id"] + assert config.aws_secret_access_key == MOCK_AWS_CREDS["aws_secret_access_key"] + + def test_invalid_config_missing_region(self): + creds_copy = MOCK_AWS_CREDS.copy() + del creds_copy["aws_region"] + with pytest.raises(ValidationError) as e: + AWSAuthenticatorConfig(**creds_copy) + + assert "Field required" in str(e.value) + assert "aws_region" in str(e.value) + + def test_invalid_config_missing_field(self): + with pytest.raises(ValidationError): + AWSAuthenticatorConfig( + aws_access_key_id="valid", aws_secret_access_key="valid" + ) + + def test_valid_config_with_profile(self): + config = AWSAuthenticatorConfig(aws_region="us-east-1", aws_profile="default") + assert config.aws_profile == "default" + + def test_invalid_config_no_creds_or_profile(self): + with pytest.raises(ValidationError): + AWSAuthenticatorConfig(aws_region="us-east-1") + + +class TestAWSAuthenticator: + @mock_aws + def test_authenticate_success(self): + config = AWSAuthenticatorConfig(**MOCK_AWS_CREDS) + authenticator = AWSAuthenticator(config) + assert authenticator.session is not None + assert authenticator.session == authenticator.backend_session + + @patch("boto3.Session", side_effect=NoCredentialsError) + def test_authenticate_failure_invalid_credentials(self, mock_session): + mock_call_args = MOCK_AWS_CREDS.copy() + del mock_call_args["aws_region"] + mock_call_args["region_name"] = DEFAULT_AWS_REGION + config = AWSAuthenticatorConfig(**MOCK_AWS_CREDS) + with pytest.raises(TFWorkerException): + AWSAuthenticator(config) + + mock_session.assert_called_once_with(**mock_call_args) + + @mock_aws + def test_env_success(self): + config = AWSAuthenticatorConfig(**MOCK_AWS_CREDS) + authenticator = AWSAuthenticator(config) + env_vars = authenticator.env() + assert env_vars["AWS_DEFAULT_REGION"] == "us-east-1" + assert env_vars["AWS_ACCESS_KEY_ID"] == "AKIAEXAMPLE" + assert env_vars["AWS_SECRET_ACCESS_KEY"] == "SECRETEXAMPLE" + + @mock_aws + def test_env_backend(self): + config = AWSAuthenticatorConfig(**MOCK_AWS_CREDS) + authenticator = AWSAuthenticator(config) + env_vars = authenticator.env(backend=True) + assert env_vars["AWS_DEFAULT_REGION"] == "us-east-1" + assert env_vars["AWS_ACCESS_KEY_ID"] == "AKIAEXAMPLE" + assert env_vars["AWS_SECRET_ACCESS_KEY"] == "SECRETEXAMPLE" + + @mock_aws + def test_env_with_assumed_role(self): + role_creds = MOCK_AWS_CREDS.copy() + role_creds.update({"aws_role_arn": "arn:aws:iam::123456789012:role/TestRole"}) + config = AWSAuthenticatorConfig(**role_creds) + authenticator = AWSAuthenticator(config) + env_vars = authenticator.env() + assert len(env_vars) == 4 + assert env_vars["AWS_DEFAULT_REGION"] == "us-east-1" + assert env_vars.get("AWS_ACCESS_KEY_ID") is not None + assert env_vars.get("AWS_SECRET_ACCESS_KEY") is not None + assert env_vars.get("AWS_SESSION_TOKEN") is not None + + @mock_aws + def test_session_and_credentials(self): + from botocore.credentials import Credentials + + config = AWSAuthenticatorConfig(**MOCK_AWS_CREDS) + authenticator = AWSAuthenticator(config) + assert authenticator.session is not None + creds = authenticator.session_credentials + assert type(creds) is Credentials + + @mock_aws + def test_backend_session_and_credentials(self): + from botocore.credentials import Credentials + + config = AWSAuthenticatorConfig(**MOCK_AWS_CREDS) + authenticator = AWSAuthenticator(config) + assert authenticator.backend_session is not None + creds = authenticator.backend_session_credentials + assert type(creds) is Credentials + + @mock_aws + def test_authenticate_with_role_arn(self): + role_creds = MOCK_AWS_CREDS.copy() + role_creds.update({"aws_role_arn": "arn:aws:iam::123456789012:role/TestRole"}) + config = AWSAuthenticatorConfig(**role_creds) + authenticator = AWSAuthenticator(config) + assert authenticator.session is not None + assert authenticator.backend_session is not None + + @mock_aws + def test_authenticate_backend_different_region(self): + role_creds = MOCK_AWS_CREDS.copy() + role_creds.update({"backend_region": "us-west-2"}) + config = AWSAuthenticatorConfig(**role_creds) + assert config.backend_region == "us-west-2" + authenticator = AWSAuthenticator(config) + assert authenticator.session is not None + assert authenticator.backend_session is not None + assert authenticator.session != authenticator.backend_session + assert authenticator.backend_session.region_name == "us-west-2" + + @mock_aws + def test_authenticate_backend_different_role(self): + role_creds = MOCK_AWS_CREDS.copy() + role_creds.update( + {"backend_role_arn": "arn:aws:iam::123456789012:role/TestRole"} + ) + config = AWSAuthenticatorConfig(**role_creds) + authenticator = AWSAuthenticator(config) + assert authenticator.session is not None + assert authenticator.backend_session is not None + assert authenticator.session != authenticator.backend_session + + @mock_aws + def test_backend_region_property(self): + alt_creds = MOCK_AWS_CREDS.copy() + alt_creds.update({"backend_region": "us-west-2"}) + config = AWSAuthenticatorConfig(**alt_creds) + authenticator = AWSAuthenticator(config) + assert authenticator.backend_region == "us-west-2" + + @mock_aws + def test_backend_region_property_default(self): + config = AWSAuthenticatorConfig(**MOCK_AWS_CREDS) + authenticator = AWSAuthenticator(config) + assert authenticator.backend_region == "us-east-1" + + @mock_aws + def test_region_property(self): + alt_creds = MOCK_AWS_CREDS.copy() + alt_creds.update({"aws_region": "us-west-2"}) + config = AWSAuthenticatorConfig(**alt_creds) + authenticator = AWSAuthenticator(config) + assert authenticator.region == "us-west-2" + + @mock_aws + def test_region_property_default(self): + config = AWSAuthenticatorConfig(**MOCK_AWS_CREDS) + authenticator = AWSAuthenticator(config) + assert authenticator.region == "us-east-1" + + +@mock_aws +class TestAssumeRoleSession: + def test_assume_role_for_backend(self, boto_session, aws_auth_config): + """Test assuming a role for the backend.""" + new_session = _assume_role_session(boto_session, aws_auth_config, backend=True) + assert new_session.region_name == aws_auth_config.backend_region + + def test_assume_role_not_for_backend(self, boto_session, aws_auth_config): + """Test assuming a role not for the backend.""" + new_session = _assume_role_session(boto_session, aws_auth_config, backend=False) + assert new_session.region_name == aws_auth_config.aws_region + + def test_assume_role_with_external_id(self, boto_session, aws_auth_config): + """Test assuming a role with an external ID.""" + new_session = _assume_role_session(boto_session, aws_auth_config, backend=False) + assert new_session is not None + + @patch("boto3.Session", side_effect=NoCredentialsError) + def test_assume_role_failure_raises_exception(self, boto_session, aws_auth_config): + """Test that an exception is raised if assuming the role fails.""" + with pytest.raises(TFWorkerException): + _assume_role_session(boto_session, aws_auth_config, backend=False) + + +class TestGetBackendSession: + @patch("tfworker.authenticators.aws._assume_role_session") + @patch("boto3.Session") + def test_get_backend_session_with_role_arn( + self, mock_boto3_session, mock_assume_role_session + ): + """Test getting backend session with backend_role_arn provided.""" + auth_config = AWSAuthenticatorConfig( + **MOCK_AWS_CREDS, + backend_role_arn="arn:aws:iam::123456789012:role/backendRole", + backend_region="us-west-2" + ) + init_session = MagicMock() + mock_assume_role_session.return_value = MagicMock() + + _get_backend_session(auth_config, init_session) + + mock_assume_role_session.assert_called_once_with( + init_session, auth_config, backend=True + ) + + @patch("tfworker.authenticators.aws._assume_role_session") + @patch("boto3.Session") + def test_get_backend_session_without_role_arn( + self, mock_boto3_session, mock_assume_role_session + ): + """Test getting backend session without backend_role_arn provided.""" + auth_config = AWSAuthenticatorConfig( + **MOCK_AWS_CREDS, backend_role_arn=None, backend_region="us-west-2" + ) + init_session = MagicMock() + mock_boto3_session.return_value = MagicMock() + + _get_backend_session(auth_config, init_session) + + mock_boto3_session.assert_called_once_with( + region_name="us-west-2", + aws_access_key_id=MOCK_AWS_CREDS["aws_access_key_id"], + aws_secret_access_key=MOCK_AWS_CREDS["aws_secret_access_key"], + ) + mock_assume_role_session.assert_not_called() + + @patch("tfworker.authenticators.aws._assume_role_session") + @patch("boto3.Session") + def test_get_backend_session_raises_exception( + self, mock_boto3_session, mock_assume_role_session + ): + """Test that TFWorkerException is raised when there's an error getting the backend session.""" + auth_config = AWSAuthenticatorConfig( + **MOCK_AWS_CREDS, backend_role_arn="invalid", backend_region="us-west-2" + ) + init_session = MagicMock() + mock_assume_role_session.side_effect = Exception("Test error") + + with pytest.raises(TFWorkerException): + _get_backend_session(auth_config, init_session) + + mock_boto3_session.assert_not_called() + + +class TestGetInitSessionArgs: + def test_with_aws_profile(self): + """Test with only aws_profile provided.""" + auth_config = AWSAuthenticatorConfig( + aws_profile="test_profile", aws_region="us-east-1" + ) + expected = {"profile_name": "test_profile"} + assert _get_init_session_args(auth_config) == expected + + def test_with_access_key_and_secret_key(self): + """Test with aws_access_key_id and aws_secret_access_key provided.""" + auth_config = AWSAuthenticatorConfig( + aws_access_key_id="test_id", + aws_secret_access_key="test_secret", + aws_region="us-east-1", + ) + expected = { + "aws_access_key_id": "test_id", + "aws_secret_access_key": "test_secret", + } + assert _get_init_session_args(auth_config) == expected + + def test_with_all_parameters(self): + """Test with all parameters provided.""" + auth_config = AWSAuthenticatorConfig( + aws_profile="test_profile", + aws_access_key_id="test_id", + aws_secret_access_key="test_secret", + aws_session_token="test_token", + aws_region="us-east-1", + ) + expected = { + "profile_name": "test_profile", + "aws_access_key_id": "test_id", + "aws_secret_access_key": "test_secret", + "aws_session_token": "test_token", + } + assert _get_init_session_args(auth_config) == expected + + +class TestNeedBackendSession: + def test_backend_session_not_needed(self): + """Test that a backend session is not needed when regions are the same and no backend_role_arn.""" + auth_config = AWSAuthenticatorConfig( + aws_region="us-east-1", aws_profile="test_profile" + ) + assert not _need_backend_session(auth_config) + + def test_backend_session_needed_different_regions(self): + """Test that a backend session is needed when aws_region and backend_region are different.""" + auth_config = AWSAuthenticatorConfig( + aws_region="us-east-1", + aws_profile="test_profile", + backend_region="us-west-2", + ) + assert _need_backend_session(auth_config) + + def test_backend_session_needed_with_backend_role_arn(self): + """Test that a backend session is needed when backend_role_arn is provided.""" + auth_config = AWSAuthenticatorConfig( + aws_region="us-east-1", + aws_profile="test_profile", + backend_region="us-east-1", + backend_role_arn="arn:aws:iam::123456789012:role/backendRole", + ) + assert _need_backend_session(auth_config) diff --git a/tests/authenticators/test_auth_google.py b/tests/authenticators/test_auth_google.py new file mode 100644 index 0000000..2780c6f --- /dev/null +++ b/tests/authenticators/test_auth_google.py @@ -0,0 +1,46 @@ +from tfworker.authenticators import ( + GoogleAuthenticator, + GoogleAuthenticatorConfig, + GoogleBetaAuthenticator, +) + + +class TestGoogleAuthenticator: + def test_initialization(self): + """Test that GoogleAuthenticator initializes correctly with config.""" + config = GoogleAuthenticatorConfig( + gcp_creds_path="/path/to/creds.json", + gcp_region="us-west-1", + project="test-project", + ) + authenticator = GoogleAuthenticator(auth_config=config) + + assert authenticator.creds_path == "/path/to/creds.json" + assert authenticator.project == "test-project" + assert authenticator.region == "us-west-1" + + def test_env_with_creds_path(self): + """Test the env method returns correct environment variable.""" + config = GoogleAuthenticatorConfig( + gcp_creds_path="/path/to/creds.json", + gcp_region="us-west-1", + project="test-project", + ) + authenticator = GoogleAuthenticator(auth_config=config) + + env_vars = authenticator.env() + assert env_vars["GOOGLE_APPLICATION_CREDENTIALS"] == "/path/to/creds.json" + + +class TestGoogleBetaAuthenticator: + def test_tag_difference(self): + """Test that GoogleBetaAuthenticator has a different tag than GoogleAuthenticator.""" + config = GoogleAuthenticatorConfig( + gcp_creds_path="/path/to/creds.json", + gcp_region="us-west-1", + project="test-project", + ) + beta_authenticator = GoogleBetaAuthenticator(auth_config=config) + + assert beta_authenticator.tag == "google-beta" + assert GoogleAuthenticator.tag != GoogleBetaAuthenticator.tag diff --git a/tests/authenticators/test_authenticators_collection.py b/tests/authenticators/test_authenticators_collection.py new file mode 100644 index 0000000..2de07f3 --- /dev/null +++ b/tests/authenticators/test_authenticators_collection.py @@ -0,0 +1,118 @@ +from unittest.mock import patch + +import pytest +from pydantic import ValidationError +from pydantic_core import InitErrorDetails + +from tfworker.authenticators.base import BaseAuthenticator, BaseAuthenticatorConfig +from tfworker.authenticators.collection import AuthenticatorsCollection +from tfworker.exceptions import FrozenInstanceError, UnknownAuthenticator + + +class MockAuthenticatorConfig(BaseAuthenticatorConfig): + pass + + +class MockAuthenticator(BaseAuthenticator): + config_model = MockAuthenticatorConfig + + def __init__(self, auth_config: BaseAuthenticatorConfig): + self.auth_config = auth_config + + def env(self): + pass + + +MockAuthenticator.tag = "mock" + + +@pytest.fixture +def authenticators_collection(mock_cli_options_root): + # Create a fresh instance of ALL for each test + all_authenticators = [MockAuthenticator] + with patch("tfworker.authenticators.collection.ALL", all_authenticators): + AuthenticatorsCollection._instance = None + return AuthenticatorsCollection(mock_cli_options_root) + + +class TestAuthenticatorsCollection: + + def test_singleton_behavior(self, mock_cli_options_root): + instance1 = AuthenticatorsCollection(mock_cli_options_root) + instance2 = AuthenticatorsCollection(mock_cli_options_root) + assert instance1 is instance2, "AuthenticatorsCollection should be a singleton" + + def test_init_successful_authenticator_creation(self, authenticators_collection): + assert ( + "mock" in authenticators_collection._authenticators + ), "Authenticator should be created and added to the collection" + + def test_init_unsuccessful_authenticator_creation(self, mock_cli_options_root): + # Mocking the ValidationError to simulate a configuration failure + errors = [ + InitErrorDetails( + **{ + "loc": ("mock_field", "mock_field"), + "input": "mock_input", + "ctx": {"error": "error message"}, + "type": "value_error", + } + ) + ] + validation_error = ValidationError.from_exception_data("invalid config", errors) + + with patch.object( + MockAuthenticator.config_model, "__call__", side_effect=validation_error + ): + AuthenticatorsCollection._instance = None + authenticators_collection = AuthenticatorsCollection(mock_cli_options_root) + assert ( + "mock" not in authenticators_collection._authenticators + ), "Authenticator with invalid configuration should not be added to the collection" + + def test_len(self, authenticators_collection): + assert len(authenticators_collection) == len( + authenticators_collection._authenticators + ), "__len__ method should return the number of authenticators" + + def test_getitem_by_key(self, authenticators_collection): + assert ( + authenticators_collection["mock"] is not None + ), "__getitem__ should return the authenticator for the given key" + + def test_getitem_by_index(self, authenticators_collection): + assert ( + authenticators_collection[0] is not None + ), "__getitem__ should return the authenticator for the given index" + + def test_getitem_key_error(self, authenticators_collection): + with pytest.raises(UnknownAuthenticator): + authenticators_collection["invalid"] + + def test_get_method(self, authenticators_collection): + assert ( + authenticators_collection.get("mock") is not None + ), "get method should return the authenticator for the given key" + + def test_get_method_key_error(self, authenticators_collection): + with pytest.raises(UnknownAuthenticator): + authenticators_collection.get("invalid") + + def test_iter(self, authenticators_collection): + for authenticator in authenticators_collection: + assert ( + authenticator.tag == "mock" + ), "__iter__ should return the authenticators in the collection" + + def test_set_item(self, authenticators_collection): + authenticators_collection["new"] = MockAuthenticator(MockAuthenticatorConfig()) + assert ( + authenticators_collection["new"] is not None + ), "__setitem__ should add a new authenticator" + + def test_set_item_frozen(self, authenticators_collection): + authenticators_collection.freeze() + with pytest.raises(FrozenInstanceError): + authenticators_collection["new"] = MockAuthenticator( + MockAuthenticatorConfig() + ) diff --git a/tests/authenticators/test_authenticatorscollection.py b/tests/authenticators/test_authenticatorscollection.py deleted file mode 100644 index feb9a64..0000000 --- a/tests/authenticators/test_authenticatorscollection.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pytest - -import tfworker.authenticators -from tfworker.commands.root import RootCommand - - -@pytest.fixture -def cli_args(aws_access_key_id, aws_secret_access_key): - return { - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_default_region": "us-east-1", - } - - -@pytest.fixture -def state_args(cli_args): - result = RootCommand.StateArgs() - for k, v in cli_args.items(): - setattr(result, k, v) - setattr(result, "backend_bucket", "alphabet") - return result - - -class TestAuthenticatorsCollection: - def test_collection(self, state_args): - ac = tfworker.authenticators.AuthenticatorsCollection(state_args=state_args) - assert len(ac) == len(tfworker.authenticators.ALL) - - a0 = ac.get(0) - assert a0.tag == ac.get(a0.tag).tag - - def test_unknown_authenticator(self, state_args): - ac = tfworker.authenticators.AuthenticatorsCollection(state_args=state_args) - assert ac.get("aws") is not None - with pytest.raises(tfworker.authenticators.UnknownAuthenticator): - ac.get("unknown") diff --git a/tests/authenticators/test_aws_auth.py b/tests/authenticators/test_aws_auth.py deleted file mode 100644 index 04c5ccd..0000000 --- a/tests/authenticators/test_aws_auth.py +++ /dev/null @@ -1,119 +0,0 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from unittest.mock import patch - -import pytest -from botocore.credentials import Credentials -from moto import mock_aws - -from tfworker.authenticators.aws import AWSAuthenticator, MissingArgumentException -from tfworker.commands.root import RootCommand -from tfworker.constants import DEFAULT_BACKEND_PREFIX - - -@pytest.fixture -def cli_args(aws_access_key_id, aws_secret_access_key): - return { - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_default_region": "us-east-1", - "backend_prefix": DEFAULT_BACKEND_PREFIX, - } - - -@pytest.fixture -def state_args(cli_args): - result = RootCommand.StateArgs() - for k, v in cli_args.items(): - setattr(result, k, v) - setattr(result, "backend_bucket", "alphabet") - return result - - -@pytest.fixture -def state_args_with_profile_only(): - result = RootCommand.StateArgs() - setattr(result, "aws_profile", "testing") - setattr(result, "backend_bucket", "alphabet") - return result - - -@pytest.fixture -def state_args_with_role_arn(state_args, aws_role_arn): - setattr(state_args, "aws_role_arn", aws_role_arn) - setattr(state_args, "backend_bucket", "alphabet") - return state_args - - -@pytest.fixture -def aws_credentials_instance(state_args): - return Credentials(state_args.aws_access_key_id, state_args.aws_secret_access_key) - - -class TestAWSAuthenticator: - def test_with_no_backend_bucket(self): - with pytest.raises(MissingArgumentException) as e: - AWSAuthenticator(state_args={}, deployment="deployfu") - assert "backend_bucket" in str(e.value) - - @mock_aws - def test_with_access_key_pair_creds( - self, sts_client, state_args, aws_access_key_id, aws_secret_access_key - ): - auth = AWSAuthenticator(state_args, deployment="deployfu") - assert auth.access_key_id == aws_access_key_id - assert auth.secret_access_key == aws_secret_access_key - assert auth.session_token is None - - @mock_aws - def test_with_access_key_pair_creds_and_role_arn( - self, sts_client, state_args_with_role_arn, aws_secret_access_key - ): - auth = AWSAuthenticator(state_args_with_role_arn, deployment="deployfu") - # The access_key_id we retrieve should NOT be the one from the fixture, - # but rather one that moto generates - assert auth.access_key_id.startswith("ASIA") - assert auth.secret_access_key != aws_secret_access_key - # Taking as a cue: https://github.com/spulec/moto/blob/master/tests/test_sts/test_sts.py#L636 - assert auth.session_token.startswith("FQoGZXIvYXdzE") - - @patch("botocore.session.Session.get_scoped_config") - @patch("botocore.session.Session.get_credentials") - def test_with_profile( - self, - mocked_credentials, - mocked_config, - state_args_with_profile_only, - aws_access_key_id, - aws_secret_access_key, - aws_credentials_instance, - cli_args, - ): - mocked_credentials.return_value = aws_credentials_instance - mocked_config.return_value = cli_args - auth = AWSAuthenticator(state_args_with_profile_only, deployment="deployfu") - assert auth.profile == "testing" - assert auth.access_key_id == aws_credentials_instance.access_key - assert auth.secret_access_key == aws_credentials_instance.secret_key - assert auth.session_token is None - - @mock_aws - def test_with_prefix(self, state_args): - auth = AWSAuthenticator(state_args, deployment="deployfu") - assert auth.prefix == DEFAULT_BACKEND_PREFIX.format(deployment="deployfu") - - state_args.backend_prefix = "my-prefix" - auth = AWSAuthenticator(state_args, deployment="deployfu") - assert auth.prefix == "my-prefix" diff --git a/tests/authenticators/test_base_auth.py b/tests/authenticators/test_base_auth.py deleted file mode 100644 index 525bb66..0000000 --- a/tests/authenticators/test_base_auth.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright 2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from tfworker.authenticators.base import BaseAuthenticator - - -class TestBaseAuthenticator: - def test_base_authenticator(self): - ba = BaseAuthenticator(state_args=None) - assert ba.env() == {} diff --git a/tests/backends/test_backends.py b/tests/backends/test_backends.py new file mode 100644 index 0000000..c8a1725 --- /dev/null +++ b/tests/backends/test_backends.py @@ -0,0 +1,7 @@ +from tfworker.backends.backends import Backends + + +class TestBackendNames: + def test_names(self): + items = Backends.__members__.items() + assert Backends.names() == [item[0] for item in items] diff --git a/tests/backends/test_backends_base.py b/tests/backends/test_backends_base.py new file mode 100644 index 0000000..fa908fd --- /dev/null +++ b/tests/backends/test_backends_base.py @@ -0,0 +1,25 @@ +import json + +import pytest + +from tfworker.backends.base import validate_backend_empty +from tfworker.exceptions import BackendError + + +class TestBalidateBackendEmpty: + + def test_validate_backend_empty(self, empty_state): + assert validate_backend_empty(json.loads(empty_state)) is True + + def test_validate_backend_empty_false(self, occupied_state): + assert validate_backend_empty(json.loads(occupied_state)) is False + + def test_validate_backend_missing_key(self): + state = json.loads("{}") + with pytest.raises(BackendError, match="key does not exist"): + validate_backend_empty(state) + + def test_validate_backend_invalid_type(self): + state = "bad state" + with pytest.raises(BackendError, match="not valid JSON"): + validate_backend_empty(state) diff --git a/tests/backends/test_backends_s3.py b/tests/backends/test_backends_s3.py new file mode 100644 index 0000000..80cf257 --- /dev/null +++ b/tests/backends/test_backends_s3.py @@ -0,0 +1,564 @@ +from unittest.mock import patch + +import boto3 +import botocore +import pytest +from moto import mock_aws + +import tfworker.util.log as log +from tfworker.backends.s3 import S3Backend +from tfworker.exceptions import BackendError + + +class TestS3BackendInit: + @mock_aws + def test_init_success(self, mock_authenticators): + backend = S3Backend(mock_authenticators, "test-deployment") + assert backend._deployment == "test-deployment" + assert backend._s3_client is not None + assert backend._ddb_client is not None + assert backend._bucket_files is not None + + @mock_aws + def test_init_success_alt_region( + self, mock_authenticators_backend_west, mocker, mock_click_context_backend_west + ): + mocker.patch( + "click.get_current_context", return_value=mock_click_context_backend_west + ) + backend = S3Backend(mock_authenticators_backend_west, "test-deployment") + assert backend._deployment == "test-deployment" + assert backend._s3_client is not None + assert backend._ddb_client is not None + assert backend._bucket_files is not None + + @mock_aws + def test_init_success_undefined_deployment( + self, mock_authenticators, mock_click_context + ): + backend = S3Backend(mock_authenticators) + assert backend._deployment == "undefined" + assert not hasattr(backend, "_s3_client") + assert not hasattr(backend, "_ddb_client") + assert not hasattr(backend, "_bucket_files") + + @mock_aws + def test_init_no_aws_session(self, mock_authenticators): + mock_authenticators["aws"].session = None + with pytest.raises(BackendError, match="AWS session not available"): + S3Backend(mock_authenticators, "test-deployment") + + @mock_aws + def test_init_no_backend_session(self, mock_authenticators): + mock_authenticators["aws"].backend_session = None + with pytest.raises(BackendError, match="AWS backend session not available"): + S3Backend(mock_authenticators, "test-deployment") + + @mock_aws + def test_init_no_create_bucket( + self, mock_authenticators, mocker, mock_click_context + ): + mock_click_context.obj.root_options.create_backend_bucket = False + mocker.patch("click.get_current_context", return_value=mock_click_context) + with pytest.raises( + BackendError, + match="Backend bucket not found and --no-create-backend-bucket specified", + ): + S3Backend(mock_authenticators, "test-deployment") + + +class TestS3BackendCheckBucketExists: + + @mock_aws + def test_check_bucket_exists(self, mock_authenticators): + backend = S3Backend(mock_authenticators, "test-deployment") + s3 = boto3.client("s3") + s3.create_bucket(Bucket="test-bucket") + assert backend._check_bucket_exists("test-bucket") is True + + @mock_aws + def test_check_bucket_does_not_exist(self, mock_authenticators): + backend = S3Backend(mock_authenticators, "test-deployment") + assert backend._check_bucket_exists("non-existent-bucket") is False + + @mock_aws + def test_check_bucket_error(self, mock_authenticators): + backend = S3Backend(mock_authenticators, "test-deployment") + with patch.object( + backend._s3_client, + "head_bucket", + side_effect=botocore.exceptions.ClientError( + {"Error": {"Code": "403"}}, "HeadBucket" + ), + ): + with pytest.raises(SystemExit): + backend._check_bucket_exists("test-bucket") + + +class TestS3BackendEnsureLockingTable: + + @mock_aws + def setup_method(self, _): + self.dynamodb = boto3.client("dynamodb") + self.cleanup_tables() + + @mock_aws + def teardown_method(self, _): + self.cleanup_tables() + + def cleanup_tables(self): + tables = self.dynamodb.list_tables()["TableNames"] + for table in tables: + self.dynamodb.delete_table(TableName=table) + self.dynamodb.get_waiter("table_not_exists").wait(TableName=table) + + @mock_aws + def test_ensure_locking_table_exists(self, mock_authenticators): + self.dynamodb.create_table( + TableName="terraform-test-deployment-exists", + KeySchema=[{"AttributeName": "LockID", "KeyType": "HASH"}], + AttributeDefinitions=[{"AttributeName": "LockID", "AttributeType": "S"}], + ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 1}, + ) + tables_len = len(self.dynamodb.list_tables()["TableNames"]) + backend = S3Backend(mock_authenticators, "test-deployment-exists") + backend._ddb_client = self.dynamodb + tables_len_after = len(self.dynamodb.list_tables()["TableNames"]) + assert tables_len == tables_len_after + + @mock_aws + def test_ensure_locking_table_create(self, mock_authenticators): + backend = S3Backend(mock_authenticators, "test-deployment-new") + backend._ensure_locking_table() + tables = self.dynamodb.list_tables()["TableNames"] + assert "terraform-test-deployment-new" in tables + + +class TestS3BackendCheckTableExists: + + @mock_aws + def test_check_table_exists(self, mock_authenticators): + backend = S3Backend(mock_authenticators, "test-deployment") + assert backend._check_table_exists("terraform-test-deployment") is True + + @mock_aws + def test_check_table_does_not_exist(self, mock_authenticators): + backend = S3Backend(mock_authenticators, "test-deployment") + assert backend._check_table_exists("non-existent-table") is False + + @mock_aws + def test_check_table_error(self, mock_authenticators, mock_click_context): + backend = S3Backend(mock_authenticators, "test-deployment") + with patch.object( + backend._ddb_client, + "list_tables", + side_effect=botocore.exceptions.ClientError( + {"Error": {"Code": "ResourceNotFoundException"}}, "DescribeTable" + ), + ): + with pytest.raises(SystemExit): + backend._check_table_exists("terraform-test-deployment") + mock_click_context.exit.assert_called_once_with(1) + + +class TestS3BackendListBucketFiles: + + @mock_aws + def test_list_bucket_files(self, mock_authenticators): + s3 = boto3.client("s3") + s3.create_bucket(Bucket="test-bucket") + s3.put_object( + Bucket="test-bucket", Key="prefix/test-deployment/def1/file1", Body=b"test" + ) + s3.put_object( + Bucket="test-bucket", Key="prefix/test-deployment/def2/file2", Body=b"test" + ) + backend = S3Backend(mock_authenticators, "test-deployment") + bucket_files = backend._list_bucket_definitions() + assert "def1" in bucket_files + assert "def2" in bucket_files + assert sorted(backend.remotes) == sorted(["def1", "def2"]) + + +class TestS3BackendClean: + + @mock_aws + def setup_method(self, _): + self.s3 = boto3.client("s3") + self.dynamodb = boto3.client("dynamodb") + self.s3.create_bucket(Bucket="test-bucket") + self.dynamodb.create_table( + TableName="terraform-test-deployment", + KeySchema=[{"AttributeName": "LockID", "KeyType": "HASH"}], + AttributeDefinitions=[{"AttributeName": "LockID", "AttributeType": "S"}], + ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 1}, + ) + self.dynamodb.get_waiter("table_exists").wait( + TableName="terraform-test-deployment" + ) + + @mock_aws + def teardown_method(self, _): + tables = self.dynamodb.list_tables()["TableNames"] + for table in tables: + self.dynamodb.delete_table(TableName=table) + self.dynamodb.get_waiter("table_not_exists").wait(TableName=table) + + try: + bucket_objects = self.s3.list_objects_v2(Bucket="test-bucket").get( + "Contents", [] + ) + for obj in bucket_objects: + self.s3.delete_object(Bucket="test-bucket", Key=obj["Key"]) + self.s3.delete_bucket(Bucket="test-bucket") + except self.s3.exceptions.NoSuchBucket: + pass + + @mock_aws + def test_clean_with_limit(self, mock_authenticators): + backend = S3Backend(mock_authenticators, "test-deployment") + self.s3.put_object(Bucket="test-bucket", Key="prefix/resource1") + with patch.object( + backend, "_clean_bucket_state" + ) as mock_clean_bucket_state, patch.object( + backend, "_clean_locking_state" + ) as mock_clean_locking_state: + backend.clean("test-deployment", limit=("resource1",)) + mock_clean_bucket_state.assert_called_once_with(definition="resource1") + mock_clean_locking_state.assert_called_once_with( + "test-deployment", definition="resource1" + ) + + @mock_aws + def test_clean_without_limit(self, mock_authenticators): + backend = S3Backend(mock_authenticators, "test-deployment") + self.s3.put_object(Bucket="test-bucket", Key="prefix/test-deployment/file1") + with patch.object( + backend, "_clean_bucket_state" + ) as mock_clean_bucket_state, patch.object( + backend, "_clean_locking_state" + ) as mock_clean_locking_state: + backend.clean("test-deployment") + mock_clean_bucket_state.assert_called_once_with() + mock_clean_locking_state.assert_called_once_with("test-deployment") + + @mock_aws + def test_clean_raises_backend_error(self, mock_authenticators): + backend = S3Backend(mock_authenticators, "test-deployment") + with patch.object( + backend, + "_clean_bucket_state", + side_effect=BackendError("error deleting state"), + ): + with pytest.raises(BackendError, match="error deleting state"): + backend.clean("test-deployment") + + @mock_aws + def test_clean_with_limit_clean_bucket_state_error(self, mock_authenticators): + backend = S3Backend(mock_authenticators, "test-deployment") + self.s3.put_object(Bucket="test-bucket", Key="prefix/resource1") + with patch.object( + backend, + "_clean_bucket_state", + side_effect=BackendError("error deleting state"), + ): + with pytest.raises(BackendError, match="error deleting state"): + backend.clean("test-deployment", limit=("resource1",)) + + @mock_aws + def test_clean_with_limit_clean_locking_state_error(self, mock_authenticators): + backend = S3Backend(mock_authenticators, "test-deployment") + self.s3.put_object(Bucket="test-bucket", Key="prefix/resource1") + with patch.object(backend, "_clean_bucket_state", return_value=None): + with patch.object( + backend, + "_clean_locking_state", + side_effect=BackendError("error deleting state"), + ): + with pytest.raises(BackendError, match="error deleting state"): + backend.clean("test-deployment", limit=("resource1",)) + + +class TestS3BackendDataHcl: + @mock_aws + def test_data_hcl_success(self, mock_authenticators): + backend = S3Backend(mock_authenticators, "test-deployment") + remotes = ["remote1", "remote2"] + result = backend.data_hcl(remotes) + assert 'data "terraform_remote_state" "remote1"' in result + assert 'data "terraform_remote_state" "remote2"' in result + assert 'backend = "s3"' in result + assert 'region = "us-east-1"' in result + assert 'bucket = "test-bucket"' in result + assert 'key = "prefix/remote1/terraform.tfstate"' in result + assert 'key = "prefix/remote2/terraform.tfstate"' in result + + @mock_aws + def test_data_hcl_invalid_remotes(self, mock_authenticators): + backend = S3Backend(mock_authenticators, "test-deployment") + with pytest.raises(ValueError, match="remotes must be a list"): + backend.data_hcl("invalid_remote") + + +class TestS3BackendHcl: + @mock_aws + def test_hcl_success(self, mock_authenticators, mock_click_context): + backend = S3Backend(mock_authenticators, "test-deployment") + result = backend.hcl("test-deployment") + assert 'backend "s3" {' in result + assert 'region = "us-east-1"' in result + assert 'bucket = "test-bucket"' in result + assert 'key = "prefix/test-deployment/terraform.tfstate"' in result + assert 'dynamodb_table = "terraform-test-deployment"' in result + assert 'encrypt = "true"' in result + + +class TestS3BackendFilterKeys: + + @mock_aws + def setup_s3(self): + self.s3 = boto3.client("s3", region_name="us-east-1") + self.s3.create_bucket(Bucket="test-bucket") + # Add objects to the bucket + self.s3.put_object(Bucket="test-bucket", Key="prefix/file1", Body="data") + self.s3.put_object(Bucket="test-bucket", Key="prefix/file2", Body="data") + self.s3.put_object(Bucket="test-bucket", Key="prefix/dir/file3", Body="data") + self.s3.put_object(Bucket="test-bucket", Key="other-prefix/file4", Body="data") + + @mock_aws + def test_filter_keys_no_prefix(self): + self.setup_s3() + paginator = self.s3.get_paginator("list_objects_v2") + keys = list(S3Backend.filter_keys(paginator, "test-bucket")) + assert "prefix/file1" in keys + assert "prefix/file2" in keys + assert "prefix/dir/file3" in keys + assert "other-prefix/file4" in keys + + @mock_aws + def test_filter_keys_with_prefix(self): + self.setup_s3() + paginator = self.s3.get_paginator("list_objects_v2") + keys = list(S3Backend.filter_keys(paginator, "test-bucket", prefix="prefix/")) + assert "prefix/file1" in keys + assert "prefix/file2" in keys + assert "prefix/dir/file3" in keys + assert "other-prefix/file4" not in keys + + @mock_aws + def test_filter_keys_with_delimiter(self): + self.setup_s3() + paginator = self.s3.get_paginator("list_objects_v2") + keys = list( + S3Backend.filter_keys( + paginator, "test-bucket", prefix="prefix/", delimiter="/" + ) + ) + assert "prefix/file1" in keys + assert "prefix/file2" in keys + assert "prefix/dir/file3" in keys + assert "other-prefix/file4" not in keys + + # @TODO: Fix the test, or code, start_after is not working as expected here, but is not used in the code + # @mock_aws + # def test_filter_keys_with_start_after(self): + # self.setup_s3() + # paginator = self.s3.get_paginator("list_objects_v2") + # keys = list(S3Backend.filter_keys(paginator, "test-bucket", prefix="prefix", start_after="prefix/file1")) + # log.trace(keys) + # assert "prefix/file1" not in keys + # assert "prefix/file2" in keys + # assert "prefix/dir/file3" in keys + # assert "other-prefix/file4" not in keys + + +class TestS3BackendCleanBucketState: + @mock_aws + def setup_s3(self, empty_state, occupied_state, all_empty=False): + if all_empty: + occupied_state = empty_state + self.s3 = boto3.client("s3", region_name="us-east-1") + self.s3.create_bucket(Bucket="test-bucket") + # Add objects to the bucket + self.s3.put_object( + Bucket="test-bucket", Key="prefix/def1/terraform.tfstate", Body=empty_state + ) + self.s3.put_object( + Bucket="test-bucket", Key="prefix/def2/terraform.tfstate", Body=empty_state + ) + self.s3.put_object( + Bucket="test-bucket", + Key="prefix/def3/terraform.tfstate", + Body=occupied_state, + ) + self.s3.put_object( + Bucket="test-bucket", + Key="prefix/def4/terraform.tfstate", + Body=occupied_state, + ) + + @mock_aws + def test_clean_bucket_state(self, mock_authenticators, empty_state, occupied_state): + self.setup_s3(empty_state, occupied_state, all_empty=True) + backend = S3Backend(mock_authenticators, "test-deployment") + backend._clean_bucket_state() + keys = list( + S3Backend.filter_keys( + self.s3.get_paginator("list_objects_v2"), "test-bucket" + ) + ) + assert len(keys) == 0 + + @mock_aws + def test_clean_bucket_state_with_definition( + self, mock_authenticators, empty_state, occupied_state + ): + self.setup_s3(empty_state, occupied_state) + backend = S3Backend(mock_authenticators, "test-deployment") + backend._clean_bucket_state(definition="def1") + keys = list( + S3Backend.filter_keys( + self.s3.get_paginator("list_objects_v2"), "test-bucket" + ) + ) + assert len(keys) == 3 + assert "prefix/def1/terraform.tfstate" not in keys + assert "prefix/def2/terraform.tfstate" in keys + assert "prefix/def3/terraform.tfstate" in keys + assert "prefix/def4/terraform.tfstate" in keys + + @mock_aws + def test_clean_bucket_state_raises_backend_error( + self, mock_authenticators, empty_state, occupied_state + ): + self.setup_s3(empty_state, occupied_state) + backend = S3Backend(mock_authenticators, "test-deployment") + with pytest.raises(BackendError, match="not empty"): + backend._clean_bucket_state(definition="def3") + keys = list( + S3Backend.filter_keys( + self.s3.get_paginator("list_objects_v2"), "test-bucket" + ) + ) + assert len(keys) == 4 + assert "prefix/def1/terraform.tfstate" in keys + assert "prefix/def2/terraform.tfstate" in keys + assert "prefix/def3/terraform.tfstate" in keys + assert "prefix/def4/terraform.tfstate" in keys + + +class TestS3BackendCleanLockingState: + @mock_aws + def setup_ddb(self, deployment, lock_id): + self.dynamodb = boto3.client("dynamodb") + self.dynamodb.create_table( + TableName=f"terraform-{deployment}", + KeySchema=[{"AttributeName": "LockID", "KeyType": "HASH"}], + AttributeDefinitions=[{"AttributeName": "LockID", "AttributeType": "S"}], + ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 1}, + ) + self.dynamodb.put_item( + TableName=f"terraform-{deployment}", + Item={"LockID": {"S": lock_id}}, + ) + + @mock_aws + def test_clean_locking_state(self, mock_authenticators): + self.setup_ddb( + "test-deployment", "test-bucket/prefix/lock1/terraform.tfstate-md5" + ) + backend = S3Backend(mock_authenticators, "test-deployment") + backend._clean_locking_state("test-deployment") + tables = self.dynamodb.list_tables()["TableNames"] + assert "terraform-test-deployment" not in tables + + @mock_aws + def test_clean_locking_state_with_definition(self, mock_authenticators): + self.setup_ddb( + "test-deployment", "test-bucket/prefix/lock1/terraform.tfstate-md5" + ) + backend = S3Backend(mock_authenticators, "test-deployment") + backend._clean_locking_state("test-deployment", definition="lock1") + tables = self.dynamodb.list_tables()["TableNames"] + items = self.dynamodb.scan(TableName="terraform-test-deployment")["Items"] + log.trace(items) + assert len(items) == 0 + assert "terraform-test-deployment" in tables + + @mock_aws + def test_clean_locking_state_with_bad_key(self, mock_authenticators): + self.setup_ddb( + "test-deployment", "test-bucket/prefix/lock1/terraform.tfstate-md5" + ) + backend = S3Backend(mock_authenticators, "test-deployment") + backend._clean_locking_state("test-deployment", definition="lock2") + tables = self.dynamodb.list_tables()["TableNames"] + items = self.dynamodb.scan(TableName="terraform-test-deployment")["Items"] + assert len(items) == 1 + assert "terraform-test-deployment" in tables + + +class TestS3BackendCreateBucket: + @mock_aws + def test_create_bucket_success(self, mock_authenticators): + backend = S3Backend(mock_authenticators, "test-deployment") + backend._create_bucket("test-bucket") + s3 = boto3.client("s3") + response = s3.list_buckets() + buckets = [bucket["Name"] for bucket in response["Buckets"]] + assert "test-bucket" in buckets + + @mock_aws + def test_create_bucket_invalid_location_constraint(self, mock_authenticators): + backend = S3Backend(mock_authenticators, "test-deployment") + with patch.object( + backend._s3_client, + "create_bucket", + side_effect=botocore.exceptions.ClientError( + {"Error": {"Code": "InvalidLocationConstraint"}}, "CreateBucket" + ), + ): + with pytest.raises(SystemExit): + backend._create_bucket("test-bucket") + + @mock_aws + def test_create_bucket_already_exists(self, mock_authenticators): + backend = S3Backend(mock_authenticators, "test-deployment") + with patch.object( + backend._s3_client, + "create_bucket", + side_effect=botocore.exceptions.ClientError( + {"Error": {"Code": "BucketAlreadyExists"}}, "CreateBucket" + ), + ): + with pytest.raises(SystemExit): + backend._create_bucket("test-bucket") + + @mock_aws + def test_create_bucket_already_exists_alt_region( + self, mock_authenticators_backend_west, mocker, mock_click_context_backend_west + ): + mocker.patch( + "click.get_current_context", return_value=mock_click_context_backend_west + ) + backend = S3Backend(mock_authenticators_backend_west, "test-deployment") + s3 = boto3.client("s3", region_name="us-west-2") + s3.create_bucket( + Bucket="already-exists-test-bucket", + CreateBucketConfiguration={"LocationConstraint": "us-west-2"}, + ) + with pytest.raises(SystemExit): + backend._create_bucket("already-exists-test-bucket") + + @mock_aws + def test_create_bucket_unknown_error(self, mock_authenticators): + backend = S3Backend(mock_authenticators, "test-deployment") + with patch.object( + backend._s3_client, + "create_bucket", + side_effect=botocore.exceptions.ClientError( + {"Error": {"Code": "UnknownError"}}, "CreateBucket" + ), + ): + with pytest.raises(SystemExit): + backend._create_bucket("test-bucket") diff --git a/tests/backends/test_base.py b/tests/backends/test_base.py deleted file mode 100644 index 33aa0bf..0000000 --- a/tests/backends/test_base.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2021-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -from unittest.mock import patch - -import pytest - -import tfworker.backends.base as base - - -@patch.multiple(base.BaseBackend, __abstractmethods__=set()) -def test_base_backend(): - b = base.BaseBackend() - assert b.tag == "base" - assert b.clean("", "") is None - assert b.data_hcl([]) is None - assert b.hcl("") is None - - -def test_validate_backend_empty(request): - with pytest.raises(base.BackendError): - base.validate_backend_empty({}) - - with open(f"{request.config.rootdir}/tests/fixtures/states/empty.tfstate") as f: - state = json.load(f) - assert base.validate_backend_empty(state) is True - - with open(f"{request.config.rootdir}/tests/fixtures/states/occupied.tfstate") as f: - state = json.load(f) - assert base.validate_backend_empty(state) is False diff --git a/tests/backends/test_gcs.py b/tests/backends/test_gcs.py deleted file mode 100644 index 1aa5df1..0000000 --- a/tests/backends/test_gcs.py +++ /dev/null @@ -1,200 +0,0 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from unittest.mock import patch - -import pytest - -from tfworker.backends.base import BackendError - - -class MockGCSClient: - def __init__(self): - self._connection = MockGCSConnection() - - @property - def get_bucket(self): - return MockGCSBucket - - -class MockGCSConnection: - @property - def api_request(self): - return None - - -class MockGCSBucket: - def __init__(self, bucket): - self._blobs = [ - MockGCSBlob("terraform/test/foo/default.tfstate", "", b'{"resources":[]}'), - MockGCSBlob("terraform/test/bar/default.tfstate", "", b'{"resources":[]}'), - MockGCSBlob( - "terraform/fail/tflock/default.tflock", "", b'{"resources":[]}' - ), - MockGCSBlob("terraform/fail/other/other.file", "", b'{"resources":[]}'), - ] - - with open(f"{bucket}/tests/fixtures/states/occupied.tfstate", "rb") as f: - self._blobs.append( - MockGCSBlob("terraform/fail/occupied/default.tfstate", "", f.read()) - ) - - def list_blobs(self, prefix): - blobs = list(filter(lambda x: x.name.startswith(prefix), self._blobs)) - assert len(blobs) > 0 - return blobs - - -class MockGCSBlob: - def __init__(self, name, path, content): - self.name = name - self.path = path - self.content = content - - def download_as_string(self): - return self.content - - def delete(self): - pass - - -class TestClean: - def test_clean_prefix_check(self, gbasec): - gbasec.backend._gcs_prefix = None - gbasec.backend._gcs_bucket = None - with pytest.raises(BackendError): - gbasec.backend.clean("test") - - gbasec.backend._gcs_prefix = "valid" - gbasec.backend._gcs_bucket = None - with pytest.raises(BackendError): - gbasec.backend.clean("test") - - gbasec.backend._gcs_prefix = None - gbasec.backend._gcs_bucket = "valid" - with pytest.raises(BackendError): - gbasec.backend.clean("test") - - @patch("tfworker.backends.GCSBackend._clean_deployment_limit") - def test_clean_limit_check(self, mock_clean, gbasec): - mock_clean.return_value = None - gbasec.backend._storage_client = MockGCSClient() - assert gbasec.backend.clean("test", limit=("test-limit",)) is None - mock_clean.assert_called_once_with(("test-limit",)) - - @patch("tfworker.backends.GCSBackend._clean_prefix") - def test_clean_no_limit_check(self, mock_clean, gbasec): - mock_clean.return_value = None - gbasec.backend._storage_client = MockGCSClient() - gbasec.backend._gcs_prefix = "test-prefix" - assert gbasec.backend.clean("test") is None - mock_clean.assert_called_once_with("test-prefix") - - @patch("google.api_core.page_iterator.HTTPIterator") - @patch("tfworker.backends.GCSBackend._clean_prefix") - def test_clean_deployment_limit(self, mock_clean, mock_iter, gbasec): - mock_iter.return_value = ["terraform/test/foo/", "terraform/test/bar/"] - mock_clean.return_value = None - gbasec.backend._storage_client = MockGCSClient() - gbasec.backend._gcs_prefix = "terraform/test" - - with pytest.raises(BackendError): - gbasec.backend._clean_deployment_limit(("zed",)) - - gbasec.backend._clean_deployment_limit( - ( - "foo", - "bar", - ) - ) - assert mock_clean.call_count == 2 - - @patch("google.api_core.page_iterator.HTTPIterator") - def test_clean_prefix(self, mock_iter, gbasec, request): - mock_iter.return_value = ["terraform/test/foo/", "terraform/test/bar/"] - gbasec.backend._storage_client = MockGCSClient() - gbasec.backend._gcs_bucket = request.config.rootdir - - assert gbasec.backend._clean_prefix("terraform/test") is None - - with pytest.raises(BackendError): - gbasec.backend._clean_prefix("terraform/fail/tflock") - - with pytest.raises(BackendError): - gbasec.backend._clean_prefix("terraform/fail/other") - - with pytest.raises(BackendError): - gbasec.backend._clean_prefix("terraform/fail/occupied") - - @patch("google.api_core.page_iterator.HTTPIterator") - def test_get_state_list(self, mock_iter, gbasec): - mock_iter.return_value = ["foo/", "bar/"] - gbasec.backend._storage_client = MockGCSClient() - gbasec.backend._gcs_prefix = "" - items = gbasec.backend._get_state_list() - assert items == ["foo", "bar"] - - mock_iter.return_value = ["terraform/test/foo/", "terraform/test/bar/"] - gbasec.backend._gcs_prefix = "terraform/test/" - items = gbasec.backend._get_state_list() - assert items == ["foo", "bar"] - - def test_item_to_value(self, gbasec): - assert gbasec.backend._item_to_value("", "foo") == "foo" - - @pytest.mark.parametrize( - "prefix, inval, outval, expected_raise", - [ - ("terraform", "terraform/foo/", "foo", None), - ("terraform/a/b/c", "terraform/a/b/c/foo/", "foo", None), - ("terraform/a/b/c", "terraform/a/b/c/foo", "", BackendError), - ("terraform", "junk", "", BackendError), - ], - ) - def test_parse_gcs_items(self, gbasec, prefix, inval, outval, expected_raise): - gbasec.backend._gcs_prefix = prefix - if expected_raise: - with pytest.raises(expected_raise): - gbasec.backend._parse_gcs_items(inval) - else: - assert gbasec.backend._parse_gcs_items(inval) == outval - - -def test_google_hcl(gbasec, gcp_creds_file): - render = gbasec.backend.hcl("test") - expected_render = f""" backend "gcs" {{ - bucket = "test_gcp_bucket" - prefix = "terraform/test-0002/test" - credentials = "{gcp_creds_file}" - }}""" - assert render == expected_render - - -def test_google_data_hcl(gbasec, gcp_creds_file): - expected_render = f"""data "terraform_remote_state" "test" {{ - backend = "gcs" - config = {{ - bucket = "test_gcp_bucket" - prefix = "terraform/test-0002/test" - credentials = "{gcp_creds_file}" - }} -}}""" - render = [] - render.append(gbasec.backend.data_hcl(["test", "test"])) - render.append(gbasec.backend.data_hcl(["test"])) - for i in render: - assert i == expected_render - - with pytest.raises(ValueError): - render.append(gbasec.backend.data_hcl("test")) diff --git a/tests/backends/test_s3.py b/tests/backends/test_s3.py deleted file mode 100644 index 3c31bdf..0000000 --- a/tests/backends/test_s3.py +++ /dev/null @@ -1,404 +0,0 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import random -import string -from unittest.mock import MagicMock, patch - -import pytest -from botocore.exceptions import ClientError -from moto import mock_aws - -from tests.conftest import MockAWSAuth -from tfworker.backends import S3Backend -from tfworker.backends.base import BackendError -from tfworker.handlers import HandlerError - -STATE_BUCKET = "test_bucket" -STATE_PREFIX = "terraform" -STATE_REGION = "us-west-2" -STATE_DEPLOYMENT = "test-0001" -EMPTY_STATE = f"{STATE_PREFIX}/{STATE_DEPLOYMENT}/empty/terraform.tfstate" -OCCUPIED_STATE = f"{STATE_PREFIX}/{STATE_DEPLOYMENT}/occupied/terraform.tfstate" -LOCK_DIGEST = "1234123412341234" -NO_SUCH_BUCKET = "no_such_bucket" - - -@pytest.fixture(scope="class") -def state_setup(request, s3_client, dynamodb_client): - # location constraint is required due to how we create the client with a specific region - location = {"LocationConstraint": STATE_REGION} - # if the bucket already exists, and is owned by us, continue. - try: - s3_client.create_bucket(Bucket=STATE_BUCKET, CreateBucketConfiguration=location) - except s3_client.exceptions.BucketAlreadyOwnedByYou: - # this is ok and expected - pass - - with open( - f"{request.config.rootdir}/tests/fixtures/states/empty.tfstate", "rb" - ) as f: - s3_client.put_object(Bucket=STATE_BUCKET, Key=EMPTY_STATE, Body=f) - - with open( - f"{request.config.rootdir}/tests/fixtures/states/occupied.tfstate", "rb" - ) as f: - s3_client.put_object(Bucket=STATE_BUCKET, Key=OCCUPIED_STATE, Body=f) - - # depending on how basec was called/used this may already be created, so don't fail - # if it already exists - try: - dynamodb_client.create_table( - TableName=f"terraform-{STATE_DEPLOYMENT}", - KeySchema=[{"AttributeName": "LockID", "KeyType": "HASH"}], - AttributeDefinitions=[ - {"AttributeName": "LockID", "AttributeType": "S"}, - ], - ProvisionedThroughput={ - "ReadCapacityUnits": 1, - "WriteCapacityUnits": 1, - }, - ) - except dynamodb_client.exceptions.ResourceInUseException: - pass - - dynamodb_client.put_item( - TableName=f"terraform-{STATE_DEPLOYMENT}", - Item={ - "LockID": {"S": f"{STATE_BUCKET}/{EMPTY_STATE}-md5"}, - "Digest": {"S": f"{LOCK_DIGEST}"}, - }, - ) - dynamodb_client.put_item( - TableName=f"terraform-{STATE_DEPLOYMENT}", - Item={ - "LockID": {"S": f"{STATE_BUCKET}/{OCCUPIED_STATE}-md5"}, - "Digest": {"S": f"{LOCK_DIGEST}"}, - }, - ) - - -class TestS3BackendLimit: - local_test_name = "".join(random.choices(string.ascii_letters, k=10)) - - def test_table_creation(self, basec): - # table should not exist - assert basec.backend._check_table_exists(self.local_test_name) is False - # so create it - assert basec.backend._create_table(self.local_test_name) is None - # and now it should - assert basec.backend._check_table_exists(self.local_test_name) is True - - def test_clean_bucket_state(self, basec, state_setup, s3_client): - # occupied is not empty, so it should raise an error - with pytest.raises(BackendError): - basec.backend._clean_bucket_state(definition="occupied") - # ensure it was not removed - assert s3_client.get_object(Bucket=STATE_BUCKET, Key=OCCUPIED_STATE) - - # ensure the empty state is present - assert s3_client.get_object(Bucket=STATE_BUCKET, Key=EMPTY_STATE) - # this returns nothing - assert basec.backend._clean_bucket_state(definition="empty") is None - # but now this should fail - with pytest.raises(s3_client.exceptions.NoSuchKey): - s3_client.get_object(Bucket=STATE_BUCKET, Key=EMPTY_STATE) - - def test_clean_locking_state(self, basec, state_setup, dynamodb_client): - # validate items exist before function call - resp = dynamodb_client.get_item( - TableName=f"terraform-{STATE_DEPLOYMENT}", - Key={"LockID": {"S": f"{STATE_BUCKET}/{EMPTY_STATE}-md5"}}, - ) - assert resp["Item"]["Digest"] == {"S": LOCK_DIGEST} - resp = dynamodb_client.get_item( - TableName=f"terraform-{STATE_DEPLOYMENT}", - Key={"LockID": {"S": f"{STATE_BUCKET}/{OCCUPIED_STATE}-md5"}}, - ) - assert resp["Item"]["Digest"] == {"S": LOCK_DIGEST} - - # this should remove just empty - assert basec.backend._clean_locking_state(STATE_DEPLOYMENT, "empty") is None - - # validate empty is gone, and occupied is not - resp = dynamodb_client.get_item( - TableName=f"terraform-{STATE_DEPLOYMENT}", - Key={"LockID": {"S": f"{STATE_BUCKET}/{EMPTY_STATE}-md5"}}, - ) - with pytest.raises(KeyError): - assert resp["Item"] - resp = dynamodb_client.get_item( - TableName=f"terraform-{STATE_DEPLOYMENT}", - Key={"LockID": {"S": f"{STATE_BUCKET}/{OCCUPIED_STATE}-md5"}}, - ) - assert resp["Item"]["Digest"] == {"S": LOCK_DIGEST} - - -class TestS3BackendAll: - def test_clean_bucket_state(self, request, basec, state_setup, s3_client): - # need to patch the occupied object with the empty one - with open( - f"{request.config.rootdir}/tests/fixtures/states/empty.tfstate", "rb" - ) as f: - s3_client.put_object(Bucket=STATE_BUCKET, Key=OCCUPIED_STATE, Body=f) - assert basec.backend._clean_bucket_state() is None - - def test_clean_locking_state(self, basec, state_setup, dynamodb_client): - # ensure the table exists before test, then remove it, and make sure it's gone - assert ( - f"terraform-{STATE_DEPLOYMENT}" - in dynamodb_client.list_tables()["TableNames"] - ) - assert basec.backend._clean_locking_state(STATE_DEPLOYMENT) is None - assert ( - f"terraform-{STATE_DEPLOYMENT}" - not in dynamodb_client.list_tables()["TableNames"] - ) - - -class TestS3BackendInit: - def setup_method(self, method): - self.authenticators = {"aws": MockAWSAuth()} - self.definitions = {} - - def test_no_session(self): - self.authenticators["aws"]._session = None - with pytest.raises(BackendError): - S3Backend(self.authenticators, self.definitions) - - def test_no_backend_session(self): - self.authenticators["aws"]._backend_session = None - with pytest.raises(BackendError): - S3Backend(self.authenticators, self.definitions) - - @patch("tfworker.backends.S3Backend._ensure_locking_table", return_value=None) - @patch("tfworker.backends.S3Backend._ensure_backend_bucket", return_value=None) - @patch("tfworker.backends.S3Backend._get_bucket_files", return_value={}) - def test_deployment_undefined( - self, - mock_get_bucket_files, - mock_ensure_backend_bucket, - mock_ensure_locking_table, - ): - # arrange - result = S3Backend(self.authenticators, self.definitions) - assert result._deployment == "undefined" - assert mock_get_bucket_files.called - assert mock_ensure_backend_bucket.called - assert mock_ensure_locking_table.called - - @patch("tfworker.backends.S3Backend._ensure_locking_table", return_value=None) - @patch("tfworker.backends.S3Backend._ensure_backend_bucket", return_value=None) - @patch("tfworker.backends.S3Backend._get_bucket_files", return_value={}) - @patch("tfworker.backends.s3.S3Handler", side_effect=HandlerError("message")) - def test_handler_error( - self, - mock_get_bucket_files, - mock_ensure_backend_bucket, - mock_ensure_locking_table, - mock_handler, - ): - with pytest.raises(SystemExit): - S3Backend(self.authenticators, self.definitions) - - -class TestS3BackendEnsureBackendBucket: - from botocore.exceptions import ClientError - - @pytest.fixture(autouse=True) - def setup_class(self, state_setup): - pass - - @patch("tfworker.backends.S3Backend._ensure_locking_table", return_value=None) - @patch("tfworker.backends.S3Backend._ensure_backend_bucket", return_value=None) - @patch("tfworker.backends.S3Backend._get_bucket_files", return_value={}) - def setup_method( - self, - method, - mock_get_bucket_files, - mock_ensure_backend_bucket, - mock_ensure_locking_table, - ): - with mock_aws(): - self.authenticators = {"aws": MockAWSAuth()} - self.definitions = {} - self.backend = S3Backend(self.authenticators, self.definitions) - self.backend._authenticator.bucket = STATE_BUCKET - self.backend._authenticator.backend_region = STATE_REGION - - def teardown_method(self, method): - with mock_aws(): - try: - self.backend._s3_client.delete_bucket(Bucket=NO_SUCH_BUCKET) - except Exception: - pass - - @mock_aws - def test_check_bucket_does_not_exist(self): - result = self.backend._check_bucket_exists(NO_SUCH_BUCKET) - assert result is False - - @mock_aws - def test_check_bucket_exists(self): - result = self.backend._check_bucket_exists(STATE_BUCKET) - assert result is True - - @mock_aws - def test_check_bucket_exists_error(self): - self.backend._s3_client = MagicMock() - self.backend._s3_client.head_bucket.side_effect = ClientError( - {"Error": {"Code": "403", "Message": "Unauthorized"}}, "head_bucket" - ) - - with pytest.raises(ClientError): - self.backend._check_bucket_exists(STATE_BUCKET) - assert self.backend._s3_client.head_bucket.called - - @mock_aws - def test_bucket_not_exist_no_create(self, capfd): - self.backend._authenticator.create_backend_bucket = False - self.backend._authenticator.bucket = NO_SUCH_BUCKET - with pytest.raises(BackendError): - self.backend._ensure_backend_bucket() - assert ( - "Backend bucket not found and --no-create-backend-bucket specified." - in capfd.readouterr().out - ) - - @mock_aws - def test_create_bucket(self): - self.backend._authenticator.create_backend_bucket = True - self.backend._authenticator.bucket = NO_SUCH_BUCKET - assert NO_SUCH_BUCKET not in [ - x["Name"] for x in self.backend._s3_client.list_buckets()["Buckets"] - ] - result = self.backend._ensure_backend_bucket() - assert result is None - assert NO_SUCH_BUCKET in [ - x["Name"] for x in self.backend._s3_client.list_buckets()["Buckets"] - ] - - @mock_aws - def test_create_bucket_invalid_location_constraint(self, capsys): - self.backend._authenticator.create_backend_bucket = True - self.backend._authenticator.bucket = NO_SUCH_BUCKET - self.backend._authenticator.backend_region = "us-west-1" - # moto doesn't properly raise a location constraint when the session doesn't match the region - # so we'll just do it manually - assert self.backend._authenticator.backend_session.region_name != "us-west-1" - assert self.backend._authenticator.backend_region == "us-west-1" - assert NO_SUCH_BUCKET not in [ - x["Name"] for x in self.backend._s3_client.list_buckets()["Buckets"] - ] - self.backend._s3_client = MagicMock() - self.backend._s3_client.create_bucket.side_effect = ClientError( - { - "Error": { - "Code": "InvalidLocationConstraint", - "Message": "InvalidLocationConstraint", - } - }, - "create_bucket", - ) - - with pytest.raises(SystemExit): - self.backend._create_bucket(NO_SUCH_BUCKET) - assert "InvalidLocationConstraint" in capsys.readouterr().out - - assert NO_SUCH_BUCKET not in [ - x["Name"] for x in self.backend._s3_client.list_buckets()["Buckets"] - ] - - # This test can not be enabled until several other tests are refactored to not create the bucket needlessly - # as the method itself skips this check when being run through a test, the same also applies to "BucketAlreadyOwnedByYou" - # @mock_aws - # def test_create_bucket_already_exists(self, capsys): - # self.backend._authenticator.create_backend_bucket = True - # self.backend._authenticator.bucket = STATE_BUCKET - # assert STATE_BUCKET in [ x['Name'] for x in self.backend._s3_client.list_buckets()['Buckets'] ] - - # with pytest.raises(SystemExit): - # result = self.backend._create_bucket(STATE_BUCKET) - # assert f"Bucket {STATE_BUCKET} already exists" in capsys.readouterr().out - - def test_create_bucket_error(self): - self.backend._authenticator.create_backend_bucket = True - self.backend._authenticator.bucket = NO_SUCH_BUCKET - self.backend._s3_client = MagicMock() - self.backend._s3_client.create_bucket.side_effect = ClientError( - {"Error": {"Code": "403", "Message": "Unauthorized"}}, "create_bucket" - ) - - with pytest.raises(ClientError): - self.backend._create_bucket(NO_SUCH_BUCKET) - assert self.backend._s3_client.create_bucket.called - - -def test_backend_remotes(basec, state_setup): - remotes = basec.backend.remotes() - assert len(remotes) == 2 - assert "empty" in remotes - assert "occupied" in remotes - - -def test_backend_clean_all(basec, request, state_setup, dynamodb_client, s3_client): - # this function should trigger an exit - with pytest.raises(SystemExit): - basec.backend.clean(STATE_DEPLOYMENT) - - # empty the occupied state - with open( - f"{request.config.rootdir}/tests/fixtures/states/empty.tfstate", "rb" - ) as f: - s3_client.put_object(Bucket=STATE_BUCKET, Key=OCCUPIED_STATE, Body=f) - - # now clean should run and return nothing - assert basec.backend.clean(STATE_DEPLOYMENT) is None - - -def test_backend_clean_limit(basec, request, state_setup, dynamodb_client, s3_client): - with pytest.raises(SystemExit): - basec.backend.clean(STATE_DEPLOYMENT, limit=["occupied"]) - assert basec.backend.clean(STATE_DEPLOYMENT, limit=["empty"]) is None - - -def test_s3_hcl(basec): - render = basec.backend.hcl("test") - expected_render = """ backend "s3" { - region = "us-west-2" - bucket = "test_bucket" - key = "terraform/test-0001/test/terraform.tfstate" - dynamodb_table = "terraform-test-0001" - encrypt = "true" - }""" - assert render == expected_render - - -def test_s3_data_hcl(basec): - expected_render = """data "terraform_remote_state" "test" { - backend = "s3" - config = { - region = "us-west-2" - bucket = "test_bucket" - key = "terraform/test-0001/test/terraform.tfstate" - } -} -""" - render = [] - render.append(basec.backend.data_hcl(["test", "test"])) - render.append(basec.backend.data_hcl(["test"])) - for i in render: - assert i == expected_render - - with pytest.raises(ValueError): - render.append(basec.backend.data_hcl("test")) diff --git a/tests/commands/test_root.py b/tests/commands/test_root.py deleted file mode 100644 index a9db5a4..0000000 --- a/tests/commands/test_root.py +++ /dev/null @@ -1,269 +0,0 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import copy -import os -import platform -from tempfile import TemporaryDirectory -from unittest.mock import patch - -import pytest -from deepdiff import DeepDiff - -import tfworker.commands.root -from tfworker.commands.root import ordered_config_load -from tfworker.constants import DEFAULT_CONFIG -from tfworker.types import CLIOptionsRoot - - -class TestMain: - def test_rc_add_arg(self, rootc): - rc = copy.deepcopy(rootc) - rc.add_arg("a", 1) - assert rc.args.a == 1 - - def test_rc_add_args(self, rootc): - rc = copy.deepcopy(rootc) - rc.add_args({"a": 1, "b": "two"}) - assert rc.args.a == 1 - assert rc.args.b == "two" - - def test_rc_init_clean(self, rootc): - # by default clean should be true - rc = tfworker.commands.root.RootCommand(CLIOptionsRoot()) - assert rc.clean is True - - # if working_dir is passed, clean should be false - random_working_dir = TemporaryDirectory() - rc = tfworker.commands.root.RootCommand( - CLIOptionsRoot(working_dir=random_working_dir.name) - ) - assert rc.clean is False - if platform.system() == "Darwin": - assert str(rc.temp_dir) == f"/private{random_working_dir.name}" - else: - assert str(rc.temp_dir) == random_working_dir.name - - # if clean is passed, it should be set to the value passed - rc = tfworker.commands.root.RootCommand(CLIOptionsRoot(clean=False)) - assert rc.temp_dir is not None - assert rc.clean is False - - # if a working dir is specified, along with clean, the dir itself - # should not be deleted, but contents inside of it should - tmpdir = TemporaryDirectory() - assert os.path.exists(tmpdir.name) is True - rc = tfworker.commands.root.RootCommand( - CLIOptionsRoot(clean=True, working_dir=tmpdir.name) - ) - assert rc.clean is True - if platform.system() == "Darwin": - assert str(rc.temp_dir) == f"/private{tmpdir.name}" - else: - assert str(rc.temp_dir) == tmpdir.name - with open(file=os.path.join(tmpdir.name, "test"), mode="w") as f: - f.write("test") - del rc - assert os.path.exists(os.path.join(tmpdir.name, "test")) is False - assert len(os.listdir(tmpdir.name)) == 0 - tmpdir.cleanup() - - def test_config_loader(self, rootc, capfd): - expected_sections = ["providers", "terraform_vars", "definitions"] - expected_tf_vars = { - "vpc_cidr": "10.0.0.0/16", - "region": "us-west-2", - "domain": "test.domain.com", - } - rootc.add_arg("deployment", "root-deployment") - rootc.load_config() - terraform_config = rootc.config.get("terraform") - for section in expected_sections: - assert section in terraform_config.keys() - - for k, v in expected_tf_vars.items(): - assert terraform_config["terraform_vars"][k] == v - - # a root command with no config should attempt to load the default, but fail, and exit - with patch("os.path.exists") as mock_exists: - mock_exists.return_value = False - with pytest.raises(SystemExit) as e: - emptyrc = tfworker.commands.root.RootCommand(CLIOptionsRoot()) - assert emptyrc.load_config() is None - mock_exists.assert_called_with(DEFAULT_CONFIG) - - # an invalid path should raise an error - invalidrc = tfworker.commands.root.RootCommand( - CLIOptionsRoot(config_file="/tmp/invalid") - ) - with pytest.raises(SystemExit) as e: - invalidrc.load_config() - assert e.value.code == 1 - out, err = capfd.readouterr() - assert "configuration file does not exist" in out - - # a j2 template with invalid substitutions should raise an error - invalidrc = tfworker.commands.root.RootCommand( - CLIOptionsRoot( - **{ - "config_file": os.path.join( - os.path.dirname(__file__), - "..", - "fixtures", - "test_config_invalid_j2.yaml", - ) - } - ) - ) - with pytest.raises(SystemExit) as e: - invalidrc.load_config() - assert e.value.code == 1 - out, err = capfd.readouterr() - assert "invalid template" in out - - # def test_pullup_keys_edge(self): - # rc = tfworker.commands.root.RootCommand() - # assert rc.load_config() is None - # assert rc._pullup_keys() is None - # assert rc.providers_odict is None - - def test_get_config_var_dict(self): - config_vars = ["foo=bar", "this=that", "one=two"] - result = tfworker.commands.root.get_config_var_dict(config_vars) - assert len(result) == 3 - assert result["foo"] == "bar" - assert result["this"] == "that" - assert result["one"] == "two" - - def test_stateargs_base(self): - rc = tfworker.commands.root.RootCommand.StateArgs() - setattr(rc, "foo", "bar") - setattr(rc, "this", ["that", "thing"]) - setattr(rc, "one", 2) - assert rc.foo == "bar" - assert len(rc.this) == 2 - assert rc.one == 2 - assert rc["foo"] == "bar" - assert rc["this"] == ["that", "thing"] - assert rc["one"] == 2 - - for k in rc.keys(): - assert k in ["foo", "this", "one"] - - for k in rc.values(): - assert k in ["bar", ["that", "thing"], 2] - - for k in rc: - assert k in ["foo", "this", "one"] - - assert str(rc) == "{'foo': 'bar', 'this': ['that', 'thing'], 'one': 2}" - - for k, v in rc.items(): - assert k in ["foo", "this", "one"] - assert v in ["bar", ["that", "thing"], 2] - - def test_stateargs_template_items(self): - rc = tfworker.commands.root.RootCommand.StateArgs() - # push --config-var param onto rootcommand - setattr(rc, "config_var", ["foo=bar", "this=that", "one=two"]) - - # check templating of config_var, no environment - result = rc.template_items(return_as_dict=True) - assert result["var"] == {"foo": "bar", "this": "that", "one": "two"} - with pytest.raises(KeyError): - result["env"] - - # check templating of config_var, with environment - os.environ["FOO"] = "bar" - os.environ["THIS"] = "that" - os.environ["ONE"] = "two" - result = rc.template_items(return_as_dict=True, get_env=True) - assert result["env"]["FOO"] == "bar" - assert result["env"]["THIS"] == "that" - assert result["env"]["ONE"] == "two" - - # check templating when returning as a list - result = rc.template_items(return_as_dict=False, get_env=True) - for k, v in result: - assert k in ["var", "env"] - if k == "var": - assert v == {"foo": "bar", "this": "that", "one": "two"} - if k == "env": - assert v["FOO"] == "bar" - assert v["THIS"] == "that" - assert v["ONE"] == "two" - - def test_stateargs_template_items_invalid(self, capfd): - rc = tfworker.commands.root.RootCommand.StateArgs() - # push --config-var param onto rootcommand - setattr(rc, "config_var", ["junky"]) - - # check templating of config_var, no environment - with pytest.raises(SystemExit) as e: - rc.template_items(return_as_dict=True) - out, err = capfd.readouterr() - assert e.value.code == 1 - assert "Invalid config-var" in out - - def test_config_formats(self, yaml_base_rootc, json_base_rootc, hcl_base_rootc): - yaml_base_rootc.load_config() - json_base_rootc.load_config() - hcl_base_rootc.load_config() - yaml_config = yaml_base_rootc.config - json_config = json_base_rootc.config - hcl_config = hcl_base_rootc.config - diff = DeepDiff(yaml_config, json_config) - assert len(diff) == 0 - diff = DeepDiff(json_config, hcl_config) - assert len(diff) == 0 - - -class TestOrderedConfigLoad: - def test_ordered_config_load(self): - config = """ - key1: value1 - key2: value2 - key3: - - item1 - - item2 - """ - result = ordered_config_load(config) - assert result == { - "key1": "value1", - "key2": "value2", - "key3": ["item1", "item2"], - } - - def test_ordered_config_load_invalid(self, capfd): - config = """ - key1: value1 - key2: value2 - key3: - - item1 - - item2 - key4: - subkey1: value1 - subkey1a: value2 - - ohnoimalistnow - """ - expected_error_out = "" - for i, line in enumerate(config.split("\n")): - expected_error_out += f"{i + 1}: {line}\n" - with pytest.raises(SystemExit) as e: - ordered_config_load(config) - out, err = capfd.readouterr() - assert e.value.code == 1 - assert "error loading yaml/json" in out - assert "the configuration that caused the error was" in out - assert expected_error_out in out diff --git a/tests/commands/test_terraform.py b/tests/commands/test_terraform.py deleted file mode 100644 index 2c24cf0..0000000 --- a/tests/commands/test_terraform.py +++ /dev/null @@ -1,621 +0,0 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pathlib -from typing import Tuple -from unittest import mock -from unittest.mock import MagicMock, patch - -import pytest -from google.cloud.exceptions import NotFound - -import tfworker -from tfworker.commands.terraform import TerraformCommand, TerraformError -from tfworker.definitions import Definition -from tfworker.handlers import HandlerError - - -def mock_pipe_exec( - args: str, - stdin: str = None, - cwd: str = None, - env: list = None, - stream_output: bool = False, -): - return (0, "".encode(), "".encode()) - - -def mock_tf_version(args: str) -> Tuple[int, str, str]: - return (0, args.encode(), "".encode()) - - -@pytest.fixture(scope="function") -def definition(): - mock_definition = MagicMock(spec=Definition) - mock_definition.tag = "test_tag" - mock_definition.path = "/path/to/definition" - mock_definition.fs_path = pathlib.Path("/path/to/definition") - # mock_definition._plan_file = None - # mock_definition._ready_to_apply = False - return mock_definition - - -@pytest.fixture(scope="function") -def terraform_command(rootc): - return TerraformCommand( - rootc, - plan_file_path="/path/to/plan", - tf_plan=True, - deployment="deployment", - show_output=True, - ) - - -@pytest.fixture(scope="function") -def terraform_destroy_command(rootc): - return TerraformCommand( - rootc, - plan_file_path="/path/to/plan", - tf_plan=True, - deployment="deployment", - show_output=True, - destroy=True, - ) - - -class TestTerraformCommand: - """These are legacy tests, and will be refactored away as work on the TerraformCommand class progresses.""" - - @pytest.mark.parametrize( - "method, tf_cmd, args", - [ - ( - "init", - "tf_12cmd", - ["-input=false", "-no-color", "-plugin-dir"], - ), - ( - "plan", - "tf_12cmd", - ["-input=false", "-detailed-exitcode", "-no-color"], - ), - ( - "apply", - "tf_12cmd", - ["-input=false", "-no-color", "-auto-approve"], - ), - ( - "destroy", - "tf_12cmd", - ["-input=false", "-no-color", "-auto-approve"], - ), - ( - "init", - "tf_13cmd", - ["-input=false", "-no-color", "-plugin-dir"], - ), - ( - "plan", - "tf_13cmd", - ["-input=false", "-detailed-exitcode", "-no-color"], - ), - ( - "apply", - "tf_13cmd", - ["-input=false", "-no-color", "-auto-approve"], - ), - ( - "destroy", - "tf_13cmd", - ["-input=false", "-no-color", "-auto-approve"], - ), - ( - "init", - "tf_14cmd", - ["-input=false", "-no-color", "-plugin-dir"], - ), - ( - "plan", - "tf_14cmd", - ["-input=false", "-detailed-exitcode", "-no-color"], - ), - ( - "apply", - "tf_14cmd", - ["-input=false", "-no-color", "-auto-approve"], - ), - ( - "destroy", - "tf_14cmd", - ["-input=false", "-no-color", "-auto-approve"], - ), - ( - "init", - "tf_15cmd", - ["-input=false", "-no-color", "-plugin-dir"], - ), - ( - "plan", - "tf_15cmd", - ["-input=false", "-detailed-exitcode", "-no-color"], - ), - ( - "apply", - "tf_15cmd", - ["-input=false", "-no-color", "-auto-approve"], - ), - ( - "destroy", - "tf_15cmd", - ["-input=false", "-no-color", "-auto-approve"], - ), - ], - ) - def test_run(self, tf_cmd: str, method: callable, args: list, request): - tf_cmd = request.getfixturevalue(tf_cmd) - with mock.patch( - "tfworker.commands.terraform.pipe_exec", - side_effect=mock_pipe_exec, - ) as mocked: - tf_cmd._run( - tf_cmd.definitions["test"], - method, - ) - mocked.assert_called_once() - call_as_string = str(mocked.mock_calls.pop()) - assert method in call_as_string - for arg in args: - assert arg in call_as_string - - def test_worker_options(self, tf_13cmd_options): - # Verify that the options from the CLI override the options from the config - assert tf_13cmd_options._rootc.worker_options_odict.get("backend") == "s3" - assert tf_13cmd_options.backend.tag == "gcs" - - # Verify that None options are overriden by the config - assert tf_13cmd_options._rootc.worker_options_odict.get("b64_encode") is True - assert tf_13cmd_options._args_dict.get("b64_encode") is False - - # The fixture causes which to return /usr/local/bin/terraform. However, since the - # path is specified in the worker_options, assert the value fromt he config. - assert tf_13cmd_options._terraform_bin == "/home/test/bin/terraform" - - # def test_no_create_backend_bucket_fails_s3(self, rootc_no_create_backend_bucket): - # with pytest.raises(BackendError): - # with mock.patch( - # "tfworker.commands.base.BaseCommand.get_terraform_version", - # side_effect=lambda x: (13, 3), - # ): - # with mock.patch( - # "tfworker.commands.base.which", - # side_effect=lambda x: "/usr/local/bin/terraform", - # ): - # return tfworker.commands.base.BaseCommand( - # rootc_no_create_backend_bucket, "test-0001", tf_version_major=13 - # ) - - def test_no_create_backend_bucket_fails_gcs(self, grootc_no_create_backend_bucket): - with pytest.raises(SystemExit): - with mock.patch( - "tfworker.commands.base.get_terraform_version", - side_effect=lambda x: (13, 3), - ): - with mock.patch( - "tfworker.commands.base.which", - side_effect=lambda x: "/usr/local/bin/terraform", - ): - with mock.patch( - "tfworker.backends.gcs.storage.Client.from_service_account_json" - ) as ClientMock: - instance = ClientMock.return_value - instance.get_bucket.side_effect = NotFound("bucket not found") - return tfworker.commands.base.BaseCommand( - grootc_no_create_backend_bucket, - "test-0001", - tf_version_major=13, - ) - - -#### -class TestTerraformCommandInit: - base_kwargs = { - "backend": "s3", - "backend_plans": False, - "b64_encode": False, - "color": True, - "deployment": "test_deployment", - "destroy": False, - "force": False, - "plan_file_path": None, - "provider_cache": "/path/to/cache", - "show_output": True, - "stream_output": False, - "terraform_bin": "/path/to/terraform", - "terraform_modules_dir": "/path/to/modules", - "tf_apply": True, - "tf_plan": True, - "tf_version": (1, 8), - } - - @pytest.fixture - def terraform_command_class(self): - return TerraformCommand - - def test_constructor_with_valid_arguments(self, rootc, terraform_command_class): - kwargs = self.base_kwargs.copy() - - with patch.object( - terraform_command_class, "_resolve_arg", side_effect=lambda arg: kwargs[arg] - ): - command = terraform_command_class(rootc, **kwargs) - assert command._destroy == kwargs["destroy"] - assert command._tf_apply == kwargs["tf_apply"] - assert command._tf_plan == kwargs["tf_plan"] - assert command._plan_file_path == kwargs["plan_file_path"] - assert command._b64_encode == kwargs["b64_encode"] - assert command._deployment == kwargs["deployment"] - assert command._force == kwargs["force"] - assert command._show_output == kwargs["show_output"] - assert command._stream_output == kwargs["stream_output"] - assert command._use_colors is True - assert command._terraform_modules_dir == kwargs["terraform_modules_dir"] - assert command._terraform_output == {} - - def test_constructor_with_apply_and_destroy(self, rootc, terraform_command_class): - kwargs = self.base_kwargs.copy() - kwargs["tf_apply"] = True - kwargs["destroy"] = True - - with patch.object( - terraform_command_class, "_resolve_arg", side_effect=lambda arg: kwargs[arg] - ): - with patch("click.secho") as mock_secho, pytest.raises(SystemExit): - terraform_command_class(rootc, **kwargs) - mock_secho.assert_called_with( - "Cannot apply and destroy in the same run", fg="red" - ) - - def test_constructor_with_backend_plans(self, rootc, terraform_command_class): - kwargs = self.base_kwargs.copy() - kwargs["backend_plans"] = True - - with patch.object( - terraform_command_class, "_resolve_arg", side_effect=lambda arg: kwargs[arg] - ): - with patch("pathlib.Path.mkdir") as mock_mkdir: - command = terraform_command_class(rootc, **kwargs) - assert command._plan_file_path == f"{command._temp_dir}/plans" - mock_mkdir.assert_called_once_with(parents=True, exist_ok=True) - - -class TestTerraformCommandProperties: - def test_plan_for_apply(self, terraform_command): - assert terraform_command.plan_for == "apply" - - def test_plan_for_destroy(self, terraform_destroy_command): - assert terraform_destroy_command.plan_for == "destroy" - - def test_tf_version_major(self, terraform_command): - assert terraform_command.tf_version_major == 1 - - -class TestTerraformCommandExec: - - def test_exec_valid_flow(self, terraform_command, definition): - def_iter = [definition] - terraform_command._provider_cache = "/path/to/cache" - - with patch.object( - terraform_command.definitions, "limited", return_value=def_iter - ), patch( - "tfworker.commands.terraform.tf_util.mirror_providers" - ) as mock_mirror_providers, patch( - "tfworker.commands.terraform.tf_util.prep_modules" - ) as mock_prep_modules, patch.object( - terraform_command, "_prep_and_init" - ) as mock_prep_and_init, patch.object( - terraform_command, "_check_plan", return_value=True - ) as mock_check_plan, patch.object( - terraform_command, "_exec_plan", return_value="changes" - ) as mock_exec_plan, patch.object( - terraform_command, "_check_apply_or_destroy", return_value=True - ) as mock_check_apply_or_destroy, patch.object( - terraform_command, "_exec_apply_or_destroy" - ) as mock_exec_apply_or_destroy: - - terraform_command.exec() - - mock_mirror_providers.assert_called_once() - mock_prep_modules.assert_called_once_with( - terraform_command._terraform_modules_dir, - terraform_command._temp_dir, - required=True, - ) - mock_prep_and_init.assert_called_once_with(def_iter) - mock_check_plan.assert_called_once_with(definition) - mock_exec_plan.assert_called_once_with(definition) - mock_check_apply_or_destroy.assert_called_once_with("changes", definition) - mock_exec_apply_or_destroy.assert_called_once_with(definition) - - def test_exec_with_invalid_limit(self, terraform_command): - with patch.object( - terraform_command.definitions, - "limited", - side_effect=ValueError("Invalid limit"), - ), patch("click.secho") as mock_secho: - with pytest.raises(SystemExit): - terraform_command.exec() - mock_secho.assert_called_once_with( - "Error with supplied limit: Invalid limit", fg="red" - ) - - def test_exec_without_plan(self, terraform_command, definition): - def_iter = [definition] - - with patch.object( - terraform_command.definitions, "limited", return_value=def_iter - ), patch( - "tfworker.commands.terraform.tf_util.prep_modules" - ) as mock_prep_modules, patch.object( - terraform_command, "_prep_and_init" - ) as mock_prep_and_init, patch.object( - terraform_command, "_check_plan", return_value=False - ) as mock_check_plan, patch.object( - terraform_command, "_exec_plan" - ) as mock_exec_plan, patch.object( - terraform_command, "_check_apply_or_destroy", return_value=True - ) as mock_check_apply_or_destroy, patch.object( - terraform_command, "_exec_apply_or_destroy" - ) as mock_exec_apply_or_destroy: - - terraform_command.exec() - - mock_prep_modules.assert_called_once_with( - terraform_command._terraform_modules_dir, - terraform_command._temp_dir, - required=True, - ) - mock_prep_and_init.assert_called_once_with(def_iter) - mock_check_plan.assert_called_once_with(definition) - mock_exec_plan.assert_not_called() - mock_check_apply_or_destroy.assert_called_once_with(None, definition) - mock_exec_apply_or_destroy.assert_called_once_with(definition) - - def test_exec_with_no_apply_or_destroy(self, terraform_command, definition): - def_iter = [definition] - - with patch.object( - terraform_command.definitions, "limited", return_value=def_iter - ), patch( - "tfworker.commands.terraform.tf_util.prep_modules" - ) as mock_prep_modules, patch.object( - terraform_command, "_prep_and_init" - ) as mock_prep_and_init, patch.object( - terraform_command, "_check_plan", return_value=True - ) as mock_check_plan, patch.object( - terraform_command, "_exec_plan", return_value="changes" - ) as mock_exec_plan, patch.object( - terraform_command, "_check_apply_or_destroy", return_value=False - ) as mock_check_apply_or_destroy, patch.object( - terraform_command, "_exec_apply_or_destroy" - ) as mock_exec_apply_or_destroy: - - terraform_command.exec() - - mock_prep_modules.assert_called_once_with( - terraform_command._terraform_modules_dir, - terraform_command._temp_dir, - required=True, - ) - mock_prep_and_init.assert_called_once_with(def_iter) - mock_check_plan.assert_called_once_with(definition) - mock_exec_plan.assert_called_once_with(definition) - mock_check_apply_or_destroy.assert_called_once_with("changes", definition) - mock_exec_apply_or_destroy.assert_not_called() - - def test_exec_with_required_prep_modules(self, terraform_command, definition): - terraform_command._terraform_modules_dir = "/temp/path" - def_iter = [definition] - - with patch.object( - terraform_command.definitions, "limited", return_value=def_iter - ), patch( - "tfworker.commands.terraform.tf_util.prep_modules" - ) as mock_prep_modules, patch.object( - terraform_command, "_prep_and_init" - ) as mock_prep_and_init, patch.object( - terraform_command, "_check_plan", return_value=True - ) as mock_check_plan, patch.object( - terraform_command, "_exec_plan", return_value="changes" - ) as mock_exec_plan, patch.object( - terraform_command, "_check_apply_or_destroy", return_value=True - ) as mock_check_apply_or_destroy, patch.object( - terraform_command, "_exec_apply_or_destroy" - ) as mock_exec_apply_or_destroy: - - terraform_command.exec() - - mock_prep_modules.assert_called_once_with( - terraform_command._terraform_modules_dir, - terraform_command._temp_dir, - required=True, - ) - mock_prep_and_init.assert_called_once_with(def_iter) - mock_check_plan.assert_called_once_with(definition) - mock_exec_plan.assert_called_once_with(definition) - mock_check_apply_or_destroy.assert_called_once_with("changes", definition) - mock_exec_apply_or_destroy.assert_called_once_with(definition) - - -class TestTerraformCommandPrepAndInit: - - def test_prep_and_init_valid_flow(self, terraform_command, definition): - def_iter = [definition] - - with patch("click.secho") as mock_secho, patch.object( - definition, "prep" - ) as mock_prep, patch.object(terraform_command, "_run") as mock_run: - - terraform_command._prep_and_init(def_iter) - - mock_secho.assert_any_call( - f"preparing definition: {definition.tag}", fg="green" - ) - mock_prep.assert_called_once_with(terraform_command._backend) - mock_run.assert_called_once_with( - definition, "init", debug=terraform_command._show_output - ) - - def test_prep_and_init_with_terraform_error(self, terraform_command, definition): - def_iter = [definition] - - with patch("click.secho") as mock_secho, patch.object( - definition, "prep" - ) as mock_prep, patch.object( - terraform_command, "_run", side_effect=TerraformError - ) as mock_run: - - with pytest.raises(SystemExit): - terraform_command._prep_and_init(def_iter) - - mock_secho.assert_any_call( - f"preparing definition: {definition.tag}", fg="green" - ) - mock_prep.assert_called_once_with(terraform_command._backend) - mock_run.assert_called_once_with( - definition, "init", debug=terraform_command._show_output - ) - mock_secho.assert_any_call("error running terraform init", fg="red") - - -class TestTerraformCommandPlanFunctions: - def test_handle_no_plan_path_true(self, terraform_command, definition): - terraform_command._tf_plan = False - assert terraform_command._handle_no_plan_path(definition) is False - assert definition._ready_to_apply is True - - def test_handle_no_plan_path_false(self, terraform_command, definition): - terraform_command._tf_plan = True - assert terraform_command._handle_no_plan_path(definition) is True - assert definition._ready_to_apply is False - - def test_prepare_plan_file(self, terraform_command, definition): - plan_file = terraform_command._prepare_plan_file(definition) - assert definition.plan_file == plan_file - assert plan_file == pathlib.Path("/path/to/plan/deployment_test_tag.tfplan") - - def test_validate_plan_path_valid(self, terraform_command): - with patch("pathlib.Path.exists", return_value=True), patch( - "pathlib.Path.is_dir", return_value=True - ): - terraform_command._validate_plan_path(pathlib.Path("/valid/path")) - - def test_validate_plan_path_invalid(self, terraform_command): - with patch("pathlib.Path.exists", return_value=False), patch( - "pathlib.Path.is_dir", return_value=False - ), pytest.raises(SystemExit): - terraform_command._validate_plan_path(pathlib.Path("/invalid/path")) - - def test_run_handlers(self, terraform_command, definition): - with patch.object( - terraform_command, "_execute_handlers", return_value=None - ) as mock_execute_handlers: - terraform_command._run_handlers( - definition, "plan", "check", pathlib.Path("/path/to/planfile") - ) - mock_execute_handlers.assert_called_once_with( - action="plan", - stage="check", - deployment="deployment", - definition=definition.tag, - definition_path=definition.fs_path, - planfile=pathlib.Path("/path/to/planfile"), - ) - - def test_run_handlers_with_error(self, terraform_command, definition): - error = HandlerError("Handler failed") - error.terminate = False - with patch.object( - terraform_command, "_execute_handlers", side_effect=error - ), patch("click.secho"): - terraform_command._run_handlers( - definition, "plan", "check", pathlib.Path("/path/to/planfile") - ) - - def test_run_handlers_with_fatal_error(self, terraform_command, definition): - error = HandlerError("Fatal handler error") - error.terminate = True - with patch.object( - terraform_command, "_execute_handlers", side_effect=error - ), patch("click.secho"), pytest.raises(SystemExit): - terraform_command._run_handlers( - definition, "plan", "check", pathlib.Path("/path/to/planfile") - ) - - def test_should_plan_no_tf_plan(self, terraform_command, definition): - terraform_command._tf_plan = False - plan_file = pathlib.Path("/path/to/empty.tfplan") - assert terraform_command._should_plan(definition, plan_file) is False - assert definition._ready_to_apply is True - - def test_should_plan_empty_plan_file(self, terraform_command, definition): - plan_file = pathlib.Path("/path/to/empty.tfplan") - with patch("pathlib.Path.exists", return_value=True), patch( - "pathlib.Path.stat", return_value=MagicMock(st_size=0) - ): - assert terraform_command._should_plan(definition, plan_file) is True - assert definition._ready_to_apply is False - - def test_should_plan_existing_valid_plan_file(self, terraform_command, definition): - plan_file = pathlib.Path("/path/to/valid.tfplan") - with patch("pathlib.Path.exists", return_value=True), patch( - "pathlib.Path.stat", return_value=MagicMock(st_size=100) - ): - assert terraform_command._should_plan(definition, plan_file) is False - assert definition._ready_to_apply is True - - def test_should_plan_no_existing_plan_file(self, terraform_command, definition): - plan_file = pathlib.Path("/path/to/nonexistent.tfplan") - with patch("pathlib.Path.exists", return_value=False): - assert terraform_command._should_plan(definition, plan_file) is True - assert definition._ready_to_apply is False - - def test_check_plan_no_plan_path(self, terraform_command, definition): - terraform_command._plan_file_path = None - with patch.object( - terraform_command, "_handle_no_plan_path", return_value=False - ) as mock_handle_no_plan_path: - assert terraform_command._check_plan(definition) is False - mock_handle_no_plan_path.assert_called_once_with(definition) - - def test_check_plan_with_plan_path(self, terraform_command, definition): - plan_file = pathlib.Path("/path/to/plan/deployment_test_tag.tfplan") - with patch.object( - terraform_command, "_prepare_plan_file", return_value=plan_file - ) as mock_prepare_plan_file, patch.object( - terraform_command, "_validate_plan_path" - ) as mock_validate_plan_path, patch.object( - terraform_command, "_run_handlers" - ) as mock_run_handlers, patch.object( - terraform_command, "_should_plan", return_value=True - ) as mock_should_plan: - assert terraform_command._check_plan(definition) is True - mock_prepare_plan_file.assert_called_once_with(definition) - mock_validate_plan_path.assert_called_once_with(plan_file.parent) - mock_run_handlers.assert_called_once() - mock_should_plan.assert_called_once_with(definition, plan_file) - - -if __name__ == "__main__": - pytest.main() diff --git a/tests/commands/test_version.py b/tests/commands/test_version.py deleted file mode 100644 index 693f7cd..0000000 --- a/tests/commands/test_version.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2021-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from unittest import mock - -from tfworker.commands.version import VersionCommand - - -def test_version_command(capsys): - with mock.patch("tfworker.commands.version.get_version") as mock_get_version: - mock_get_version.return_value = "1.2.3" - command = VersionCommand() - command.exec() - text = capsys.readouterr() - assert text.out == "terraform-worker version 1.2.3\n" diff --git a/tests/conftest.py b/tests/conftest.py index d07ec51..1ba222c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,479 +1,193 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - import os -import random -import string -from unittest import mock +import sys +from unittest.mock import MagicMock import boto3 +import click import pytest from moto import mock_aws -import tfworker -import tfworker.commands.base -import tfworker.commands.root -import tfworker.providers -import tfworker.types as tf_types - - -@pytest.fixture -def aws_access_key_id(): - suffix = "".join(random.choices("RAX", k=16)) - return f"AKIA{suffix}" - - -@pytest.fixture -def aws_secret_access_key(): - return "".join( - random.choices( - string.ascii_uppercase + string.ascii_lowercase + string.digits, k=40 - ) - ) - - -@pytest.fixture() -def gcp_creds_file(tmp_path): - creds_file = tmp_path / "creds.json" - creds_file.write_text( - """ - { - "type": "service_account", - "project_id": "test_project", - "private_key_id": "test_key_id", - "private_key": "test_key", - "client_email": " - }""" - ) - return creds_file - - -@pytest.fixture -def aws_role_arn(aws_account_id, aws_role_name): - return f"arn:aws:iam:{aws_account_id}:role/{aws_role_name}" - - -@pytest.fixture -def aws_role_name(): - return "".join(random.choices(string.ascii_lowercase, k=8)) - - -@pytest.fixture -def aws_account_id(): - return "".join(random.choices(string.digits, k=12)) +from tfworker.app_state import AppState +from tfworker.authenticators import AuthenticatorsCollection +from tfworker.cli_options import CLIOptionsClean, CLIOptionsRoot, CLIOptionsTerraform +from tfworker.types.config_file import ConfigFile, GlobalVars -@pytest.fixture(scope="class") +@pytest.fixture(scope="function") def aws_credentials(): """Mocked AWS Credentials for moto.""" os.environ["AWS_ACCESS_KEY_ID"] = "testing" os.environ["AWS_SECRET_ACCESS_KEY"] = "testing" os.environ["AWS_SECURITY_TOKEN"] = "testing" os.environ["AWS_SESSION_TOKEN"] = "testing" - - -@pytest.fixture(scope="class") -def s3_client(aws_credentials): - with mock_aws(): - yield boto3.client("s3", region_name="us-west-2") - - -@pytest.fixture(scope="class") -def dynamodb_client(aws_credentials): - with mock_aws(): - yield boto3.client("dynamodb", region_name="us-west-2") - - -@pytest.fixture(scope="class") -def sts_client(aws_credentials): - with mock_aws(): - yield boto3.client("sts", region_name="us-west-2") - - -class MockAWSAuth: - """ - This class is used to replace the AWS authenticator, moto is unable to - provide mock support for the complex authentication options we support - (cross account assumed roles, user identity, etc...) - """ - - @mock_aws - def __init__(self): - self._session = boto3.Session() - self._backend_session = self._session - self.bucket = "test_bucket" - self.prefix = "terraform/test-0001" - - @property - def session(self): - return self._session - - @property - def backend_session(self): - return self._backend_session - - -@pytest.fixture() -def grootc(gcp_creds_file): - result = tfworker.commands.root.RootCommand( - tf_types.CLIOptionsRoot( - **{ - "backend": "gcs", - "backend_region": "us-central1", - "backend_bucket": "test_gcp_bucket", - "backend_prefix": "terraform/test-0002", - "backend_use_all_remotes": False, - "config_file": os.path.join( - os.path.dirname(__file__), "fixtures", "gcp_test_config.yaml" - ), - "gcp_creds_path": str(gcp_creds_file), - "gcp_project": "test_project", - "gcp_region": "us-west-2b", - "repository_path": os.path.join(os.path.dirname(__file__), "fixtures"), - "create_backend_bucket": True, - } - ) - ) - return result - - -@pytest.fixture() -def grootc_no_create_backend_bucket(gcp_creds_file): - result = tfworker.commands.root.RootCommand( - tf_types.CLIOptionsRoot( - **{ - "backend": "gcs", - "backend_region": "us-central1", - "backend_bucket": "test_gcp_bucket", - "backend_prefix": "terraform/test-0002", - "backend_use_all_remotes": False, - "config_file": os.path.join( - os.path.dirname(__file__), "fixtures", "gcp_test_config.yaml" - ), - "gcp_creds_path": str(gcp_creds_file), - "gcp_project": "test_project", - "gcp_region": "us-west-2b", - "repository_path": os.path.join(os.path.dirname(__file__), "fixtures"), - "create_backend_bucket": False, - } - ) - ) - return result + os.environ["AWS_DEFAULT_REGION"] = "us-east-1" @pytest.fixture(scope="function") -@mock.patch("tfworker.authenticators.aws.AWSAuthenticator", new=MockAWSAuth) -def rootc( - s3_client, dynamodb_client, sts_client, gcp_creds_file, create_backend_bucket=True -): - result = tfworker.commands.root.RootCommand( - tf_types.CLIOptionsRoot( - **{ - "aws_access_key_id": "1234567890", - "aws_secret_access_key": "1234567890", - "aws_region": "us-west-2", - "backend": "s3", - "backend_region": "us-west-2", - "backend_bucket": "test_bucket", - "backend_prefix": "terraform/test-0001", - "backend_use_all_remotes": False, - "config_file": os.path.join( - os.path.dirname(__file__), "fixtures", "test_config.yaml" - ), - "gcp_creds_path": str(gcp_creds_file), - "gcp_project": "test_project", - "gcp_region": "us-west-2b", - "repository_path": os.path.join(os.path.dirname(__file__), "fixtures"), - "create_backend_bucket": create_backend_bucket, - } - ) - ) - return result +def empty_state(): + """A Representation of a terraform state file with no resources""" + with open(f"{os.path.dirname(__file__)}/fixtures/states/empty.tfstate", "r") as f: + return f.read() @pytest.fixture(scope="function") -@mock.patch("tfworker.authenticators.aws.AWSAuthenticator", new=MockAWSAuth) -def rootc_no_create_backend_bucket( - s3_client, dynamodb_client, gcp_creds_file, sts_client -): - result = tfworker.commands.root.RootCommand( - tf_types.CLIOptionsRoot( - **{ - "aws_access_key_id": "1234567890", - "aws_secret_access_key": "1234567890", - "aws_region": "us-west-2", - "backend": "s3", - "backend_region": "us-west-2", - "backend_bucket": "test_bucket", - "backend_prefix": "terraform/test-0001", - "backend_use_all_remotes": False, - "config_file": os.path.join( - os.path.dirname(__file__), "fixtures", "test_config.yaml" - ), - "gcp_creds_path": str(gcp_creds_file), - "gcp_project": "test_project", - "gcp_region": "us-west-2b", - "repository_path": os.path.join(os.path.dirname(__file__), "fixtures"), - "create_backend_bucket": False, - } - ) - ) - return result +def occupied_state(): + """A Representation of a terraform state file with resources""" + with open( + f"{os.path.dirname(__file__)}/fixtures/states/occupied.tfstate", "r" + ) as f: + return f.read() @pytest.fixture(scope="function") -@mock.patch("tfworker.authenticators.aws.AWSAuthenticator", new=MockAWSAuth) -def json_base_rootc(s3_client, dynamodb_client, gcp_creds_file, sts_client): - result = tfworker.commands.root.RootCommand( - tf_types.CLIOptionsRoot( - **{ - "aws_access_key_id": "1234567890", - "aws_secret_access_key": "1234567890", - "aws_region": "us-west-2", - "backend": "s3", - "backend_region": "us-west-2", - "backend_bucket": "test_bucket", - "backend_prefix": "terraform/test-0001", - "backend_use_all_remotes": False, - "config_file": os.path.join( - os.path.dirname(__file__), "fixtures", "base_config_test.json" - ), - "gcp_creds_path": str(gcp_creds_file), - "gcp_project": "test_project", - "gcp_region": "us-west-2b", - "repository_path": os.path.join(os.path.dirname(__file__), "fixtures"), - } - ) - ) - return result +def mock_cli_options_root(): + """A mock CLIOptionsRoot object with default values""" + mock_root = MagicMock(spec=CLIOptionsRoot) + mock_root.region = "us-east-1" + mock_root.backend_region = "us-east-1" + mock_root.backend_bucket = "test-bucket" + mock_root.backend_plans = False + mock_root.backend_prefix = "prefix" + mock_root.create_backend_bucket = True + mock_root.config_var = {} + return mock_root @pytest.fixture(scope="function") -@mock.patch("tfworker.authenticators.aws.AWSAuthenticator", new=MockAWSAuth) -def yaml_base_rootc(s3_client, dynamodb_client, gcp_creds_file, sts_client): - result = tfworker.commands.root.RootCommand( - tf_types.CLIOptionsRoot( - **{ - "aws_access_key_id": "1234567890", - "aws_secret_access_key": "1234567890", - "aws_region": "us-west-2", - "backend": "s3", - "backend_region": "us-west-2", - "backend_bucket": "test_bucket", - "backend_prefix": "terraform/test-0001", - "backend_use_all_remotes": False, - "config_file": os.path.join( - os.path.dirname(__file__), "fixtures", "base_config_test.yaml" - ), - "gcp_creds_path": str(gcp_creds_file), - "gcp_project": "test_project", - "gcp_region": "us-west-2b", - "repository_path": os.path.join(os.path.dirname(__file__), "fixtures"), - } - ) - ) - return result +def mock_cli_options_root_backend_west(): + """A mock CLIOptionsRoot object with default values and backend in us-west-2""" + mock_root = MagicMock(spec=CLIOptionsRoot) + mock_root.region = "us-east-1" + mock_root.backend_region = "us-west-2" + mock_root.backend_bucket = "west-test-bucket" + mock_root.backend_plans = False + mock_root.backend_prefix = "prefix" + mock_root.create_backend_bucket = True + return mock_root @pytest.fixture(scope="function") -@mock.patch("tfworker.authenticators.aws.AWSAuthenticator", new=MockAWSAuth) -def hcl_base_rootc(s3_client, dynamodb_client, gcp_creds_file, sts_client): - result = tfworker.commands.root.RootCommand( - tf_types.CLIOptionsRoot( - **{ - "aws_access_key_id": "1234567890", - "aws_secret_access_key": "1234567890", - "aws_region": "us-west-2", - "backend": "s3", - "backend_region": "us-west-2", - "backend_bucket": "test_bucket", - "backend_prefix": "terraform/test-0001", - "backend_use_all_remotes": False, - "config_file": os.path.join( - os.path.dirname(__file__), "fixtures", "base_config_test.hcl" - ), - "gcp_creds_path": str(gcp_creds_file), - "gcp_project": "test_project", - "gcp_region": "us-west-2b", - "repository_path": os.path.join(os.path.dirname(__file__), "fixtures"), - } - ) - ) - return result +def mock_cli_options_terraform(): + """A mock CLIOptionsTerraform object with default values""" + mock_terraform = MagicMock(spec=CLIOptionsTerraform) + mock_terraform.apply = True + mock_terraform.destroy = False + mock_terraform.plan_file_path = None + return mock_terraform @pytest.fixture(scope="function") -@mock.patch("tfworker.authenticators.aws.AWSAuthenticator", new=MockAWSAuth) -def rootc_options(s3_client, dynamodb_client, gcp_creds_file, sts_client): - result = tfworker.commands.root.RootCommand( - tf_types.CLIOptionsRoot( - **{ - "aws_region": "us-east-2", - "backend": "gcs", - "backend_region": "us-west-2", - "backend_bucket": "test_bucket", - "backend_prefix": "terraform/test-0001", - "backend_use_all_remotes": False, - "config_file": os.path.join( - os.path.dirname(__file__), - "fixtures", - "test_config_with_options.yaml", - ), - "gcp_creds_path": str(gcp_creds_file), - "gcp_project": "test_project", - "gcp_region": "us-west-2b", - "repository_path": os.path.join(os.path.dirname(__file__), "fixtures"), - } - ) - ) - return result - - -@pytest.fixture -def basec(rootc, s3_client): - with mock.patch( - "tfworker.commands.base.get_terraform_version", - side_effect=lambda x: (13, 3), - ): - with mock.patch( - "tfworker.commands.base.which", - side_effect=lambda x: "/usr/local/bin/terraform", - ): - return tfworker.commands.base.BaseCommand( - rootc, "test-0001", tf_version_major=13 - ) - - -@pytest.fixture -def gbasec(grootc): - with mock.patch( - "tfworker.commands.base.get_terraform_version", - side_effect=lambda x: (13, 3), - ): - with mock.patch( - "tfworker.commands.base.which", - side_effect=lambda x: "/usr/local/bin/terraform", - ): - with mock.patch( - "tfworker.backends.gcs.storage.Client.from_service_account_json" - ): - return tfworker.commands.base.BaseCommand( - grootc, "test-0001", tf_version_major=13 - ) +def mock_cli_options_clean(): + """A mock CLIOptionsClean object with default values""" + mock_clean = MagicMock(spec=CLIOptionsClean) + return mock_clean -@pytest.fixture -def tf_Xcmd(rootc): - return tfworker.commands.terraform.TerraformCommand(rootc, deployment="test-0001") - - -@pytest.fixture -def tf_15cmd(rootc): - with mock.patch( - "tfworker.util.terraform.get_terraform_version", - side_effect=lambda x: (15, 0), - ): - with mock.patch( - "tfworker.commands.base.which", - side_effect=lambda x: "/usr/local/bin/terraform", - ): - return tfworker.commands.terraform.TerraformCommand( - rootc, deployment="test-0001", tf_version=(15, 0) - ) +@pytest.fixture(scope="function") +@mock_aws +def mock_authenticators(aws_credentials): + """A mock AuthenticatorsCollection object with default values""" + mock_auth = MagicMock(spec=AuthenticatorsCollection) + mock_auth["aws"].session = boto3.Session() + mock_auth["aws"].backend_session = mock_auth["aws"].session + mock_auth["aws"].bucket = "test-bucket" + mock_auth["aws"].prefix = "prefix" + mock_auth["aws"].region = "us-east-1" + mock_auth["aws"].backend_region = "us-east-1" + return mock_auth -@pytest.fixture -def tf_14cmd(rootc): - with mock.patch( - "tfworker.util.terraform.get_terraform_version", - side_effect=lambda x: (14, 5), - ): - with mock.patch( - "tfworker.commands.base.which", - side_effect=lambda x: "/usr/local/bin/terraform", - ): - return tfworker.commands.terraform.TerraformCommand( - rootc, deployment="test-0001", tf_version=(14, 5) - ) +@pytest.fixture(scope="function") +@mock_aws +def mock_authenticators_backend_west(aws_credentials): + """A mock AuthenticatorsCollection object with default values and backend in us-west-2""" + mock_auth = MagicMock(spec=AuthenticatorsCollection) + mock_auth["aws"].session = boto3.Session() + mock_auth["aws"].backend_session = boto3.Session(region_name="us-west-2") + mock_auth["aws"].bucket = "west-test-bucket" + mock_auth["aws"].prefix = "prefix" + mock_auth["aws"].region = "us-east-1" + mock_auth["aws"].backend_region = "us-west-2" + return mock_auth @pytest.fixture -def tf_13cmd(rootc): - with mock.patch( - "tfworker.util.terraform.get_terraform_version", - side_effect=lambda x: (13, 5), - ): - with mock.patch( - "tfworker.commands.base.which", - side_effect=lambda x: "/usr/local/bin/terraform", - ): - return tfworker.commands.terraform.TerraformCommand( - rootc, deployment="test-0001", tf_version=(13, 5) - ) +def mock_loaded_config(): + """A mock ConfigFile object with default values""" + mock_config = MagicMock(spec=ConfigFile) + return mock_config -@pytest.fixture -def tf_12cmd(rootc): - with mock.patch( - "tfworker.util.terraform.get_terraform_version", - side_effect=lambda x: (12, 27), - ): - with mock.patch( - "tfworker.commands.base.which", - side_effect=lambda x: "/usr/local/bin/terraform", - ): - return tfworker.commands.terraform.TerraformCommand( - rootc, deployment="test-0001", tf_version=(12, 27) - ) +@pytest.fixture(scope="function") +def mock_app_state( + mock_cli_options_root, + mock_cli_options_clean, + mock_cli_options_terraform, + mock_loaded_config, + mock_authenticators, + tmpdir, +): + """A mock AppState object with default values""" + mock_state = MagicMock(spec=AppState) + mock_state.authenticators = mock_authenticators + mock_state.root_options = mock_cli_options_root + mock_state.deployment = "test-deployment" + mock_state.clean_options = mock_cli_options_clean + mock_state.terraform_options = mock_cli_options_terraform + mock_state.loaded_config = mock_loaded_config + mock_state.loaded_config.providers = {} + mock_state.loaded_config.definitions = {} + mock_state.loaded_config.handlers = {} + mock_state.loaded_config.worker_options = {} + mock_state.loaded_config.global_vars = GlobalVars() + mock_state.working_dir = str(tmpdir) + + return mock_state -@pytest.fixture -def tf_13cmd_options(rootc_options): - with mock.patch( - "tfworker.util.terraform.get_terraform_version", - side_effect=lambda x: (13, 5), - ): - with mock.patch( - "tfworker.commands.base.which", - side_effect=lambda x: "/usr/local/bin/terraform", - ): - with mock.patch( - "tfworker.backends.gcs.storage.Client.from_service_account_json" - ): - return tfworker.commands.terraform.TerraformCommand( - rootc_options, - deployment="test-0001-options", - tf_version=(13, 5), - b64_encode=False, - ) +@pytest.fixture(scope="function") +def mock_app_state_backend_west( + mock_cli_options_root_backend_west, + mock_cli_options_terraform, + mock_cli_options_clean, + mock_loaded_config, + mock_authenticators, + tmpdir, +): + """A mock AppState object with default values and backend in us-west-2""" + mock_state = MagicMock(spec=AppState) + mock_state.authenticators = mock_authenticators + mock_state.root_options = mock_cli_options_root_backend_west + mock_state.clean_options = mock_cli_options_clean + mock_state.terraform_options = mock_cli_options_terraform + mock_state.deployment = "test-deployment" + mock_state.loaded_config = mock_loaded_config + mock_state.loaded_config.providers = {} + mock_state.loaded_config.definitions = {} + mock_state.loaded_config.handlers = {} + mock_state.working_dir = str(tmpdir) + return mock_state -@pytest.fixture -def definition_odict(): - one_def = { - "test": dict( - { - "path": "/test", - "remote_vars": {"a": 1, "b": "two"}, - "terraform_vars": {"c": 3, "d": "four"}, - "template_vars": {"e": 5, "f": "six"}, - } - ) - } - return dict(one_def) +@pytest.fixture(scope="function") +def mock_click_context(mock_app_state): + """A mock click context object with default values""" + ctx = MagicMock(spec=click.Context) + ctx.obj = mock_app_state + ctx.exit = MagicMock(side_effect=sys.exit) + return ctx -@pytest.fixture -def test_config_file(): - return os.path.join(os.path.dirname(__file__), "fixtures", "test_config.yaml") +@pytest.fixture(scope="function") +def mock_click_context_backend_west(mock_app_state_backend_west): + """A mock click context object with default values and backend in us-west-2""" + ctx = MagicMock(spec=click.Context) + ctx.obj = mock_app_state_backend_west + ctx.exit = MagicMock(side_effect=sys.exit) + return ctx + + +@pytest.fixture(autouse=True) +def setup_method(mocker, mock_click_context): + """A fixture to setup the click context which is used throughout""" + mocker.patch("click.get_current_context", return_value=mock_click_context) diff --git a/tests/copier/test_copier_factory.py b/tests/copier/test_copier_factory.py new file mode 100644 index 0000000..64454aa --- /dev/null +++ b/tests/copier/test_copier_factory.py @@ -0,0 +1,183 @@ +import os +import platform +import tempfile +from unittest.mock import patch + +import pytest + +from tfworker.copier.factory import Copier, CopyFactory + +C_CONFLICTS = ["test.txt", "foo", "test.tf"] +C_SOURCE = "test_source" +if platform.system() == "Darwin": + C_ROOT_PATH = "/private/tmp/test" +else: + C_ROOT_PATH = "/tmp/test/" + + +@pytest.fixture(scope="session") +def register_test_copier(): + @CopyFactory.register("testfixture") + class TestCopierFixture(Copier): + @staticmethod + def type_match(source: str) -> bool: + if source == "test": + return True + else: + return False + + def copy(self) -> bool: + return True + + +@pytest.fixture +@patch.multiple(Copier, __abstractmethods__=set()) +def cwp(tmp_path): + c = Copier( + source=C_SOURCE, + root_path=C_ROOT_PATH, + destination=f"{str(tmp_path)}", + conflicts=C_CONFLICTS, + arbitrary="value", + ) + return c + + +@pytest.fixture +@patch.multiple(Copier, __abstractmethods__=set()) +def copier(): + c = Copier(source=C_SOURCE) + return c + + +@pytest.mark.usefixtures("register_test_copier") +class TestCopierFactory: + """tests for the copier factory""" + + def test_register(self): + """test that copiers can register themselves""" + start_len = len(CopyFactory.registry) + + @CopyFactory.register("test_copier") + class TestCopier(Copier): + pass + + assert len(CopyFactory.registry) == start_len + 1 + assert "test_copier" in CopyFactory.registry.keys() + + with pytest.raises(ValueError): + + @CopyFactory.register("test_copier") + class TestCopier2(Copier): + pass + + def test_get_copier_type(self): + """test that get copier type functions for the test copier""" + assert CopyFactory.get_copier_type("test") == "testfixture" + + with pytest.raises(NotImplementedError): + CopyFactory.get_copier_type("invalid") + + def test_create_copier(self): + """test that the proper object is returned given the test copier source""" + assert type(CopyFactory.create("test")).__name__ == "TestCopierFixture" + + +class TestCopier: + """tests for the base Copier class""" + + @patch.multiple(Copier, __abstractmethods__=set()) + def test_constructor(self, tmp_path, copier, cwp): + """test that the copiers have expected properties""" + assert copier._source == C_SOURCE + assert not hasattr(copier, "_root_path") + assert not hasattr(copier, "_destination") + assert not hasattr(copier, "_conflicts") + assert len(copier._kwargs) == 0 + + assert cwp._source == C_SOURCE + assert cwp._root_path == C_ROOT_PATH + assert cwp._destination == str(tmp_path) + assert cwp._conflicts == C_CONFLICTS + assert cwp._kwargs["arbitrary"] == "value" + + with pytest.raises(ValueError): + Copier(source="test_source", conflicts="bad_value") + + def test_source(self, copier, cwp): + """test the source property""" + assert copier.source == C_SOURCE + assert cwp.source == C_SOURCE + + def test_root_path(self, copier, cwp): + """test that root path always returns a string for all copiers""" + assert cwp.root_path == C_ROOT_PATH + assert copier.root_path == "" + + def test_conflicts(self, copier, cwp): + """test to ensure conflicts property always returns a list, with contents depending on copier params""" + assert copier.conflicts == [] + assert cwp.conflicts == C_CONFLICTS + + def test_check_conflicts(self, request, copier, cwp): + """test the behavior of checking conflicts""" + + with pytest.raises(FileExistsError): + cwp.check_conflicts( + f"{request.config.rootdir}/tests/fixtures/definitions/test_a" + ) + + assert ( + cwp.check_conflicts( + f"{request.config.rootdir}/tests/fixtures/definitions/test_c" + ) + is None + ) + assert ( + copier.check_conflicts( + f"{request.config.rootdir}/tests/fixtures/definitions/test_c" + ) + is None + ) + assert ( + copier.check_conflicts( + f"{request.config.rootdir}/tests/fixtures/definitions/test_a" + ) + is None + ) + + def test_get_destination(self, tmp_path, copier): + dpath = f"{str(tmp_path)}/destination_test)" + + # test that get destination raises an error if destination is not set + with pytest.raises(ValueError): + copier.get_destination() + + # test that get destination returns proper directory, and it is not created + setattr(copier, "_destination", dpath) + assert copier.get_destination(make_dir=False) == dpath + assert not os.path.isdir(dpath) + + # test that the destination is created with this optional parameter + assert copier.get_destination(make_dir=True) == dpath + assert os.path.isdir(dpath) + + def test_get_destination_path(self, tmp_path, copier): + """Ensure the destination path is returned properly when destination is set""" + dpath_td = tempfile.TemporaryDirectory() + dpath = dpath_td.name + + # ensure object is in valid state for test + with pytest.raises(AttributeError): + getattr(copier, "_destination") + + assert copier.get_destination(**{"destination": dpath}) == dpath + + # ensure the directory is returned properly when make_dirs is true, and no errors + # are raised when the directory already exists + rpath = copier.get_destination(**{"destination": dpath, "make_dir": True}) + assert rpath == dpath + assert os.path.isdir(rpath) + + # remove the temporary directory + del dpath_td diff --git a/tests/copier/test_copier_fs.py b/tests/copier/test_copier_fs.py new file mode 100644 index 0000000..076f1a2 --- /dev/null +++ b/tests/copier/test_copier_fs.py @@ -0,0 +1,96 @@ +import os +import tempfile + +import pytest + +from tfworker.copier import FileSystemCopier + + +class TestFileSystemCopier: + """Test the FileSystem copier""" + + def test_copy(self, request, tmp_path): + """tests the file system copy method""" + assert not os.path.isfile(f"{str(tmp_path)}/test.tf") + c = FileSystemCopier( + source="/tests/fixtures/definitions/test_a", + root_path=f"{request.config.rootdir}", + destination=f"{str(tmp_path)}", + ) + c.copy() + assert os.path.isfile(f"{str(tmp_path)}/test.tf") + + c = FileSystemCopier( + source="/tests/fixtures/definitions/test_a", + root_path=f"{request.config.rootdir}", + destination=f"{str(tmp_path)}", + ) + + with pytest.raises(FileNotFoundError): + c.copy(sub_path="invalid_path") + + def test_local_path(self): + """tests the local path property""" + + # This is a relative path based on where the worker ran from + source = "tests/fixtures/definitions/test_a" + c = FileSystemCopier(source=source, root_path=os.getcwd()) + assert c.local_path == f"{os.getcwd()}/{source}" + + # This tests resolution of an absolute path + tmpdir = tempfile.TemporaryDirectory() + c = FileSystemCopier(source=tmpdir.name) + assert c.local_path == tmpdir.name + del tmpdir + + # This tests the resolution of an absolute path with a root path + tmpdir = tempfile.TemporaryDirectory() + c = FileSystemCopier(source=tmpdir.name, root_path=os.getcwd()) + assert c.local_path == tmpdir.name + del tmpdir + + # Ensure file not found error is raised on invalid relative path + with pytest.raises(FileNotFoundError): + FileSystemCopier( + source="some/invalid/path", root_path=os.getcwd() + ).local_path + + # Ensure file not found error is raised on invalid absolute path + with pytest.raises(FileNotFoundError): + FileSystemCopier(source="/some/invalid/path").local_path + + def test_type_match(self, request): + source = FileSystemCopier.make_local_path( + source="/tests/fixtures/definitions/test_a", + root_path=f"{request.config.rootdir}", + ) + + # this should return true because the source is a valid directory + assert FileSystemCopier.type_match(source) is True + # this should return false because the full path to source does not exist inside of root_path + assert FileSystemCopier.type_match("/some/invalid/path") is False + # this should return true because the full path to source exists inside of root_path + assert ( + FileSystemCopier.type_match( + "/tests/fixtures/definitions/test_a", + **{"root_path": f"{request.config.rootdir}"}, + ) + is True + ) + # this should return false because the source is not a valid directory + assert FileSystemCopier.type_match("/some/invalid/path") is False + + @pytest.mark.parametrize( + "source, root_path, expected", + [ + ("bar", "/tmp/foo", "/tmp/foo/bar"), + ("/tmp", "", "/tmp"), + ("/bar//", "/tmp/", "/tmp/bar/"), + ("//tmp//", "", "/tmp/"), + ], + ) + def test_make_local_path(self, source, root_path, expected): + assert ( + FileSystemCopier.make_local_path(source=source, root_path=root_path) + == expected + ) diff --git a/tests/copier/test_copier_git.py b/tests/copier/test_copier_git.py new file mode 100644 index 0000000..54d5406 --- /dev/null +++ b/tests/copier/test_copier_git.py @@ -0,0 +1,120 @@ +import os +import platform +import re +import shutil +from typing import Tuple +from unittest import mock + +import pytest + +from tfworker.copier import GitCopier + +C_CONFLICTS = ["test.txt", "foo", "test.tf"] +C_SOURCE = "test_source" +if platform.system() == "Darwin": + C_ROOT_PATH = "/private/tmp/test" +else: + C_ROOT_PATH = "/tmp/test/" + + +def mock_pipe_exec_type_match(cmd: str) -> Tuple[int, str, str]: + """a mock function to return specific results based on supplied command""" + tokens = " ".join(cmd.split()).split(" ") + if tokens[1] == "ls-remote": + if tokens[2] == "permissionerror": + raise PermissionError + if tokens[2] == "filenotfounderror": + raise FileNotFoundError + if tokens[2] == "validremote": + return (0, "", "") + if tokens[0] == "/opt/bin/git": + return (0, "", "") + else: + raise NotImplementedError("bad use of mock") + + +def mock_pipe_exec_clone(cmd: str, cwd: str) -> Tuple[int, str, str]: + """a mock function to copy files and imitate a git clone""" + tokens = re.split(r"\s+", cmd) + assert os.path.isdir(tokens[2]) + shutil.copytree(tokens[2], cwd, dirs_exist_ok=True) + return (0, "", "") + + +class TestGitCopier: + """test the GitCopier copier""" + + def test_copy(self, request, tmp_path): + with mock.patch( + "tfworker.copier.git_copier.pipe_exec", side_effect=mock_pipe_exec_clone + ) as mocked: + """test a failing condition, conflicting files, no branch so check clone called with master""" + dpath = f"{str(tmp_path)}/destination" + spath = f"{request.config.rootdir}/tests/fixtures/definitions/test_a" + c = GitCopier(source=spath, destination=dpath, conflicts=C_CONFLICTS) + with pytest.raises(FileExistsError): + c.copy() + + assert ( + mocked.call_args.args[0] + == f"git clone {spath} --branch master --single-branch ./" + ) + + """ test a succeeding condition, extra options passed """ + spath = f"{request.config.rootdir}/tests/fixtures/definitions" + c = GitCopier(source=spath, destination=dpath, conflicts=[]) + c.copy( + branch="foo", + sub_path="test_a", + git_cmd="git", + git_args="", + reset_repo=True, + ) + assert ( + mocked.call_args.args[0] + == f"git clone {spath} --branch foo --single-branch ./" + ) + assert os.path.isfile(f"{dpath}/test.tf") + + def test_type_match(self): + """tests to ensure the various git cases return properly""" + with mock.patch( + "tfworker.copier.git_copier.pipe_exec", + side_effect=mock_pipe_exec_type_match, + ) as mocked: + result = GitCopier.type_match("permissionerror") + assert result is False + mocked.assert_called_with("git ls-remote permissionerror") + + result = GitCopier.type_match("filenotfounderror") + assert result is False + mocked.assert_called_with("git ls-remote filenotfounderror") + + result = GitCopier.type_match( + "string_inspect", git_cmd="/opt/bin/git", git_args="--bar" + ) + assert result is True + mocked.assert_called_with("/opt/bin/git --bar ls-remote string_inspect") + + def test_make_and_clean_temp(self): + """tests making the temporary directory for git clones""" + c = GitCopier("test_source") + + # ensure that the temp directory is created and attributes are set + c.make_temp() + assert hasattr(c, "_temp_dir") + temp_dir = c._temp_dir + assert os.path.isdir(temp_dir) + assert hasattr(c, "_temp_dir") + + # ensure that the function is idempotent + c.make_temp() + # ensure that the temp directory is the same + assert temp_dir == c._temp_dir + assert os.path.isdir(c._temp_dir) + assert hasattr(c, "_temp_dir") + + # ensure that the temp directory is removed + c.clean_temp() + assert not os.path.isdir(temp_dir) + assert not hasattr(c, "_temp_dir") diff --git a/tests/definitions/test_definitions_collection.py b/tests/definitions/test_definitions_collection.py new file mode 100644 index 0000000..4d5defb --- /dev/null +++ b/tests/definitions/test_definitions_collection.py @@ -0,0 +1,84 @@ +import pytest + +from tfworker.definitions import Definition, DefinitionsCollection +from tfworker.exceptions import FrozenInstanceError + +mock_definitions = { + "def1": {"path": "path1"}, + "def2": {"path": "path2"}, +} + + +class TestDefinitionsCollection: + def teardown_method(self): + DefinitionsCollection.reset() + + def test_init(self): + def1 = Definition(name="def1", path="path1") + def2 = Definition(name="def2", path="path2") + definitions_collection = DefinitionsCollection(mock_definitions) + assert definitions_collection._definitions == {"def1": def1, "def2": def2} + + def test_init_with_comma_in_definition_name(self): + with pytest.raises(ValueError): + DefinitionsCollection({"def,1": {"path": "path1"}}) + + def test_init_is_singleton(self): + definitions_collection1 = DefinitionsCollection(mock_definitions) + definitions_collection2 = DefinitionsCollection(mock_definitions) + assert definitions_collection1 is definitions_collection2 + + def test_init_bad_definition(self): + with pytest.raises(SystemExit): + DefinitionsCollection({"def1": {"path": "path1", "bad_key": "bad_value"}}) + + def test_init_with_limiter(self): + definitions_collection = DefinitionsCollection( + mock_definitions, limiter=["def1"] + ) + assert definitions_collection._definitions == { + "def1": Definition(name="def1", path="path1") + } + + def test_init_with_limiter_always_include_definition(self): + definitions_collection = DefinitionsCollection( + { + "def1": {"path": "path1", "always_include": True}, + "def2": { + "path": "path2", + }, + }, + limiter=["def2"], + ) + assert definitions_collection._definitions.keys() == {"def1", "def2"} + + def test_get(self): + definitions_collection = DefinitionsCollection(mock_definitions) + assert definitions_collection.get("def1") == Definition( + name="def1", path="path1" + ) + + def test_get_not_found(self): + definitions_collection = DefinitionsCollection(mock_definitions) + assert definitions_collection.get("def3") is None + + def test_len(self): + definitions_collection = DefinitionsCollection(mock_definitions) + assert len(definitions_collection) == 2 + + def test_iter(self): + definitions_collection = DefinitionsCollection(mock_definitions) + assert list(definitions_collection) == ["def1", "def2"] + + def test_setitem(self): + definitions_collection = DefinitionsCollection(mock_definitions) + definitions_collection["def3"] = Definition(name="def3", path="path3") + assert definitions_collection.get("def3") == Definition( + name="def3", path="path3" + ) + + def test_setitem_frozen(self): + definitions_collection = DefinitionsCollection(mock_definitions) + definitions_collection.freeze() + with pytest.raises(FrozenInstanceError): + definitions_collection["def3"] = Definition(name="def3", path="path3") diff --git a/tests/definitions/test_definitions_model.py b/tests/definitions/test_definitions_model.py new file mode 100644 index 0000000..3aa7ebf --- /dev/null +++ b/tests/definitions/test_definitions_model.py @@ -0,0 +1,207 @@ +import pytest + +from tfworker.definitions.model import Definition, DefinitionRemoteOptions + + +def mock_definition(): + return { + "name": "test", + "path": "test", + } + + +@pytest.fixture +def mock_global_vars(): + return { + "terraform_vars": { + "global_tf_1": "global1", + "global_tf_2": "global2", + }, + "remote_vars": { + "global_remote_1": "global1", + "global_remote_2": "global2", + }, + "template_vars": { + "global_template_1": "global1", + "global_template_2": "global2", + }, + } + + +class TestDefinitionModel: + def test_definition_model(self): + testdef = Definition(**mock_definition()) + assert testdef.name == "test" + assert testdef.path == "test" + assert testdef.ready is False + assert testdef.needs_apply is False + assert testdef.plan_file is None + + def test_definition_path(self, tmp_path): + testdef = Definition(**mock_definition()) + assert testdef.get_target_path(tmp_path) == tmp_path / "definitions" / "test" + + def test_definition_template_vars(self): + testdef = Definition(**mock_definition()) + testdef.template_vars = {"test": "test"} + assert testdef.get_template_vars({}) == {"test": "test"} + + def test_definition_template_vars_with_globals(self, mock_global_vars): + testdef = Definition(**mock_definition()) + testdef.template_vars = {"test": "test"} + expected_result = {**mock_global_vars["template_vars"], "test": "test"} + assert ( + testdef.get_template_vars(mock_global_vars["template_vars"]) + == expected_result + ) + + def test_definition_template_vars_ignore_global_vars(self, mock_global_vars): + testdef = Definition(**mock_definition()) + testdef.ignore_global_vars = True + testdef.template_vars = {"test": "test"} + assert testdef.get_template_vars(mock_global_vars["template_vars"]) == { + "test": "test" + } + + def test_definition_template_vars_ignore_template_vars(self, mock_global_vars): + testdef = Definition(**mock_definition()) + testdef.ignored_global_template_vars = ["global_template_1"] + testdef.template_vars = {"test": "test"} + assert testdef.get_template_vars(mock_global_vars["template_vars"]) == { + "test": "test", + "global_template_2": "global2", + } + + def test_definition_template_vars_use_global_template_vars(self, mock_global_vars): + testdef = Definition(**mock_definition()) + testdef.use_global_template_vars = ["global_template_1"] + testdef.template_vars = {"test": "test"} + assert testdef.get_template_vars(mock_global_vars["template_vars"]) == { + "test": "test", + "global_template_1": "global1", + } + + def test_definition_template_vars_precedence(self, mock_global_vars): + testdef = Definition(**mock_definition()) + testdef.template_vars = {"global_template_1": "test"} + assert testdef.get_template_vars(mock_global_vars["template_vars"]) == { + "global_template_2": "global2", + "global_template_1": "test", + } + + def test_definition_remote_vars(self): + testdef = Definition(**mock_definition()) + testdef.remote_vars = {"test": "test"} + assert testdef.get_remote_vars({"test": "test"}) == {"test": "test"} + + def test_definition_remote_vars_with_globals(self, mock_global_vars): + testdef = Definition(**mock_definition()) + testdef.remote_vars = {"test": "test"} + expected_result = {**mock_global_vars["remote_vars"], "test": "test"} + assert ( + testdef.get_remote_vars(mock_global_vars["remote_vars"]) == expected_result + ) + + def test_definition_remote_vars_ignore_global_vars(self, mock_global_vars): + testdef = Definition(**mock_definition()) + testdef.ignore_global_vars = True + testdef.remote_vars = {"test": "test"} + assert testdef.get_remote_vars(mock_global_vars["remote_vars"]) == { + "test": "test" + } + + def test_definition_remote_vars_ignore_remote_vars(self, mock_global_vars): + testdef = Definition(**mock_definition()) + testdef.ignored_global_remote_vars = ["global_remote_1"] + testdef.remote_vars = {"test": "test"} + assert testdef.get_remote_vars(mock_global_vars["remote_vars"]) == { + "test": "test", + "global_remote_2": "global2", + } + + def test_definition_remote_vars_use_global_remote_vars(self, mock_global_vars): + testdef = Definition(**mock_definition()) + testdef.use_global_remote_vars = ["global_remote_1"] + testdef.remote_vars = {"test": "test"} + assert testdef.get_remote_vars(mock_global_vars["remote_vars"]) == { + "test": "test", + "global_remote_1": "global1", + } + + def test_definition_remote_vars_precedence(self, mock_global_vars): + testdef = Definition(**mock_definition()) + testdef.remote_vars = {"global_remote_1": "test"} + assert testdef.get_remote_vars(mock_global_vars["remote_vars"]) == { + "global_remote_2": "global2", + "global_remote_1": "test", + } + + def test_definition_terraform_vars(self): + testdef = Definition(**mock_definition()) + testdef.terraform_vars = {"test": "test"} + assert testdef.get_terraform_vars({}) == {"test": "test"} + + def test_definition_terraform_vars_with_globals(self, mock_global_vars): + testdef = Definition(**mock_definition()) + testdef.terraform_vars = {"test": "test"} + expected_result = {**mock_global_vars["terraform_vars"], "test": "test"} + assert ( + testdef.get_terraform_vars(mock_global_vars["terraform_vars"]) + == expected_result + ) + + def test_definition_terraform_vars_ignore_global_vars(self, mock_global_vars): + testdef = Definition(**mock_definition()) + testdef.ignore_global_vars = True + testdef.terraform_vars = {"test": "test"} + assert testdef.get_terraform_vars(mock_global_vars["terraform_vars"]) == { + "test": "test" + } + + def test_definition_terraform_vars_ignore_terraform_vars(self, mock_global_vars): + testdef = Definition(**mock_definition()) + testdef.ignored_global_terraform_vars = ["global_tf_1"] + testdef.terraform_vars = {"test": "test"} + assert testdef.get_terraform_vars(mock_global_vars["terraform_vars"]) == { + "test": "test", + "global_tf_2": "global2", + } + + def test_definition_terraform_vars_use_global_terraform_vars( + self, mock_global_vars + ): + testdef = Definition(**mock_definition()) + testdef.use_global_terraform_vars = ["global_tf_1"] + testdef.terraform_vars = {"test": "test"} + assert testdef.get_terraform_vars(mock_global_vars["terraform_vars"]) == { + "test": "test", + "global_tf_1": "global1", + } + + def test_definition_terraform_vars_precedence(self, mock_global_vars): + testdef = Definition(**mock_definition()) + testdef.terraform_vars = {"global_tf_1": "test"} + assert testdef.get_terraform_vars(mock_global_vars["terraform_vars"]) == { + "global_tf_2": "global2", + "global_tf_1": "test", + } + + def test_definition_remote_options(self): + testdef = DefinitionRemoteOptions(branch="test") + assert testdef.branch == "test" + assert testdef.sub_path is None + + def test_get_used_providers(self, mocker): + mocker.patch( + "tfworker.util.terraform.find_required_providers", return_value={"aws": ""} + ) + testdef = Definition(**mock_definition()) + assert testdef.get_used_providers("working_dir") == ["aws"] + + def test_get_used_providers_no_providers(self, mocker): + mocker.patch( + "tfworker.util.terraform.find_required_providers", + side_effect=AttributeError, + ) + testdef = Definition(**mock_definition()) + assert testdef.get_used_providers("working_dir_two") is None diff --git a/tests/definitions/test_definitions_plan.py b/tests/definitions/test_definitions_plan.py new file mode 100644 index 0000000..90a3c5e --- /dev/null +++ b/tests/definitions/test_definitions_plan.py @@ -0,0 +1,77 @@ +from pathlib import Path + +from tfworker.definitions.model import Definition +from tfworker.definitions.plan import DefinitionPlan +from tfworker.types import TerraformAction + +mock_definition = Definition(name="def1", path="./path") + + +class TestDefinitionsPlan: + def test_plan_init(self, mock_click_context, mock_app_state): + dp = DefinitionPlan(mock_click_context, mock_app_state) + assert dp._ctx == mock_click_context + assert dp._app_state == mock_app_state + + def test_plan_for_apply(self, mock_app_state): + dp = DefinitionPlan(None, mock_app_state) + assert dp.plan_for == TerraformAction.APPLY + + def test_plan_for_destroy(self, mock_app_state): + mock_app_state.terraform_options.apply = False + mock_app_state.terraform_options.destroy = True + + dp = DefinitionPlan(None, mock_app_state) + assert dp.plan_for == TerraformAction.DESTROY + + def test_set_plan_file(self, mock_click_context, mock_app_state): + dp = DefinitionPlan(mock_click_context, mock_app_state) + dp.set_plan_file(mock_definition) + assert mock_definition.plan_file == Path( + f"{mock_app_state.working_dir}/plans/{mock_definition.name}.tfplan" + ) + + def test_set_plan_file_custom_path( + self, mock_click_context, mock_app_state, tmpdir + ): + mock_app_state.terraform_options.plan_file_path = str(tmpdir) + dp = DefinitionPlan(mock_click_context, mock_app_state) + dp.set_plan_file(mock_definition) + assert mock_definition.plan_file == Path( + f"{str(tmpdir)}/{mock_app_state.deployment}/{mock_definition.name}.tfplan" + ) + assert mock_definition.plan_file.parent.exists() + + def test_needs_plan(self, mock_click_context, mock_app_state): + dp = DefinitionPlan(mock_click_context, mock_app_state) + assert dp.needs_plan(mock_definition)[0] is True + assert dp.needs_plan(mock_definition)[1].startswith("no saved") + + def test_needs_plan_empty_file(self, mock_click_context, mock_app_state): + mock_app_state.root_options.backend_plans = True + dp = DefinitionPlan(mock_click_context, mock_app_state) + dp.set_plan_file(mock_definition) + mock_definition.plan_file.touch() + result = dp.needs_plan(mock_definition) + assert result[0] is True + assert result[1].startswith("empty") + assert not mock_definition.plan_file.exists() + + def test_needs_plan_existing_file(self, mock_click_context, mock_app_state): + mock_app_state.root_options.backend_plans = True + dp = DefinitionPlan(mock_click_context, mock_app_state) + dp.set_plan_file(mock_definition) + mock_definition.plan_file.write_text("test") + result = dp.needs_plan(mock_definition) + assert result[0] is False + assert result[1].startswith("plan file exists") + assert mock_definition.plan_file.exists() + + def test_needs_plan_existing_file_no_file(self, mock_click_context, mock_app_state): + mock_app_state.root_options.backend_plans = True + dp = DefinitionPlan(mock_click_context, mock_app_state) + dp.set_plan_file(mock_definition) + result = dp.needs_plan(mock_definition) + assert result[0] is True + assert result[1].startswith("no plan file") + assert not mock_definition.plan_file.exists() diff --git a/tests/definitions/test_definitions_prepare.py b/tests/definitions/test_definitions_prepare.py new file mode 100644 index 0000000..a5aedb2 --- /dev/null +++ b/tests/definitions/test_definitions_prepare.py @@ -0,0 +1,64 @@ +import pytest + +from tfworker.definitions.prepare import DefinitionPrepare, get_coppier, copy, get_jinja_env, write_template_file, filter_templates, vars_typer +from tfworker.definitions import Definition, DefinitionsCollection +from tfworker.exceptions import TFWorkerException, ReservedFileError + +@pytest.fixture +def mock_definition(): + return Definition(name="def1", path="./path") + +@pytest.fixture +def def_prepare(mock_app_state): + definitions = DefinitionsCollection({'def1': {'path': './path'}}) + mock_app_state.definitions = definitions + mock_app_state.root_options.repository_path = "." + return DefinitionPrepare(mock_app_state) + + +class TestDefinitionPrepareCopyFiles: + def test_copy_files(self, mocker, def_prepare, mock_definition): + """ make sure copy files makes the right calls""" + mock_get_copier = mocker.patch('tfworker.definitions.prepare.get_coppier') + mock_copy = mocker.patch('tfworker.definitions.prepare.copy') + def_prepare.copy_files(mock_definition.name) + mock_get_copier.assert_called_once_with(mock_definition.path, def_prepare._app_state.root_options.repository_path) + mock_copy.assert_called_once_with( + copier=mock_get_copier.return_value, + destination=mock_definition.get_target_path(def_prepare._app_state.working_dir), + options=mock_definition.remote_path_options.model_dump()) + + def test_copy_files_no_copier(self, mocker, def_prepare, mock_definition): + """ make sure copy files raises an exception if no copier is found""" + mocker.patch('tfworker.definitions.prepare.get_coppier', side_effect=NotImplementedError()) + with pytest.raises(TFWorkerException): + def_prepare.copy_files(mock_definition.name) + + def test_copy_files_reserved_file_error(self, mocker, def_prepare, mock_definition): + """ make sure copy files raises an exception if a reserved file is found""" + mocker.patch('tfworker.definitions.prepare.get_coppier') + mocker.patch('tfworker.definitions.prepare.copy', side_effect=ReservedFileError()) + with pytest.raises(TFWorkerException): + def_prepare.copy_files(mock_definition.name) + +class TestDefinitionPrepareWriteTemplates: + def test_render_templates(self, mocker, def_prepare, mock_definition): + """ make sure render_templates makes the right calls""" + mock_get_jinja_env = mocker.patch('tfworker.definitions.prepare.get_jinja_env') + mock_jinja_env = mocker.MagicMock() + mock_jinja_env.list_templates.return_value = ['template1.tf'] + mock_get_jinja_env.return_value = mock_jinja_env + mock_write_template_file = mocker.patch('tfworker.definitions.prepare.write_template_file') + mock_get_template_vars = mocker.patch.object(def_prepare, '_get_template_vars', return_value={}) + target_path = mock_definition.get_target_path(def_prepare._app_state.working_dir) + def_prepare.render_templates(mock_definition.name) + mock_get_template_vars.assert_called_once_with(mock_definition.name) + mock_get_jinja_env.assert_called_once_with(template_path=target_path, jinja_globals=mock_get_template_vars.return_value) + mock_write_template_file.assert_called_once_with( + jinja_env=mock_get_jinja_env(), + template_path=mock_definition.get_target_path(def_prepare._app_state.working_dir), + template_file="template1.tf",) + +class TestDefinitionPrepareCreateLocalVars: + def test_create_local_vars(self, mocker, ): + """ make sure the local vars file is created with expected content """ diff --git a/tests/handlers/test_exceptions.py b/tests/handlers/test_exceptions.py deleted file mode 100644 index 435e3d3..0000000 --- a/tests/handlers/test_exceptions.py +++ /dev/null @@ -1,32 +0,0 @@ -from tfworker.handlers.exceptions import HandlerError, UnknownHandler - - -def test_handler_error(): - error_message = "This is a test error message" - terminate = True - - error = HandlerError(error_message, terminate) - - assert error.message == error_message - assert error.terminate == terminate - assert str(error) == f"Handler error: {error_message}" - - -def test_handler_error_no_terminate(): - error_message = "This is a test error message" - terminate = False - - error = HandlerError(error_message, terminate) - - assert error.message == error_message - assert error.terminate == terminate - assert str(error) == f"Handler error: {error_message}" - - -def test_unknown_handler(): - provider = "aws" - - error = UnknownHandler(provider) - - assert error.provider == provider - assert str(error) == f"Unknown handler: {provider}" diff --git a/tests/handlers/test_trivy.py b/tests/handlers/test_trivy.py deleted file mode 100644 index ac8a87f..0000000 --- a/tests/handlers/test_trivy.py +++ /dev/null @@ -1,309 +0,0 @@ -import unittest -from pathlib import Path -from unittest.mock import MagicMock, call, patch - -import pytest - -from tfworker.handlers.exceptions import HandlerError -from tfworker.handlers.trivy import TrivyHandler - - -class TestTrivyHandlerTrivyRunnable(unittest.TestCase): - def test_trivy_not_runnable(self): - with self.assertRaises(HandlerError): - TrivyHandler({"path": "/path/to/trivy"}) - - @patch("os.path.exists") - @patch("os.access") - def test_trivy_runnable(self, mock_access, mock_exists): - mock_exists.return_value = True - mock_access.return_value = True - self.assertTrue(TrivyHandler._trivy_runable("/path/to/trivy")) - - @patch("os.path.exists") - def test_trivy_not_runnable_no_exists(self, mock_exists): - mock_exists.return_value = False - self.assertFalse(TrivyHandler._trivy_runable("/path/to/trivy")) - - @patch("os.path.exists") - @patch("os.access") - def test_trivy_not_runnable_no_access(self, mock_access, mock_exists): - mock_exists.return_value = True - mock_access.return_value = False - self.assertFalse(TrivyHandler._trivy_runable("/path/to/trivy")) - - -class TestTrivyHandlerExecute(unittest.TestCase): - @pytest.fixture(autouse=True) - def trivy_runnable_patcher(self): - patcher = patch("tfworker.handlers.trivy.TrivyHandler._trivy_runable") - mock_trivy_runable = patcher.start() - mock_trivy_runable.return_value = True - yield mock_trivy_runable - - def test__raise_if_not_ready(self): - handler = TrivyHandler({}) - handler._ready = False - with self.assertRaises(HandlerError): - handler.execute("plan", "pre") - - def test_execute_pre_plan_without_definition_path(self): - handler = TrivyHandler({}) - with self.assertRaises(HandlerError): - handler.execute("plan", "pre") - - @patch("tfworker.handlers.trivy.click") - def test_execute_pre_plan_skip_definition(self, mock_click): - handler = TrivyHandler({"skip_definition": True}) - handler._trivy_runable = MagicMock(return_value=True) - handler.is_ready = MagicMock(return_value=True) - handler.execute("plan", "pre", definition_path="/path/to/definition") - mock_click.secho.assert_called_with( - "Skipping trivy scan of definition", fg="yellow" - ) - - @patch("tfworker.handlers.trivy.click") - def test_execute_pre_plan_scan_definition(self, mock_click): - handler = TrivyHandler({}) - handler.is_ready = MagicMock(return_value=True) - handler._scan = MagicMock() - handler.execute("plan", "pre", definition_path="/path/to/definition") - mock_click.secho.assert_called_with( - "scanning definition with trivy: /path/to/definition", fg="green" - ) - handler._scan.assert_called_with("/path/to/definition") - - def test_execute_post_plan_without_planfile(self): - handler = TrivyHandler({}) - with self.assertRaises(HandlerError): - handler.execute("plan", "post", changes=True) - - def test_execute_post_plan_without_definition_path(self): - handler = TrivyHandler({}) - with self.assertRaises(HandlerError): - handler.execute("plan", "post", planfile="/path/to/planfile", changes=True) - - @patch("tfworker.handlers.trivy.click") - def test_execute_post_plan_skip_planfile(self, mock_click): - handler = TrivyHandler({"skip_planfile": True}) - handler.is_ready = MagicMock(return_value=True) - handler.execute( - "plan", - "post", - planfile="/path/to/planfile", - definition_path="/path/to/definition", - changes=True, - ) - mock_click.secho.assert_called_with( - "Skipping trivy scan of planfile", fg="yellow" - ) - - @patch("tfworker.handlers.trivy.click") - def test_execute_post_plan_scan_planfile(self, mock_click): - handler = TrivyHandler({}) - handler.is_ready = MagicMock(return_value=True) - handler._scan = MagicMock() - handler.execute( - "plan", - "post", - planfile="/path/to/planfile", - definition_path="/path/to/definition", - changes=True, - ) - mock_click.secho.assert_called_with( - "scanning planfile with trivy: /path/to/planfile", fg="green" - ) - handler._scan.assert_called_with("/path/to/definition", "/path/to/planfile") - - -class TestTrivyHandlerScan(unittest.TestCase): - @pytest.fixture(autouse=True) - def trivy_runnable_patcher(self): - patcher = patch("tfworker.handlers.trivy.TrivyHandler._trivy_runable") - mock_trivy_runable = patcher.start() - mock_trivy_runable.return_value = True - yield mock_trivy_runable - - @patch("tfworker.handlers.trivy.pipe_exec") - @patch("tfworker.handlers.trivy.click") - def test__scan_definition_success_with_defaults(self, mock_click, mock_pipe_exec): - mock_pipe_exec.return_value = (0, "stdout", "stderr") - handler = TrivyHandler({}) - handler._trivy_runable = MagicMock(return_value=True) - handler._handle_results = MagicMock() - handler._scan("/path/to/definition") - mock_pipe_exec.assert_called_with( - "/usr/bin/trivy --quiet fs --scanners misconfig,secret --skip-dirs **/examples --cache-dir /tmp/trivy_cache --severity HIGH,CRITICAL --exit-code 1 .", - stream_output=True, - cwd="/path/to/definition", - ) - handler._handle_results.assert_called_with(0, "stdout", "stderr", None) - - @patch("tfworker.handlers.trivy.pipe_exec") - @patch("tfworker.handlers.trivy.click") - def test__scan_plan_success_with_options(self, mock_click, mock_pipe_exec): - config = { - "path": "/path/to/trivy", - "exit_code": "2", - "skip_dirs": [], - "severity": "CRITICAL", - "cache_dir": "/path/to/cache", - "quiet": False, - "debug": True, - "stream_output": False, - "format": "template", - "template": "template", - "args": {"arg1": "value1", "arg2": "value2"}, - } - - mock_pipe_exec.return_value = (0, "stdout", "stderr") - handler = TrivyHandler(config) - handler._trivy_runable = MagicMock(return_value=True) - handler._handle_results = MagicMock() - handler._scan("/path/to/definition") - mock_pipe_exec.assert_called_with( - "/path/to/trivy --debug fs --scanners misconfig,secret --cache-dir /path/to/cache --severity CRITICAL --exit-code 2 --format template --template template --arg1 value1 --arg2 value2 .", - stream_output=False, - cwd="/path/to/definition", - ) - handler._handle_results.assert_called_with(0, "stdout", "stderr", None) - - @patch("tfworker.handlers.trivy.pipe_exec") - @patch("tfworker.handlers.trivy.click") - def test__scan_planfile_success_with_defaults(self, mock_click, mock_pipe_exec): - mock_pipe_exec.return_value = (0, "stdout", "stderr") - handler = TrivyHandler({}) - handler._trivy_runable = MagicMock(return_value=True) - handler._handle_results = MagicMock() - handler._scan("/path/to/definition", Path("/path/to/planfile")) - mock_pipe_exec.assert_called_with( - "/usr/bin/trivy --quiet config --cache-dir /tmp/trivy_cache --severity HIGH,CRITICAL --exit-code 1 /path/to/planfile", - stream_output=True, - cwd="/path/to/definition", - ) - handler._handle_results.assert_called_with( - 0, "stdout", "stderr", Path("/path/to/planfile") - ) - - @patch("tfworker.handlers.trivy.pipe_exec") - @patch("tfworker.handlers.trivy.click") - def test__scan_planfile_success_with_options(self, mock_click, mock_pipe_exec): - config = { - "path": "/path/to/trivy", - "exit_code": "2", - "skip_dirs": [], - "severity": "CRITICAL", - "cache_dir": "/path/to/cache", - "quiet": False, - "debug": True, - "stream_output": False, - "format": "template", - "template": "template", - "args": {"arg1": "value1", "arg2": "value2"}, - } - - mock_pipe_exec.return_value = (0, "stdout", "stderr") - handler = TrivyHandler(config) - handler._trivy_runable = MagicMock(return_value=True) - handler._handle_results = MagicMock() - handler._scan("/path/to/definition", Path("/path/to/planfile")) - mock_pipe_exec.assert_called_with( - "/path/to/trivy --debug config --cache-dir /path/to/cache --severity CRITICAL --exit-code 2 --format template --template template --arg1 value1 --arg2 value2 /path/to/planfile", - stream_output=False, - cwd="/path/to/definition", - ) - handler._handle_results.assert_called_with( - 0, "stdout", "stderr", Path("/path/to/planfile") - ) - - @patch("tfworker.handlers.trivy.pipe_exec") - @patch("tfworker.handlers.trivy.click") - def test__scan_failure(self, mock_click, mock_pipe_exec): - mock_pipe_exec.side_effect = Exception("error") - handler = TrivyHandler({}) - handler._trivy_runable = MagicMock(return_value=True) - handler._handle_results = MagicMock() - with self.assertRaises(HandlerError): - handler._scan("/path/to/definition") - handler._handle_results.assert_not_called() - - -class TestTrivyHandlerHandleResults(unittest.TestCase): - @pytest.fixture(autouse=True) - def trivy_runnable_patcher(self): - patcher = patch("tfworker.handlers.trivy.TrivyHandler._trivy_runable") - mock_trivy_runable = patcher.start() - mock_trivy_runable.return_value = True - yield mock_trivy_runable - - @patch("tfworker.handlers.trivy.click") - def test__handle_results_success(self, mock_click): - handler = TrivyHandler({}) - handler._handle_results(0, "stdout".encode(), "stderr".encode(), None) - mock_click.secho.assert_not_called() - - @patch("tfworker.handlers.trivy.click") - def test__handle_results_failure(self, mock_click): - handler = TrivyHandler({}) - handler._handle_results(1, "stdout".encode(), "stderr".encode(), None) - mock_click.secho.assert_called_with( - "trivy scan failed with exit code 1", fg="red" - ) - mock_click.secho.assert_called_once() - - @patch("tfworker.handlers.trivy.click") - @patch("tfworker.handlers.trivy.strip_ansi") - def test__handle_results_failure_stream_output(self, mock_strip_ansi, mock_click): - handler = TrivyHandler({"stream_output": False}) - mock_strip_ansi.side_effect = MagicMock( - side_effect=lambda x: x.decode("UTF-8") if isinstance(x, bytes) else x - ) - handler._handle_results(1, "stdout".encode(), "stderr".encode(), None) - calls = [ - call("trivy scan failed with exit code 1", fg="red"), - call("stdout: stdout", fg="red"), - call("stderr: stderr", fg="red"), - ] - mock_click.secho.assert_has_calls(calls) - - @patch("tfworker.handlers.trivy.click") - def test__handle_results_required(self, mock_click): - handler = TrivyHandler({"required": True}) - with self.assertRaises(HandlerError): - handler._handle_results(1, "stdout".encode(), "stderr".encode(), None) - - @patch("tfworker.handlers.trivy.click") - @patch("os.remove") - def test__handle_results_remove_planfile(self, mock_remove, mock_click): - handler = TrivyHandler({"required": True}) - with self.assertRaises(HandlerError): - handler._handle_results( - 1, "stdout".encode(), "stderr".encode(), "/path/to/planfile" - ) - mock_remove.assert_called_with("/path/to/planfile") - - -class TestTrivyHandlerRaiseIfNotReady(unittest.TestCase): - def test__raise_if_not_ready_ready(self): - handler = TrivyHandler({}) - handler._ready = True - result = handler._raise_if_not_ready() - self.assertIsNone(result) - - def test__raise_if_not_ready_not_ready(self): - handler = TrivyHandler({"required": False}) - handler._ready = False - with self.assertRaises(HandlerError) as e: - handler._raise_if_not_ready() - self.assertFalse(e.terminate) - - def test__raise_if_not_ready_required(self): - handler = TrivyHandler({"required": True}) - handler._ready = False - with self.assertRaises(HandlerError) as e: - handler._raise_if_not_ready() - self.assertTrue(e.terminate) - - -if __name__ == "__main__": - unittest.main() diff --git a/tests/providers/test_google.py b/tests/providers/test_google.py deleted file mode 100644 index 4f4cca0..0000000 --- a/tests/providers/test_google.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2020 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def test_google_hcl(basec, gcp_creds_file): - render = basec.providers["google"].hcl() - expected_render = f"""provider "google" {{ - region = "us-west-2" - credentials = file("{gcp_creds_file}") -}}""" - - assert render == expected_render diff --git a/tests/providers/test_google_beta.py b/tests/providers/test_google_beta.py deleted file mode 100644 index 7f2806f..0000000 --- a/tests/providers/test_google_beta.py +++ /dev/null @@ -1,8 +0,0 @@ -def test_google_hcl(basec, gcp_creds_file): - render = basec.providers["google-beta"].hcl() - expected_render = f"""provider "google-beta" {{ - region = "us-west-2" - credentials = file("{gcp_creds_file}") -}}""" - - assert render == expected_render diff --git a/tests/test_app_state.py b/tests/test_app_state.py new file mode 100644 index 0000000..ce315e9 --- /dev/null +++ b/tests/test_app_state.py @@ -0,0 +1,35 @@ +import pytest + +from tfworker.app_state import AppState +from tfworker.cli_options import CLIOptionsTerraform +from tfworker.exceptions import FrozenInstanceError + + +class TestAppState: + def test_app_state_model(self): + app_state = AppState() + assert app_state.deployment == "undefined" + assert app_state.model_config == { + "extra": "forbid", + "arbitrary_types_allowed": True, + } + assert app_state.authenticators is None + assert app_state.backend is None + assert app_state.clean_options is None + assert app_state.definitions is None + assert app_state.handlers is None + assert app_state.loaded_config == {} + assert app_state.providers is None + + def test_app_state_freeze(self): + app_state = AppState() + app_state.terraform_options = CLIOptionsTerraform() + app_state.freeze() + + # ensure primary model is frozen + with pytest.raises(FrozenInstanceError): + app_state.deployment = "test" + + # ensure nested models are frozen + with pytest.raises(FrozenInstanceError): + app_state.terraform_options.apply = True diff --git a/tests/test_cli.py b/tests/test_cli.py deleted file mode 100644 index bb4fba2..0000000 --- a/tests/test_cli.py +++ /dev/null @@ -1,153 +0,0 @@ -# Copyright 2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from unittest.mock import patch - -import pytest -from click.testing import CliRunner - -import tfworker.cli - - -class TestCLI: - def test_validate_deployment_valid(self): - """ensure valid names are returned""" - assert tfworker.cli.validate_deployment(None, None, "test") == "test" - - def test_validate_deployment_invalid_spaces(self, capfd): - """ensure deploys with spaces failed""" - with pytest.raises(SystemExit) as e: - tfworker.cli.validate_deployment(None, None, "test test") - out, err = capfd.readouterr() - assert e.type == SystemExit - assert e.value.code == 1 - assert "not contain spaces" in out - - def test_validate_deployment_invalid_length(self, capfd): - """ensure deploy over 16 chars fail""" - with pytest.raises(SystemExit) as e: - tfworker.cli.validate_deployment( - None, None, "testtesttesttesttesttesttesttesttesttest" - ) - out, err = capfd.readouterr() - assert e.type == SystemExit - assert e.value.code == 1 - assert "32 characters" in out - - def test_validate_host(self): - """only linux and darwin are supported, and require 64 bit platforms""" - with patch("tfworker.cli.get_platform", return_value=("linux", "amd64")): - assert tfworker.cli.validate_host() is True - with patch("tfworker.cli.get_platform", return_value=("darwin", "amd64")): - assert tfworker.cli.validate_host() is True - with patch("tfworker.cli.get_platform", return_value=("darwin", "arm64")): - assert tfworker.cli.validate_host() is True - - def test_validate_host_invalid_machine(self, capfd): - """ensure invalid machine types fail""" - with patch("tfworker.cli.get_platform", return_value=("darwin", "i386")): - with pytest.raises(SystemExit) as e: - tfworker.cli.validate_host() - out, err = capfd.readouterr() - assert e.type == SystemExit - assert e.value.code == 1 - assert "not supported" in out - - def test_validate_host_invalid_os(self, capfd): - """ensure invalid os types fail""" - with patch("tfworker.cli.get_platform", return_value=("windows", "amd64")): - with pytest.raises(SystemExit) as e: - tfworker.cli.validate_host() - out, err = capfd.readouterr() - assert e.type == SystemExit - assert e.value.code == 1 - assert "not supported" in out - - def test_cli_no_params(self): - """ensure cli returns usage with no params""" - from tfworker.cli import cli - - runner = CliRunner() - result = runner.invoke(cli) - assert result.exit_code == 0 - assert "Usage: cli [OPTIONS] COMMAND [ARGS]..." in result.output - - def test_cli_missing_command(self): - """ensure cli returns usage with no command""" - from tfworker.cli import cli - - runner = CliRunner() - result = runner.invoke(cli, ["--config-file", "test"]) - assert result.exit_code == 2 - assert "Missing command" in result.output - - def test_cli_invalid_config(self): - """ensure the CLI fails with an invalid config file""" - # @TODO(ephur): this test demonstrates an issue with how rendering exits - # when it encounters errors, this masks the true error of config - # file not being found. This should be fixed in a future PR. - from tfworker.cli import cli - - runner = CliRunner() - result = runner.invoke(cli, ["--config-file", "test", "terraform", "foo"]) - assert result.exit_code == 1 - # the expected result is: configuration file {config_file} not found" the - # exception is being handled in the wrong place - assert "configuration file does not exist" in result.output - - @patch("tfworker.cli.CleanCommand", autospec=True) - def test_cli_clean_command(self, mock_request, test_config_file): - """ensure the CLI clean command executes""" - from tfworker.cli import cli - - runner = CliRunner() - runner.invoke(cli, ["--config-file", test_config_file, "clean", "foo"]) - mock_request.assert_called_once() - assert mock_request.method_calls[0][0] == "().exec" - - @patch("tfworker.cli.VersionCommand", autospec=True) - def test_cli_version_command(self, mock_request, test_config_file): - """ensure the CLI version command executes""" - from tfworker.cli import cli - - runner = CliRunner() - runner.invoke(cli, ["version"]) - mock_request.assert_called_once() - assert mock_request.method_calls[0][0] == "().exec" - - @patch("tfworker.cli.TerraformCommand", autospec=True) - def test_cli_terraform_command(self, mock_request, test_config_file): - """ensure the CLI terraform command executes - @TODO(ephur): This test demonstrates why the CLI skel should only - call the exec method of the command, and not the other methods. - """ - from tfworker.cli import cli - - runner = CliRunner() - result = runner.invoke( - cli, ["--config-file", test_config_file, "terraform", "foo"] - ) - assert result.exit_code == 0 - # the three steps the cli should execute - assert mock_request.method_calls[0][0] == "().exec" - - @patch("tfworker.cli.EnvCommand", autospec=True) - def test_cli_env_command(self, mock_request, test_config_file): - """ensure the CLI env command executes""" - from tfworker.cli import cli - - runner = CliRunner() - runner.invoke(cli, ["--config-file", test_config_file, "env"]) - mock_request.assert_called_once() - assert mock_request.method_calls[0][0] == "().exec" diff --git a/tests/test_cli_options.py b/tests/test_cli_options.py new file mode 100644 index 0000000..dded6be --- /dev/null +++ b/tests/test_cli_options.py @@ -0,0 +1,374 @@ +import pytest + +from tfworker import cli_options as c + + +class TestCLIOptionsRoot: + """ + Tests covering base CLIOptionsRoot model and validation of its attributes + """ + + def test_cli_options_root_model(self): + cli_options = c.CLIOptionsRoot() + assert cli_options is not None + + def test_cli_options_with_invalid_backend(self): + with pytest.raises(ValueError): + c.CLIOptionsRoot(backend="invalid_backend") + + def test_cli_options_with_valid_backend_str(self): + from tfworker.backends import Backends + + cli_options = c.CLIOptionsRoot(backend="s3") + assert cli_options.backend == Backends.S3 + + def test_cli_options_with_valid_backend_enum(self): + from tfworker.backends import Backends + + cli_options = c.CLIOptionsRoot(backend=Backends.S3) + assert cli_options.backend == Backends.S3 + + def test_cli_options_with_lower_log_level(self): + cli_options = c.CLIOptionsRoot(log_level="debug") + assert cli_options.log_level == "DEBUG" + + def test_cli_options_with_invalid_log_level(self): + with pytest.raises(ValueError): + c.CLIOptionsRoot(log_level="invalid_log_level") + + def test_config_exists(self, tmp_path): + config_file = tmp_path / "config.yaml" + config_file.write_text("config") + cli_options = c.CLIOptionsRoot(config_file=str(config_file)) + assert cli_options.config_file == str(config_file) + + def test_config_does_not_exist(self): + with pytest.raises(ValueError): + c.CLIOptionsRoot(config_file="config_file") + + def test_config_file_is_dir(self, tmp_path): + config_file = tmp_path / "config.yaml" + config_file.mkdir() + with pytest.raises(ValueError): + c.CLIOptionsRoot(config_file=str(config_file)) + + def test_config_file_is_not_readable(self, tmp_path): + config_file = tmp_path / "config.yaml" + config_file.touch() + config_file.chmod(0o000) + with pytest.raises(ValueError): + c.CLIOptionsRoot(config_file=str(config_file)) + config_file.chmod(0o644) + + def test_gcp_creds_path_exists(self, tmp_path): + gcp_creds_path = tmp_path / "gcp_creds.json" + gcp_creds_path.write_text("gcp_creds") + cli_options = c.CLIOptionsRoot(gcp_creds_path=str(gcp_creds_path)) + assert cli_options.gcp_creds_path == str(gcp_creds_path) + + def test_gcp_creds_path_does_not_exist(self): + with pytest.raises(ValueError): + c.CLIOptionsRoot(gcp_creds_path="gcp_creds_path") + + def test_gcp_creds_path_is_dir(self, tmp_path): + gcp_creds_path = tmp_path / "gcp_creds.json" + gcp_creds_path.mkdir() + with pytest.raises(ValueError): + c.CLIOptionsRoot(gcp_creds_path=str(gcp_creds_path)) + + def test_gcp_creds_path_is_none(self): + cli_options = c.CLIOptionsRoot(gcp_creds_path=None) + assert cli_options.gcp_creds_path is None + + def test_validate_backend_prefix_leading_slash(self): + cli_options = c.CLIOptionsRoot(backend_prefix="/backend_prefix") + assert cli_options.backend_prefix == "backend_prefix" + + def test_validate_backend_prefix_trailing_slash(self): + cli_options = c.CLIOptionsRoot(backend_prefix="backend_prefix/") + assert cli_options.backend_prefix == "backend_prefix" + + def test_validate_backend_prefix_leading_and_trailing_slash(self): + cli_options = c.CLIOptionsRoot(backend_prefix="/backend_prefix/") + assert cli_options.backend_prefix == "backend_prefix" + + def test_validate_backend_prefix_double_slashes(self): + cli_options = c.CLIOptionsRoot(backend_prefix="backend//prefix") + assert cli_options.backend_prefix == "backend/prefix" + + def test_repository_path_valid(self, tmp_path): + repository_path = tmp_path + cli_options = c.CLIOptionsRoot(repository_path=str(repository_path)) + assert cli_options.repository_path == str(repository_path) + + def test_repository_path_invalid(self): + with pytest.raises(ValueError): + c.CLIOptionsRoot(repository_path="nonexistent_dir") + + def test_repository_path_is_file(self, tmp_path): + repository_path = tmp_path / "file" + repository_path.touch() + with pytest.raises(ValueError): + c.CLIOptionsRoot(repository_path=str(repository_path)) + + def test_repository_path_is_none(self): + with pytest.raises(ValueError): + c.CLIOptionsRoot(repository_path=None) + + def test_repository_path_not_writable(self, tmp_path): + repository_path = tmp_path / "dir" + repository_path.mkdir() + repository_path.chmod(0o444) + with pytest.raises(ValueError): + c.CLIOptionsRoot(repository_path=str(repository_path)) + repository_path.chmod(0o755) + + def test_repository_path_not_readable(self, tmp_path): + repository_path = tmp_path / "dir" + repository_path.mkdir() + repository_path.chmod(0o222) + with pytest.raises(ValueError): + c.CLIOptionsRoot(repository_path=str(repository_path)) + repository_path.chmod(0o755) + + def test_working_dir_valid(self, tmp_path): + working_dir = tmp_path + cli_options = c.CLIOptionsRoot(working_dir=str(working_dir)) + assert cli_options.working_dir == str(working_dir) + + def test_working_dir_invalid(self): + with pytest.raises(ValueError): + c.CLIOptionsRoot(working_dir="nonexistent_dir") + + def test_working_dir_is_file(self, tmp_path): + working_dir = tmp_path / "file" + working_dir.touch() + with pytest.raises(ValueError): + c.CLIOptionsRoot(working_dir=str(working_dir)) + + def test_working_dir_is_none(self): + cli_options = c.CLIOptionsRoot(working_dir=None) + assert cli_options.working_dir is None + + def test_working_dir_not_writable(self, tmp_path): + working_dir = tmp_path / "dir" + working_dir.mkdir() + working_dir.chmod(0o444) + with pytest.raises(ValueError): + c.CLIOptionsRoot(working_dir=str(working_dir)) + working_dir.chmod(0o755) + + def test_working_dir_not_readable(self, tmp_path): + working_dir = tmp_path / "dir" + working_dir.mkdir() + working_dir.chmod(0o222) + with pytest.raises(ValueError): + c.CLIOptionsRoot(working_dir=str(working_dir)) + working_dir.chmod(0o755) + + def test_working_dir_not_empty(self, tmp_path): + working_dir = tmp_path / "dir" + working_dir.mkdir() + (working_dir / "file").touch() + with pytest.raises(ValueError): + c.CLIOptionsRoot(working_dir=str(working_dir)) + + +class TestCLIOptionsTerraform: + """ + Tests covering base CLIOptionsTerraform model and validation of its attributes + """ + + def test_cli_options_terraform_model(self): + cli_options = c.CLIOptionsTerraform() + assert cli_options is not None + + def test_validate_apply_and_destroy(self): + with pytest.raises(ValueError): + c.CLIOptionsTerraform(apply=True, destroy=True) + + def test_validate_terraform_bin(self, mocker, mock_click_context): + mocker.patch("shutil.which", return_value="./terraform") + mocker.patch("tfworker.cli_options.os.access", return_value=True) + mocker.patch("tfworker.cli_options.os.path.isfile", return_value=True) + mocker.patch("tfworker.cli_options.os.path.isabs", return_value=True) + mocker.patch("tfworker.cli_options.os.path.exists", return_value=True) + mocker.patch( + "tfworker.cli_options.get_terraform_version", return_value="0.12.0" + ) + mocker.patch("click.get_current_context", return_value=mock_click_context) + cli_options = c.CLIOptionsTerraform(terraform_bin=None) + assert cli_options.terraform_bin == "./terraform" + + def test_validate_terraform_bin_not_found(self): + with pytest.raises(ValueError): + c.CLIOptionsTerraform(terraform_bin="nonexistent_bin") + + def test_validate_terraform_bin_is_none(self, mocker): + mocker.patch("shutil.which", return_value=None) + with pytest.raises(ValueError): + c.CLIOptionsTerraform(terraform_bin=None) + + def test_validate_terraform_bin_not_executable(self, tmp_path): + terraform_bin = tmp_path / "terraform" + terraform_bin.touch() + terraform_bin.chmod(0o644) + with pytest.raises(ValueError): + c.CLIOptionsTerraform(terraform_bin=str(terraform_bin)) + terraform_bin.chmod(0o755) + + def test_validate_provider_cache_valid(self, tmp_path): + provider_cache = tmp_path + cli_options = c.CLIOptionsTerraform(provider_cache=str(provider_cache)) + assert cli_options.provider_cache == str(provider_cache) + + def test_validate_provider_cache_invalid(self): + with pytest.raises(ValueError): + c.CLIOptionsTerraform(provider_cache="nonexistent_dir") + + def test_validate_provider_cache_is_file(self, tmp_path): + provider_cache = tmp_path / "file" + provider_cache.touch() + with pytest.raises(ValueError): + c.CLIOptionsTerraform(provider_cache=str(provider_cache)) + + def test_validate_provider_cache_is_none(self): + cli_options = c.CLIOptionsTerraform(provider_cache=None) + assert cli_options.provider_cache is None + + def test_validate_provider_cache_not_writable(self, tmp_path): + provider_cache = tmp_path / "dir" + provider_cache.mkdir() + provider_cache.chmod(0o444) + with pytest.raises(ValueError): + c.CLIOptionsTerraform(provider_cache=str(provider_cache)) + provider_cache.chmod(0o755) + + def test_validate_provider_cache_not_readable(self, tmp_path): + provider_cache = tmp_path / "dir" + provider_cache.mkdir() + provider_cache.chmod(0o222) + with pytest.raises(ValueError): + c.CLIOptionsTerraform(provider_cache=str(provider_cache)) + provider_cache.chmod(0o755) + + def test_validate_plan_file_path_valid(self, tmp_path): + plan_file_path = tmp_path + cli_options = c.CLIOptionsTerraform(plan_file_path=str(plan_file_path)) + assert cli_options.plan_file_path == str(plan_file_path) + + def test_validate_plan_file_path_invalid(self): + with pytest.raises(ValueError): + c.CLIOptionsTerraform(plan_file_path="nonexistent_dir") + + def test_validate_plan_file_path_is_file(self, tmp_path): + plan_file_path = tmp_path / "dir" + plan_file_path.touch() + with pytest.raises(ValueError): + c.CLIOptionsTerraform(plan_file_path=str(plan_file_path)) + + def test_validate_plan_file_path_is_none(self): + cli_options = c.CLIOptionsTerraform(plan_file_path=None) + assert cli_options.plan_file_path is None + + def test_validate_plan_file_path_not_writable(self, tmp_path): + plan_file_path = tmp_path / "dir" + plan_file_path.mkdir() + plan_file_path.chmod(0o444) + with pytest.raises(ValueError): + c.CLIOptionsTerraform(plan_file_path=str(plan_file_path)) + plan_file_path.chmod(0o755) + + def test_validate_plan_file_path_not_readable(self, tmp_path): + plan_file_path = tmp_path / "dir" + plan_file_path.mkdir() + plan_file_path.chmod(0o222) + with pytest.raises(ValueError): + c.CLIOptionsTerraform(plan_file_path=str(plan_file_path)) + plan_file_path.chmod(0o755) + + def test_validate_limit_none(self): + cli_options = c.CLIOptionsTerraform(limit=None) + assert cli_options.limit is None + + def test_validate_limit_list(self, mocker, mock_click_context): + mocker.patch("click.get_current_context", return_value=mock_click_context) + mock_click_context.obj.loaded_config.definitions = { + "module1": {}, + "module2": {}, + } + cli_options = c.CLIOptionsTerraform(limit=["module1", "module2"]) + assert cli_options.limit == ["module1", "module2"] + + def test_validate_limit_csv(self, mocker, mock_click_context): + mocker.patch("click.get_current_context", return_value=mock_click_context) + mock_click_context.obj.loaded_config.definitions = { + "module1": {}, + "module2": {}, + } + cli_options = c.CLIOptionsTerraform(limit="module1,module2") + assert cli_options.limit == ["module1", "module2"] + + def test_validate_limit_csv_in_list(self, mocker, mock_click_context): + mocker.patch("click.get_current_context", return_value=mock_click_context) + mock_click_context.obj.loaded_config.definitions = { + "module1": {}, + "module2": {}, + "module3": {}, + } + cli_options = c.CLIOptionsTerraform(limit=["module1,module2", "module3"]) + assert sorted(cli_options.limit) == sorted(["module1", "module2", "module3"]) + + def test_validate_limit_not_in_config(self, mocker, mock_click_context): + mocker.patch("click.get_current_context", return_value=mock_click_context) + mock_click_context.obj.loaded_config.definitions = {"module1": {}} + with pytest.raises(ValueError): + c.CLIOptionsTerraform(limit=["module1", "module2"]) + + +class TestCLIOptionsClean: + """ + Tests covering base CLIOptionsClean model and validation of its attributes + """ + + def test_cli_options_clean_model(self): + cli_options = c.CLIOptionsClean() + assert cli_options is not None + + def test_validate_limit_none(self): + cli_options = c.CLIOptionsClean(limit=None) + assert cli_options.limit is None + + def test_validate_limit_list(self, mocker, mock_click_context): + mocker.patch("click.get_current_context", return_value=mock_click_context) + mock_click_context.obj.loaded_config.definitions = { + "module1": {}, + "module2": {}, + } + cli_options = c.CLIOptionsClean(limit=["module1", "module2"]) + assert cli_options.limit == ["module1", "module2"] + + def test_validate_limit_csv(self, mocker, mock_click_context): + mocker.patch("click.get_current_context", return_value=mock_click_context) + mock_click_context.obj.loaded_config.definitions = { + "module1": {}, + "module2": {}, + } + cli_options = c.CLIOptionsClean(limit="module1,module2") + assert cli_options.limit == ["module1", "module2"] + + def test_validate_limit_csv_in_list(self, mocker, mock_click_context): + mocker.patch("click.get_current_context", return_value=mock_click_context) + mock_click_context.obj.loaded_config.definitions = { + "module1": {}, + "module2": {}, + "module3": {}, + } + cli_options = c.CLIOptionsClean(limit=["module1,module2", "module3"]) + assert sorted(cli_options.limit) == sorted(["module1", "module2", "module3"]) + + def test_validate_limit_not_in_config(self, mocker, mock_click_context): + mocker.patch("click.get_current_context", return_value=mock_click_context) + mock_click_context.obj.loaded_config.definitions = {"module1": {}} + with pytest.raises(ValueError): + c.CLIOptionsClean(limit=["module1", "module2"]) diff --git a/tests/test_definitions.py b/tests/test_definitions.py deleted file mode 100644 index a53ed45..0000000 --- a/tests/test_definitions.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import pytest - -from tfworker.definitions import Definition - -EXPECTED_TEST_BLOCK = """resource "null_resource" "test_a" { - -} -""" - -EXPECTED_TF_BLOCK = """terraform { - backend "s3" { - region = "us-west-2" - bucket = "test_bucket" - key = "terraform/test-0001/test/terraform.tfstate" - dynamodb_table = "terraform-test-0001" - encrypt = "true" - } -}""" - - -EXPECTED_VARS_BLOCK = """vpc_cidr = "10.0.0.0/16" -region = "us-west-2" -deprecated_region = "us-west-2" -domain = "test.domain.com" -deployment = "test-0001" -ip_list = ["127.0.0.1/32", "192.168.0.1/32"] -map_list = {"list": ["a", "b", "c"]} -map_map = {"map": {"list": ["x", "y", "z"]}} -""" - - -class TestDefinitions: - @pytest.mark.parametrize( - "base, expected", - [ - ({"terraform_vars": {"c": 1}}, 3), - ({"miss": {"c": "bad_val"}}, 3), - ({}, 3), - ], - ) - def test_make_vars(self, definition_odict, base, expected): - name = "test" - definition = Definition( - name, - definition_odict[name], - "test_deployment", - {}, - {}, - {}, - None, - "", - "", - 12, - ) - test_vars = definition.make_vars( - definition_odict[name].get("terraform_vars", dict()), - base.get("terraform_vars"), - ) - assert test_vars["c"] == expected - - @pytest.mark.parametrize( - "base, expected, inner", - [ - ("a_test_str", '"a_test_str"', False), - ( - {"key1": "val1", "key2": "val2"}, - '{"key1": "val1", "key2": "val2"}', - False, - ), - ({"key1": "val1", "key2": "val2"}, {"key1": "val1", "key2": "val2"}, True), - (["item1", "item2", "item3"], '["item1", "item2", "item3"]', False), - (["item1", "item2", "item3"], ["item1", "item2", "item3"], True), - ( - {"lkey": ["item1", "item2", "item3"]}, - '{"lkey": ["item1", "item2", "item3"]}', - False, - ), - ( - {"lkey": ["item1", "item2", "item3"]}, - {"lkey": ["item1", "item2", "item3"]}, - True, - ), - ], - ) - def test_var_typer(self, base, expected, inner): - assert Definition.vars_typer(base, inner=inner) == expected diff --git a/tests/types/test_cli_options.py b/tests/types/test_cli_options.py deleted file mode 100644 index 2678198..0000000 --- a/tests/types/test_cli_options.py +++ /dev/null @@ -1,70 +0,0 @@ -from pathlib import Path -from tempfile import NamedTemporaryFile, TemporaryDirectory - -import pytest - -from tfworker.types import CLIOptionsRoot - - -def test_working_dir_validator_with_valid_directory(): - with TemporaryDirectory() as temp_dir: - options = CLIOptionsRoot(working_dir=temp_dir) - assert options.working_dir == temp_dir - - -def test_working_dir_validator_with_non_existent_directory(): - with pytest.raises(ValueError, match=r"Working path .* does not exist!"): - CLIOptionsRoot(working_dir="/non/existent/path") - - -def test_working_dir_validator_with_file_instead_of_directory(): - with TemporaryDirectory() as temp_dir: - file_path = Path(temp_dir) / "file.txt" - file_path.touch() - with pytest.raises(ValueError, match=r"Working path .* is not a directory!"): - CLIOptionsRoot(working_dir=str(file_path)) - - -def test_working_dir_validator_with_non_empty_directory(): - with TemporaryDirectory() as temp_dir: - (Path(temp_dir) / "file.txt").touch() - with pytest.raises(ValueError, match=r"Working path .* must be empty!"): - CLIOptionsRoot(working_dir=temp_dir) - - -def test_clean_validator_with_working_dir_set_and_clean_not_set(): - with TemporaryDirectory() as temp_dir: - options = CLIOptionsRoot(working_dir=temp_dir) - assert options.clean is False - - -def test_clean_validator_with_working_dir_not_set_and_clean_not_set(): - options = CLIOptionsRoot() - assert options.clean is True - - -def test_clean_validator_with_working_dir_set_and_clean_set_to_true(): - with TemporaryDirectory() as temp_dir: - options = CLIOptionsRoot(working_dir=temp_dir, clean=True) - assert options.clean is True - - -def test_clean_validator_with_working_dir_not_set_and_clean_set_to_false(): - options = CLIOptionsRoot(clean=False) - assert options.clean is False - - -def test_validate_gcp_creds_path(): - # Test with a non-existing file - with pytest.raises(ValueError, match=r"Path .* is not a file!"): - CLIOptionsRoot(gcp_creds_path="non_existing_file.json") - - # Test with a directory - with pytest.raises(ValueError, match=r"Path .* is not a file!"): - CLIOptionsRoot(gcp_creds_path=".") - - # Test with a valid file - # Create a temporary file for the test - with NamedTemporaryFile() as temp_file: - # The validator should not raise any exception for a valid file - CLIOptionsRoot(gcp_creds_path=temp_file.name) diff --git a/tests/util/test_copier.py b/tests/util/test_copier.py deleted file mode 100644 index a4851e3..0000000 --- a/tests/util/test_copier.py +++ /dev/null @@ -1,387 +0,0 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import platform -import re -import shutil -import tempfile -from typing import Tuple -from unittest import mock -from unittest.mock import patch - -import pytest - -from tfworker.util.copier import Copier, CopyFactory, FileSystemCopier, GitCopier - -C_CONFLICTS = ["test.txt", "foo", "test.tf"] -C_SOURCE = "test_source" -if platform.system() == "Darwin": - C_ROOT_PATH = "/private/tmp/test" -else: - C_ROOT_PATH = "/tmp/test/" - - -@pytest.fixture(scope="session") -def register_test_copier(): - @CopyFactory.register("testfixture") - class TestCopierFixture(Copier): - @staticmethod - def type_match(source: str) -> bool: - if source == "test": - return True - else: - return False - - def copy(self) -> bool: - return True - - -@pytest.fixture -@patch.multiple(Copier, __abstractmethods__=set()) -def cwp(tmp_path): - c = Copier( - source=C_SOURCE, - root_path=C_ROOT_PATH, - destination=f"{str(tmp_path)}", - conflicts=C_CONFLICTS, - arbitrary="value", - ) - return c - - -@pytest.fixture -@patch.multiple(Copier, __abstractmethods__=set()) -def copier(): - c = Copier(source=C_SOURCE) - return c - - -def mock_pipe_exec_type_match(cmd: str) -> Tuple[int, str, str]: - """a mock function to return specific results based on supplied command""" - tokens = " ".join(cmd.split()).split(" ") - if tokens[1] == "ls-remote": - if tokens[2] == "permissionerror": - raise PermissionError - if tokens[2] == "filenotfounderror": - raise FileNotFoundError - if tokens[2] == "validremote": - return (0, "", "") - if tokens[0] == "/opt/bin/git": - return (0, "", "") - else: - raise NotImplementedError("bad use of mock") - - -def mock_pipe_exec_clone(cmd: str, cwd: str) -> Tuple[int, str, str]: - """a mock function to copy files and imitate a git clone""" - tokens = re.split(r"\s+", cmd) - assert os.path.isdir(tokens[2]) - shutil.copytree(tokens[2], cwd, dirs_exist_ok=True) - return (0, "", "") - - -@pytest.mark.usefixtures("register_test_copier") -class TestCopierFactory: - """tests for the copier factory""" - - def test_register(self): - """test that copiers can register themselves""" - start_len = len(CopyFactory.registry) - - @CopyFactory.register("test_copier") - class TestCopier(Copier): - pass - - assert len(CopyFactory.registry) == start_len + 1 - assert "test_copier" in CopyFactory.registry.keys() - - with pytest.raises(ValueError): - - @CopyFactory.register("test_copier") - class TestCopier2(Copier): - pass - - def test_get_copier_type(self): - """test that get copier type functions for the test copier""" - assert CopyFactory.get_copier_type("test") == "testfixture" - - with pytest.raises(NotImplementedError): - CopyFactory.get_copier_type("invalid") - - def test_create_copier(self): - """test that the proper object is returned given the test copier source""" - assert type(CopyFactory.create("test")).__name__ == "TestCopierFixture" - - -class TestCopier: - """tests for the base Copier class""" - - @patch.multiple(Copier, __abstractmethods__=set()) - def test_constructor(self, tmp_path, copier, cwp): - """test that the copiers have expected properties""" - assert copier._source == C_SOURCE - assert not hasattr(copier, "_root_path") - assert not hasattr(copier, "_destination") - assert not hasattr(copier, "_conflicts") - assert len(copier._kwargs) == 0 - - assert cwp._source == C_SOURCE - assert cwp._root_path == C_ROOT_PATH - assert cwp._destination == str(tmp_path) - assert cwp._conflicts == C_CONFLICTS - assert cwp._kwargs["arbitrary"] == "value" - - with pytest.raises(ValueError): - Copier(source="test_source", conflicts="bad_value") - - def test_source(self, copier, cwp): - """test the source property""" - assert copier.source == C_SOURCE - assert cwp.source == C_SOURCE - - def test_root_path(self, copier, cwp): - """test that root path always returns a string for all copiers""" - assert cwp.root_path == C_ROOT_PATH - assert copier.root_path == "" - - def test_conflicts(self, copier, cwp): - """test to ensure conflicts property always returns a list, with contents depending on copier params""" - assert copier.conflicts == [] - assert cwp.conflicts == C_CONFLICTS - - def test_check_conflicts(self, request, copier, cwp): - """test the behavior of checking conflicts""" - - with pytest.raises(FileExistsError): - cwp.check_conflicts( - f"{request.config.rootdir}/tests/fixtures/definitions/test_a" - ) - - assert ( - cwp.check_conflicts( - f"{request.config.rootdir}/tests/fixtures/definitions/test_c" - ) - is None - ) - assert ( - copier.check_conflicts( - f"{request.config.rootdir}/tests/fixtures/definitions/test_c" - ) - is None - ) - assert ( - copier.check_conflicts( - f"{request.config.rootdir}/tests/fixtures/definitions/test_a" - ) - is None - ) - - def test_get_destination(self, tmp_path, copier): - dpath = f"{str(tmp_path)}/destination_test)" - - # test that get destination raises an error if destination is not set - with pytest.raises(ValueError): - copier.get_destination() - - # test that get destination returns proper directory, and it is not created - setattr(copier, "_destination", dpath) - assert copier.get_destination(make_dir=False) == dpath - assert not os.path.isdir(dpath) - - # test that the destination is created with this optional parameter - assert copier.get_destination(make_dir=True) == dpath - assert os.path.isdir(dpath) - - def test_get_destination_path(self, tmp_path, copier): - """Ensure the destination path is returned properly when destination is set""" - dpath_td = tempfile.TemporaryDirectory() - dpath = dpath_td.name - - # ensure object is in valid state for test - with pytest.raises(AttributeError): - getattr(copier, "_destination") - - assert copier.get_destination(**{"destination": dpath}) == dpath - - # ensure the directory is returned properly when make_dirs is true, and no errors - # are raised when the directory already exists - rpath = copier.get_destination(**{"destination": dpath, "make_dir": True}) - assert rpath == dpath - assert os.path.isdir(rpath) - - # remove the temporary directory - del dpath_td - - -class TestGitCopier: - """test the GitCopier copier""" - - def test_copy(self, request, tmp_path): - with mock.patch( - "tfworker.util.copier.pipe_exec", side_effect=mock_pipe_exec_clone - ) as mocked: - """test a failing condition, conflicting files, no branch so check clone called with master""" - dpath = f"{str(tmp_path)}/destination" - spath = f"{request.config.rootdir}/tests/fixtures/definitions/test_a" - c = GitCopier(source=spath, destination=dpath, conflicts=C_CONFLICTS) - with pytest.raises(FileExistsError): - c.copy() - - assert ( - mocked.call_args.args[0] - == f"git clone {spath} --branch master --single-branch ./" - ) - - """ test a succeeding condition, extra options passed """ - spath = f"{request.config.rootdir}/tests/fixtures/definitions" - c = GitCopier(source=spath, destination=dpath, conflicts=[]) - c.copy( - branch="foo", - sub_path="test_a", - git_cmd="git", - git_args="", - reset_repo=True, - ) - assert ( - mocked.call_args.args[0] - == f"git clone {spath} --branch foo --single-branch ./" - ) - assert os.path.isfile(f"{dpath}/test.tf") - - def test_type_match(self): - """tests to ensure the various git cases return properly""" - with mock.patch( - "tfworker.util.copier.pipe_exec", side_effect=mock_pipe_exec_type_match - ) as mocked: - result = GitCopier.type_match("permissionerror") - assert result is False - mocked.assert_called_with("git ls-remote permissionerror") - - result = GitCopier.type_match("filenotfounderror") - assert result is False - mocked.assert_called_with("git ls-remote filenotfounderror") - - result = GitCopier.type_match( - "string_inspect", git_cmd="/opt/bin/git", git_args="--bar" - ) - assert result is True - mocked.assert_called_with("/opt/bin/git --bar ls-remote string_inspect") - - def test_make_and_clean_temp(self): - """tests making the temporary directory for git clones""" - c = GitCopier("test_source") - - # ensure that the temp directory is created and attributes are set - c.make_temp() - assert hasattr(c, "_temp_dir") - temp_dir = c._temp_dir - assert os.path.isdir(temp_dir) - assert hasattr(c, "_temp_dir") - - # ensure that the function is idempotent - c.make_temp() - # ensure that the temp directory is the same - assert temp_dir == c._temp_dir - assert os.path.isdir(c._temp_dir) - assert hasattr(c, "_temp_dir") - - # ensure that the temp directory is removed - c.clean_temp() - assert not os.path.isdir(temp_dir) - assert not hasattr(c, "_temp_dir") - - -class TestFileSystemCopier: - """Test the FileSystem copier""" - - def test_copy(self, request, tmp_path): - """tests the file system copy method""" - assert not os.path.isfile(f"{str(tmp_path)}/test.tf") - c = FileSystemCopier( - source="/tests/fixtures/definitions/test_a", - root_path=f"{request.config.rootdir}", - destination=f"{str(tmp_path)}", - ) - c.copy() - assert os.path.isfile(f"{str(tmp_path)}/test.tf") - - c = FileSystemCopier( - source="/tests/fixtures/definitions/test_a", - root_path=f"{request.config.rootdir}", - destination=f"{str(tmp_path)}", - ) - - with pytest.raises(FileNotFoundError): - c.copy(sub_path="invalid_path") - - def test_local_path(self): - """tests the local path property""" - - # This is a relative path based on where the worker ran from - source = "tests/fixtures/definitions/test_a" - c = FileSystemCopier(source=source, root_path=os.getcwd()) - assert c.local_path == f"{os.getcwd()}/{source}" - - # This tests resolution of an absolute path - tmpdir = tempfile.TemporaryDirectory() - c = FileSystemCopier(source=tmpdir.name) - assert c.local_path == tmpdir.name - del tmpdir - - # Ensure file not found error is raised on invalid relative path - with pytest.raises(FileNotFoundError): - FileSystemCopier( - source="some/invalid/path", root_path=os.getcwd() - ).local_path - - # Ensure file not found error is raised on invalid absolute path - with pytest.raises(FileNotFoundError): - FileSystemCopier(source="/some/invalid/path").local_path - - def test_type_match(self, request): - source = FileSystemCopier.make_local_path( - source="/tests/fixtures/definitions/test_a", - root_path=f"{request.config.rootdir}", - ) - - # this should return true because the source is a valid directory - assert FileSystemCopier.type_match(source) is True - # this should return false because the full path to source does not exist inside of root_path - assert FileSystemCopier.type_match("/some/invalid/path") is False - # this should return true because the full path to source exists inside of root_path - assert ( - FileSystemCopier.type_match( - "/tests/fixtures/definitions/test_a", - **{"root_path": f"{request.config.rootdir}"}, - ) - is True - ) - # this should return false because the source is not a valid directory - assert FileSystemCopier.type_match("/some/invalid/path") is False - - @pytest.mark.parametrize( - "source, root_path, expected", - [ - ("bar", "/tmp/foo", "/tmp/foo/bar"), - ("/tmp", "", "/tmp"), - ("/bar//", "/tmp/", "/tmp/bar/"), - ("//tmp//", "", "/tmp/"), - ], - ) - def test_make_local_path(self, source, root_path, expected): - assert ( - FileSystemCopier.make_local_path(source=source, root_path=root_path) - == expected - ) diff --git a/tests/util/test_util_cli.py b/tests/util/test_util_cli.py index 3979411..5d88c19 100644 --- a/tests/util/test_util_cli.py +++ b/tests/util/test_util_cli.py @@ -1,10 +1,11 @@ from typing import List, Optional +from unittest.mock import patch import click import pytest -from pydantic import BaseModel, Field +from pydantic import BaseModel -from tfworker.util.cli import pydantic_to_click +from tfworker.util.cli import pydantic_to_click, validate_host class ATestModel(BaseModel): @@ -16,7 +17,7 @@ class ATestModel(BaseModel): optional_float_field: Optional[float] = None bool_field: bool optional_bool_field: Optional[bool] = None - list_str_field: List[str] = Field(required=True) + list_str_field: List[str] optional_list_str_field: Optional[List[str]] = [] @@ -77,3 +78,36 @@ def test_unsupported_type(): @pydantic_to_click(UnsupportedModel) def a_command(): pass + + +def test_validate_host(): + """only linux and darwin are supported, and require 64 bit platforms""" + with patch("tfworker.util.system.get_platform", return_value=("linux", "amd64")): + assert validate_host() is None + with patch("tfworker.util.system.get_platform", return_value=("darwin", "amd64")): + assert validate_host() is None + with patch("tfworker.util.system.get_platform", return_value=("darwin", "arm64")): + assert validate_host() is None + + +def test_validate_host_invalid_machine(): + """ensure invalid machine types fail""" + with patch("tfworker.util.cli.get_platform", return_value=("darwin", "i386")): + with pytest.raises(NotImplementedError, match="running on i386"): + validate_host() + + +def test_validate_host_invalid_os(): + """ensure invalid os types fail""" + with patch("tfworker.util.cli.get_platform", return_value=("windows", "amd64")): + with pytest.raises(NotImplementedError, match="running on windows"): + validate_host() + + +def test_vlidate_host_invalid_os_machine(): + """ensure invalid os and machine types fail""" + with patch("tfworker.util.cli.get_platform", return_value=("windows", "i386")): + with pytest.raises(NotImplementedError) as e: + validate_host() + assert "running on windows" in str(e.value) + assert "running on i386" in str(e.value) diff --git a/tests/util/test_hooks.py b/tests/util/test_util_hooks.py similarity index 96% rename from tests/util/test_hooks.py rename to tests/util/test_util_hooks.py index 48e8691..8e48fbc 100644 --- a/tests/util/test_hooks.py +++ b/tests/util/test_util_hooks.py @@ -4,12 +4,13 @@ import tfworker.util.hooks as hooks from tfworker.exceptions import HookError -from tfworker.types import TerraformAction, TerraformStage +from tfworker.types.terraform import TerraformAction, TerraformStage # Fixture for a mock Terraform state file @pytest.fixture def mock_terraform_state(): + """A mock Terraform state file with a single remote state resource""" return """ { "version": 4, @@ -45,6 +46,7 @@ def mock_terraform_state(): @pytest.fixture def mock_terraform_locals(): + """A mock Terraform locals file with two variables""" return """locals { local_key = data.terraform_remote_state.example.outputs.key local_another_key = data.terraform_remote_state.example.outputs.another_key @@ -99,7 +101,7 @@ def test_get_state_item_from_output_file_not_found(self, mock_pipe_exec): @mock.patch("tfworker.util.hooks.pipe_exec") def test_get_state_item_from_output_error(self, mock_pipe_exec): - mock_pipe_exec.return_value = (1, "", "error") + mock_pipe_exec.return_value = (1, "", "error".encode()) with pytest.raises(HookError): hooks._get_state_item_from_output( "working_dir", {}, "terraform_bin", "state", "item" @@ -237,6 +239,10 @@ def test_set_hook_env_var(self): @mock.patch("tfworker.util.hooks.pipe_exec") def test_execute_hook_script(self, mock_pipe_exec, capsys): + import tfworker.util.log as log + + old_log_level = log.log_level + log.log_level = log.LogLevel.DEBUG mock_pipe_exec.return_value = (0, b"stdout", b"stderr") hooks._execute_hook_script( "hook_script", @@ -247,10 +253,11 @@ def test_execute_hook_script(self, mock_pipe_exec, capsys): True, ) mock_pipe_exec.assert_called_once_with( - "hook_script pre plan", cwd="working_dir/hooks", env={} + "hook_script pre plan", cwd="working_dir/hooks", env={}, stream_output=False ) captured = capsys.readouterr() captured_lines = captured.out.splitlines() + log.log_level = old_log_level assert len(captured_lines) == 4 assert "Results from hook script: hook_script" in captured_lines assert "exit code: 0" in captured_lines diff --git a/tests/util/test_util_log.py b/tests/util/test_util_log.py new file mode 100644 index 0000000..1b33f75 --- /dev/null +++ b/tests/util/test_util_log.py @@ -0,0 +1,366 @@ +from unittest.mock import patch + +import pytest + +import tfworker.util.log as log + +REDACTED_ITEMS = ["aws_secret_access_key", "aws_session_token", "aws_profile"] + + +@pytest.fixture(autouse=True) +def reset_log_level(): + """Reset log level to ERROR after each test""" + log.log_level = log.LogLevel.ERROR + yield + log.log_level = log.LogLevel.ERROR + + +def test_redact_items_re_string(): + sensitive_string = """aws_secret_access_key="my_secret_key" aws_session_token 'my_session_token' aws_profile: 'default' aws_profile=admin_profile""" + expected_result = """aws_secret_access_key="REDACTED" aws_session_token 'REDACTED' aws_profile: 'REDACTED' aws_profile=REDACTED""" + assert ( + log.redact_items_re(sensitive_string, redact=REDACTED_ITEMS) == expected_result + ) + + +def test_redact_items_re_dict(): + sensitive_dict = { + "aws_secret_access_key": "my_secret_key", + "aws_session_token": "my_session_token", + "note": "aws_profile=admin_profile", + } + expected_result = { + "aws_secret_access_key": "REDACTED", + "aws_session_token": "REDACTED", + "note": "aws_profile=REDACTED", + } + assert log.redact_items_re(sensitive_dict, redact=REDACTED_ITEMS) == expected_result + + +def test_redact_items_re_invalid_type(): + with pytest.raises(ValueError, match="Items must be a dictionary or a string"): + log.redact_items_re(12345) + + +def test_redact_items_re_nested_dict(): + sensitive_dict = { + "level1": { + "aws_secret_access_key": "my_secret_key", + "nested": { + "aws_session_token": "my_session_token", + "note": "aws_profile=admin_profile", + }, + } + } + expected_result = { + "level1": { + "aws_secret_access_key": "REDACTED", + "nested": {"aws_session_token": "REDACTED", "note": "aws_profile=REDACTED"}, + } + } + assert log.redact_items_re(sensitive_dict, redact=REDACTED_ITEMS) == expected_result + + +def test_redact_items_re_for_overredaction(): + sensitive_string = """ +terraform: + worker_options: + aws_region: us-east-1 + aws_profile: default + backend: s3 + backend_bucket: test-terraform + backend_region: us-west-2 + provider-cache: ./.cache + + + definitions: + test: + path: ./dns + + argo_test: + path: ./argo_test + + providers: + aws: + requirements: + version: 5.54.1 + config_blocks: + default_tags: + tags: + terraform: "true" + deployment: foo + 'null': + requirements: + version: 3.2.2 +""" + expected_result = """ +terraform: + worker_options: + aws_region: us-east-1 + aws_profile: REDACTED + backend: s3 + backend_bucket: test-terraform + backend_region: us-west-2 + provider-cache: ./.cache + + + definitions: + test: + path: ./dns + + argo_test: + path: ./argo_test + + providers: + aws: + requirements: + version: 5.54.1 + config_blocks: + default_tags: + tags: + terraform: "true" + deployment: foo + 'null': + requirements: + version: 3.2.2 +""" + assert ( + log.redact_items_re(sensitive_string, redact=REDACTED_ITEMS) == expected_result + ) + + +def test_redact_items_token_string(): + sensitive_string = """aws_secret_access_key="my_secret_key" aws_session_token 'my_session_token' aws_profile: 'default' aws_profile=admin_profile""" + expected_result = """aws_secret_access_key="REDACTED" aws_session_token 'REDACTED' aws_profile: 'REDACTED' aws_profile=REDACTED""" + assert ( + log.redact_items_token(sensitive_string, redact=REDACTED_ITEMS) + == expected_result + ) + + +def test_redact_items_token_dict(): + sensitive_dict = { + "aws_secret_access_key": "my_secret_key", + "aws_session_token": "my_session_token", + "note": "aws_profile=admin_profile", + } + expected_result = { + "aws_secret_access_key": "REDACTED", + "aws_session_token": "REDACTED", + "note": "aws_profile=REDACTED", + } + assert ( + log.redact_items_token(sensitive_dict, redact=REDACTED_ITEMS) == expected_result + ) + + +def test_redact_items_token_invalid_type(): + with pytest.raises(ValueError, match="Items must be a dictionary or a string"): + log.redact_items_token(12345) + + +def test_redact_items_token_nested_dict(): + sensitive_dict = { + "level1": { + "aws_secret_access_key": "my_secret_key", + "nested": { + "aws_session_token": "my_session_token", + "note": "aws_profile=admin_profile", + }, + } + } + expected_result = { + "level1": { + "aws_secret_access_key": "REDACTED", + "nested": {"aws_session_token": "REDACTED", "note": "aws_profile=REDACTED"}, + } + } + assert ( + log.redact_items_token(sensitive_dict, redact=REDACTED_ITEMS) == expected_result + ) + + +def test_redact_items_token_for_overredaction(): + sensitive_string = """ +terraform: + worker_options: + aws_region: us-east-1 + aws_profile: default + backend: s3 + backend_bucket: test-terraform + backend_region: us-west-2 + provider-cache: ./.cache + + + definitions: + test: + path: ./dns + + argo_test: + path: ./argo_test + + providers: + aws: + requirements: + version: 5.54.1 + config_blocks: + default_tags: + tags: + terraform: "true" + deployment: foo + 'null': + requirements: + version: 3.2.2 +""" + expected_result = """ +terraform: + worker_options: + aws_region: us-east-1 + aws_profile: REDACTED + backend: s3 + backend_bucket: test-terraform + backend_region: us-west-2 + provider-cache: ./.cache + + + definitions: + test: + path: ./dns + + argo_test: + path: ./argo_test + + providers: + aws: + requirements: + version: 5.54.1 + config_blocks: + default_tags: + tags: + terraform: "true" + deployment: foo + 'null': + requirements: + version: 3.2.2 +""" + assert ( + log.redact_items_token(sensitive_string, redact=REDACTED_ITEMS) + == expected_result + ) + + +@patch("tfworker.util.log.secho") +def test_log_no_redaction(mock_secho): + log.log_level = log.LogLevel.INFO + log.log("This is a test message.", log.LogLevel.INFO) + mock_secho.assert_called_once_with("This is a test message.", fg="green") + + +@patch("tfworker.util.log.secho") +def test_log_with_redaction(mock_secho): + log.log_level = log.LogLevel.INFO + sensitive_string = """aws_secret_access_key="my_secret_key" aws_session_token 'my_session_token' aws_session_token:my_session_token""" + expected_result = """aws_secret_access_key="REDACTED" aws_session_token 'REDACTED' aws_session_token:REDACTED""" + log.log(sensitive_string, log.LogLevel.INFO, redact=True) + mock_secho.assert_called_once_with(expected_result, fg="green") + + +@patch("tfworker.util.log.secho") +def test_partial_safe_info(mock_secho): + log.log_level = log.LogLevel.INFO + sensitive_string = ( + """aws_secret_access_key="my_secret_key" aws_session_token my_session_token""" + ) + expected_result = """aws_secret_access_key="REDACTED" aws_session_token REDACTED""" + log.safe_info(sensitive_string) + mock_secho.assert_called_once_with(expected_result, fg="green") + + +@patch("tfworker.util.log.secho") +def test_partial_info_no_redaction(mock_secho): + log.log_level = log.LogLevel.INFO + message = "This is an info message." + log.info(message) + mock_secho.assert_called_once_with(message, fg="green") + + +@patch("tfworker.util.log.secho") +def test_log_levels(mock_secho): + log.log_level = log.LogLevel.DEBUG + + trace_message = "This is a trace message." + debug_message = "This is a debug message." + info_message = "This is an info message." + warn_message = "This is a warn message." + error_message = "This is an error message." + + log.trace(trace_message) + assert not mock_secho.called # TRACE should not appear since log_level is DEBUG + + log.debug(debug_message) + mock_secho.assert_called_with(debug_message, fg="blue") + + log.info(info_message) + mock_secho.assert_called_with(info_message, fg="green") + + log.warn(warn_message) + mock_secho.assert_called_with(warn_message, fg="yellow") + + log.error(error_message) + mock_secho.assert_called_with(error_message, fg="red") + + log.log_level = log.LogLevel.TRACE + + log.trace(trace_message) + mock_secho.assert_called_with( + trace_message, fg="cyan" + ) # TRACE should appear since log_level is TRACE + + +@patch("tfworker.util.log.secho") +def test_log_with_redaction_and_error_level(mock_secho): + log.log_level = log.LogLevel.INFO + sensitive_string = "Error: aws_secret_access_key=my_secret_key" + expected_result = "Error: aws_secret_access_key=REDACTED" + log.log(sensitive_string, log.LogLevel.ERROR, redact=True) + mock_secho.assert_called_once_with(expected_result, fg="red") + + +@patch("tfworker.util.log.secho") +def test_log_trace_level(mock_secho): + log.log_level = log.LogLevel.TRACE + log.log("This is a trace message.", log.LogLevel.TRACE) + mock_secho.assert_called_once_with("This is a trace message.", fg="cyan") + + +# performance testing the two different redact methods +@pytest.mark.performance +def test_redact_items_regex_performance(): + import timeit + + iterations = 200000 + sensitive_string = """aws_secret_access_key="my_secret_key" aws_session_token 'my_session_token' aws_profile: 'default' aws_profile=admin_profile""" + elapsed_time = timeit.timeit( + lambda: log.redact_items_re(sensitive_string), number=iterations + ) + print( + f"Regex implementation took {elapsed_time:.4f} seconds for {iterations} iterations" + ) + + +@pytest.mark.performance +def test_redact_items_tokenize_performance(): + import timeit + + iterations = 200000 + sensitive_string = """aws_secret_access_key="my_secret_key" aws_session_token 'my_session_token' aws_profile: 'default' aws_profile=admin_profile""" + elapsed_time = timeit.timeit( + lambda: log.redact_items_token(sensitive_string), number=iterations + ) + print( + f"Tokenize implementation took {elapsed_time:.4f} seconds for {iterations} iterations" + ) + + +if __name__ == "__main__": + pytest.main() diff --git a/tests/util/test_system.py b/tests/util/test_util_system.py similarity index 65% rename from tests/util/test_system.py rename to tests/util/test_util_system.py index ee76445..faae8d9 100644 --- a/tests/util/test_system.py +++ b/tests/util/test_util_system.py @@ -1,21 +1,8 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. from unittest import mock import pytest -from tfworker.util.system import get_platform, get_version, pipe_exec, strip_ansi, which +from tfworker.util.system import get_platform, pipe_exec, strip_ansi def mock_pipe_exec(args, stdin=None, cwd=None, env=None): @@ -26,12 +13,6 @@ def mock_tf_version(args: str): return (0, args.encode(), "".encode()) -def mock_distribution(*args, **kwargs): - Class = mock.MagicMock() - Class.version = "1.2.3" - return Class - - class TestUtilSystem: @pytest.mark.parametrize( "commands, exit_code, cwd, stdin, stdout, stderr, stream_output", @@ -100,46 +81,6 @@ def test_pipe_exec( assert stdout.encode() in return_stdout.rstrip() assert return_stderr.rstrip() in stderr.encode() - def test_which(self): - with mock.patch( - "os.path.isfile", - side_effect=lambda x: True, - ): - with mock.patch("os.access", side_effect=lambda x, y: True): - assert which("terraform") is not None - - def test_which_full_path(self): - with mock.patch( - "os.path.isfile", - side_effect=lambda x: True, - ): - with mock.patch("os.access", side_effect=lambda x, y: True): - assert which("/full/path/to/file") is not None - - def test_which_not_found(self): - with mock.patch( - "os.path.isfile", - side_effect=lambda x: False, - ): - with mock.patch("os.access", side_effect=lambda x, y: False): - assert which("terraform") is None - - def test_get_version(self): - with mock.patch( - "tfworker.util.system.importlib.metadata.distribution", - side_effect=mock_distribution, - ): - assert get_version() == "1.2.3" - - def test_get_version_unknown(self): - from importlib.metadata import PackageNotFoundError - - with mock.patch( - "tfworker.util.system.importlib.metadata.distribution", - side_effect=PackageNotFoundError, - ): - assert get_version() == "unknown" - def test_strip_ansi(self): assert strip_ansi("\x1B[31mHello\x1B[0m") == "Hello" assert strip_ansi("\x1B[32mWorld\x1B[0m") == "World" diff --git a/tests/util/test_util_terraform.py b/tests/util/test_util_terraform.py deleted file mode 100644 index be627e3..0000000 --- a/tests/util/test_util_terraform.py +++ /dev/null @@ -1,433 +0,0 @@ -import shutil -from contextlib import contextmanager -from unittest.mock import MagicMock, call, patch - -import pytest - -from tfworker.constants import ( - DEFAULT_REPOSITORY_PATH, - TF_PROVIDER_DEFAULT_HOSTNAME, - TF_PROVIDER_DEFAULT_NAMESPACE, -) -from tfworker.providers.providers_collection import ProvidersCollection -from tfworker.types import ProviderGID -from tfworker.util.terraform import ( - find_required_providers, - generate_terraform_lockfile, - get_provider_gid_from_source, - get_terraform_version, - mirror_providers, - prep_modules, -) - - -@contextmanager -def does_not_raise(): - yield - - -@pytest.fixture -def providers_collection(): - providers_odict = { - "provider1": { - "requirements": {"source": "hashicorp/provider1", "version": "1.0.0"} - }, - "provider2": { - "requirements": {"source": "hashicorp/provider2", "version": "2.0.0"} - }, - } - return ProvidersCollection( - providers_odict=providers_odict, - authenticators=MagicMock(), - ) - - -@pytest.fixture -def empty_providers_collection(): - return ProvidersCollection( - providers_odict={}, - authenticators=MagicMock(), - ) - - -def test_prep_modules(tmp_path): - test_file_content = "test" - - module_path = tmp_path / "terraform-modules" - module_path.mkdir() - - target_path = tmp_path / "target" - target_path.mkdir() - - # Create a test module directory with a file - test_module_dir = module_path / "test_module_dir" - test_module_dir.mkdir() - test_module_file = test_module_dir / "test_module_file.tf" - with open(test_module_file, "w") as f: - f.write(test_file_content) - test_module_ignored_file = test_module_dir / "test_module_ignored_file.txt" - test_module_ignored_file.touch() - test_module_default_ignored_file = test_module_dir / "terraform.tfstate" - test_module_default_ignored_file.touch() - - prep_modules(str(module_path), str(target_path)) - - final_target_path = target_path / "terraform-modules" / "test_module_dir" - - # check the target path exists - assert final_target_path.exists() - - # check the file is copied to the target directory - assert (final_target_path / "test_module_file.tf").exists() - - # check the file content is the same - with open(final_target_path / "test_module_file.tf") as f: - assert f.read() == test_file_content - - # check that the ignored file is not copied to the target directory - assert not (final_target_path / "terraform.tfstate").exists() - - # remove the contents of the target directory - shutil.rmtree(target_path) - assert not target_path.exists() - - # Use a custom ignore pattern - prep_modules(str(module_path), str(target_path), ignore_patterns=["*.txt"]) - - # ensure the default ignored file is copied - assert (final_target_path / "terraform.tfstate").exists() - - # ensure the custom ignored file is not copied - assert not (final_target_path / "test_module_ignored_file.txt").exists() - - -def test_prep_modules_not_found(tmp_path): - module_path = tmp_path / "terraform-modules" - target_path = tmp_path / "target" - - prep_modules(str(module_path), str(target_path)) - - # check the target path does not exist - assert not target_path.exists() - - -def test_prep_modules_required(tmp_path): - module_path = tmp_path / "terraform-modules" - target_path = tmp_path / "target" - - with pytest.raises(SystemExit): - prep_modules(str(module_path), str(target_path), required=True) - - # check the target path does not exist - assert not target_path.exists() - - -def test_prep_modules_default_path(): - class MockPath: - def __init__(self, exists_return_value): - self.exists_return_value = exists_return_value - - def exists(self): - return self.exists_return_value - - with patch( - "pathlib.Path", return_value=MockPath(exists_return_value=False) - ) as MockPath: - result = prep_modules("", "test_target") - assert result is None - assert MockPath.call_count == 2 - MockPath.assert_has_calls( - [ - call(f"{DEFAULT_REPOSITORY_PATH}/terraform-modules"), - call("test_target/terraform-modules"), - ], - any_order=True, - ) - - -@pytest.mark.parametrize( - "stdout, stderr, return_code, major, minor, expected_exception", - [ - ("Terraform v0.12.29", "", 0, 0, 12, does_not_raise()), - ("Terraform v1.3.5", "", 0, 1, 3, does_not_raise()), - ("TF 14", "", 0, "", "", pytest.raises(SystemExit)), - ("", "error", 1, "", "", pytest.raises(SystemExit)), - ], -) -def test_get_tf_version( - stdout: str, - stderr: str, - return_code: int, - major: int, - minor: int, - expected_exception: callable, -): - with patch( - "tfworker.util.terraform.pipe_exec", - side_effect=[(return_code, stdout.encode(), stderr.encode())], - ) as mocked: - with expected_exception: - (actual_major, actual_minor) = get_terraform_version(stdout) - assert actual_major == major - assert actual_minor == minor - mocked.assert_called_once() - - -@pytest.fixture -def mock_mirror_setup(): - mock_mirror_settings = { - "providers": MagicMock(), - "terraform_bin": "/path/to/terraform", - "working_dir": "/working/dir", - "cache_dir": "/cache/dir", - "temp_dir": "/temp/dir", - } - with patch("tfworker.util.terraform.pipe_exec") as mock_pipe_exec, patch( - "tfworker.util.terraform.tfhelpers._write_mirror_configuration" - ) as mock_write_mirror_configuration, patch( - "tfworker.util.terraform.tfhelpers._validate_cache_dir" - ) as mock_validate_cache_dir, patch( - "tfworker.util.terraform.click.secho" - ) as mock_secho: - - yield mock_secho, mock_validate_cache_dir, mock_write_mirror_configuration, mock_pipe_exec, mock_mirror_settings - - -def test_mirror_providers(mock_mirror_setup): - ( - mock_secho, - mock_validate_cache_dir, - mock_write_mirror_configuration, - mock_pipe_exec, - mock_mirror_settings, - ) = mock_mirror_setup - mock_write_mirror_configuration.return_value.__enter__.return_value = ( - mock_mirror_settings["temp_dir"] - ) - mock_pipe_exec.return_value = (0, b"stdout", b"stderr") - - result = mirror_providers( - providers=mock_mirror_settings["providers"], - terraform_bin=mock_mirror_settings["terraform_bin"], - working_dir=mock_mirror_settings["working_dir"], - cache_dir=mock_mirror_settings["cache_dir"], - ) - - mock_validate_cache_dir.assert_called_once_with(mock_mirror_settings["cache_dir"]) - mock_write_mirror_configuration.assert_called_once_with( - mock_mirror_settings["providers"], - mock_mirror_settings["working_dir"], - mock_mirror_settings["cache_dir"], - ) - mock_pipe_exec.assert_called_once_with( - f"{mock_mirror_settings['terraform_bin']} providers mirror {mock_mirror_settings['cache_dir']}", - cwd=mock_mirror_settings["temp_dir"], - stream_output=True, - ) - assert result is None - - -def test_mirror_providers_tf_error(mock_mirror_setup): - ( - mock_secho, - mock_validate_cache_dir, - mock_write_mirror_configuration, - mock_pipe_exec, - mock_mirror_settings, - ) = mock_mirror_setup - mock_write_mirror_configuration.return_value.__enter__.return_value = ( - mock_mirror_settings["temp_dir"] - ) - mock_pipe_exec.return_value = (1, b"stdout", b"stderr") - - with pytest.raises(SystemExit): - mirror_providers( - providers=mock_mirror_settings["providers"], - terraform_bin=mock_mirror_settings["terraform_bin"], - working_dir=mock_mirror_settings["working_dir"], - cache_dir=mock_mirror_settings["cache_dir"], - ) - - mock_validate_cache_dir.assert_called_once_with(mock_mirror_settings["cache_dir"]) - mock_write_mirror_configuration.assert_called_once_with( - mock_mirror_settings["providers"], - mock_mirror_settings["working_dir"], - mock_mirror_settings["cache_dir"], - ) - mock_pipe_exec.assert_called_once_with( - f"{mock_mirror_settings['terraform_bin']} providers mirror {mock_mirror_settings['cache_dir']}", - cwd=mock_mirror_settings["temp_dir"], - stream_output=True, - ) - - -def test_mirror_providers_all_in_cache(mock_mirror_setup): - ( - mock_secho, - mock_validate_cache_dir, - mock_write_mirror_configuration, - mock_pipe_exec, - mock_mirror_settings, - ) = mock_mirror_setup - mock_write_mirror_configuration.return_value.__enter__.side_effect = IndexError - - mirror_providers( - providers=mock_mirror_settings["providers"], - terraform_bin=mock_mirror_settings["terraform_bin"], - working_dir=mock_mirror_settings["working_dir"], - cache_dir=mock_mirror_settings["cache_dir"], - ) - - mock_validate_cache_dir.assert_called_once_with(mock_mirror_settings["cache_dir"]) - mock_write_mirror_configuration.assert_called_once_with( - mock_mirror_settings["providers"], - mock_mirror_settings["working_dir"], - mock_mirror_settings["cache_dir"], - ) - mock_pipe_exec.assert_not_called() - mock_secho.assert_called_with("All providers in cache", fg="yellow") - - -@patch("tfworker.util.terraform.click.secho") -@patch("tfworker.util.terraform.tfhelpers._get_cached_hash") -@patch("tfworker.util.terraform.tfhelpers._not_in_cache") -def test_generate_terraform_lockfile( - mock_not_in_cache, mock_get_cached_hash, mock_secho, providers_collection -): - providers = providers_collection - included_providers = ["provider1"] - cache_dir = "/cache/dir" - mock_not_in_cache.return_value = False - mock_get_cached_hash.return_value = ["hash1", "hash2"] - - expected_result = """provider "registry.terraform.io/hashicorp/provider1" { - version = "1.0.0" - constraints = "1.0.0" - hashes = [ - "hash1", - "hash2", - ] -} -""" - - result = generate_terraform_lockfile(providers, included_providers, cache_dir) - mock_not_in_cache.assert_called() - mock_get_cached_hash.assert_called() - assert result == expected_result - - -@patch("tfworker.util.terraform.click.secho") -@patch("tfworker.util.terraform.tfhelpers._get_cached_hash") -@patch("tfworker.util.terraform.tfhelpers._not_in_cache") -def test_generate_terraform_lockfile_no_includes( - mock_not_in_cache, mock_get_cached_hash, mock_secho, providers_collection -): - providers = providers_collection - included_providers = None - cache_dir = "/cache/dir" - mock_not_in_cache.return_value = False - mock_get_cached_hash.return_value = ["hash1", "hash2"] - - expected_result = """provider "registry.terraform.io/hashicorp/provider1" { - version = "1.0.0" - constraints = "1.0.0" - hashes = [ - "hash1", - "hash2", - ] -} - -provider "registry.terraform.io/hashicorp/provider2" { - version = "2.0.0" - constraints = "2.0.0" - hashes = [ - "hash1", - "hash2", - ] -} -""" - - result = generate_terraform_lockfile(providers, included_providers, cache_dir) - mock_not_in_cache.assert_called() - mock_get_cached_hash.assert_called() - assert result == expected_result - - -@patch("tfworker.util.terraform.click.secho") -@patch("tfworker.util.terraform.tfhelpers._get_cached_hash") -@patch("tfworker.util.terraform.tfhelpers._not_in_cache") -def test_generate_terraform_lockfile_not_in_cache( - mock_not_in_cache, mock_get_cached_hash, mock_secho -): - providers = MagicMock() - providers.__iter__.return_value = [MagicMock()] - included_providers = ["provider1", "provider2"] - cache_dir = "/cache/dir" - mock_not_in_cache.return_value = True - - result = generate_terraform_lockfile(providers, included_providers, cache_dir) - - mock_secho.assert_called_once_with( - f"Generating lockfile for providers: {included_providers}", fg="yellow" - ) - mock_not_in_cache.assert_called() - assert result is None - - -def test_get_provider_gid_from_source_full(): - result = get_provider_gid_from_source("example.com/namespace/provider") - assert result == ProviderGID( - hostname="example.com", namespace="namespace", type="provider" - ) - - -def test_get_provider_gid_from_source_long(): - with pytest.raises(ValueError): - get_provider_gid_from_source("example.com/namespace/provider/invalid") - - -def test_get_provider_gid_from_source_short(): - with pytest.raises(ValueError): - get_provider_gid_from_source(None) - - -def test_get_provider_from_source_provider(): - result = get_provider_gid_from_source("provider") - assert result == ProviderGID( - hostname=TF_PROVIDER_DEFAULT_HOSTNAME, - namespace=TF_PROVIDER_DEFAULT_NAMESPACE, - type="provider", - ) - - -def test_get_provider_from_source_namespace(): - result = get_provider_gid_from_source("namespace/provider") - assert result == ProviderGID( - hostname=TF_PROVIDER_DEFAULT_HOSTNAME, namespace="namespace", type="provider" - ) - - -@patch("tfworker.util.terraform.tfhelpers._find_required_providers") -def test_find_required_providers(mock_find_required_providers): - search_dir = "/search/dir" - mock_find_required_providers.return_value = { - "provider1": [{"version": "1.0.0", "source": "hashicorp/provider1"}] - } - - result = find_required_providers(search_dir) - - mock_find_required_providers.assert_called_once_with(search_dir) - assert result == { - "provider1": [{"version": "1.0.0", "source": "hashicorp/provider1"}] - } - - -@patch("tfworker.util.terraform.tfhelpers._find_required_providers") -def test_find_required_providers_empty(mock_find_required_providers): - search_dir = "/search/dir/empty" - mock_find_required_providers.return_value = {} - - result = find_required_providers(search_dir) - - mock_find_required_providers.assert_called_once_with(search_dir) - assert result is None diff --git a/tests/util/test_util_terraform_helpers.py b/tests/util/test_util_terraform_helpers.py index 6e2993c..0ede7c6 100644 --- a/tests/util/test_util_terraform_helpers.py +++ b/tests/util/test_util_terraform_helpers.py @@ -4,9 +4,11 @@ from unittest.mock import MagicMock import pytest +from packaging.specifiers import SpecifierSet -from tfworker.providers.providers_collection import ProvidersCollection -from tfworker.types import ProviderGID +from tfworker.exceptions import TFWorkerException +from tfworker.providers import ProviderGID +from tfworker.providers.collection import ProvidersCollection from tfworker.util.system import get_platform from tfworker.util.terraform_helpers import ( _create_mirror_configuration, @@ -15,7 +17,6 @@ _get_provider_cache_dir, _not_in_cache, _parse_required_providers, - _validate_cache_dir, _write_mirror_configuration, ) @@ -35,25 +36,29 @@ def version(): return "1.0.0" -@pytest.fixture +@pytest.fixture(scope="function") def providers_collection(): + """A ProvidersCollection object with one provider""" providers_odict = { "provider1": { "requirements": {"source": "hashicorp/provider1", "version": "1.0.0"} }, } - return ProvidersCollection( + yield ProvidersCollection( providers_odict=providers_odict, authenticators=MagicMock(), ) + ProvidersCollection.delete_instance() -@pytest.fixture +@pytest.fixture(scope="function") def empty_providers_collection(): - return ProvidersCollection( + """An empty ProvidersCollection object""" + yield ProvidersCollection( providers_odict={}, authenticators=MagicMock(), ) + ProvidersCollection.delete_instance() @pytest.fixture @@ -86,159 +91,237 @@ def create_cache_files(cache_dir, provider_gid, version): return cache_dir, version_file, provider_file -def test_not_in_cache_false(provider_gid, version, create_cache_files): - cache_dir, version_file, provider_file = create_cache_files - assert not _not_in_cache(provider_gid, version, str(cache_dir)) - - -def test_not_in_cache_true(provider_gid, version, cache_dir): - assert _not_in_cache(provider_gid, version, str(cache_dir)) - - -def test_not_in_cache_missing_version_file(provider_gid, version, create_cache_files): - cache_dir, version_file, provider_file = create_cache_files - version_file.unlink() # Remove the version file - assert _not_in_cache(provider_gid, version, str(cache_dir)) - - -def test_not_in_cache_missing_provider_file(provider_gid, version, create_cache_files): - cache_dir, version_file, provider_file = create_cache_files - provider_file.unlink() # Remove the provider file - assert _not_in_cache(provider_gid, version, str(cache_dir)) - - -def test_get_cached_hash(provider_gid, version, create_cache_files): - cache_dir, _, _ = create_cache_files - cached_hash = _get_cached_hash(provider_gid, version, str(cache_dir)) - assert cached_hash == "dummy_hash" - - -def test_validate_cache_dir(cache_dir): - _validate_cache_dir(str(cache_dir)) - - -def test_validate_cache_dir_nonexistent(): - with pytest.raises(SystemExit): - _validate_cache_dir("nonexistent_dir") - - -def test_validate_cache_dir_not_a_directory(tmp_path): - file_path = tmp_path / "not_a_directory" - file_path.touch() # Create a file instead of a directory - with pytest.raises(SystemExit): - _validate_cache_dir(str(file_path)) - - -def test_validate_cache_dir_not_writable(tmp_path): - cache_dir = tmp_path / "cache" - cache_dir.mkdir() - cache_dir.chmod(0o555) # Read and execute permissions only - with pytest.raises(SystemExit): - _validate_cache_dir(str(cache_dir)) - cache_dir.chmod(0o755) # Restore permissions for cleanup - - -def test_validate_cache_dir_not_readable(tmp_path): - cache_dir = tmp_path / "cache" - cache_dir.mkdir() - cache_dir.chmod(0o333) # Write and execute permissions only - with pytest.raises(SystemExit): - _validate_cache_dir(str(cache_dir)) - cache_dir.chmod(0o755) # Restore permissions for cleanup - - -def test_validate_cache_dir_not_executable(tmp_path): - cache_dir = tmp_path / "cache" - cache_dir.mkdir() - cache_dir.chmod(0o666) # Read and write permissions only - with pytest.raises(SystemExit): - _validate_cache_dir(str(cache_dir)) - cache_dir.chmod(0o755) # Restore permissions for cleanup - - -def test_get_provider_cache_dir(provider_gid, cache_dir): - provider_cache_dir = _get_provider_cache_dir(provider_gid, str(cache_dir)) - expected_dir = ( - pathlib.Path(cache_dir) - / provider_gid.hostname - / provider_gid.namespace - / provider_gid.type - ) - assert provider_cache_dir == expected_dir - - -def test_write_mirror_configuration(providers_collection, cache_dir): - with TemporaryDirectory() as working_dir: - temp_dir = _write_mirror_configuration( - providers_collection, working_dir, str(cache_dir) +class TestTerraformHelpersNotInCache: + def test_not_in_cache_false(self, provider_gid, version, create_cache_files): + cache_dir, _, _ = create_cache_files + assert not _not_in_cache(provider_gid, version, str(cache_dir)) + + def test_not_in_cache_true(self, provider_gid, version, cache_dir): + assert _not_in_cache(provider_gid, version, str(cache_dir)) + + def test_not_in_cache_missing_version_file( + self, provider_gid, version, create_cache_files + ): + cache_dir, version_file, _ = create_cache_files + version_file.unlink() # Remove the version file + assert _not_in_cache(provider_gid, version, str(cache_dir)) + + def test_not_in_cache_missing_provider_file( + self, provider_gid, version, create_cache_files + ): + cache_dir, _, provider_file = create_cache_files + provider_file.unlink() # Remove the provider file + assert _not_in_cache(provider_gid, version, str(cache_dir)) + + +class TestTerraformHelpersGetCachedHash: + def test_get_cached_hash(self, provider_gid, version, create_cache_files): + cache_dir, _, _ = create_cache_files + cached_hash = _get_cached_hash(provider_gid, version, str(cache_dir)) + assert cached_hash == "dummy_hash" + + +class TestTerraformHelpersGetProviderCacheDir: + def test_get_provider_cache_dir(self, provider_gid, cache_dir): + provider_cache_dir = _get_provider_cache_dir(provider_gid, str(cache_dir)) + expected_dir = ( + pathlib.Path(cache_dir) + / provider_gid.hostname + / provider_gid.namespace + / provider_gid.type ) - assert temp_dir is not None - assert (pathlib.Path(temp_dir.name) / "terraform.tf").exists() + assert provider_cache_dir == expected_dir -def test_write_mirror_configuration_empty_providers( - empty_providers_collection, cache_dir -): - with TemporaryDirectory() as working_dir: - with pytest.raises(IndexError): - _write_mirror_configuration( - empty_providers_collection, working_dir, str(cache_dir) +class TestTerraformHelpersWriteMirrorConfiguration: + def test_write_mirror_configuration(self, providers_collection, cache_dir): + with TemporaryDirectory() as working_dir: + temp_dir = _write_mirror_configuration( + providers_collection, working_dir, str(cache_dir) ) + del providers_collection # this is a singleton and we need to delete it to test the empty_providers_collection + assert temp_dir is not None + assert (pathlib.Path(temp_dir.name) / "terraform.tf").exists() + + def test_write_mirror_configuration_empty_providers( + self, empty_providers_collection, cache_dir + ): + with TemporaryDirectory() as working_dir: + with pytest.raises(IndexError): + _write_mirror_configuration( + empty_providers_collection, working_dir, str(cache_dir) + ) + + +class TestTerraformHelpersCreateMirrorConfiguration: + def test_create_mirror_configuration(self, providers_collection): + includes = ["provider1", "provider2"] + tf_config = _create_mirror_configuration(providers_collection, includes) + assert "terraform {" in tf_config + + +class TestTerraformHelpersParseRequiredProviders: + def test_parse_required_providers(self): + content = { + "terraform": [ + { + "required_providers": [ + { + "provider1": { + "source": "hashicorp/provider1", + "version": "1.0.0", + } + } + ] + } + ] + } + expected_providers = { + "provider1": {"source": "hashicorp/provider1", "version": "1.0.0"} + } + assert _parse_required_providers(content) == expected_providers + def test_parse_required_providers_no_providers(self): + content = {"terraform": [{"required_providers": []}]} + assert _parse_required_providers(content) == {} -def test_create_mirror_configuration(providers_collection): - includes = ["provider1", "provider2"] - tf_config = _create_mirror_configuration(providers_collection, includes) - assert "terraform {" in tf_config - - -def test_parse_required_providers(): - content = { - "terraform": [ - { - "required_providers": [ - {"provider1": {"source": "hashicorp/provider1", "version": "1.0.0"}} - ] - } - ] - } - expected_providers = { - "provider1": {"source": "hashicorp/provider1", "version": "1.0.0"} - } - assert _parse_required_providers(content) == expected_providers - - -def test_parse_required_providers_no_providers(): - content = {"terraform": [{"required_providers": []}]} - assert _parse_required_providers(content) is None - + def test_parse_required_providers_no_terraform(self): + content = { + "required_providers": [ + {"provider1": {"source": "hashicorp/provider1", "version": "1.0.0"}} + ] + } + assert _parse_required_providers(content) == {} -def test_parse_required_providers_no_terraform(): - content = { - "required_providers": [ - {"provider1": {"source": "hashicorp/provider1", "version": "1.0.0"}} - ] - } - assert _parse_required_providers(content) is None +class TestTerraformHelpersFindRequiredProviders: -def test_find_required_providers(tmp_path): - tf_content = """ - terraform { - required_providers { - provider1 = { - source = "hashicorp/provider1" - version = "1.0.0" + def test_find_required_providers(self, tmp_path): + tf_content = """ + terraform { + required_providers { + provider1 = { + source = "hashicorp/provider1" + version = "1.0.0" + } } - } - } - """ - test_file = tmp_path / "main.tf" - with open(test_file, "w") as f: - f.write(tf_content) - - providers = _find_required_providers(str(tmp_path)) - expected_providers = { - "provider1": {"source": "hashicorp/provider1", "version": "1.0.0"} - } - assert providers == expected_providers + } + """ + test_file = tmp_path / "main.tf" + with open(test_file, "w") as f: + f.write(tf_content) + + providers = _find_required_providers(str(tmp_path)) + expected_providers = { + "provider1": { + "source": "hashicorp/provider1", + "version": SpecifierSet("==1.0.0"), + } + } + assert providers == expected_providers + + def test_find_required_providers_multiple_versions(self, tmp_path): + tf_content_a = """ + terraform { + required_providers { + provider1 = { + source = "hashicorp/provider1" + version = ">=1.0.0" + } + } + } + """ + tf_content_b = """ + terraform { + required_providers { + provider1 = { + source = "hashicorp/provider1" + version = "==1.5.0" + } + } + } + """ + test_file_a = tmp_path / "main.tf" + test_file_b = tmp_path / "main2.tf" + with open(test_file_a, "w") as f: + f.write(tf_content_a) + with open(test_file_b, "w") as f: + f.write(tf_content_b) + providers = _find_required_providers(str(tmp_path)) + expected_providers = { + "provider1": { + "source": "hashicorp/provider1", + "version": (SpecifierSet(">=1.0.0") & SpecifierSet("==1.5.0")), + } + } + assert providers == expected_providers + + def test_find_required_providers_invalid_version(self, tmp_path): + tf_content = """ + terraform { + required_providers { + provider1 = { + source = "hashicorp/provider1" + version = ">=@~~1.0.0" + } + } + } + """ + test_file = tmp_path / "main.tf" + with open(test_file, "w") as f: + f.write(tf_content) + with pytest.raises(TFWorkerException): + _find_required_providers(str(tmp_path)) + + def test_find_required_providers_no_providers(self, tmp_path): + tf_content = """ + terraform { + } + """ + test_file = tmp_path / "main.tf" + with open(test_file, "w") as f: + f.write(tf_content) + + providers = _find_required_providers(str(tmp_path)) + assert providers == {} + + def test_find_required_providers_invalid_hcl(self, tmp_path): + tf_content = """ + tfworker { izgreat! } + """ + test_file = tmp_path / "main.tf" + with open(test_file, "w") as f: + f.write(tf_content) + + providers = _find_required_providers(str(tmp_path)) + assert providers == {} + + def test_find_providers_conflicting_source(self, tmp_path): + tf_content_a = """ + terraform { + required_providers { + provider1 = { + source = "hashicorp/provider1" + version = ">=1.0.0" + } + } + } + """ + tf_content_b = """ + terraform { + required_providers { + provider1 = { + source = "hashicorp/provider2" + version = "==1.5.0" + } + } + } + """ + test_file_a = tmp_path / "main.tf" + test_file_b = tmp_path / "main2.tf" + with open(test_file_a, "w") as f: + f.write(tf_content_a) + with open(test_file_b, "w") as f: + f.write(tf_content_b) + with pytest.raises(TFWorkerException): + _find_required_providers(str(tmp_path)) diff --git a/tfworker/__init__.py b/tfworker/__init__.py deleted file mode 100644 index c404316..0000000 --- a/tfworker/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2020 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import typing - -# https://github.com/python/typing/issues/182 eventually this type should be placed somewhere else -JSONType = typing.Union[ - str, int, float, bool, None, typing.Dict[str, typing.Any], typing.List[typing.Any] -] diff --git a/tfworker/app_state.py b/tfworker/app_state.py new file mode 100644 index 0000000..f2917c7 --- /dev/null +++ b/tfworker/app_state.py @@ -0,0 +1,90 @@ +from pathlib import Path + +from pydantic import ConfigDict, Field + +from tfworker import cli_options +from tfworker.authenticators import AuthenticatorsCollection +from tfworker.backends import BaseBackend +from tfworker.definitions import DefinitionsCollection +from tfworker.handlers.collection import HandlersCollection +from tfworker.providers import ProvidersCollection +from tfworker.types import ConfigFile, FreezableBaseModel + + +class AppState(FreezableBaseModel): + """ + AppState defines the model for the application state. The application state is stored on the + click context as an object that can always be retrieved by any component of the program by + calling click.get_current_context().obj. + + The AppState contains all of the different models for different components of the application + as well as the options loaded from the config file, and supplied via the command line. This + allows for easy access anywhere in the application without having to overload kwargs all over + the place. + """ + + deployment: str = Field("undefined", description="The deployment name.") + model_config = ConfigDict( + { + "extra": "forbid", + "arbitrary_types_allowed": True, + } + ) + + authenticators: AuthenticatorsCollection | None = Field( + None, + description="Authenticators are what are responsible for authentication with the various backends.", + ) + backend: BaseBackend | None = Field( + None, + description="The backend is responsible for interactions with the cloud provider where the remote state is stored.", + ) + clean_options: cli_options.CLIOptionsClean | None = Field( + None, description="These are the options passed to the clean command." + ) + definitions: DefinitionsCollection | None = Field( + None, + description="Definitions are the core of a deployment, they are descriptions of local or remote terraform modules to be deployed, and how to configure them.", + ) + handlers: HandlersCollection | None = Field( + None, + description="Handlers are plugins that can be executed along with terraform at various stages, they allow easily extending the application functionality.", + ) + loaded_config: ConfigFile | None = Field( + {}, + description="This represents the loaded configuration file, merged with various command line options.", + ) + providers: ProvidersCollection | None = Field( + None, + description="Providers are terraform plugins, some provides require special handling, for example when they require authentication information, almost always the generic type is used.", + ) + root_options: cli_options.CLIOptionsRoot | None = Field( + None, + description="These are the options passed to the root of the CLI, these options are focused on backend and authenticator configuration.", + ) + terraform_options: cli_options.CLIOptionsTerraform | None = Field( + None, + description="These options are passed to the terraform command, they control the terraform orchestration.", + ) + working_dir: Path | None = Field( + None, + description="The working directory is the root of where all filesystem actions are handled within the application.", + ) + + def freeze(self): + """ + Freeze the AppState and all nested models. + + This is used to prevent modification of the AppState after it has been initialized. + + the `backend` attribute is not frozen, it has no modification methods. + """ + super().freeze() + self.authenticators.freeze() if self.authenticators else None + self.clean_options.freeze() if self.clean_options else None + self.definitions.freeze() if self.definitions else None + self.handlers.freeze() if self.handlers else None + self.loaded_config.freeze() if self.loaded_config else None + self.providers.freeze() if self.providers else None + self.root_options.freeze() if self.root_options else None + self.terraform_options.freeze() if self.terraform_options else None diff --git a/tfworker/authenticators/__init__.py b/tfworker/authenticators/__init__.py index 071188f..6ec5300 100644 --- a/tfworker/authenticators/__init__.py +++ b/tfworker/authenticators/__init__.py @@ -1,46 +1,8 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections - -from .aws import AWSAuthenticator # noqa -from .base import UnknownAuthenticator # noqa -from .google import GoogleAuthenticator, GoogleBetaAuthenticator # noqa - -ALL = [AWSAuthenticator, GoogleAuthenticator, GoogleBetaAuthenticator] - - -class AuthenticatorsCollection(collections.abc.Mapping): - def __init__(self, state_args, **kwargs): - self._authenticators = dict( - [(auth.tag, auth(state_args, **kwargs)) for auth in ALL] - ) - - def __len__(self): - return len(self._authenticators) - - def __getitem__(self, value): - if type(value) is int: - return self._authenticators[list(self._authenticators.keys())[value]] - return self._authenticators[value] - - def __iter__(self): - return iter(self._authenticators.values()) - - def get(self, value): - try: - return self[value] - except Exception: - raise UnknownAuthenticator(provider=value) - return None +from .aws import AWSAuthenticator, AWSAuthenticatorConfig # pragma: no cover # noqa +from .base import BaseAuthenticator, BaseAuthenticatorConfig # pragma: no cover # noqa +from .collection import AuthenticatorsCollection # pragma: no cover # noqa +from .google import ( # pragma: no cover # noqa + GoogleAuthenticator, + GoogleAuthenticatorConfig, + GoogleBetaAuthenticator, +) diff --git a/tfworker/authenticators/aws.py b/tfworker/authenticators/aws.py index 63a991a..3c307a9 100644 --- a/tfworker/authenticators/aws.py +++ b/tfworker/authenticators/aws.py @@ -1,163 +1,326 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - import shlex +from typing import Dict import boto3 +from botocore.credentials import Credentials +from pydantic import model_validator -from tfworker import constants as const +import tfworker.util.log as log +from tfworker.exceptions import TFWorkerException -from .base import BaseAuthenticator +from .base import BaseAuthenticator, BaseAuthenticatorConfig -class MissingArgumentException(Exception): - pass +class AWSAuthenticatorConfig(BaseAuthenticatorConfig): + """ + A configuration that describes the configuration required for the AWS Authenticator. + This model is populated by values from the "tfworker.cli_options.CLIOptionsRoot" model. -class AWSAuthenticator(BaseAuthenticator): - tag = "aws" - - def __init__(self, state_args, **kwargs): - super(AWSAuthenticator, self).__init__(state_args, **kwargs) - self.bucket = self._resolve_arg("backend_bucket") - if not self.bucket: - raise MissingArgumentException("backend_bucket is a required argument") - - self.access_key_id = self._resolve_arg("aws_access_key_id") - self.backend_region = self._resolve_arg("backend_region") - self.prefix = self._resolve_arg("backend_prefix") - self.profile = self._resolve_arg("aws_profile") - self.region = self._resolve_arg("aws_region") - self.role_arn = self._resolve_arg("aws_role_arn") - self.secret_access_key = self._resolve_arg("aws_secret_access_key") - self.session_token = self._resolve_arg("aws_session_token") - self.external_id = self._resolve_arg("aws_external_id") - - self.deployment = kwargs.get("deployment") - - self._account_id = None - self._backend_session = None - self._session = None - - # If the default value is used, render the deployment name into it - if self.prefix == const.DEFAULT_BACKEND_PREFIX: - self.prefix = const.DEFAULT_BACKEND_PREFIX.format( - deployment=self.deployment - ) + Attributes: + aws_region (str): the AWS region to use. This is required. + aws_access_key_id (str): an aws access key id. Either this or a profile is required. + aws_external_id (str): a unique ID that can be used for cross account assumptions. Defaults to None. + aws_profile (str): an aws profile. Either this or an access key id is required. + aws_role_arn (str): if provided, the role to assume using other creds. Defaults to None. + aws_secret_access_key (str): an aws secret access key. Either this or a profile is required. + aws_session_token (str): an aws session token. Defaults to None. + backend_region (str): the AWS region for the TF backend (s3, dynamodb). Defaults to `aws_region`. + backend_role_arn (str): the role to assume for the backend. Defaults to None. + duration (int): the duration of an assumed role session. Defaults to 3600. + session_name (str): the name of the assumed role session. Defaults to "tfworker". + """ + + aws_region: str + aws_access_key_id: str | None = None + aws_external_id: str | None = None + aws_profile: str | None = None + aws_role_arn: str | None = None + aws_secret_access_key: str | None = None + aws_session_token: str | None = None + backend_region: str | None = None + backend_role_arn: str | None = None + duration: int | None = 3600 + mfa_serial: str | None = None + session_name: str | None = "tfworker" + + @model_validator(mode="before") + @classmethod + def set_backend_region(cls, values: Dict[str, str]) -> Dict[str, str]: + """ + Sets the backend region to the same as the AWS region if not provided + + Args: + values (dict): the values passed to the model + + Returns: + dict: the modified values + """ + if values.get("aws_region") and not values.get("backend_region"): + values["backend_region"] = values["aws_region"] + return values + + @model_validator(mode="before") + @classmethod + def check_valid_aws_auth(cls, values: Dict[str, str]) -> Dict[str, str]: + """ + Validates that an acceptable configuration for AWS authentication is provided - aws_is_active = (self.access_key_id and self.secret_access_key) or self.profile - if aws_is_active: - # Initialize the session objects - self._session = boto3.Session( - region_name=self.region, **self._session_state_args + Args: + values (dict): the values passed to the model + + Returns: + dict: the unmodified values + + Raises: + ValueError: if the configuration is not valid + """ + + if not ( + values.get("aws_access_key_id") and values.get("aws_secret_access_key") + ) and not values.get("aws_profile"): + raise ValueError( + "Either aws_access_key_id and aws_secret_access_key or profile must be provided" ) + return values - if not self.role_arn: - # if a role was not provided, need to ensure credentials are set - # in the config, these will come from the session - creds = self._session.get_credentials() - self.access_key_id = creds.access_key - self.secret_access_key = creds.secret_key - self.session_token = creds.token - - if self.backend_region == self.region: - self._backend_session = self._session - else: - self._backend_session = boto3.Session( - region_name=self.backend_region, **self._session_state_args - ) - else: - (self.__session, creds) = AWSAuthenticator.get_assumed_role_session( - self._session, self.role_arn, external_id=self.external_id - ) - self.access_key_id = creds["AccessKeyId"] - self.secret_access_key = creds["SecretAccessKey"] - self.session_token = creds["SessionToken"] - - if self.backend_region == self.region: - self._backend_session = self._session - else: - # Explicitly do NOT pass the profile here since the assumed role - # has no local profile - self._backend_session = boto3.Session( - region_name=self.backend_region, - aws_access_key_id=self.access_key_id, - aws_secret_access_key=self.secret_access_key, - aws_session_token=self.session_token, - ) - @property - def _session_state_args(self): - state_args = dict() +class AWSAuthenticator(BaseAuthenticator): + """ + The AWS authenticator is used to authenticate to AWS and generate environment variables + + Attributes: + tag (str): the tag for the authenticator, used by other methods to lookup the authenticator + config_model (AWSAuthenticatorConfig): the configuration model for the authenticator + session (boto3.session): the primary session + backend_session (boto3.session): the backend session + session_credentials (Dict[str, str]): the credentials for the primary session + backend_session_credentials (Dict[str, str]): the credentials for the backend session + """ - if self.profile: - state_args["profile_name"] = self.profile + tag: str = "aws" + config_model: BaseAuthenticatorConfig = AWSAuthenticatorConfig - if self.access_key_id: - state_args["aws_access_key_id"] = self.access_key_id + def __init__(self, auth_config: AWSAuthenticatorConfig) -> None: + """ + Initialize the AWS authenticator - if self.secret_access_key: - state_args["aws_secret_access_key"] = self.secret_access_key + Args: + auth_config (AWSAuthenticatorConfig): the configuration for the authenticator - if self.session_token is not None: - state_args["aws_session_token"] = self.session_token + Raises: + TFWorkerException: if there is an error authenticating to AWS + """ + self._backend_session: boto3.session = None + self._session: boto3.session = None + + log.debug(f"authenticating to AWS, in region {auth_config.aws_region}") + # The initial session is used to create any other sessions, or use directly if no role is assumed + try: + log.trace("authenticating to AWS for initial session") + init_session: boto3.session = boto3.Session( + region_name=auth_config.aws_region, + **_get_init_session_args(auth_config), + ) + except Exception as e: + raise TFWorkerException(f"error authenticating to AWS: {e}") from e - return state_args + # handle role assumption if necessary + if not auth_config.aws_role_arn: + log.trace("no role to assume, using initial session") + self._session = init_session + else: + log.info(f"assuming role: {auth_config.aws_role_arn}") + self._session = _assume_role_session(init_session, auth_config) + + # handle backend session if necessary + if not _need_backend_session(auth_config): + log.trace("backend session and regular session are the same") + self._backend_session = self._session + else: + log.trace( + f"gathering backend session in region {auth_config.backend_region}" + ) + self._backend_session = _get_backend_session(auth_config, init_session) + + @property + def region(self) -> str: + return self._session.region_name @property - def backend_session(self): + def backend_region(self) -> str: + return self._backend_session.region_name + + @property + def backend_session(self) -> boto3.session: return self._backend_session @property - def session(self): + def backend_session_credentials(self) -> Dict[str, str]: + return self._backend_session.get_credentials() + + @property + def session(self) -> boto3.session: return self._session - def env(self): + @property + def session_credentials(self) -> Dict[str, str]: + return self._session.get_credentials() + + def env(self, backend: bool = False) -> Dict[str, str]: + """ + env returns a dictionary of environment variables that should be set + + Args: + backend (bool): whether this is for the backend. Defaults to False. + + Returns: + Dict[str, str]: the environment variables + """ result = {} - if self.access_key_id: - result["AWS_ACCESS_KEY_ID"] = shlex.quote(self.access_key_id) - if self.region: - result["AWS_DEFAULT_REGION"] = shlex.quote(self.region) - if self.secret_access_key: - result["AWS_SECRET_ACCESS_KEY"] = shlex.quote(self.secret_access_key) - if self.session_token: - result["AWS_SESSION_TOKEN"] = shlex.quote(self.session_token) + + if backend: + session_ref = self.backend_session + else: + session_ref = self.session + + creds: Credentials = session_ref.get_credentials() + result["AWS_DEFAULT_REGION"] = shlex.quote(session_ref.region_name) + result["AWS_ACCESS_KEY_ID"] = shlex.quote(creds.access_key) + result["AWS_SECRET_ACCESS_KEY"] = shlex.quote(creds.secret_key) + + if creds.token: + result["AWS_SESSION_TOKEN"] = shlex.quote(creds.token) + return result - @staticmethod - def get_assumed_role_session( - session, - role_arn, - session_name="AssumedRoleSession1", - duration=3600, - external_id="", - ): - """get_assumed_role_session returns a boto3 session updated with assumed role credentials""" - sts_client = session.client("sts") + +def _assume_role_session( + session: boto3.session, auth_config: AWSAuthenticatorConfig, backend=False +) -> boto3.session: + """ + Uses the provided session to assume a role + + Args: + session (boto3.session): the session to use for the assumption + backend (bool): whether this is for the backend. Defaults to False. + + Returns: + boto3.session: the new session + + Raises: + TFWorkerException: if there is an error assuming the role + """ + sts_client = session.client("sts") + + if backend: + assume_args = { + "RoleArn": auth_config.backend_role_arn, + "RoleSessionName": auth_config.session_name, + "DurationSeconds": auth_config.duration, + } + region = auth_config.backend_region + else: assume_args = { - "RoleArn": role_arn, - "RoleSessionName": session_name, - "DurationSeconds": duration, + "RoleArn": auth_config.aws_role_arn, + "RoleSessionName": auth_config.session_name, + "DurationSeconds": auth_config.duration, } - if external_id: - assume_args["ExternalId"] = external_id - role_creds = sts_client.assume_role(**assume_args)["Credentials"] + region = auth_config.aws_region + if auth_config.aws_external_id: + assume_args["ExternalId"] = auth_config.aws_external_id + + role_creds = sts_client.assume_role(**assume_args)["Credentials"] + try: new_session = boto3.Session( aws_access_key_id=role_creds["AccessKeyId"], aws_secret_access_key=role_creds["SecretAccessKey"], aws_session_token=role_creds["SessionToken"], + region_name=region, ) + except Exception as e: + raise TFWorkerException(f"error assuming role: {e}") from e + + return new_session + + +def _get_backend_session( + auth_config: AWSAuthenticatorConfig, init_session: boto3.session +) -> boto3.session: + """ + Gets the backend session + + Args: + auth_config (AWSAuthenticatorConfig): the configuration for the authenticator + init_session (boto3.session): the initial session + + Raises: + TFWorkerException: if there is an error getting the backend session + """ + try: + if auth_config.backend_role_arn: + log.info(f"assuming backend role: {auth_config.backend_role_arn}") + backend_session = _assume_role_session( + init_session, auth_config, backend=True + ) + else: + log.debug( + f"authenticating to AWS for backend session, in region {auth_config.backend_region}" + ) + backend_session = boto3.Session( + region_name=auth_config.backend_region, + **_get_init_session_args(auth_config), + ) + except Exception as e: + raise TFWorkerException( + f"error authenticating to AWS for backend session: {e}" + ) from e + + return backend_session + + +def _get_init_session_args(auth_config: AWSAuthenticatorConfig) -> Dict[str, str]: + """ + Returns a dictionary of arguments to pass to the initial boto3 session + + Args: + auth_config (AWSAuthenticatorConfig): the configuration for the authenticator + + Returns: + Dict[str, str]: the arguments to pass to the session + """ + session_args = dict() + + if auth_config.aws_profile is not None: + session_args["profile_name"] = auth_config.aws_profile + + if auth_config.aws_access_key_id is not None: + session_args["aws_access_key_id"] = auth_config.aws_access_key_id + + if auth_config.aws_secret_access_key is not None: + session_args["aws_secret_access_key"] = auth_config.aws_secret_access_key + + if auth_config.aws_session_token is not None: + session_args["aws_session_token"] = auth_config.aws_session_token + + return session_args + + +def _need_backend_session(auth_config: AWSAuthenticatorConfig) -> bool: + """ + Returns whether a backend session is needed + + Args: + auth_config (AWSAuthenticatorConfig): the configuration for the authenticator - return new_session, role_creds + Returns: + bool: whether a backend session is needed + """ + # the conditions in which a backend session is needed: + # - backend_region is different from aws_region + # - backend_role_arn is provided + return ( + auth_config.aws_region != auth_config.backend_region + or auth_config.backend_role_arn is not None + ) diff --git a/tfworker/authenticators/base.py b/tfworker/authenticators/base.py index 2253231..f81c131 100644 --- a/tfworker/authenticators/base.py +++ b/tfworker/authenticators/base.py @@ -1,33 +1,26 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -class BaseAuthenticator: - tag = "base" - - def __init__(self, state_args, **kwargs): - self._args = state_args - self.clean = kwargs.get("clean") - self.create_backend_bucket = self._resolve_arg("create_backend_bucket") - - def _resolve_arg(self, name): - return getattr(self._args, name) if hasattr(self._args, name) else None - - def env(self): - return {} - - -class UnknownAuthenticator(Exception): - def __init__(self, provider): - super().__init__(f"{provider} is not a known authenticator.") +from abc import ABC, abstractmethod + +from pydantic import BaseModel + + +class BaseAuthenticatorConfig(BaseModel): + """ + Base class for all authenticator configurations. + """ + + ... + + +class BaseAuthenticator(ABC): + """ + Base class for all authenticators. + """ + + tag: str + config_model: BaseAuthenticatorConfig + + @abstractmethod + def __init__(self, auth_config: BaseAuthenticatorConfig): ... # noqa + + @abstractmethod + def env(self): ... # noqa diff --git a/tfworker/authenticators/collection.py b/tfworker/authenticators/collection.py new file mode 100644 index 0000000..9821012 --- /dev/null +++ b/tfworker/authenticators/collection.py @@ -0,0 +1,77 @@ +import collections +import threading +from typing import TYPE_CHECKING + +from pydantic import ValidationError + +import tfworker.util.log as log +from tfworker.exceptions import FrozenInstanceError, UnknownAuthenticator + +from .aws import AWSAuthenticator # noqa +from .base import BaseAuthenticator # noqa +from .google import GoogleAuthenticator, GoogleBetaAuthenticator # noqa + +if TYPE_CHECKING: + from tfworker.cli_options import CLIOptionsRoot # pragma: no cover # noqa + +ALL = [AWSAuthenticator, GoogleAuthenticator, GoogleBetaAuthenticator] + + +class AuthenticatorsCollection(collections.abc.Mapping): + """ + A thread safe, singleton collection of all authenticators that have an appropriate configuration + + Attributes: + _instance (AuthenticatorsCollection): The singleton instance of the collection + _lock (threading.Lock): A lock to ensure thread safety + _authenticators (dict): The collection of authenticators + """ + + _instance = None + _lock = threading.Lock() + _frozen = False + + def __new__(cls, *args, **kwargs): + with cls._lock: + if cls._instance is None: + cls._instance = super().__new__(cls) + return cls._instance + + def __init__(self, root_args: "CLIOptionsRoot"): + if not hasattr(self, "_initialized"): + self._authenticators = {} + for auth in ALL: + try: + config = auth.config_model(**root_args.model_dump()) + self._authenticators[auth.tag] = auth(config) + log.debug(f"authenticator {auth.tag} created") + except ValidationError: + log.trace( + f"authenticator {auth.tag} not created, configuration not supplied" + ) + self._initialized = True + + def __len__(self) -> int: + return len(self._authenticators) + + def __getitem__(self, value) -> BaseAuthenticator: + try: + if isinstance(value, int): + return self._authenticators[list(self._authenticators.keys())[value]] + return self._authenticators[value] + except KeyError: + raise UnknownAuthenticator(provider=value) + + def __iter__(self) -> iter: + return iter(self._authenticators.values()) + + def __setitem__(self, key, value): + if self._frozen: + raise FrozenInstanceError("Cannot modify a frozen instance.") + self._authenticators[key] = value + + def freeze(self): + self._frozen = True + + def get(self, value) -> BaseAuthenticator: + return self[value] diff --git a/tfworker/authenticators/google.py b/tfworker/authenticators/google.py index dc0562a..0bd6048 100644 --- a/tfworker/authenticators/google.py +++ b/tfworker/authenticators/google.py @@ -1,42 +1,41 @@ -# Copyright 2020 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - import shlex -from tfworker import constants as const +from .base import BaseAuthenticator, BaseAuthenticatorConfig -from .base import BaseAuthenticator +class GoogleAuthenticatorConfig(BaseAuthenticatorConfig): + """ + Configuration for the Google Authenticator. -class GoogleAuthenticator(BaseAuthenticator): - tag = "google" + Attributes: + gcp_creds_path (str): The path to the Google Cloud Platform credentials file. + gcp_region (str): The region to use for the Google Cloud Platform. + project (str): The Google Cloud Platform project to USE. + """ + + gcp_creds_path: str + gcp_region: str + project: str - def __init__(self, state_args, **kwargs): - super(GoogleAuthenticator, self).__init__(state_args, **kwargs) - self.bucket = self._resolve_arg("backend_bucket") - self.creds_path = self._resolve_arg("gcp_creds_path") - self.prefix = self._resolve_arg("backend_prefix") - self.project = self._resolve_arg("gcp_project") - self.region = self._resolve_arg("gcp_region") +class GoogleAuthenticator(BaseAuthenticator): + """ + Authenticator for Google Cloud Platform. Authentication is only supported using + a service account key file. + + Attributes: + creds_path (str): The path to the Google Cloud Platform credentials file. + project (str): The Google Cloud Platform project to USE. + region (str): The region to use for the Google Cloud Platform. + """ - self.deployment = kwargs.get("deployment") + tag = "google" + config_model = GoogleAuthenticatorConfig - if self.prefix == const.DEFAULT_BACKEND_PREFIX: - self.prefix = const.DEFAULT_BACKEND_PREFIX.format( - deployment=self.deployment - ) + def __init__(self, auth_config: GoogleAuthenticatorConfig): + self.creds_path = auth_config.gcp_creds_path + self.project = auth_config.project + self.region = auth_config.gcp_region def env(self): result = {} @@ -46,4 +45,8 @@ def env(self): class GoogleBetaAuthenticator(GoogleAuthenticator): + """ + The Google Beta Authenticator is the same as the Google Authenticator, but with a different tag. + """ + tag = "google-beta" diff --git a/tfworker/backends/__init__.py b/tfworker/backends/__init__.py index bcb5e4c..d155313 100644 --- a/tfworker/backends/__init__.py +++ b/tfworker/backends/__init__.py @@ -1,24 +1,4 @@ -# Copyright 2020 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .base import BackendError, Backends, BaseBackend # noqa -from .gcs import GCSBackend # noqa -from .s3 import S3Backend # noqa - - -def select_backend(backend, deployment, authenticators, definitions): - if backend == Backends.s3: - return S3Backend(authenticators, definitions, deployment=deployment) - elif backend == Backends.gcs: - return GCSBackend(authenticators, definitions, deployment=deployment) +from .backends import Backends # pragma: no cover # noqa +from .base import BaseBackend # pragma: no cover # noqa +from .gcs import GCSBackend # pragma: no cover # noqa +from .s3 import S3Backend # pragma: no cover # noqa diff --git a/tfworker/backends/backends.py b/tfworker/backends/backends.py new file mode 100644 index 0000000..0218895 --- /dev/null +++ b/tfworker/backends/backends.py @@ -0,0 +1,20 @@ +from enum import Enum +from typing import List + +from tfworker.backends.gcs import GCSBackend +from tfworker.backends.s3 import S3Backend + + +class Backends(Enum): + S3 = S3Backend + GCS = GCSBackend + + @classmethod + def names(cls) -> List[str]: + """ + List of the names of the available backends + + Returns: + List[str]: List of the names of the available back + """ + return [i.name for i in cls] diff --git a/tfworker/backends/base.py b/tfworker/backends/base.py index 8744b52..be35581 100644 --- a/tfworker/backends/base.py +++ b/tfworker/backends/base.py @@ -1,63 +1,95 @@ -# Copyright 2020 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - from abc import ABCMeta, abstractmethod +from typing import TYPE_CHECKING +from tfworker.exceptions import BackendError from tfworker.types import JSONType +if TYPE_CHECKING: + from tfworker.authenticators import ( # pragma: no cover # noqa + AuthenticatorsCollection, + ) -class BackendError(Exception): - # add custom "help" parameter to the exception - def __init__(self, message, help=None): - super().__init__(message) - self._help = help - @property - def help(self): - if self._help is None: - return "No help available" - return self._help +class BaseBackend(metaclass=ABCMeta): + """ + The base backend is an abrastract class that defines the interface for backends + A backend provides the mechanisms and functions for interacting with a Terraform backend, -class BaseBackend(metaclass=ABCMeta): - plan_storage = False - tag = "base" + Attributes: + plan_storage (bool): A flag to indicate whether the backend supports plan storage + auth_tag(str): The tag of the authenticator that is required for this backend + tag (str): A unique identifier for the backend + """ + + auth_tag: str + tag: str + plan_storage: bool = False @abstractmethod - def hcl(self, name: str) -> str: - pass + def __init__( + self, authenticators: "AuthenticatorsCollection", deployment: str = None + ): + """ + The __init__ method initializes the backend + + Args: + authenticators (AuthenticatorsCollection): The collection of authenticators + deployment (str): The deployment name + + Raises: + BackendError: If there is an error during initialization + """ + pass # pragma: no cover @abstractmethod - def data_hcl(self, exclude: list) -> str: - pass + def hcl(self, deployment: str) -> str: + """ + The HCL method returns the configuration that belongs in the "terraform" configuration block + + Args: + deployment (str): The deployment name + + Returns: + str: The HCL configuration + """ + pass # pragma: no cover @abstractmethod - def clean(self, deployment: str, limit: tuple) -> str: - pass + def data_hcl(self, remotes: list) -> str: + """ + The data_hcl method returns the configuration that is used to configure this backend as a remote + data source. + + Args: + remotes (list): A list of remote sources to provide a configuration for + + Returns: + str: The HCL configuration for the remote data source + """ + pass # pragma: no cover @abstractmethod - def remotes(self) -> list: - pass + def clean(self, deployment: str, limit: tuple) -> None: + """ + Clean is called to remove any resources that are no longer needed - @property - def handlers(self) -> dict: - return {} + Args: + deployment (str): The deployment name + limit (tuple): A tuple with a list of resources to limit execution to + """ + pass # pragma: no cover + @property + @abstractmethod + def remotes(self) -> list: + """ + Remotes returns a list of the remote data sources that may be configured for a deployment -class Backends: - s3 = "s3" - gcs = "gcs" + Returns: + list: A list of remote data sources + """ + pass # pragma: no cover def validate_backend_empty(state: JSONType) -> bool: @@ -72,4 +104,6 @@ def validate_backend_empty(state: JSONType) -> bool: else: return True except KeyError: - raise BackendError("resources key does not exist in state!") + raise BackendError("resources key does not exist in state") + except TypeError: + raise BackendError("state is not valid JSON") diff --git a/tfworker/backends/gcs.py b/tfworker/backends/gcs.py index a55be1f..0f8b725 100644 --- a/tfworker/backends/gcs.py +++ b/tfworker/backends/gcs.py @@ -1,18 +1,5 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - import json +from typing import TYPE_CHECKING import click from google.api_core import page_iterator @@ -20,16 +7,29 @@ from google.cloud import storage from google.cloud.exceptions import Conflict, NotFound -from .base import BackendError, BaseBackend, validate_backend_empty +import tfworker.util.log as log +from tfworker.exceptions import BackendError + +from .base import BaseBackend, validate_backend_empty + +if TYPE_CHECKING: + from tfworker.authenticators import ( # pragma: no cover # noqa + AuthenticatorsCollection, + ) class GCSBackend(BaseBackend): tag = "gcs" auth_tag = "google" - def __init__(self, authenticators, definitions, deployment=None): + def __init__( + self, authenticators: "AuthenticatorsCollection", deployment: str = None + ): + log.warn( + "The GCS Backend has not been updated and tested in a long time; it may not work as expected." + ) + self._authenticator = authenticators[self.auth_tag] - self._definitions = definitions self._gcs_bucket = None self._gcs_prefix = None @@ -99,6 +99,7 @@ def _clean_prefix(self, prefix: str) -> None: else: raise BackendError(f"state file at: {b.name} is not empty") + @property def remotes(self) -> list: """this is unimplemented here""" raise NotImplementedError diff --git a/tfworker/backends/s3.py b/tfworker/backends/s3.py index 704a32d..5c3b4bf 100644 --- a/tfworker/backends/s3.py +++ b/tfworker/backends/s3.py @@ -1,41 +1,58 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - import json import os from contextlib import closing -from pathlib import Path -from uuid import uuid4 -from zipfile import ZipFile +from typing import TYPE_CHECKING, Generator -import boto3 +import boto3.dynamodb import botocore +import botocore.errorfactory +import botocore.paginate import click -from ..handlers import BaseHandler, HandlerError -from .base import BackendError, BaseBackend, validate_backend_empty +import tfworker.util.log as log +from tfworker.exceptions import BackendError + +from .base import BaseBackend, validate_backend_empty + +if TYPE_CHECKING: + import boto3 # pragma: no cover # noqa + + from tfworker.app_state import AppState # pragma: no cover # noqa + from tfworker.authenticators import ( # pragma: no cover # noqa + AuthenticatorsCollection, + AWSAuthenticator, + ) class S3Backend(BaseBackend): - tag = "s3" + """ + Defines how to interact with the S3 backend + + Attributes: + auth_tag (str): The tag for the authenticator to use + plan_storage (bool): A flag to indicate whether the backend supports plan storage + remotes (list): A list of remote data sources based on the deployment + tag (str): A unique identifier for the backend + _authenticator (Authenticator): The authenticator for the backend + _ctx (Context): The current click context + _app_state (AppState): The current application state + _s3_client (botocore.client.S3): The boto3 S3 client + _deployment (str): The deployment name or "undefined" + _ddb_client (botocore.client.DynamoDB): The boto3 DynamoDB client + _bucket_files (set): A set of the keys in the bucket that correspond to all the definitions in a deployment + """ + auth_tag = "aws" - plan_storage = False + plan_storage = True + tag = "s3" + + def __init__( + self, authenticators: "AuthenticatorsCollection", deployment: str = None + ): + self._authenticator: "AWSAuthenticator" = authenticators[self.auth_tag] + self._ctx: click.Context = click.get_current_context() + self._app_state: "AppState" = self._ctx.obj - def __init__(self, authenticators, definitions, deployment=None): - # print the module name for debugging - self._definitions = definitions - self._authenticator = authenticators[self.auth_tag] if not self._authenticator.session: raise BackendError( "AWS session not available", @@ -49,41 +66,199 @@ def __init__(self, authenticators, definitions, deployment=None): if deployment is None: self._deployment = "undefined" - else: - self._deployment = deployment + return - # Setup AWS clients and ensure backend resources are available - self._ddb_client = self._authenticator.backend_session.client("dynamodb") - self._s3_client = self._authenticator.backend_session.client("s3") + self._deployment = deployment + self._ddb_client: botocore.client.DynamodDB = ( + self._authenticator.backend_session.client("dynamodb") + ) + self._s3_client: botocore.client.S3 = ( + self._authenticator.backend_session.client("s3") + ) self._ensure_locking_table() self._ensure_backend_bucket() - self._bucket_files = self._get_bucket_files() - - try: - self._handlers = S3Handler(self._authenticator) - self.plan_storage = True - except HandlerError as e: - click.secho(f"Error initializing S3Handler: {e}") - raise SystemExit(1) + self._bucket_files: list = self._list_bucket_definitions() @property - def handlers(self) -> dict: + def remotes(self) -> list: + return list(self._bucket_files) + + def clean(self, deployment: str, limit: tuple = None) -> None: """ - handlers returns a dictionary of handlers for the backend, ensure a singleton + clean handles determining the desired items to clean and acts as a director to the + internal methods which handle actual resource removal + + Args: + deployment (str): The deployment name + limit (tuple): A tuple with a list of resources to limit execution to + + Raises: + BackendError: An error occurred while cleaning the backend + """ + if limit: + for limit_item in limit: + log.warn( + "when using limit, dynamodb tables won't be completely dropped" + ) + try: + self._clean_bucket_state(definition=limit_item) + self._clean_locking_state(deployment, definition=limit_item) + except BackendError as e: + raise BackendError(f"error deleting state: {e}") + else: + try: + self._clean_bucket_state() + except BackendError as e: + raise BackendError(f"error deleting state: {e}") + self._clean_locking_state(deployment) + + def data_hcl(self, remotes: list) -> str: """ - return {self.tag: self._handlers} + data_hcl returns the terraform configuration for the remote data sources - def remotes(self) -> list: - """return a list of the remote bucket keys""" - return list(self._bucket_files) + Args: + remotes (list): A list of remote sources to provide a configuration for - def _clean_bucket_state(self, definition=None): + Returns: + str: The HCL configuration for the remote data source + """ + + rendered_prefix = self._app_state.root_options.backend_prefix.format( + deployment=self._app_state.deployment + ) + remote_data_config = [] + if type(remotes) is not list: + raise ValueError("remotes must be a list") + + for remote in set(remotes): + remote_data_config.append(f'data "terraform_remote_state" "{remote}" {{') + remote_data_config.append(' backend = "s3"') + remote_data_config.append(" config = {") + remote_data_config.append( + f' region = "{self._app_state.root_options.backend_region}"' + ) + remote_data_config.append( + f' bucket = "{self._app_state.root_options.backend_bucket}"' + ) + remote_data_config.append( + " key =" f' "{rendered_prefix}/{remote}/terraform.tfstate"' + ) + remote_data_config.append(" }") + remote_data_config.append("}\n") + return "\n".join(remote_data_config) + + def hcl(self, deployment: str) -> str: + """ + hcl returns the configuration that belongs inside the "terraform" configuration block + + Args: + deployment (str): The deployment name + + Returns: + str: The HCL configuration + """ + rendered_prefix = self._app_state.root_options.backend_prefix.format( + deployment=self._app_state.deployment + ) + state_config = [] + state_config.append(' backend "s3" {') + state_config.append( + f' region = "{self._app_state.root_options.backend_region}"' + ) + state_config.append( + f' bucket = "{self._app_state.root_options.backend_bucket}"' + ) + state_config.append( + f' key = "{rendered_prefix}/{deployment}/terraform.tfstate"' + ) + state_config.append(f' dynamodb_table = "terraform-{self._deployment}"') + state_config.append(' encrypt = "true"') + state_config.append(" }\n") + return "\n".join(state_config) + + @staticmethod + def filter_keys( + paginator: botocore.paginate.Paginator, + bucket_name: str, + prefix: str = "/", + delimiter: str = "/", + start_after: str = "", + ) -> Generator[str, None, None]: + """ + Filters the keys in a bucket based on the prefix + + adapted from: https://stackoverflow.com/questions/30249069/listing-contents-of-a-bucket-with-boto3 + + Args: + paginator (botocore.paginate.Paginator): The paginator object + bucket_name (str): The name of the bucket + prefix (str): The prefix to filter by + delimiter (str): The delimiter to use + start_after (str): The key to start after + + Yields: + str: Any object key in the bucket contents for all pages + """ + + prefix = prefix[1:] if prefix.startswith(delimiter) else prefix + start_after = ( + (start_after or prefix) if prefix.endswith(delimiter) else start_after + ) + for page in paginator.paginate( + Bucket=bucket_name, Prefix=prefix, StartAfter=start_after + ): + for content in page.get("Contents", ()): + yield content["Key"] + + def _check_bucket_exists(self, name: str) -> bool: + """ + check if a supplied bucket exists + + Args: + name (str): The name of the bucket + + Returns: + bool: True if the bucket exists, False otherwise + """ + try: + self._s3_client.head_bucket(Bucket=name) + return True + except botocore.exceptions.ClientError as err: + err_str = str(err) + if "Not Found" in err_str: + return False + log.error(f"Error checking for bucket: {err}") + click.get_current_context().exit(1) + + def _check_table_exists(self, name: str) -> bool: + """ + check if a supplied dynamodb table exists + + Args: + name (str): The name of the table + + Returns: + bool: True if the table exists, False otherwise + """ + try: + log.trace(f"checking for table: {name}") + if name in self._ddb_client.list_tables()["TableNames"]: + return True + except botocore.exceptions.ClientError as err: + log.error(f"Error checking for table: {err}") + click.get_current_context().exit(1) + return False + + def _clean_bucket_state(self, definition: str = None) -> None: """ clean_state validates all of the terraform states are empty, and then removes the backend objects from S3 - optionally definition can be passed to limit the cleanup - to a single definition + Args: + definition (str): The definition + + Raises: + BackendError: An error occurred while cleaning the state """ s3_paginator = self._s3_client.get_paginator("list_objects_v2") @@ -104,66 +279,115 @@ def _clean_bucket_state(self, definition=None): if validate_backend_empty(backend): self._delete_with_versions(s3_object) - click.secho(f"backend file removed: {s3_object}", fg="yellow") + log.info(f"backend file removed: {s3_object}") else: raise BackendError(f"state file at: {s3_object} is not empty") - def _clean_locking_state(self, deployment, definition=None): + def _clean_locking_state(self, deployment: str, definition: str = None) -> None: """ - clean_locking_state when called removes the dynamodb table - that holds all of the state checksums and locking table - entries + Remove the table, or items from the locking table + + Args: + deployment (str): The deployment name + definition (str): The definition, if provided, only an item will be removed """ + bucket = self._ctx.obj.root_options.backend_bucket + prefix = self._ctx.obj.root_options.backend_prefix.format(deployment=deployment) + dynamo_client = self._authenticator.backend_session.resource("dynamodb") if definition is None: table = dynamo_client.Table(f"terraform-{deployment}") table.delete() - click.secho(f"locking table: terraform-{deployment} removed", fg="yellow") + log.info(f"locking table: terraform-{deployment} removed") else: # delete only the entry for a single state resource + item = f"{bucket}/{prefix}/{definition}/terraform.tfstate-md5" + log.info(f"removing locking table key: {item} if it exists") table = dynamo_client.Table(f"terraform-{deployment}") - table.delete_item( - Key={ - "LockID": f"{self._authenticator.bucket}/{self._authenticator.prefix}/{definition}/terraform.tfstate-md5" - } - ) - click.secho( - f"locking table key: '{self._authenticator.bucket}/{self._authenticator.prefix}/{definition}/terraform.tfstate-md5' removed", - fg="yellow", - ) + table.delete_item(Key={"LockID": item}) - def _ensure_locking_table(self) -> None: + def _create_bucket(self, name: str) -> None: """ - _ensure_locking_table checks for the existence of the locking table, and - creates it if it doesn't exist + Create the S3 locking bucket + + Args: + name (str): The name of the bucket """ - # get dynamodb client from backend session - locking_table_name = f"terraform-{self._deployment}" + create_bucket_args = { + "Bucket": name, + "ACL": "private", + } + if self._authenticator.backend_session.region_name != "us-east-1": + create_bucket_args["CreateBucketConfiguration"] = { + "LocationConstraint": self._authenticator.backend_region + } + try: + log.info(f"Creating backend bucket: {name}") + self._s3_client.create_bucket(**create_bucket_args) + except botocore.exceptions.ClientError as err: + err_str = str(err) + log.trace(f"Error creating bucket: {err}") + if "InvalidLocationConstraint" in err_str: + log.error( + "InvalidLocationConstraint raised when trying to create a bucket. " + "Verify the AWS backend region passed to the worker matches the " + "backend AWS region in the profile.", + ) + click.get_current_context().exit(1) + elif "BucketAlreadyExists" in err_str: + # Ignore when testing + if "PYTEST_CURRENT_TEST" not in os.environ: + log.error( + f"Bucket {name} already exists, this is not expected since a moment ago it did not" + ) # pragma: no cover + click.get_current_context().exit(1) + elif "BucketAlreadyOwnedByYou" in err_str: + log.error(f"Bucket {name} already owned by you: {err}") + self._ctx.exit(1) + else: + log.error(f"Unknown error creating bucket: {err}") + self._ctx.exit(1) + + def _create_bucket_versioning(self, name: str) -> None: + """ + Enable versioning on the bucket - # Check locking table for aws backend - click.secho( - f"Checking backend locking table: {locking_table_name}", fg="yellow" + Args: + name (str): The name of the bucket + """ + log.info(f"Enabling versioning on bucket: {name}") + self._s3_client.put_bucket_versioning( + Bucket=name, VersioningConfiguration={"Status": "Enabled"} ) - if self._check_table_exists(locking_table_name): - click.secho("DynamoDB lock table found, continuing.", fg="yellow") - else: - click.secho( - "DynamoDB lock table not found, creating, please wait...", fg="yellow" - ) - self._create_table(locking_table_name) + def _create_bucket_public_access_block(self, name: str) -> None: + """ + Block public access to the bucket - def _check_table_exists(self, name: str) -> bool: - """check if a supplied dynamodb table exists""" - if name in self._ddb_client.list_tables()["TableNames"]: - return True - return False + Args: + name (str): The name of the bucket + """ + log.info(f"Blocking public access to bucket: {name}") + self._s3_client.put_public_access_block( + Bucket=name, + PublicAccessBlockConfiguration={ + "BlockPublicAcls": True, + "IgnorePublicAcls": True, + "BlockPublicPolicy": True, + "RestrictPublicBuckets": True, + }, + ) def _create_table( self, name: str, read_capacity: int = 1, write_capacity: int = 1 ) -> None: """ Create a dynamodb locking table. + + Args: + name (str): The name of the table + read_capacity (int): The read capacity units + write_capacity (int): The write capacity units """ table_key = "LockID" self._ddb_client.create_table( @@ -182,21 +406,33 @@ def _create_table( TableName=name, WaiterConfig={"Delay": 10, "MaxAttempts": 30} ) + def _delete_with_versions(self, key: str) -> None: + """ + _delete_with_versions should handle object deletions, and all references / versions of the object + + note: in initial testing this isn't required, but is inconsistent with how S3 delete markers, and the boto + delete object call work there may be some configurations that require extra handling. + """ + self._s3_client.delete_object(Bucket=self._authenticator.bucket, Key=key) + def _ensure_backend_bucket(self) -> None: """ _ensure_backend_bucket checks for the existence of the backend bucket, and creates it if it doesn't exist, along with setting the appropriate bucket permissions + + Raises: + BackendError: An error occurred while ensuring the backend bucket """ - bucket_present = self._check_bucket_exists(self._authenticator.bucket) + bucket = click.get_current_context().obj.root_options.backend_bucket + create_bucket = self._app_state.root_options.create_backend_bucket + bucket_present = self._check_bucket_exists(bucket) if bucket_present: - click.secho( - f"Backend bucket: {self._authenticator.bucket} found", fg="yellow" - ) + log.debug(f"backend bucket {bucket} found") return - if not self._authenticator.create_backend_bucket and not bucket_present: + if not create_bucket: raise BackendError( "Backend bucket not found and --no-create-backend-bucket specified." ) @@ -205,85 +441,34 @@ def _ensure_backend_bucket(self) -> None: self._create_bucket_versioning(self._authenticator.bucket) self._create_bucket_public_access_block(self._authenticator.bucket) - def _create_bucket(self, name: str) -> None: - """ - _create_bucket creates a new s3 bucket - """ - try: - click.secho(f"Creating backend bucket: {name}", fg="yellow") - self._s3_client.create_bucket( - Bucket=name, - CreateBucketConfiguration={ - "LocationConstraint": self._authenticator.backend_region - }, - ACL="private", - ) - except botocore.exceptions.ClientError as err: - err_str = str(err) - if "InvalidLocationConstraint" in err_str: - click.secho( - "InvalidLocationConstraint raised when trying to create a bucket. " - "Verify the AWS backend region passed to the worker matches the " - "backend AWS region in the profile.", - fg="red", - ) - raise SystemExit(1) - elif "BucketAlreadyExists" in err_str: - # Ignore when testing - if "PYTEST_CURRENT_TEST" not in os.environ: - click.secho( - f"Bucket {name} already exists, this is not expected since a moment ago it did not", - fg="red", - ) - raise SystemExit(1) - elif "BucketAlreadyOwnedByYou" not in err_str: - raise err - - def _create_bucket_versioning(self, name: str) -> None: - """ - _create_bucket_versioning enables versioning on the bucket - """ - click.secho(f"Enabling versioning on bucket: {name}", fg="yellow") - self._s3_client.put_bucket_versioning( - Bucket=name, VersioningConfiguration={"Status": "Enabled"} - ) - - def _create_bucket_public_access_block(self, name: str) -> None: + def _ensure_locking_table(self) -> None: """ - _create_bucket_public_access_block blocks public access to the bucket + _ensure_locking_table checks for the existence of the locking table, and + creates it if it doesn't exist """ - click.secho(f"Blocking public access to bucket: {name}", fg="yellow") - self._s3_client.put_public_access_block( - Bucket=name, - PublicAccessBlockConfiguration={ - "BlockPublicAcls": True, - "IgnorePublicAcls": True, - "BlockPublicPolicy": True, - "RestrictPublicBuckets": True, - }, - ) + locking_table_name = f"terraform-{self._deployment}" + log.debug(f"checking for locking table: {locking_table_name}") - def _check_bucket_exists(self, name: str) -> bool: - """ - check if a supplied bucket exists - """ - try: - self._s3_client.head_bucket(Bucket=name) - return True - except botocore.exceptions.ClientError as err: - err_str = str(err) - if "Not Found" in err_str: - return False - raise err + if self._check_table_exists(locking_table_name): + log.debug(f"DynamoDB lock table {locking_table_name} found, continuing.") + else: + log.info( + f"DynamoDB lock table {locking_table_name} not found, creating, please wait..." + ) + self._create_table(locking_table_name) - def _get_bucket_files(self) -> set: + def _list_bucket_definitions(self) -> set: """ - _get_bucket_files returns a set of the keys in the bucket + _get_bucket_files returns a set of the keys in the bucket that correspond + to all the definitions in a deployment, the function is poorly named. """ bucket_files = set() + root_options = click.get_current_context().obj.root_options + bucket = root_options.backend_bucket + prefix = root_options.backend_prefix.format(deployment=self._deployment) + log.trace(f"listing definition prefixes in: {bucket}/{prefix}") s3_paginator = self._s3_client.get_paginator("list_objects_v2").paginate( - Bucket=self._authenticator.bucket, - Prefix=self._authenticator.prefix, + Bucket=bucket, Prefix=prefix ) for page in s3_paginator: @@ -292,304 +477,6 @@ def _get_bucket_files(self) -> set: # just append the last part of the prefix to the list, as they # are relative to the base path, and deployment name bucket_files.add(key["Key"].split("/")[-2]) + log.trace(f"bucket files: {bucket_files}") return bucket_files - - def _delete_with_versions(self, key): - """ - _delete_with_versions should handle object deletions, and all references / versions of the object - - note: in initial testing this isn't required, but is inconsistent with how S3 delete markers, and the boto - delete object call work there may be some configurations that require extra handling. - """ - self._s3_client.delete_object(Bucket=self._authenticator.bucket, Key=key) - - def clean(self, deployment: str, limit: tuple = None) -> None: - """ - clean handles determining the desired items to clean and acts as a director to the - internal methods which handle actual resource removal - """ - if limit: - for limit_item in limit: - click.secho( - "when using limit, dynamodb tables won't be completely dropped", - fg="yellow", - ) - try: - # the bucket state deployment is part of the s3 prefix - self._clean_bucket_state(definition=limit_item) - # deployment name needs specified to determine the dynamo table - self._clean_locking_state(deployment, definition=limit_item) - except BackendError as e: - click.secho(f"error deleting state: {e}", fg="red") - raise SystemExit(1) - else: - try: - self._clean_bucket_state() - except BackendError as e: - click.secho(f"error deleting state: {e}") - raise SystemExit(1) - self._clean_locking_state(deployment) - - def hcl(self, name: str) -> str: - state_config = [] - state_config.append(' backend "s3" {') - state_config.append(f' region = "{self._authenticator.backend_region}"') - state_config.append(f' bucket = "{self._authenticator.bucket}"') - state_config.append( - f' key = "{self._authenticator.prefix}/{name}/terraform.tfstate"' - ) - state_config.append(f' dynamodb_table = "terraform-{self._deployment}"') - state_config.append(' encrypt = "true"') - state_config.append(" }") - return "\n".join(state_config) - - def data_hcl(self, remotes: list) -> str: - remote_data_config = [] - if type(remotes) is not list: - raise ValueError("remotes must be a list") - - for remote in set(remotes): - remote_data_config.append(f'data "terraform_remote_state" "{remote}" {{') - remote_data_config.append(' backend = "s3"') - remote_data_config.append(" config = {") - remote_data_config.append( - f' region = "{self._authenticator.backend_region}"' - ) - remote_data_config.append(f' bucket = "{self._authenticator.bucket}"') - remote_data_config.append( - " key =" - f' "{self._authenticator.prefix}/{remote}/terraform.tfstate"' - ) - remote_data_config.append(" }") - remote_data_config.append("}\n") - return "\n".join(remote_data_config) - - @staticmethod - def filter_keys(paginator, bucket_name, prefix="/", delimiter="/", start_after=""): - """ - filter_keys returns just they keys that are needed - primarily from: https://stackoverflow.com/questions/30249069/listing-contents-of-a-bucket-with-boto3 - """ - - prefix = prefix[1:] if prefix.startswith(delimiter) else prefix - start_after = ( - (start_after or prefix) if prefix.endswith(delimiter) else start_after - ) - try: - for page in paginator.paginate( - Bucket=bucket_name, Prefix=prefix, StartAfter=start_after - ): - for content in page.get("Contents", ()): - yield content["Key"] - except TypeError: - pass - - -class S3Handler(BaseHandler): - """The S3Handler class is a handler for the s3 backend""" - - actions = ["plan", "apply"] - required_vars = [] - _is_ready = False - - def __init__(self, authenticator): - try: - self.execution_functions = { - "plan": { - "check": self._check_plan, - "post": self._post_plan, - }, - "apply": { - "pre": self._pre_apply, - }, - } - - self._authenticator = authenticator - self._s3_client = self._authenticator.backend_session.client("s3") - - except Exception as e: - raise HandlerError(f"Error initializing S3Handler: {e}") - - def is_ready(self): - if not self._is_ready: - filename = str(uuid4().hex[:6].upper()) - if self._s3_client.list_objects( - Bucket=self._authenticator.bucket, - Prefix=f"{self._authenticator.prefix}/{filename}", - ).get("Contents"): - raise HandlerError( - f"Error initializing S3Handler, remote file already exists: {filename}" - ) - try: - self._s3_client.upload_file( - "/dev/null", - self._authenticator.bucket, - f"{self._authenticator.prefix}/{filename}", - ) - except boto3.exceptions.S3UploadFailedError as e: - raise HandlerError( - f"Error initializing S3Handler, could not create file: {e}" - ) - try: - self._s3_client.delete_object( - Bucket=self._authenticator.bucket, - Key=f"{self._authenticator.prefix}/{filename}", - ) - except boto3.exceptions.S3UploadFailedError as e: - raise HandlerError( - f"Error initializing S3Handler, could not delete file: {e}" - ) - self._is_ready = True - return self._is_ready - - def execute(self, action, stage, **kwargs): - # save a copy of the planfile to the backend state bucket - if action in self.execution_functions.keys(): - if stage in self.execution_functions[action].keys(): - self.execution_functions[action][stage](**kwargs) - return None - - def _check_plan(self, planfile: Path, definition: str, **kwargs): - """check_plan runs while the plan is being checked, it should fetch a file from the backend and store it in the local location""" - # ensure planfile does not exist or is zero bytes if it does - remotefile = f"{self._authenticator.prefix}/{definition}/{planfile.name}" - statefile = f"{self._authenticator.prefix}/{definition}/terraform.tfstate" - if planfile.exists(): - if planfile.stat().st_size == 0: - planfile.unlink() - else: - raise HandlerError(f"planfile already exists: {planfile}") - - if self._s3_get_plan(planfile, remotefile): - if not planfile.exists(): - raise HandlerError(f"planfile not found after download: {planfile}") - # verify the lineage and serial from the planfile matches the statefile - if not self._verify_lineage(planfile, statefile): - click.secho( - "planfile lineage does not match statefile, remote plan is unsuitable and will be removed", - fg="red", - ) - self._s3_delete_plan(remotefile) - planfile.unlink() - else: - click.secho( - f"remote planfile downloaded: s3://{self._authenticator.bucket}/{remotefile} -> {planfile}", - fg="yellow", - ) - return None - - def _post_plan( - self, planfile: Path, definition: str, changes: bool = False, **kwargs - ): - """post_apply runs after the apply is complete, it should upload the planfile to the backend""" - logfile = planfile.with_suffix(".log") - remotefile = f"{self._authenticator.prefix}/{definition}/{planfile.name}" - remotelog = remotefile.replace(".tfplan", ".log") - if "text" in kwargs.keys(): - with open(logfile, "w") as f: - f.write(kwargs["text"]) - if planfile.exists() and changes: - if self._s3_put_plan(planfile, remotefile): - click.secho( - f"remote planfile uploaded: {planfile} -> s3://{self._authenticator.bucket}/{remotefile}", - fg="yellow", - ) - if self._s3_put_plan(logfile, remotelog): - click.secho( - f"remote logfile uploaded: {logfile} -> s3://{self._authenticator.bucket}/{remotelog}", - fg="yellow", - ) - return None - - def _pre_apply(self, planfile: Path, definition: str, **kwargs): - """_pre_apply runs before the apply is started, it should remove the planfile from the backend""" - remotefile = f"{self._authenticator.prefix}/{definition}/{planfile.name}" - remotelog = remotefile.replace(".tfplan", ".log") - if self._s3_delete_plan(remotefile): - click.secho( - f"remote planfile removed: s3://{self._authenticator.bucket}/{remotefile}", - fg="yellow", - ) - if self._s3_delete_plan(remotelog): - click.secho( - f"remote logfile removed: s3://{self._authenticator.bucket}/{remotelog}", - fg="yellow", - ) - return None - - def _s3_get_plan(self, planfile: Path, remotefile: str) -> bool: - """_get_plan downloads the file from s3""" - # fetch the planfile from the backend - downloaded = False - try: - self._s3_client.download_file( - self._authenticator.bucket, remotefile, planfile - ) - # make sure the local file exists, and is greater than 0 bytes - downloaded = True - except botocore.exceptions.ClientError as e: - if e.response["Error"]["Code"] == "404": - click.secho(f"remote plan {remotefile} not found", fg="yellow") - pass - else: - raise HandlerError(f"Error downloading planfile: {e}") - return downloaded - - def _s3_put_plan(self, planfile: Path, remotefile: str) -> bool: - """_put_plan uploads the file to s3""" - uploaded = False - # don't upload empty plans - if planfile.stat().st_size == 0: - return uploaded - try: - self._s3_client.upload_file( - str(planfile), self._authenticator.bucket, remotefile - ) - uploaded = True - except botocore.exceptions.ClientError as e: - raise HandlerError(f"Error uploading planfile: {e}") - return uploaded - - def _s3_delete_plan(self, remotefile: str) -> bool: - """_delete_plan removes a remote plan file""" - deleted = False - try: - self._s3_client.delete_object( - Bucket=self._authenticator.bucket, Key=remotefile - ) - deleted = True - except botocore.exceptions.ClientError as e: - raise HandlerError(f"Error deleting planfile: {e}") - return deleted - - def _verify_lineage(self, planfile: Path, statefile: str) -> bool: - # load the statefile as a json object from the backend - state = None - try: - state = json.loads( - self._s3_client.get_object( - Bucket=self._authenticator.bucket, Key=statefile - )["Body"].read() - ) - except botocore.exceptions.ClientError as e: - raise HandlerError(f"Error downloading statefile: {e}") - - # load the planfile as a json object - plan = None - try: - with ZipFile(str(planfile), "r") as zip: - with zip.open("tfstate") as f: - plan = json.loads(f.read()) - except Exception as e: - raise HandlerError(f"Error loading planfile: {e}") - - # compare the lineage and serial from the planfile to the statefile - if not (state and plan): - return False - if state["serial"] != plan["serial"]: - return False - if state["lineage"] != plan["lineage"]: - return False - - return True diff --git a/tfworker/cli.py b/tfworker/cli.py index b93862a..89037de 100644 --- a/tfworker/cli.py +++ b/tfworker/cli.py @@ -1,230 +1,149 @@ #!/usr/bin/env python -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import sys - import click from pydantic import ValidationError -import tfworker.types as tf_types -from tfworker.commands import ( - CleanCommand, - EnvCommand, - RootCommand, - TerraformCommand, - VersionCommand, +import tfworker.util.log as log +from tfworker.app_state import AppState +from tfworker.cli_options import CLIOptionsClean, CLIOptionsRoot, CLIOptionsTerraform +from tfworker.commands.clean import CleanCommand +from tfworker.commands.config import log_limiter +from tfworker.commands.env import EnvCommand +from tfworker.commands.root import RootCommand +from tfworker.commands.terraform import TerraformCommand +from tfworker.util.cli import ( + handle_option_error, + pydantic_to_click, + validate_deployment, + validate_host, ) -from tfworker.util.cli import pydantic_to_click -from tfworker.util.system import get_platform - -def validate_deployment(ctx, deployment, name): - """Validate the deployment is no more than 32 characters.""" - if len(name) > 32: - click.secho("deployment must be less than 32 characters", fg="red") - raise SystemExit(1) - if " " in name: - click.secho("deployment must not contain spaces", fg="red") - raise SystemExit(1) - return name +@click.group() +@pydantic_to_click(CLIOptionsRoot) +@click.version_option(package_name="terraform-worker") +@click.pass_context +def cli(ctx: click.Context, **kwargs): + """ + The terraform worker is a command line utility to orchestrate terraform + + The goal is to reduce the amount of boiler plate terraform code, and allow + for both a more dynamic execution, as well as a more controlled execution. + Through a combinatin of definitions and providers specified in the configuration + file, the worker can be used to build, destroy, and manage terraform deployments. + """ + try: + validate_host() + except NotImplementedError as e: + log.msg(str(e), log.LogLevel.ERROR) + ctx.exit(1) -def validate_host(): - """Ensure that the script is being run on a supported platform.""" - supported_opsys = ["darwin", "linux"] - supported_machine = ["amd64", "arm64"] + try: + options = CLIOptionsRoot.model_validate(kwargs) + except ValidationError as e: + handle_option_error(e) - opsys, machine = get_platform() + log.log_level = log.LogLevel[options.log_level] + log.msg(f"set log level to {options.log_level}", log.LogLevel.DEBUG) + app_state = AppState(root_options=options) + ctx.obj = app_state + register_plugins() + RootCommand() + log.trace("finished intializing root command") - if opsys not in supported_opsys: - click.secho( - f"running on {opsys} is not supported", - fg="red", - ) - raise SystemExit(1) - if machine not in supported_machine: - click.secho( - f"running on {machine} machines is not supported", - fg="red", - ) - raise SystemExit(1) +@cli.command() +@pydantic_to_click(CLIOptionsClean) +@click.argument("deployment", envvar="WORKER_DEPLOYMENT", callback=validate_deployment) +@click.pass_context +def clean(ctx: click.Context, deployment: str, **kwargs): # noqa: E501 + """ + Clean up remnants of a deployment + + Once a deployment is destroyed via terraform, there are traces left in + the backend such as S3 buckets, DynamoDB tables, etc. This command will + verify the state is empty, and then remove those traces from the backend. + """ + try: + options = CLIOptionsClean.model_validate(kwargs) + except ValidationError as e: + handle_option_error(e) - return True + ctx.obj.clean_options = options + log.info(f"cleaning Deployment: {deployment}") + log_limiter() + cc = CleanCommand(deployment=deployment) + cc.exec() -class CSVType(click.types.StringParamType): - name = "csv" - envvar_list_splitter = "," - def __repr__(self): - return "CSV" +@cli.command() +@pydantic_to_click(CLIOptionsTerraform) +@click.argument("deployment", envvar="WORKER_DEPLOYMENT", callback=validate_deployment) +@click.pass_context +def terraform(ctx: click.Context, deployment: str, **kwargs): + """ + Execute terraform orchestration on all or a subset of definitions in a deployment + The terraform command is used to plan, apply, and destroy terraform deployments. It + dynamically creates and breaks down large states into smaller subsets of deployments + which can share common parameters and a fixed set of providers. + """ + # @TODO: Add support for a --target flag to target specific IDs in a definition -@click.group() -@pydantic_to_click(tf_types.CLIOptionsRoot) -@click.pass_context -def cli(ctx, **kwargs): - """CLI for the worker utility.""" try: - options = tf_types.CLIOptionsRoot(**kwargs) - validate_host() - ctx.obj = RootCommand(options) + options = CLIOptionsTerraform.model_validate(kwargs) except ValidationError as e: - click.echo(f"Error in options: {e}") - ctx.exit(1) + handle_option_error(e) + ctx.obj.terraform_options = options + log.info(f"building Deployment: {deployment}") + log_limiter() + tfc = TerraformCommand(deployment=deployment) -@cli.command() -@click.option( - "--limit", - help="limit operations to a single definition", - envvar="WORKER_LIMIT", - multiple=True, - type=CSVType(), -) -@click.argument("deployment", callback=validate_deployment) -@click.pass_obj -def clean(rootc, *args, **kwargs): # noqa: E501 - """clean up terraform state""" - # clean just items if limit supplied, or everything if no limit - CleanCommand(rootc, *args, **kwargs).exec() + # Prepare the provider cache + tfc.prep_providers() + # @TODO: Determine how much of this should be executed here, versus + # orchestrated in the TerraformCommand classes .exec method + tfc.terraform_init() + tfc.terraform_plan() + tfc.terraform_apply_or_destroy() @cli.command() -def version(): - """display program version""" - VersionCommand().exec() - sys.exit(0) +@click.pass_context +def env(ctx: click.Context, **kwargs): + """ + Export environment variables for the configured backend + + This command can be useful to setup environment credentials, that the + worker will use. It handles configuration for the different backends + allowing you to `eval` the output to have terraform commands work as + the worker will execute them. This can be helpful when doing manual + state management + """ + env = EnvCommand() + env.exec() -@cli.command() -@click.option( - "--plan-file-path", - default=None, - envvar="WORKER_PLAN_FILE_PATH", - help="path to plan files, with plan it will save to this location, apply will read from it", -) -@click.option( - "--apply/--no-apply", - "tf_apply", - envvar="WORKER_APPLY", - default=False, - help="apply the terraform configuration", -) -@click.option( - "--plan/--no-plan", - "tf_plan", - envvar="WORKER_PLAN", - type=bool, - default=True, - help="toggle running a plan, plan will still be skipped if using a saved plan file with apply", -) -@click.option( - "--force/--no-force", - "force", - default=False, - envvar="WORKER_FORCE", - help="force apply/destroy without plan change", -) -@click.option( - "--destroy/--no-destroy", - default=False, - envvar="WORKER_DESTROY", - help="destroy a deployment instead of create it", -) -@click.option( - "--show-output/--no-show-output", - default=True, - envvar="WORKER_SHOW_OUTPUT", - help="show output from terraform commands", -) -@click.option( - "--terraform-bin", - envvar="WORKER_TERRAFORM_BIN", - help="The complate location of the terraform binary", -) -@click.option( - "--b64-encode-hook-values/--no--b64-encode-hook-values", - "b64_encode", - default=False, - envvar="WORKER_B64_ENCODE_HOOK_VALUES", - help=( - "Terraform variables and outputs can be complex data structures, setting this" - " open will base64 encode the values for use in hook scripts" - ), -) -@click.option( - "--terraform-modules-dir", - envvar="WORKER_TERRAFORM_MODULES_DIR", - default="", - help=( - "Absolute path to the directory where terraform modules will be stored." - "If this is not set it will be relative to the repository path at ./terraform-modules" - ), -) -@click.option( - "--limit", - help="limit operations to a single definition", - envvar="WORKER_LIMIT", - multiple=True, - type=CSVType(), -) -@click.option( - "--provider-cache", - envvar="WORKER_PROVIDER_CACHE", - default=None, - help="if provided this directory will be used as a cache for provider plugins", -) -@click.option( - "--stream-output/--no-stream-output", - help="stream the output from terraform command", - envvar="WORKER_STREAM_OUTPUT", - default=True, -) -@click.option( - "--color/--no-color", - help="colorize the output from terraform command", - envvar="WORKER_COLOR", - default=False, -) -@click.argument("deployment", envvar="WORKER_DEPLOYMENT", callback=validate_deployment) -@click.pass_obj -def terraform(rootc, *args, **kwargs): - """execute terraform orchestration""" - try: - tfc = TerraformCommand(rootc, *args, **kwargs) - except FileNotFoundError as e: - click.secho(f"terraform binary not found: {e.filename}", fg="red", err=True) - raise SystemExit(1) +# @TODO: Command to list all definitions in the backend for a given deployment +# @TODO: Command to pull the remote state for a given deployment - click.secho(f"building deployment {kwargs.get('deployment')}", fg="green") - click.secho(f"working in directory: {tfc.temp_dir}", fg="yellow") - tfc.exec() - sys.exit(0) +def register_plugins(): + """ + Register the plugins + """ + # Register Handlers + log.trace("registering handlers") + import tfworker.handlers # noqa: F401 -@cli.command() -@click.pass_obj -def env(rootc, *args, **kwargs): - # provide environment variables from backend to configure shell environment - env = EnvCommand(rootc, *args, **kwargs) - env.exec() - sys.exit(0) + # from tfworker.handlers.bitbucket import BitbucketHandler # noqa: F401 + # from tfworker.handlers.s3 import S3Handler # noqa: F401 + # from tfworker.handlers.trivy import TrivyHandler # noqa: F401 + # Register Copiers + log.trace("registering copiers") + import tfworker.copier # noqa: F401 if __name__ == "__main__": diff --git a/tfworker/cli_options.py b/tfworker/cli_options.py new file mode 100644 index 0000000..048b997 --- /dev/null +++ b/tfworker/cli_options.py @@ -0,0 +1,499 @@ +import os +import shutil +from typing import List, Optional, Union + +import click +from pydantic import ( + ConfigDict, + Field, + ValidationError, + field_validator, + model_validator, +) +from pydantic_core import InitErrorDetails + +import tfworker.util.log as log +from tfworker import constants as const +from tfworker.backends import Backends +from tfworker.types import FreezableBaseModel +from tfworker.util.terraform import get_terraform_version + + +class CLIOptionsRoot(FreezableBaseModel): + """ + CLIOptionsRoot is a Pydantic model that represents the root options for the CLI. + """ + + model_config = ConfigDict(extra="forbid", validate_assignment=True) + + aws_access_key_id: Optional[str] = Field( + None, + json_schema_extra={"env": "AWS_ACCESS_KEY_ID"}, + description="AWS Access key", + ) + aws_external_id: Optional[str] = Field( + None, + json_schema_extra={"env": "AWS_EXTERNAL_ID"}, + description="If provided, will be used to assume the role specified by --aws-role-arn", + ) + aws_profile: Optional[str] = Field( + None, + json_schema_extra={"env": "AWS_PROFILE"}, + description="The AWS/Boto3 profile to use", + ) + aws_role_arn: Optional[str] = Field( + None, + json_schema_extra={"env": "AWS_ROLE_ARN"}, + description="If provided, credentials will be used to assume this role (complete ARN)", + ) + aws_secret_access_key: Optional[str] = Field( + None, + json_schema_extra={"env": "AWS_SECRET_ACCESS_KEY"}, + description="AWS access key secret", + ) + aws_session_token: Optional[str] = Field( + None, + json_schema_extra={"env": "AWS_SESSION_TOKEN"}, + description="AWS access key token", + ) + aws_region: str = Field( + const.DEFAULT_AWS_REGION, + json_schema_extra={"env": "AWS_DEFAULT_REGION"}, + description="AWS Region to build in", + ) + backend: Backends = Field( + Backends.S3.name.lower(), + json_schema_extra={"env": "WORKER_BACKEND"}, + description="State/locking provider. One of: s3, gcs", + ) + backend_bucket: Optional[str] = Field( + None, + json_schema_extra={"env": "WORKER_BACKEND_BUCKET"}, + description="Bucket (must exist) where all terraform states are stored", + ) + backend_plans: bool = Field( + False, + json_schema_extra={"env": "WORKER_BACKEND_PLANS"}, + description="Store plans in the backend", + ) + backend_prefix: str = Field( + const.DEFAULT_BACKEND_PREFIX, + json_schema_extra={"env": "WORKER_BACKEND_PREFIX"}, + description="Prefix to use in backend storage bucket for all terraform states", + ) + backend_region: str = Field( + const.DEFAULT_AWS_REGION, + description="Region where terraform root/lock bucket exists", + ) + backend_use_all_remotes: bool = Field( + False, + json_schema_extra={"env": "WORKER_BACKEND_USE_ALL_REMOTES"}, + description="Generate remote data sources based on all definition paths present in the backend", + ) + create_backend_bucket: bool = Field( + True, description="Create the backend bucket if it does not exist" + ) + config_file: str = Field( + const.DEFAULT_CONFIG, + json_schema_extra={"env": "WORKER_CONFIG_FILE"}, + description="Path to the configuration file", + ) + config_var: Optional[List[str]] = Field( + [], + description='key=value to be supplied as jinja variables in config_file under "var" dictionary, can be specified multiple times', + ) + log_level: str = Field( + "INFO", + description="The level to use for logging/output", + json_schema_extra={"env": "LOG_LEVEL"}, + ) + gcp_region: str = Field( + const.DEFAULT_GCP_REGION, + json_schema_extra={"env": "GCP_REGION"}, + description="Region to build in", + ) + gcp_creds_path: Optional[str] = Field( + None, + json_schema_extra={"env": "GCP_CREDS_PATH"}, + description="Relative path to the credentials JSON file for the service account to be used.", + ) + gcp_project: Optional[str] = Field( + None, + json_schema_extra={"env": "GCP_PROJECT"}, + description="GCP project name to which work will be applied", + ) + repository_path: str = Field( + const.DEFAULT_REPOSITORY_PATH, + json_schema_extra={"env": "WORKER_REPOSITORY_PATH"}, + description="The root repository/working path, any relative paths will be resolved from here", + ) + working_dir: Optional[str] = Field( + None, + json_schema_extra={"env": "WORKER_WORKING_DIR"}, + description="Specify the path to use instead of a temporary directory, must exist, be empty, and be writeable, --clean applies to this directory as well", + ) + + @field_validator("backend", mode="before") + @classmethod + def validate_backend(cls, backend: Union[Backends, str]) -> Backends: + """Validate the backend type. + + Args: + backend: The backend type. + + Returns: + The validated backend type. + + Raises: + ValueError: If the backend is not supported. + """ + # convert the backend str to the corresponding enum + if isinstance(backend, str): + try: + selected_backend = Backends[backend.upper()] + except (ValueError, KeyError): + raise ValueError( + f"Backend {backend} is not supported, must be one of {' | '.join(Backends.names())}" + ) + return selected_backend + if isinstance(backend, Backends): + return backend + + @field_validator("config_file") + @classmethod + def validate_config_file(cls, fpath: str) -> str: + """Validates the config file exists, and is readable + + Args: + fpath (str): The path to the config file + + Returns: + str: The absolute path to the config file + + Raises: + ValueError: If the file does not exist or is not readable + """ + if not os.path.isabs(fpath): + fpath = os.path.abspath(fpath) + if os.path.isdir(fpath): + raise ValueError(f"Config file {fpath} is a directory!") + if not os.path.isfile(fpath): + raise ValueError(f"Config file {fpath} does not exist!") + if not os.access(fpath, os.R_OK): + raise ValueError(f"Config file {fpath} is not readable!") + return fpath + + @field_validator("gcp_creds_path") + @classmethod + def validate_gcp_creds_path(cls, fpath: Union[str, None]) -> Union[str, None]: + """Validate the GCP credentials path. + + Args: + fpath (str): Path to the GCP credentials file. + + Returns: + Fully resolved path to the GCP credentials file. + + Raises: + ValueError: If the path does not exist or is not a file. + """ + if fpath is None: + return + if not os.path.isabs(fpath): + fpath = os.path.abspath(fpath) + if os.path.isfile(fpath): + return fpath + raise ValueError(f"Path {fpath} is not a file!") + + @field_validator("log_level") + @classmethod + def validate_log_level(cls, level: str) -> str: + """Validate the log level. + + Args: + level(str): The log level. + + Returns: + The normalized/validated log level. + + Raises: + ValueError: If the log level is invalid. + """ + try: + _ = log.LogLevel[level.upper()] + return level.upper() + except KeyError: + raise ValueError("Invalid log level") + + @field_validator("backend_prefix") + @classmethod + def validate_backend_prefix(cls, prefix: str) -> str: + """Mutate the backend prefix to ensure there are no leading or trailing slashes, or double slashes. + + Args: + prefix (str): The backend prefix. + + Returns: + The validated backend prefix. + """ + if prefix.startswith("/"): + prefix = prefix[1:] + if prefix.endswith("/"): + prefix = prefix[:-1] + if "//" in prefix: + prefix = prefix.replace("//", "/") + + return prefix + + @field_validator("repository_path") + @classmethod + def validate_repository_path(cls, fpath: str) -> str: + return validate_existing_dir(fpath) + + @field_validator("working_dir") + @classmethod + def validate_working_dir(cls, fpath: Union[str, None]) -> Union[str, None]: + return validate_existing_dir(fpath, empty=True) + + +class CLIOptionsClean(FreezableBaseModel): + """ + CLIOptionsClean is a Pydantic model that represents the options for the clean command. + """ + + model_config = ConfigDict(extra="forbid", validate_assignment=True) + + limit: Optional[List[str]] = Field( + [], + description="limit operations to a single definition, multiple values allowed, or separate with commas", + json_schema_extra={"short_arg": "l", "env": "WORKER_LIMIT"}, + ) + + @model_validator(mode="before") + def validate_limit(cls, values): + return validate_limit(values) + + +class CLIOptionsTerraform(FreezableBaseModel): + """ + CLIOptionsTerraform is a Pydantic model that represents the options for the terraform command. + """ + + model_config = ConfigDict(extra="forbid", validate_assignment=True) + + apply: bool = Field( + False, + json_schema_extra={"env": "WORKER_APPLY"}, + description="Apply the terraform configuration", + ) + destroy: bool = Field( + False, + json_schema_extra={"env": "WORKER_DESTROY"}, + description="Destroy a deployment instead of create it", + ) + plan: bool = Field( + True, + json_schema_extra={"env": "WORKER_PLAN"}, + description="Toggle running a plan, plan will still be skipped if using a saved plan file with apply", + ) + force: bool = Field( + False, + json_schema_extra={"env": "WORKER_FORCE"}, + description="Force apply/destroy without plan change", + ) + plan_file_path: Optional[str] = Field( + None, + json_schema_extra={"env": "WORKER_PLAN_FILE_PATH"}, + description="Path to plan files, with plan it will save to this location, apply will read from it", + ) + show_output: bool = Field( + True, + json_schema_extra={"env": "WORKER_SHOW_OUTPUT"}, + description="Show output from terraform commands", + ) + terraform_bin: Optional[str] = Field( + None, + json_schema_extra={"env": "WORKER_TERRAFORM_BIN"}, + description="The complete location of the terraform binary", + ) + b64_encode: bool = Field( + False, + json_schema_extra={"env": "WORKER_B64_ENCODE"}, + description="Base64 encode Terraform variables and outputs for use in hook scripts", + ) + limit: Optional[List[str]] = Field( + None, + json_schema_extra={"env": "WORKER_LIMIT"}, + description="Limit operations to a single definition", + ) + provider_cache: Optional[str] = Field( + None, + json_schema_extra={"env": "WORKER_PROVIDER_CACHE"}, + description="Directory to be used as a cache for provider plugins", + ) + strict_locking: bool = Field( + True, + json_schema_extra={"env": "WORKER_STRICT_LOCKING"}, + description="Enable strict locking for terraform providers (fail if pinned providers do not meet constraints)", + ) + stream_output: bool = Field( + True, + json_schema_extra={"env": "WORKER_STREAM_OUTPUT"}, + description="Stream the output from terraform command", + ) + color: bool = Field( + False, + json_schema_extra={"env": "WORKER_COLOR"}, + description="Colorize the output from terraform command", + ) + backend_use_all_remotes: bool = Field( + False, + json_schema_extra={"env": "WORKER_BACKEND_USE_ALL_REMOTES"}, + description="Generate remote data sources based on all definition paths present in the backend", + ) + + @model_validator(mode="before") + @classmethod + def validate_apply_and_destroy(cls, values): + errors = [] + if values.get("apply") and values.get("destroy"): + errors.append( + InitErrorDetails( + loc=("--apply", "--apply"), + input=(values.get("apply")), + ctx={"error": "apply and destroy cannot both be true"}, + type="value_error", + ) + ) + errors.append( + InitErrorDetails( + loc=("--destroy", "--destroy"), + input=(values.get("destroy")), + ctx={"error": "apply and destroy cannot both be true"}, + type="value_error", + ) + ) + if errors: + raise ValidationError.from_exception_data("apply_and_destroy", errors) + return values + + @field_validator("terraform_bin") + @classmethod + def validate_terraform_bin(cls, fpath: Union[str, None]) -> Union[str, None]: + """Validate the terraform binary path. + + Args: + fpath: Path to the terraform binary. + + Returns: + Path to the terraform binary. + + Raises: + ValueError: If the path does not exist or is not a file. + """ + if fpath is None: + config = click.get_current_context().obj.loaded_config + if config is not None: + fpath = config.worker_options.get("terraform_bin") + log.trace(f"Using terraform binary from config: {fpath}") + else: + log.trace(f"Using terraform binary from CLI: {fpath}") + if fpath is None: + fpath = shutil.which("terraform") + log.trace(f"Using terraform binary from PATH: {fpath}") + if fpath is None: + raise ValueError( + "Terraform binary not found in PATH, specify in config or with --terraform-bin" + ) + if not os.path.isabs(fpath): + fpath = os.path.abspath(fpath) + if not os.path.isfile(fpath): + raise ValueError(f"Terraform binary {fpath} does not exist!") + if not os.access(fpath, os.X_OK): + raise ValueError(f"Terraform binary {fpath} is not executable!") + get_terraform_version(fpath, validation=True) + log.trace(f"Terraform binary path validated: {fpath}") + return fpath + + @field_validator("provider_cache") + @classmethod + def validate_provider_cache(cls, fpath: Union[str, None]) -> Union[str, None]: + return validate_existing_dir(fpath) + + @field_validator("plan_file_path") + @classmethod + def validate_plan_file_path(cls, fpath: Union[str, None]) -> Union[str, None]: + return validate_existing_dir(fpath) + + @model_validator(mode="before") + def validate_limit(cls, values): + return validate_limit(values) + + +def validate_existing_dir(fpath: Union[str, None], empty=False) -> Union[str, None]: + """ + validate_existing_dir is called by multiple validators, it ensures + a writable directory exists at the provided path, and optionally that + it is empty + + Args: + fpath (str): The path to the directory + empty (bool): If the directory must be empty + + Returns: + str: The absolute path to the directory + + Raises: + ValueError: If the directory does not exist, is not a directory, is not writeable, or is not empty + """ + if fpath is None: + return + if not os.path.isabs(fpath): + fpath = os.path.abspath(fpath) + if not os.path.isdir(fpath): + raise ValueError(f"path {fpath} does not exist!") + if not os.access(fpath, os.W_OK): + raise ValueError(f"Ppath {fpath} is not writeable!") + if not os.access(fpath, os.R_OK): + raise ValueError(f"path {fpath} is not readable!") + if empty and any(os.listdir(fpath)): + raise ValueError(f"path {fpath} must be empty!") + return fpath + + +def validate_limit(values): + """ + validate_limit is called by multiple CLIOptions models to validate the limit field + """ + if values.get("limit") is None: + return values + + if isinstance(values["limit"], str): + values["limit"] = values["limit"].split(",") + + new_items = [] + # accept comma separated values and convert to list, same as passing --limit item_one --limit item_two + for item in values["limit"]: + if "," in item: + new_items.extend(item.split(",")) + else: + new_items.append(item) + + values["limit"] = new_items + + errors = [] + config = click.get_current_context().obj.loaded_config + if config is not None: + for item in values["limit"]: + if item not in config.definitions.keys(): + errors.append( + InitErrorDetails( + loc=("--limit", "--limit"), + input=item, + ctx={"error": f"definition {item} not found in config"}, + type="value_error", + ) + ) + if errors: + raise ValidationError.from_exception_data("invalid_limit", errors) + return values diff --git a/tfworker/commands/__init__.py b/tfworker/commands/__init__.py deleted file mode 100644 index 0b790e5..0000000 --- a/tfworker/commands/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright 2020 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .base import BaseCommand # noqa -from .clean import CleanCommand # noqa -from .env import EnvCommand # noqa -from .root import RootCommand # noqa -from .terraform import TerraformCommand # noqa -from .version import VersionCommand # noqa diff --git a/tfworker/commands/base.py b/tfworker/commands/base.py index 0201003..84be81d 100644 --- a/tfworker/commands/base.py +++ b/tfworker/commands/base.py @@ -1,191 +1,358 @@ -# Copyright 2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pathlib +from typing import TYPE_CHECKING, Any, Dict, Union import click +from pydantic import BaseModel, ValidationError -from tfworker.authenticators import AuthenticatorsCollection -from tfworker.backends import BackendError, select_backend -from tfworker.definitions import DefinitionsCollection -from tfworker.handlers import HandlersCollection -from tfworker.handlers.exceptions import HandlerError, UnknownHandler +import tfworker.commands.config as c +import tfworker.util.log as log +from tfworker.exceptions import BackendError, HandlerError, TFWorkerException +from tfworker.util.cli import handle_config_error -# from tfworker.plugins import PluginsCollection -from tfworker.providers.providers_collection import ProvidersCollection -from tfworker.util.system import get_version, which -from tfworker.util.terraform import get_terraform_version +if TYPE_CHECKING: + from tfworker.app_state import AppState # pragma: no cover # noqa + from tfworker.authenticators.collection import ( # pragma: no cover # noqa + AuthenticatorsCollection, + ) + from tfworker.backends import Backends, BaseBackend # pragma: no cover + from tfworker.definitions.collection import ( # pragma: no cover # noqa + DefinitionsCollection, + ) + from tfworker.handlers.collection import ( # pragma: no cover # noqa + HandlersCollection, + ) + from tfworker.providers.collection import ( # pragma: no cover # noqa + ProvidersCollection, + ) - -class MissingDependencyException(Exception): - pass + from ..cli_options import CLIOptionsRoot # pragma: no cover # noqa class BaseCommand: - def __init__(self, rootc, deployment="undefined", limit=tuple(), **kwargs): - self._rootc = rootc - self._args_dict = dict(kwargs) - self._args_dict.update(self._rootc.args.__dict__) - - self._version = get_version() - self._providers = None - self._definitions = None - self._backend = None - # self._plugins = None - self._terraform_vars = dict() - self._remote_vars = dict() - self._temp_dir = rootc.temp_dir - self._repository_path = rootc.args.repository_path - - rootc.add_arg("deployment", deployment) - rootc.load_config() - - self._provider_cache = self._resolve_arg("provider_cache") - if self._provider_cache is not None: - self._provider_cache = pathlib.Path(self._provider_cache).resolve() - - (self._tf_version_major, self._tf_version_minor) = self._resolve_arg( - "tf_version" - ) or (None, None) - - self._terraform_bin = self._resolve_arg("terraform_bin") or which("terraform") - if not self._terraform_bin: - raise MissingDependencyException( - "Cannot find terraform in arguments or on PATH" - ) - if self._tf_version_major is None or self._tf_version_minor is None: - ( - self._tf_version_major, - self._tf_version_minor, - ) = get_terraform_version(self._terraform_bin) - - self._authenticators = AuthenticatorsCollection( - rootc.args, deployment=deployment, **kwargs + """ + Base command class that initializes the application state + """ + + def __init__( + self, + deployment: str | None = None, + ctx: click.Context | None = None, + app_state: Union["AppState", None] = None, + ) -> None: + """ + initialize the base command with the deployment, exceptions are handled + in all of the _init methods + + Args: + deployment (str | None): The deployment name + + """ + self._ctx: click.Context + self._app_state: "AppState" + + if ctx is not None: + self._ctx = ctx + else: + self._ctx = click.get_current_context() + + if app_state is not None: + self._app_state = app_state + else: + self._app_state = self._ctx.obj + + self._app_state.deployment = deployment + c.resolve_model_with_cli_options(self._app_state) + log.log_level = log.LogLevel[self._app_state.root_options.log_level] + + self._app_state.authenticators = _init_authenticators( + self._app_state.root_options + ) + self._app_state.providers = _init_providers( + self._app_state.loaded_config.providers, self._app_state.authenticators ) - self._providers = ProvidersCollection( - rootc.providers_odict, self._authenticators + self._app_state.backend = _init_backend_(self._app_state) + self._app_state.definitions = _init_definitions( + self._app_state.loaded_config.definitions ) - self._plan_for = "destroy" if self._resolve_arg("destroy") else "apply" - self._definitions = DefinitionsCollection( - rootc.definitions_odict, - deployment, - limit, - self._plan_for, - self._providers, - self._repository_path, - rootc, - self._temp_dir, - self._tf_version_major, - provider_cache=self._provider_cache, + self._app_state.handlers = _init_handlers( + self._app_state.loaded_config.handlers ) - # plugins_odict = dict() - for provider in rootc.providers_odict: - try: - raw_version = rootc.providers_odict[provider]["requirements"]["version"] - except KeyError: - click.secho( - "providers must have a version constraint specified", fg="red" - ) - raise SystemExit() - version = raw_version.split(" ")[-1] - vals = {"version": version} - base_url = rootc.providers_odict[provider].get("baseURL") - if base_url: - vals["baseURL"] = base_url - source = rootc.providers_odict[provider].get("source") - if source: - vals["source"] = source - try: - self._backend = select_backend( - self._resolve_arg("backend"), - deployment, - self._authenticators, - self._definitions, - ) - except BackendError as e: - click.secho(e, fg="red") - click.secho(e.help, fg="red") - raise SystemExit(1) - - # if backend_plans is requested, check if backend supports it - self._backend_plans = self._resolve_arg("backend_plans") - if self._backend_plans: - if not self._backend.plan_storage: - click.secho( - f"backend {self._backend.tag} does not support backend_plans", - fg="red", - ) - raise SystemExit(1) - - # initialize handlers collection - click.secho("Initializing handlers", fg="green") - try: - self._handlers = HandlersCollection(rootc.handlers_odict) - except (UnknownHandler, HandlerError, TypeError) as e: - click.secho(e, fg="red") - raise SystemExit(1) - - # allow a backend to implement handlers as well since they already control the provider session - if self._backend.handlers and self._backend_plans: - self._handlers.update(self._backend.handlers) - - # list enabled handlers - click.secho("Enabled handlers:", fg="green") - for h in self._handlers: - click.secho(f" {h}", fg="green") - @property - def authenticators(self): - return self._authenticators + # with deployment name known, update the root options + self._app_state.root_options.backend_prefix = ( + self._app_state.root_options.backend_prefix.format(deployment=deployment) + ) + self._app_state.freeze() @property - def backend(self): - return self._backend + def ctx(self) -> click.Context: + return self._ctx @property - def providers(self): - return self._providers + def app_state(self) -> "AppState": + return self._app_state - @property - def definitions(self): - return self._definitions - # @property - # def plugins(self): - # return self._plugins +def _init_authenticators( + root_options: "CLIOptionsRoot", +) -> "AuthenticatorsCollection": + """ + Initialize the authenticators collection for the application state - @property - def temp_dir(self): - return self._temp_dir + Args: + root_options (CLIOptionsRoot): The root options object - @property - def repository_path(self): - return self._repository_path - - def _execute_handlers(self, action, stage, **kwargs): - """Execute all ready handlers for supported actions""" - for h in self._handlers: - if action in h.actions and h.is_ready(): - h.execute(action, stage, **kwargs) - - def _resolve_arg(self, name): - """Resolve argument in order of precedence: - 1) CLI argument - 2) Config file - """ - if name in self._args_dict and self._args_dict[name] is not None: - return self._args_dict[name] - if name in self._rootc.worker_options_odict: - return self._rootc.worker_options_odict[name] - return None + Returns: + AuthenticatorsCollection: The initialized authenticators collection + """ + # from tfworker.authenticators.collection import AuthenticatorsCollection + import tfworker.authenticators.collection as c + + try: + authenticators = c.AuthenticatorsCollection(root_options) + except TFWorkerException as e: + log.error(e) + click.get_current_context().exit(1) + + log.debug( + f"initialized authenticators {[x.tag for x in authenticators.keys()]}", + ) + return authenticators + + +def _init_providers( + providers_config: "ProvidersCollection", + authenticators: "AuthenticatorsCollection", +) -> "ProvidersCollection": + """ + Initialize the providers collection based on the provided configuration, it will + add information for providers that require authentication configurations + + Args: + providers_config (ProvidersCollection): The providers configuration + authenticators (AuthenticatorsCollection): The authenticators collection + + Returns: + ProvidersCollection: The initialized providers collection + """ + from tfworker.providers.collection import ProvidersCollection + + try: + providers = ProvidersCollection(providers_config, authenticators) + except ValidationError as e: + handle_config_error(e) + + log.debug( + f"initialized providers {[x for x in providers.keys()]}", + ) + return providers + + +def _init_definitions(definitions_config: Dict[str, Any]) -> "DefinitionsCollection": + """ + Initialize the definitions collection based on the provided configuration, + + Args: + definitions_config (Dict[str, Any]): The definitions configuration + """ + # look for any limit options on the app_state + from tfworker.definitions.collection import DefinitionsCollection + + try: + definitions = DefinitionsCollection( + definitions_config, limiter=c.find_limiter() + ) + log.debug( + f"initialized definitions {[x for x in definitions.keys()]}", + ) + except ValueError as e: + log.error(e) + click.get_current_context().exit(1) + + return definitions + + +def _init_backend_(app_state: "AppState") -> "BaseBackend": + """ + Returns the initialized backend. + + Args: + app_state (AppState): The current application state. + + Returns: + BaseBackend: The initialized backend. + + """ + backend_config = app_state.root_options.backend + + be = _select_backend( + backend_config, + app_state.deployment, + app_state.authenticators, + ) + + _check_backend_plans(app_state.root_options.backend_plans, be) + + log.debug(f"initialized backend {be.tag}") + return be + + +def _select_backend( + backend: "Backends", deployment: str, authenticators: "AuthenticatorsCollection" +) -> "BaseBackend": + """ + Selects and initializes the backend. + + Args: + backend_config (dict): Configuration for the backend. + deployment (str): The deployment name. + authenticators (AuthenticatorsCollection): The authenticators collection. + definitions (DefinitionsCollection): The definitions collection. + + Returns: + BaseBackend: The initialized backend. + + Raises: + BackendError: If there is an error selecting the backend. + """ + try: + return backend.value(authenticators, deployment=deployment) + except BackendError as e: + log.error(e) + log.error(e.help) + click.get_current_context().exit(1) + + +def _check_backend_plans(backend_plans, backend) -> None: + """ + Checks if backend plans are supported by the backend. + + Args: + backend_plans (bool): Flag indicating if backend plans are requested. + backend (BaseBackend): The initialized backend. + + """ + if backend_plans: + log.trace(f"backend_plans requested, checking if {backend.tag} supports it") + if not backend.plan_storage: + log.error(f"backend {backend.tag} does not support backend_plans") + click.get_current_context().exit(1) + + +def _init_handlers(handlers_config: Dict[str, Any]) -> "HandlersCollection": + """ + Initialize the handlers collection based on the provided configuration. + + Args: + handlers_config (Dict[str, Any]): Configuration for the handlers. + + Returns: + HandlersCollection: The initialized handlers collection. + + """ + from tfworker.handlers.collection import HandlersCollection + + parsed_handlers = _parse_handlers(handlers_config) + _add_universal_handlers(parsed_handlers) + + log.trace(f"parsed handlers {parsed_handlers}") + + handlers = HandlersCollection(parsed_handlers) + log.debug(f"initialized handlers {[x for x in handlers.keys()]}") + + _check_handlers_ready(handlers) + return handlers + + +def _parse_handlers(handlers_config: Dict[str, Any]) -> Dict[str, Any]: + """ + Parses the handlers configuration into handler instances. + + Args: + handlers_config (Dict[str, Any]): Configuration for the handlers. + + Returns: + Dict[str, Any]: Parsed handler instances. + + """ + parsed_handlers = {} + for k, v in handlers_config.items(): + log.trace(f"initializing handler {k}") + log.trace(f"handler config: {v}") + config = _validate_handler_config(k, v) + parsed_handlers[k] = _initialize_handler(k, config) + return parsed_handlers + + +def _validate_handler_config( + handler_name: str, handler_config: Dict[str, Any] +) -> BaseModel: + """ + Validates the configuration for a handler. + + Args: + handler_name (str): The name of the handler. + handler_config (Dict[str, Any]): The configuration for the handler. + + Returns: + BaseModel: The validated configuration model. + + Raises: + ValidationError: If the configuration is invalid. + """ + from tfworker.handlers.registry import HandlerRegistry as hr + + try: + return hr.get_handler_config_model(handler_name).model_validate(handler_config) + except ValidationError as e: + handle_config_error(e) + + +def _initialize_handler(handler_name: str, config: BaseModel) -> Any: + """ + Initializes a handler with the given configuration. + + Args: + handler_name (str): The name of the handler. + config (BaseModel): The validated configuration model. + + Returns: + Any: The initialized handler. + """ + from tfworker.handlers.registry import HandlerRegistry as hr + + try: + return hr.get_handler(handler_name)(config) + except HandlerError as e: + log.error(e) + click.get_current_context().exit(1) + + +def _add_universal_handlers(parsed_handlers: Dict[str, Any]): + """ + Adds universal handlers to the parsed handlers. + + Args: + parsed_handlers (Dict[str, Any]): The parsed handlers. + + """ + from tfworker.handlers.registry import HandlerRegistry as hr + + for h in hr.list_universal_handlers(): + log.trace(f"initializing universal handler {h}") + if h not in parsed_handlers.keys(): + parsed_handlers[h] = hr.get_handler(h)() + + +def _check_handlers_ready(handlers: "HandlersCollection"): + """ + Checks if all handlers are ready. + + Args: + handlers (HandlersCollection): The handlers collection. + + """ + for h, v in handlers.items(): + log.trace(f"checking if handler {h} is ready") + if not v.is_ready: + log.debug(f"handler {h} is not ready, removing it") + handlers.pop(h) diff --git a/tfworker/commands/clean.py b/tfworker/commands/clean.py index d742d49..1ba5032 100644 --- a/tfworker/commands/clean.py +++ b/tfworker/commands/clean.py @@ -1,33 +1,18 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import click - -from tfworker.backends.base import BackendError from tfworker.commands.base import BaseCommand class CleanCommand(BaseCommand): def __init__(self, rootc, **kwargs): - super(CleanCommand, self).__init__(rootc, **kwargs) - self._deployment = self._resolve_arg("deployment") - self._limit = self._resolve_arg("limit") + # super(CleanCommand, self).__init__(rootc, **kwargs) + # self._deployment = self._resolve_arg("deployment") + # self._limit = self._resolve_arg("limit") + pass def exec(self): - try: - self._backend.clean(deployment=self._deployment, limit=self._limit) - except BackendError as e: - click.secho(f"error while cleaning: {e}", fg="red") - raise SystemExit(1) - click.secho("backend cleaning completed", fg="green") + # try: + # self._backend.clean(deployment=self._deployment, limit=self._limit) + # except BackendError as e: + # click.secho(f"error while cleaning: {e}", fg="red") + # raise SystemExit(1) + # click.secho("backend cleaning completed", fg="green") + pass diff --git a/tfworker/commands/config.py b/tfworker/commands/config.py new file mode 100644 index 0000000..e0182c4 --- /dev/null +++ b/tfworker/commands/config.py @@ -0,0 +1,232 @@ +import inspect +import io +import json +import os +import pathlib +from typing import Any, Dict, List, Type, Union + +import click +import hcl2 +import jinja2 +import yaml +from jinja2.runtime import StrictUndefined +from pydantic import BaseModel, ValidationError + +import tfworker.util.log as log +from tfworker.app_state import AppState +from tfworker.types.config_file import ConfigFile +from tfworker.util.cli import handle_config_error + +from .. import cli_options + + +def load_config(config_file: str, config_vars: Dict[str, str]) -> ConfigFile: + """ + Load the configuration file. + + Args: + config_file (str): The path to the configuration file. + config_vars (Dict[str, str]): A dictionary of configuration variables. + + Returns: + Dict[str, Any]: The loaded configuration. + """ + log.trace(f"loading config file: {config_file}") + rendered_config = _process_template(config_file, _get_full_config_vars(config_vars)) + log.safe_trace(f"rendered config: {json.dumps(rendered_config)}") + if config_file.endswith(".hcl"): + loaded_config: Dict[Any, Any] = hcl2.loads(rendered_config)["terraform"] + else: + loaded_config: Dict[Any, Any] = yaml.safe_load(rendered_config)["terraform"] + + try: + parsed_config = ConfigFile.model_validate(loaded_config) + except ValidationError as e: + handle_config_error(e) + + return parsed_config + + +def find_limiter() -> List[str] | None: + """ + Find if any of the CLIOptions have a limit option. + + Returns: + List[str] | None: The limiter if found, otherwise None + """ + model_classes = get_cli_options_model_classes() + available_classes = [] + + for model_class in model_classes: + log.trace(f"checking model class: {model_class.__name__} for limit") + if model_class.model_fields.get("limit", None) is None: + continue + available_classes.append(model_class.__name__) + + app_state = click.get_current_context().obj + for field in app_state.model_fields_set: + model = getattr(app_state, field) + if model.__class__.__name__ in available_classes: + return model.limit + + +def log_limiter() -> None: + """Log the limiter.""" + limiter = find_limiter() + if limiter: + log.info(f"limiting to {', '.join(limiter)}") + + +def get_cli_options_model_classes() -> List[Type[BaseModel]]: + """ + Get all model classes from tfworker.types.cli_options that inherit from BaseModel + and have names prefixed with 'CLIOptions'. + + Returns: + List[Type[BaseModel]]: List of model classes. + """ + cli_options_module = cli_options + model_classes = [] + + for name, obj in inspect.getmembers(cli_options_module, inspect.isclass): + if name.startswith("CLIOptions") and issubclass(obj, BaseModel): + model_classes.append(obj) + + return model_classes + + +def resolve_model_with_cli_options( + app_state: AppState, model_classes: Union[List[Type[BaseModel]], None] = None +) -> None: + """ + Resolve the model with the CLI options. + + Args: + ctx (click.Context): The click context. + model_classes (List[Type[BaseModel]]): The model classes to resolve. + """ + if model_classes is None: + model_classes = get_cli_options_model_classes() + + if not hasattr(app_state, "loaded_config") or app_state.loaded_config is None: + raise ValueError("loaded_config is not set on the AppState object") + + skip_param_sources = [ + click.core.ParameterSource.ENVIRONMENT, + click.core.ParameterSource.COMMANDLINE, + ] + log.trace(f"not overwriting param sources: {skip_param_sources}") + + for field in app_state.model_fields_set: + model = getattr(app_state, field) + _update_model_if_match( + app_state, model_classes, field, model, skip_param_sources + ) + + +def _update_model_if_match( + app_state: AppState, + model_classes: List[Type[BaseModel]], + field: str, + model: BaseModel, + skip_param_sources: List[click.core.ParameterSource], +): + """ + Update the model if it matches any of the model classes. + + Args: + ctx (click.Context): The click context. + model_classes (List[Type[BaseModel]]): The model classes to check against. + field (str): The field name. + model (BaseModel): The model instance. + skip_param_sources (List[click.core.ParameterSource]): List of parameter sources to skip. + """ + log.trace(f"checking field: {field}") + if any(isinstance(model, model_class) for model_class in model_classes): + for model_class in model_classes: + if isinstance(model, model_class): + log.trace(f"model {field}: matches model_class {model_class.__name__}") + _set_model_parameters(app_state, model, field, skip_param_sources) + + +def _set_model_parameters( + app_state: AppState, + model: BaseModel, + field: str, + skip_param_sources: List[click.core.ParameterSource], +): + """ + Set the parameters on the model. + + Args: + ctx (click.Context): The click context. + model (BaseModel): The model instance. + field (str): The field name. + skip_param_sources (List[click.core.ParameterSource]): List of parameter sources to skip. + """ + ctx = click.get_current_context() + for k, v in app_state.loaded_config.worker_options.items(): + if k in model.model_fields: + if ctx.get_parameter_source(k) in skip_param_sources: + log.trace( + f"skipping {k} as it is set via {ctx.get_parameter_source(k)}" + ) + continue + log.trace(f"Setting {k} to {v} on {field}") + try: + setattr(model, k, v) + except ValidationError as e: + handle_config_error(e) + # Also need to add all the worker_options to the loaded_config + for k in model.model_fields.keys(): + # if k not in app_state.loaded_config.worker_options: + value = getattr(model, k) + log.trace( + f"setting {k}={value} to worker_options via {model.__class__.__name__}" + ) + app_state.loaded_config.worker_options[k] = value + + +def _process_template(config_file: str, config_vars: Dict[str, str]) -> str: + """ + Process the Jinja2 template. + """ + try: + template_reader = io.StringIO() + jinja_env = jinja2.Environment( + undefined=StrictUndefined, + loader=jinja2.FileSystemLoader(pathlib.Path(config_file).parents[0]), + ) + template_config = jinja_env.get_template(pathlib.Path(config_file).name) + template_config.stream(**config_vars).dump(template_reader) + except jinja2.exceptions.UndefinedError as e: + log.safe_error(f"Jinja2 Enironment\n{json.dumps(config_vars, indent=2)}") + log.error(f"configuration file contains invalid template substitutions: {e}") + click.get_current_context().exit(1) + except jinja2.exceptions.TemplateNotFound as e: + log.error(f"configuration file {config_file} not found: {e}") + click.get_current_context().exit(1) + except jinja2.exceptions.TemplateSyntaxError as e: + log.error("configuration file contains invalid template syntax") + log.error(f"File: {e.filename}; Line: {e.lineno}; Message: {e.message}") + click.get_current_context().exit(1) + + return template_reader.getvalue() + + +def _get_full_config_vars(config_vars: Dict[str, str]) -> Dict[str, Any]: + """ + Get the full configuration variables. + """ + original_config_vars = dict(config_vars) + config_vars["var"] = dict() + for k, v in original_config_vars.items(): + config_vars["var"][k] = v + del original_config_vars + + # add os.environ to config_vars + config_vars["env"] = dict() + for k, v in os.environ.items(): + config_vars["env"][k] = v + + return config_vars diff --git a/tfworker/commands/env.py b/tfworker/commands/env.py index 22389c7..fa7af65 100644 --- a/tfworker/commands/env.py +++ b/tfworker/commands/env.py @@ -1,21 +1,6 @@ -# Copyright 2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - import click -from tfworker.authenticators import AuthenticatorsCollection -from tfworker.commands.base import BaseCommand +from .base import BaseCommand class EnvCommand(BaseCommand): @@ -26,20 +11,7 @@ class EnvCommand(BaseCommand): executing commands against the rendered terraform definitions such as `terraform import` """ - def __init__(self, rootc, **kwargs): - # Initialize the base command - self._rootc = rootc - self._args_dict = dict(kwargs) - self._args_dict.update(self._rootc.args.__dict__) - - # parse the configuration - rootc.add_arg("deployment", "env") - rootc.load_config() - - # initialize any authenticators - self._authenticators = AuthenticatorsCollection(rootc.args, deployment=None) - def exec(self): - for auth in self._authenticators: + for auth in self.app_state.authenticators: for k, v in auth.env().items(): click.secho(f"export {k}={v}") diff --git a/tfworker/commands/root.py b/tfworker/commands/root.py index 7264d66..508fdc2 100644 --- a/tfworker/commands/root.py +++ b/tfworker/commands/root.py @@ -1,284 +1,101 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import io -import os -import pathlib import tempfile from pathlib import Path -from typing import Union +from typing import Any, Dict import click -import hcl2 -import jinja2 -import yaml -from jinja2.runtime import StrictUndefined - -from tfworker.types import CLIOptionsRoot - -class RootCommand: - def __init__(self, options: CLIOptionsRoot): - """ - Initialize the RootCommand with the given arguments. - - Args: - args (dict, optional): A dictionary of arguments to initialize the RootCommand with. Defaults to {}. - """ +import tfworker.util.log as log +from tfworker.cli_options import CLIOptionsRoot - # To avoid refactoring everything all at once, take items from CLIOptionsRoot and assign them to self - # This is a temporary measure to allow for a gradual transition to the new CLIOptionsRoot class - self.working_dir = options.working_dir - self.clean = options.clean - self.config_file = options.config_file - self.tf = None +from .config import load_config, resolve_model_with_cli_options - if self.working_dir is not None: - self.temp_dir = pathlib.Path(self.working_dir).resolve() - else: - self.temp_dir = tempfile.mkdtemp() - self.args = self.StateArgs() - self.add_args(options.dict()) - - def __del__(self): - """ - Cleanup the temporary directory after execution. - """ - - # Temporary for refactoring - if not hasattr(self, "clean"): - if hasattr(self, "temp_dir"): - print(f"self.temp_dir: {self.temp_dir} may be abandoned!!!") - return - - if self.clean: - # the affect of remove_top being true is removing the top level directory, for a temporary - # directory this is desirable however when a working-dir is specified it's likely a volume - # mount in a container, so empty the files if clean is desired but do not remove the top level - remove_top = True if self.working_dir is None else False +class RootCommand: + """ + The RootCommand class is the main entry point for the CLI. - try: - rm_tree(self.temp_dir, inner=remove_top) - except FileNotFoundError: - pass + It is only responsible for setting up the root/global options shared by + all sub-commands. + """ - def add_args(self, args): + def __init__(self) -> None: """ - Add a dictionary of args. + Initliaze the RootCommand object; this is the main entry point for the CLI. Args: - args (dict): A dictionary of arguments to add. - """ - for k, v in args.items(): - self.add_arg(k, v) - - def add_arg(self, k, v): + args (dict, optional): A dictionary of arguments to initialize the RootCommand with. Defaults to {}. """ - Add an argument to the state args. + log.trace("initializing root command object") + app_state = click.get_current_context().obj + options = app_state.root_options + app_state.working_dir = self._resolve_working_dir(options.working_dir) + log.debug(f"working directory: {app_state.working_dir}") + log.debug(f"loading config file: {options.config_file}") + app_state.loaded_config = load_config( + options.config_file, self._prepare_template_vars(options) + ) + log.safe_trace(f"loaded config: {app_state.loaded_config}") + # update the app_config with configuration from the command line + resolve_model_with_cli_options(app_state) + log.trace("finished initializing root command object") + + @staticmethod + def _resolve_working_dir(working_dir: str | None) -> Path: + """ + Resolve the working directory. Args: - k (str): The key of the argument. - v (any): The value of the argument. - """ - setattr(self.args, k, v) - return None - - def load_config(self): - """ - Load the configuration file. - """ - if not self.config_file: - return - - self._config_file_exists() - rendered_config = self._process_template() - - if self.config_file.endswith(".hcl"): - self.config = ordered_config_load_hcl(rendered_config) - else: - self.config = ordered_config_load(rendered_config) - - # Decorate the RootCommand with the config values - self.tf = self.config.get("terraform", dict()) - self._pullup_keys() - self._merge_args() - - def _config_file_exists(self): - """ - Check if the configuration file exists. - """ - if not os.path.exists(self.config_file): - click.secho( - f"configuration file does not exist: {self.config_file}", fg="red" - ) - raise SystemExit(1) - - def _process_template(self) -> str: - """ - Process the Jinja2 template. - """ - try: - template_reader = io.StringIO() - jinja_env = jinja2.Environment( - undefined=StrictUndefined, - loader=jinja2.FileSystemLoader( - pathlib.Path(self.config_file).parents[0] - ), - ) - template_config = jinja_env.get_template( - pathlib.Path(self.config_file).name - ) - template_config.stream( - **self.args.template_items(return_as_dict=True, get_env=True) - ).dump(template_reader) - except jinja2.exceptions.UndefinedError as e: - click.secho( - f"configuration file contains invalid template substitutions: {e}", - fg="red", - ) - raise SystemExit(1) + working_dir (str): The working directory. - return template_reader.getvalue() - - def _pullup_keys(self): - """ - A utility function to place keys from the loaded config file directly on the RootCommand instance. - """ - for k in [ - "definitions", - "providers", - "handlers", - "remote_vars", - "template_vars", - "terraform_vars", - "worker_options", - ]: - if self.tf: - setattr(self, f"{k}_odict", self.tf.get(k, dict())) - else: - setattr(self, f"{k}_odict", None) - - def _merge_args(self): - """ - Merge the worker options from the config file with the command line arguments. + Returns: + pathlib.Path: The resolved working directory. """ - for k, v in self.worker_options_odict.items(): - self.add_arg(k, v) + if working_dir is None: + log.trace("working directory not provided, using temporary directory") + return Path(tempfile.TemporaryDirectory().name) + log.trace(f"working directory provided: {working_dir}") + return Path(working_dir).resolve() - class StateArgs: + @staticmethod + def _prepare_template_vars(options: CLIOptionsRoot) -> Dict[str, Any]: """ - A class to hold arguments in the state for easier access. - """ - - def __iter__(self): - return iter(self.__dict__) - - def __getitem__(self, name): - return self.__dict__[name] - - def __repr__(self): - return str(self.__dict__) - - def keys(self): - return self.__dict__.keys() - - def items(self): - return self.__dict__.items() - - def values(self): - return self.__dict__.values() - - def template_items(self, return_as_dict=False, get_env=False): - rvals = {} - for k, v in self.__dict__.items(): - if k == "config_var": - try: - rvals["var"] = get_config_var_dict(v) - except ValueError as e: - click.secho( - f'Invalid config-var specified: "{e}" must be in format key=value', - fg="red", - ) - raise SystemExit(1) - else: - rvals[k] = v - if get_env is True: - rvals["env"] = dict() - for k, v in os.environ.items(): - rvals["env"][k] = v - if return_as_dict: - return rvals - return rvals.items() - - -def get_config_var_dict(config_vars): - """ - Returns a dictionary of of key=value for each item provided as a command line substitution. - - Args: - config_vars (list): A list of command line substitutions. + Prepare the template variables. - Returns: - dict: A dictionary of key=value pairs. - """ - return_vars = dict() - for cv in config_vars: - try: - k, v = tuple(cv.split("=")) - return_vars[k] = v - except ValueError: - raise ValueError(cv) - return return_vars - - -def ordered_config_load_hcl(config: str) -> dict: - """ - Load an hcl config, and replace templated items. - """ - return hcl2.loads(config) - - -def ordered_config_load(config: str) -> dict: - """ - since python 3.7 the yaml loader is deterministic, so we can - use the standard yaml loader - """ - try: - return yaml.load(config, Loader=yaml.FullLoader) - except yaml.YAMLError as e: - click.secho(f"error loading yaml/json: {e}", fg="red") - click.secho("the configuration that caused the error was\n:", fg="red") - for i, line in enumerate(config.split("\n")): - click.secho(f"{i + 1}: {line}", fg="red") - raise SystemExit(1) - - -def rm_tree(base_path: Union[str, Path], inner: bool = False) -> None: - """ - Recursively removes all files and directories. - - Args: - base_path (Union[str, Path]): The base path to start removing files and directories from. - inner (bool, optional): Controls recrusion, if True only the inner files and directories are removed. Defaults to False. - """ - parent: Path = Path(base_path) - - for child in parent.glob("*"): - if child.is_file() or child.is_symlink(): - child.unlink() - else: - rm_tree(child, inner=True) - if inner: - parent.rmdir() + Args: + options (CLIOptionsRoot): The root options. + + Returns: + Dict[str, Any]: The template variables. + """ + template_items = {} + log.trace("preparing template items") + for k, v in options.model_dump().items(): + + if v is None: + log.trace(f"skipping {k} as it is None") + continue + + if isinstance(v, str): + log.trace(f"adding {k}={v}") + template_items[k] = v + continue + + if isinstance(v, list): + log.trace(f"attempting to add list of strings {k}={v}") + for i in v: + if isinstance(i, str): + subs = i.split("=") + if len(subs) == 2: + log.trace(f"adding list item {subs[0]}={subs[1]}") + template_items[subs[0]] = subs[1] + else: + log.trace( + f"skipping invalid list item {i}; not valid k=v pair" + ) + continue + else: + log.trace(f"skipping {i} as it is not a string") + + log.trace(f"skipping {k} as it is not a string or list of strings") + log.trace(f"template_items: {template_items}") + return template_items diff --git a/tfworker/commands/terraform.py b/tfworker/commands/terraform.py index 44362b6..2ed33a8 100644 --- a/tfworker/commands/terraform.py +++ b/tfworker/commands/terraform.py @@ -1,581 +1,483 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. import os -import pathlib - -import click +from typing import TYPE_CHECKING, Dict, Union import tfworker.util.hooks as hooks +import tfworker.util.log as log import tfworker.util.terraform as tf_util from tfworker.commands.base import BaseCommand from tfworker.definitions import Definition -from tfworker.exceptions import HookError, PlanChange, TerraformError -from tfworker.handlers.exceptions import HandlerError -from tfworker.util.system import pipe_exec, strip_ansi +from tfworker.exceptions import HandlerError, HookError, TFWorkerException +from tfworker.types.terraform import TerraformAction, TerraformStage +from tfworker.util.system import pipe_exec + +if TYPE_CHECKING: + from tfworker.app_state import AppState class TerraformCommand(BaseCommand): - def __init__(self, rootc, **kwargs): - super(TerraformCommand, self).__init__(rootc, **kwargs) - self._destroy = self._resolve_arg("destroy") - self._tf_apply = self._resolve_arg("tf_apply") - self._tf_plan = self._resolve_arg("tf_plan") - self._plan_file_path = self._resolve_arg("plan_file_path") - - if self._tf_apply and self._destroy: - click.secho("can not apply and destroy at the same time", fg="red") - raise SystemExit(1) - - if self._backend_plans and not self._plan_file_path: - # create a plan file path in the tmp dir - self._plan_file_path = f"{self._temp_dir}/plans" - pathlib.Path(self._plan_file_path).mkdir(parents=True, exist_ok=True) - - self._b64_encode = self._resolve_arg("b64_encode") - self._deployment = kwargs["deployment"] - self._force = self._resolve_arg("force") - self._show_output = self._resolve_arg("show_output") - # streaming doesn't allow for distinction between stderr and stdout, but allows - # terraform operations to be viewed before the process is completed - self._stream_output = self._resolve_arg("stream_output") - self._use_colors = True if self._resolve_arg("color") else False - self._terraform_modules_dir = self._resolve_arg("terraform_modules_dir") - self._terraform_output = dict() - - ############## - # Properties # - ############## - @property - def plan_for(self): - """plan_for will either be apply or destroy, indicating what action is being planned for""" - return self._plan_for + """ + The TerraformCommand class is called by the top level CLI + as part of the `terraform` sub-command. It inherits from + BaseCommand which sets up the application state. - @property - def tf_version_major(self): - return self._tf_version_major + This class may contain various methods that are used to + orchestrate the terraform workflow. The methods in this + class should be limited to providing error handling and + orchestration of the terraform workflow. - ################## - # Public methods # - ################## - def exec(self) -> None: - """exec handles running the terraform chain, it the primary method called by the CLI + If you are tempted to override the `__init__` method, + reconsider the strategy for what you're about to add + """ - Returns: - None + @property + def terraform_config(self): + if hasattr(self, "_terraform_config"): + return self._terraform_config + else: + self._terraform_config = TerraformCommandConfig(self._app_state) + return self._terraform_config + + def prep_providers(self) -> None: """ - # generate an iterator for the specified definitions (or all if no limit is specified) + Prepare / Mirror the providers + """ + if self.app_state.terraform_options.provider_cache is None: + log.debug("no provider cache specified; using temporary cache") + local_cache = self.app_state.working_dir / "terraform-plugins" + local_cache.mkdir(exist_ok=True) + self.app_state.terraform_options.provider_cache = str(local_cache) + + log.trace( + f"using provider cache path: {self.app_state.terraform_options.provider_cache}" + ) try: - # convert the definitions iterator to a list to allow reusing the iterator - def_iter = list(self.definitions.limited()) - except ValueError as e: - click.secho(f"Error with supplied limit: {e}", fg="red") - raise SystemExit(1) - - if self._provider_cache is not None: tf_util.mirror_providers( - self._providers, - self._terraform_bin, - self._temp_dir, - self._provider_cache, + self.app_state.providers, + self.app_state.terraform_options.terraform_bin, + self.app_state.root_options.working_dir, + self.app_state.terraform_options.provider_cache, ) + except TFWorkerException as e: + log.error(f"error mirroring providers: {e}") + self.ctx.exit(1) - # prepare the modules, they are required if the modules dir is specified, otherwise they are optional - tf_util.prep_modules( - self._terraform_modules_dir, - self._temp_dir, - required=(self._terraform_modules_dir != ""), - ) + def terraform_init(self) -> None: + from tfworker.definitions.prepare import DefinitionPrepare - # prepare the definitions and run terraform init - self._prep_and_init(def_iter) + def_prep = DefinitionPrepare(self.app_state) - for definition in def_iter: - # Execute plan if needed - changes = ( - self._exec_plan(definition) if self._check_plan(definition) else None + for name in self.app_state.definitions.keys(): + log.info(f"initializing definition: {name}") + def_prep.copy_files(name=name) + try: + def_prep.render_templates(name=name) + def_prep.create_local_vars(name=name) + def_prep.create_terraform_vars(name=name) + def_prep.create_worker_tf(name=name) + def_prep.download_modules( + name=name, stream_output=self.terraform_config.stream_output + ) + def_prep.create_terraform_lockfile(name=name) + except TFWorkerException as e: + log.error(f"error rendering templates for definition {name}: {e}") + self.ctx.exit(1) + + self._exec_terraform_action(name=name, action=TerraformAction.INIT) + + def terraform_plan(self) -> None: + if not self.app_state.terraform_options.plan: + log.debug("--no-plan option specified; skipping plan") + return + + from tfworker.definitions.plan import DefinitionPlan + + needed: bool + reason: str + def_plan: DefinitionPlan = DefinitionPlan(self.ctx, self.app_state) + + for name in self.app_state.definitions.keys(): + log.info(f"running pre-plan for definition: {name}") + def_plan.set_plan_file(self.app_state.definitions[name]) + self._exec_terraform_pre_plan(name=name) + needed, reason = def_plan.needs_plan(self.app_state.definitions[name]) + + if not needed: + if "plan file exists" in reason: + self.app_state.definitions[name].needs_apply = True + log.info(f"definition {name} does not need a plan: {reason}") + continue + + log.info(f"definition {name} needs a plan: {reason}") + self._exec_terraform_plan(name=name) + + def terraform_apply_or_destroy(self) -> None: + if self.app_state.terraform_options.destroy: + action: TerraformAction = TerraformAction.DESTROY + elif self.app_state.terraform_options.apply: + action: TerraformAction = TerraformAction.APPLY + else: + log.debug("neither apply nor destroy specified; skipping") + return + + for name in self.app_state.definitions.keys(): + if action == TerraformAction.DESTROY: + if self.app_state.terraform_options.limit: + if name not in self.app_state.terraform_options.limit: + log.info(f"skipping destroy for definition: {name}") + continue + log.trace( + f"running {action} for definition: {name} if needs_apply is True, value is: {self.app_state.definitions[name].needs_apply}" + ) + if self.app_state.definitions[name].needs_apply: + log.info(f"running apply for definition: {name}") + self._exec_terraform_action(name=name, action=action) + + def _exec_terraform_action(self, name: str, action: TerraformAction) -> None: + """ + Execute terraform action + """ + if action == TerraformAction.PLAN: + raise TFWorkerException( + "use _exec_terraform_pre_plan & _exec_terraform_plan method to run plan" ) - # execute apply or destroy if needed - if self._check_apply_or_destroy(changes, definition): - self._exec_apply_or_destroy(definition) + definition: Definition = self.app_state.definitions[name] - ################### - # Private methods # - ################### + try: + log.trace( + f"executing {TerraformStage.PRE} {action.value} handlers for definition {name}" + ) + self._app_state.handlers.exec_handlers( + action=action, + stage=TerraformStage.PRE, + deployment=self.app_state.deployment, + definition=definition, + working_dir=self.app_state.working_dir, + ) + except HandlerError as e: + log.error(f"handler error on definition {name}: {e}") + self.ctx.exit(2) - ########################################### - # Methods for dealing with terraform init # - ########################################### - def _prep_and_init(self, def_iter: list[Definition]) -> None: - """Prepares the definition and runs terraform init + log.trace( + f"executing {TerraformStage.PRE} {action.value} hooks for definition {name}" + ) + self._exec_hook( + definition, + action, + TerraformStage.PRE, + ) - Args: - def_iter: an iterator of definitions to prepare + log.trace(f"running terraform {action.value} for definition {name}") + result = self._run(name, action) + if result.exit_code: + log.error(f"error running terraform {action.value} for {name}") + self.ctx.exit(1) - Returns: - None - """ - for definition in def_iter: - click.secho(f"preparing definition: {definition.tag}", fg="green") - definition.prep(self._backend) + try: + log.trace( + f"executing {TerraformStage.POST.value} {action.value} handlers for definition {name}" + ) + self._app_state.handlers.exec_handlers( + action=action, + stage=TerraformStage.POST, + deployment=self.app_state.deployment, + definition=definition, + working_dir=self.app_state.working_dir, + result=result, + ) + except HandlerError as e: + log.error(f"handler error on definition {name}: {e}") + self.ctx.exit(2) - try: - self._run(definition, "init", debug=self._show_output) - except TerraformError: - click.secho("error running terraform init", fg="red") - raise SystemExit(1) - - ########################################### - # Methods for dealing with terraform plan # - ########################################### - def _check_plan(self, definition: Definition) -> bool: + log.trace( + f"executing {TerraformStage.POST.value} {action.value} hooks for definition {name}" + ) + self._exec_hook( + definition, + action, + TerraformStage.POST, + result, + ) + + def _exec_terraform_pre_plan(self, name: str) -> None: """ - Determines if a plan is needed for the provided definition + Execute terraform pre plan with hooks and handlers for the given definition + """ + definition: Definition = self.app_state.definitions[name] - Args: - definition: the definition to check for a plan + log.trace(f"executing pre plan handlers for definition {name}") + try: + self._app_state.handlers.exec_handlers( + action=TerraformAction.PLAN, + stage=TerraformStage.PRE, + deployment=self.app_state.deployment, + definition=definition, + working_dir=self.app_state.working_dir, + ) + except HandlerError as e: + log.error(f"handler error on definition {name}: {e}") + self.ctx.exit(2) + + log.trace(f"executing pre plan hooks for definition {name}") + self._exec_hook( + self._app_state.definitions[name], + TerraformAction.PLAN, + TerraformStage.PRE, + ) - Returns: - bool: True if a plan is needed, False otherwise + def _exec_terraform_plan(self, name: str) -> None: + """ + Execute terraform plan with hooks and handlers for the given definition """ - if not self._plan_file_path: - return self._handle_no_plan_path(definition) + definition: Definition = self.app_state.definitions[name] - plan_file = self._prepare_plan_file(definition) - self._validate_plan_path(plan_file.parent) - self._run_handlers(definition, "plan", "check", plan_file=plan_file) + log.trace(f"running terraform plan for definition {name}") + result = self._run(name, TerraformAction.PLAN) - return self._should_plan(definition, plan_file) + if result.exit_code == 0: + log.debug(f"no changes for definition {name}") + definition.needs_apply = False - def _handle_no_plan_path(self, definition: Definition) -> bool: - """Handles the case where no plan path is specified, saved plans are not possible + if result.exit_code == 1: + log.error(f"error running terraform plan for {name}") + self.ctx.exit(1) - Args: - definition: the definition to check for a plan + if result.exit_code == 2: + log.debug(f"terraform plan for {name} indicates changes") + definition.needs_apply = True + + try: + log.trace(f"executing post plan handlers for definition {name}") + self._app_state.handlers.exec_handlers( + action=TerraformAction.PLAN, + stage=TerraformStage.POST, + deployment=self.app_state.deployment, + definition=definition, + working_dir=self.app_state.working_dir, + result=result, + ) + except HandlerError as e: + log.error(f"handler error on definition {name}: {e}") + self.ctx.exit(2) + + log.trace(f"executing post plan hooks for definition {name}") + self._exec_hook( + self._app_state.definitions[name], + TerraformAction.PLAN, + TerraformStage.POST, + result, + ) - Returns: - bool: True if a plan is needed, False otherwise + def _run( + self, + definition_name: str, + action: TerraformAction, + ) -> "TerraformResult": """ + run terraform + """ + log.debug( + f"handling terraform command: {action} for definition {definition_name}" + ) + definition: Definition = self.app_state.definitions[definition_name] + params: dict = self.terraform_config.get_params( + action, plan_file=definition.plan_file + ) - if not self._tf_plan: - definition._ready_to_apply = True - return False - definition._ready_to_apply = False - return True + working_dir: str = definition.get_target_path( + self.app_state.root_options.working_dir + ) - def _prepare_plan_file(self, definition: Definition) -> pathlib.Path: - """Prepares the plan file for the definition + log.debug( + f"cmd: {self.app_state.terraform_options.terraform_bin} {action} {params}" + ) - Args: - definition: the definition to prepare the plan file for + result: TerraformResult = TerraformResult( + *pipe_exec( + f"{self.app_state.terraform_options.terraform_bin} {action} {params}", + cwd=working_dir, + env=self.terraform_config.env, + stream_output=self.terraform_config.stream_output, + ) + ) - Returns: - pathlib.Path: the path to the plan file - """ - plan_path = pathlib.Path(self._plan_file_path).resolve() - plan_file = plan_path / f"{self._deployment}_{definition.tag}.tfplan" - definition.plan_file = plan_file - click.secho(f"using plan file:{plan_file}", fg="yellow") - return plan_file + log.debug(f"exit code: {result.exit_code}") + return result - def _validate_plan_path(self, plan_path: pathlib.Path) -> None: - """Validates the plan path + def _exec_hook( + self, + definition: Definition, + action: TerraformAction, + stage: TerraformStage, + result: Union["TerraformResult", None] = None, + ) -> None: + """ + Find and execute the appropriate hooks for a supplied definition Args: - plan_path: the path to the plan file - - Returns: - None + definition (Definition): the definition to execute the hooks for + action (TerraformAction): the action to execute the hooks for + stage (TerraformStage): the stage to execute the hooks for + result (TerraformResult): the result of the terraform command """ - if not (plan_path.exists() and plan_path.is_dir()): - click.secho( - f'plan path "{plan_path}" is not suitable, it is not an existing directory' - ) - raise SystemExit(1) + hook_dir = definition.get_target_path(self.app_state.working_dir) - def _exec_plan(self, definition) -> bool: - """_exec_plan executes a terraform plan, returns true if a plan has changes""" - changes = False - - # call handlers for pre plan try: - self._execute_handlers( - action="plan", - stage="pre", - deployment=self._deployment, - definition=definition.tag, - definition_path=definition.fs_path, - ) - except HandlerError as e: - if e.terminate: - click.secho(f"terminating due to fatal handler error {e}", fg="red") - raise SystemExit(1) - click.secho(f"handler error: {e}", fg="red") - - click.secho( - f"planning definition for {self._plan_for}: {definition.tag}", - fg="green", - ) + if not hooks.check_hooks(stage, hook_dir, action): + log.trace( + f"no {stage}-{action} hooks found for definition {definition.name}" + ) + return - try: - self._run( - definition, - "plan", - debug=self._show_output, - plan_action=self._plan_for, - plan_file=str(definition.plan_file), + log.info( + f"executing {stage}-{action} hooks for definition {definition.name}" ) - except PlanChange: - # on destroy, terraform ALWAYS indicates a plan change - click.secho(f"plan changes for {self._plan_for} {definition.tag}", fg="red") - definition._ready_to_apply = True - changes = True - except TerraformError: - click.secho( - f"error planning terraform definition: {definition.tag}!", - fg="red", + hooks.hook_exec( + stage, + action, + hook_dir, + self.terraform_config.env, + self.terraform_config.terraform_bin, + b64_encode=self.terraform_config.b64_encode, + debug=self.terraform_config.debug, + extra_vars=definition.get_template_vars( + self.app_state.loaded_config.global_vars.template_vars + ), ) - raise SystemExit(2) + except HookError as e: + log.error(f"hook execution error on definition {definition.name}: \n{e}") + self.ctx.exit(2) - try: - self._execute_handlers( - action="plan", - stage="post", - deployment=self._deployment, - definition=definition.tag, - definition_path=definition.fs_path, - text=strip_ansi(self._terraform_output["stdout"].decode()), - planfile=definition.plan_file, - changes=changes, - ) - except HandlerError as e: - click.secho(f"{e}", fg="red") - if e.terminate: - click.secho("error is fatal, terminating", fg="red") - raise SystemExit(1) - - if not changes: - click.secho(f"no plan changes for {definition.tag}", fg="yellow") - - return changes - - def _should_plan(self, definition: Definition, plan_file: pathlib.Path) -> bool: - if not self._tf_plan: - definition._ready_to_apply = True - return False - - if plan_file.exists(): - if plan_file.stat().st_size == 0: - click.secho( - f"exiting plan file {plan_file} exists but is empty; planning again", - fg="green", - ) - definition._ready_to_apply = False - return True - click.secho( - f"existing plan file {plan_file} is suitable for apply; not planning again; remove plan file to allow planning", - fg="green", - ) - definition._ready_to_apply = True - return False - - definition._ready_to_apply = False - return True - - #################################################### - # Methods for dealing with terraform apply/destroy # - #################################################### - def _check_apply_or_destroy(self, changes, definition) -> bool: - """_check_apply_or_destroy determines if a terraform execution is needed""" - # never apply if --no-apply is used - if self._tf_apply is not True: - return False - - # if not changes and not force, skip apply - if not (changes or definition._ready_to_apply) and not self._force: - click.secho("no changes, skipping terraform apply", fg="yellow") - return False - - # if the definition plan file exists, and is not empty then apply - if self._plan_file_path is not None: - if not definition.plan_file.exists(): - click.secho( - f"plan file {definition.plan_file} does not exist, can't apply", - fg="red", - ) - return False - # if --force is specified, always apply - if self._force: - click.secho( - f"--force specified, proceeding with apply for {definition.tag} anyway", - ) - return True +class TerraformResult: + """ + Hold the results of a terraform run + """ - # All of the false conditions have been returned - return True + def __init__(self, exit_code: int, stdout: bytes, stderr: bytes): + self.exit_code = exit_code + self.stdout = stdout + self.stderr = stderr - def _exec_apply_or_destroy(self, definition) -> None: - """_exec_apply_or_destroy executes a terraform apply or destroy""" - # call handlers for pre apply - try: - self._execute_handlers( - action=self._plan_for, - stage="pre", - deployment=self._deployment, - definition=definition.tag, - definition_path=definition.fs_path, - planfile=definition.plan_file, - ) - except HandlerError as e: - if e.terminate: - click.secho(f"terminating due to fatal handler error {e}", fg="red") - raise SystemExit(1) - click.secho(f"handler error: {e}", fg="red") + @property + def stdout_str(self) -> str: + return self.stdout.decode() - # execute terraform apply or destroy - tf_error = False - try: - self._run( - definition, - self._plan_for, - debug=self._show_output, - plan_file=definition.plan_file, - ) - except TerraformError: - tf_error = True + @property + def stderr_str(self) -> str: + return self.stdout.decode() - # remove the plan file if it exists - if definition.plan_file is not None and definition.plan_file.exists(): - definition.plan_file.unlink() + def log_stdout(self, action: TerraformAction) -> None: + log_method = TerraformCommandConfig.get_config().get_log_method(action) + for line in self.stdout.decode().splitlines(): + log_method(f"stdout: {line}") - # call handlers for post apply/destroy - try: - self._execute_handlers( - action=self._plan_for, - stage="post", - deployment=self._deployment, - definition=definition.tag, - definition_path=definition.fs_path, - planfile=definition.plan_file, - error=tf_error, - ) - except HandlerError as e: - if e.terminate: - click.secho(f"terminating due to fatal handler error {e}", fg="red") - raise SystemExit(1) - click.secho(f"handler error: {e}", fg="red") - - if tf_error is True: - click.secho( - f"error executing terraform {self._plan_for} for {definition.tag}", - fg="red", - ) - raise SystemExit(2) - else: - click.secho( - f"terraform {self._plan_for} complete for {definition.tag}", - fg="green", - ) + def log_stderr(self, action: TerraformAction) -> None: + log_method = TerraformCommandConfig.get_config().get_log_method(action) + for line in self.stderr.decode().splitlines(): + log_method(f"stderr: {line}") - ##################################### - # Methods for dealing with handlers # - ##################################### - def _run_handlers( - self, definition, action, stage, plan_file=None, **kwargs - ) -> None: - """Runs the handlers for the given action and stage + def log_file(self, filename: str) -> None: + with open(filename, "w+") as f: + f.write(self.stdout.decode()) + f.write(self.stderr.decode()) - Args: - definition: the definition to run the handlers for - action: the action to run the handlers for - stage: the stage to run the handlers for - plan_file: the plan file to pass to the handlers - kwargs: additional keyword arguments to pass to the handlers - - Returns: - None - """ - try: - self._execute_handlers( - action=action, - stage=stage, - deployment=self._deployment, - definition=definition.tag, - definition_path=definition.fs_path, - planfile=plan_file, - **kwargs, - ) - except HandlerError as e: - if e.terminate: - click.secho(f"terminating due to fatal handler error {e}", fg="red") - raise SystemExit(1) - click.secho(f"handler error: {e}", fg="red") - - ######################################## - # Common methods for running terraform # - ######################################## - def _run( - self, definition, command, debug=False, plan_action="init", plan_file=None - ): - """Run terraform.""" + def has_changes(self) -> bool: + return self.exit_code == 2 - if self._provider_cache is None: - plugin_dir = f"{self._temp_dir}/terraform-plugins" - else: - plugin_dir = self._provider_cache - - color_str = "-no-color" if self._use_colors is False else "" - params = { - "init": f"-input=false {color_str} -plugin-dir={plugin_dir}", - # -lockfile=readonly is ideal, but many of our modules are not - # only partially defining the required providers; they need to specify all - # required providers, or none, and let the worker generate the requirements - # based on the deployment_config.yaml.j2 - # "init": f"-input=false {color_str} -plugin-dir={plugin_dir} -lockfile=readonly", - "plan": f"-input=false -detailed-exitcode {color_str}", - "apply": f"-input=false {color_str} -auto-approve", - "destroy": f"-input=false {color_str} -auto-approve", - } - - if plan_action == "destroy": - params["plan"] += " -destroy" - - if plan_file is not None: - params["plan"] += f" -out {plan_file}" - params["apply"] += f" {plan_file}" - env = os.environ.copy() +class TerraformCommandConfig: + """ + A class to hold parameters for terraform commands + + this class is meant to be a singleton + """ + + _instance = None + + def __new__(cls, app_state: "AppState"): + if cls._instance is None: + cls._instance = super(TerraformCommandConfig, cls).__new__(cls) + cls._instance._app_state = app_state + return cls._instance + + def __init__(self, app_state: "AppState"): + self._app_state = app_state + self._env = None + + @classmethod + def get_config(cls) -> "TerraformCommandConfig": + return cls._instance + @property + def stream_output(self): + return self._app_state.terraform_options.stream_output + + @property + def terraform_bin(self): + return self._app_state.terraform_options.terraform_bin + + @property + def env(self): + if self._env is None: + self._env = self._get_env() + return self._env + + @property + def b64_encode(self): + return self._app_state.terraform_options.b64_encode + + @property + def debug(self): + if ( + log.LogLevel[self._app_state.root_options.log_level].value + <= log.LogLevel.DEBUG.value + ): + return True + return False + + @property + def action(self): + if self._app_state.terraform_options.destroy: + return TerraformAction.DESTROY + return TerraformAction.APPLY + + @property + def strict_locking(self): + return self._app_state.terraform_options.strict_locking + + @staticmethod + def get_log_method(command: str) -> callable: + return { + "init": log.debug, + "plan": log.info, + "apply": log.info, + "destroy": log.info, + }[command] + + def get_params(self, command: TerraformAction, plan_file: str) -> str: + """Return the parameters for a given command""" + color_str = ( + "-no-color" if self._app_state.terraform_options.color is False else "" + ) + + plan_action = " -destroy" if self.action == TerraformAction.DESTROY else "" + read_only = "-lockfile=readonly" if self.strict_locking else "" + + return { + TerraformAction.INIT: f"-input=false {color_str} {read_only} -plugin-dir={self._app_state.terraform_options.provider_cache}", + TerraformAction.PLAN: f"-input=false {color_str} {plan_action} -detailed-exitcode -out {plan_file}", + TerraformAction.APPLY: f"-input=false {color_str} -auto-approve {plan_file}", + TerraformAction.DESTROY: f"-input=false {color_str} -auto-approve", + }[command] + + def _get_env(self) -> Dict[str, str]: + env = os.environ.copy() # acknowledge that we are using a plugin cache; and compute the lockfile each run env["TF_PLUGIN_CACHE_MAY_BREAK_DEPENDENCY_LOCK_FILE"] = "1" # reduce non essential terraform output env["TF_IN_AUTOMATION"] = "1" - for auth in self._authenticators: + for auth in self._app_state.authenticators: env.update(auth.env()) - - working_dir = f"{self._temp_dir}/definitions/{definition.tag}" - command_params = params.get(command) - if not command_params: - raise ValueError( - f"invalid command passed to terraform, {command} has no defined params!" - ) - - # only execute hooks for plan/apply/destroy - try: - if hooks.check_hooks("pre", working_dir, command) and command in [ - "apply", - "destroy", - "plan", - ]: - # pre exec hooks - # want to pass remotes - # want to pass tf_vars - click.secho( - f"found pre-{command} hook script for definition {definition.tag}," - " executing ", - fg="yellow", - ) - hooks.hook_exec( - "pre", - command, - working_dir, - env, - self._terraform_bin, - debug=debug, - b64_encode=self._b64_encode, - extra_vars=definition.template_vars, - ) - except HookError as e: - click.secho( - f"hook execution error on definition {definition.tag}: {e}", - fg="red", - ) - raise SystemExit(2) - - click.secho( - f"cmd: {self._terraform_bin} {command} {command_params}", fg="yellow" - ) - (exit_code, stdout, stderr) = pipe_exec( - f"{self._terraform_bin} {command} {command_params}", - cwd=working_dir, - env=env, - stream_output=self._stream_output, - ) - click.secho(f"exit code: {exit_code}", fg="blue") - ( - self._terraform_output["exit_code"], - self._terraform_output["stdout"], - self._terraform_output["stderr"], - ) = (exit_code, stdout, stderr) - - if debug and not self._stream_output: - for line in stdout.decode().splitlines(): - click.secho(f"stdout: {line}", fg="blue") - for line in stderr.decode().splitlines(): - click.secho(f"stderr: {line}", fg="red") - - # If a plan file was saved, write the plan output - if plan_file is not None: - plan_log = f"{os.path.splitext(plan_file)[0]}.log" - - with open(plan_log, "w") as pl: - pl.write("STDOUT:\n") - for line in stdout.decode().splitlines(): - pl.write(f"{line}\n") - pl.write("\nSTDERR:\n") - for line in stderr.decode().splitlines(): - pl.write(f"{line}\n") - - # special handling of the exit codes for "plan" operations - if command == "plan": - if exit_code == 0: - return True - if exit_code == 1: - raise TerraformError - if exit_code == 2: - raise PlanChange - - if exit_code: - raise TerraformError - - # only execute hooks for plan/destroy - try: - if hooks.check_hooks("post", working_dir, command) and command in [ - "apply", - "destroy", - "plan", - ]: - click.secho( - f"found post-{command} hook script for definition {definition.tag}," - " executing ", - fg="yellow", - ) - hooks.hook_exec( - "post", - command, - working_dir, - env, - self._terraform_bin, - debug=debug, - b64_encode=self._b64_encode, - extra_vars=definition.template_vars, - ) - except HookError as e: - click.secho( - f"hook execution error on definition {definition.tag}: {e}", fg="red" - ) - raise SystemExit(2) - return True + return env diff --git a/tfworker/commands/version.py b/tfworker/commands/version.py deleted file mode 100644 index e8c1c52..0000000 --- a/tfworker/commands/version.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2021 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from tfworker.commands.base import BaseCommand -from tfworker.util.system import get_version - - -class VersionCommand(BaseCommand): - def __init__(self): - self._version = get_version() - - def exec(self): - print(f"terraform-worker version {self._version}") diff --git a/tfworker/constants.py b/tfworker/constants.py index 10c4535..69b44b5 100644 --- a/tfworker/constants.py +++ b/tfworker/constants.py @@ -1,25 +1,10 @@ -# Copyright 2020 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - import os _CWD = os.getcwd() - DEFAULT_BACKEND_PREFIX = "terraform/state/{deployment}" -DEFAULT_CONFIG = f"{_CWD}/worker.yaml" DEFAULT_REPOSITORY_PATH = _CWD +DEFAULT_CONFIG = f"{DEFAULT_REPOSITORY_PATH}/worker.yaml" DEFAULT_AWS_REGION = "us-east-1" DEFAULT_GCP_REGION = "us-east-1a" @@ -27,6 +12,9 @@ TF_PROVIDER_DEFAULT_NAMESPACE = "hashicorp" TF_PROVIDER_DEFAULT_LOCKFILE = ".terraform.lock.hcl" +# Items to refact from CLI / Logging output +REDACTED_ITEMS = ["aws_secret_access_key", "aws_session_token"] + TF_STATE_CACHE_NAME = "worker_state_cache.json" WORKER_LOCALS_FILENAME = "worker_generated_locals.tf" WORKER_TF_FILENAME = "worker_generated_terraform.tf" diff --git a/tfworker/copier/__init__.py b/tfworker/copier/__init__.py new file mode 100644 index 0000000..75db306 --- /dev/null +++ b/tfworker/copier/__init__.py @@ -0,0 +1,3 @@ +from .factory import Copier, CopyFactory # pragma: no cover # noqa +from .fs_copier import FileSystemCopier # pragma: no cover # noqa +from .git_copier import GitCopier # pragma: no cover # noqa diff --git a/tfworker/copier/factory.py b/tfworker/copier/factory.py new file mode 100644 index 0000000..171230c --- /dev/null +++ b/tfworker/copier/factory.py @@ -0,0 +1,144 @@ +from abc import ABC, abstractmethod +from pathlib import Path +from typing import Callable, Type + + +class CopyFactory: + """The factory class for creating copiers""" + + registry = {} + + @classmethod + def register(cls, name: str) -> Callable[[Type["Copier"]], Type["Copier"]]: + """Class method to register copiers""" + + def inner_wrapper(wrapped_class: Type["Copier"]) -> Type["Copier"]: + if name in cls.registry: + raise ValueError(f"Executor {name} already exists") + cls.registry[name] = wrapped_class + return wrapped_class + + return inner_wrapper + + @classmethod + def create(cls, source: str, **kwargs) -> "Copier": + """ + create creates a copier based on the provided source + + Args: + source (str): the source path to copy + **kwargs: additional keyword arguments + + Returns: + Copier: the copier instance + """ + copier_class = cls.registry[cls.get_copier_type(source, **kwargs)] + copier = copier_class(source, **kwargs) + return copier + + @classmethod + def get_copier_type(cls, source: str, **kwargs) -> str: + """ + get_copier_type returns the copier type for a given source + + Args: + source (str): the source path to copy + **kwargs: additional keyword arguments + + Returns: + str: the copier type + """ + for copier_type, copier_class in cls.registry.items(): + if copier_class.type_match(source, **kwargs): + return copier_type + raise NotImplementedError(f"no valid copier for {source}") + + +class Copier(ABC): + """The base class for definition copiers""" + + _register_name: str = None + + def __init__(self, source: str, **kwargs): + self._source = source + self._kwargs = {} + + for k, v in kwargs.items(): + if k in ["conflicts", "destination", "root_path"]: + setattr(self, f"_{k}", v) + else: + self._kwargs[k] = v + + self._kwargs = kwargs + + if hasattr(self, "_conflicts"): + if type(self._conflicts) is not list: + raise ValueError("Conflicts must be a list of filenames to disallow") + + @staticmethod + @abstractmethod + def type_match(source: str, **kwargs) -> bool: # pragma: no cover + """type_match determines if the source is supported/handled by a copier""" + pass + + @abstractmethod + def copy(self, **kwargs) -> None: # pragma: no cover + """copy executes the copy from the source, into the working path""" + pass + + @property + def root_path(self): + """root_path returns an optional root path to use for relative file operations""" + if hasattr(self, "_root_path"): + return self._root_path + else: + return "" + + @property + def conflicts(self): + """conflicts returns a list of disallowed files""" + if hasattr(self, "_conflicts"): + return self._conflicts + else: + return [] + + @property + def source(self): + """source contains the source path provided""" + return self._source + + def get_destination(self, make_dir: bool = True, **kwargs) -> str: + """get_destination returns the destination path, and optionally makes the destination directory""" + if not (hasattr(self, "_destination") or "destination" in kwargs.keys()): + raise ValueError("no destination provided") + if "destination" in kwargs: + d = kwargs["destination"] + else: + d = self._destination + + if make_dir: + make_d = Path(d) + make_d.mkdir(parents=True, exist_ok=True) + + return d + + def check_conflicts(self, path: str) -> None: + """Checks for files with conflicting names in a path""" + conflicting = [] + if self.conflicts: + check_path = Path(path) + for check_file in check_path.glob("*"): + if check_file.name in self.conflicts: + conflicting.append(check_file.name) + + if conflicting: + raise FileExistsError(f"{','.join(conflicting)}") + + def __init_subclass__(cls, **kwargs): + """ + Whenever a subclass is created, register it with the CopyFactory + """ + super().__init_subclass__(**kwargs) + copier_name = getattr(cls, "_register_name", None) + if copier_name is not None: + CopyFactory.register(copier_name)(cls) diff --git a/tfworker/copier/fs_copier.py b/tfworker/copier/fs_copier.py new file mode 100644 index 0000000..8d10cca --- /dev/null +++ b/tfworker/copier/fs_copier.py @@ -0,0 +1,67 @@ +import os +import re +import shutil + +import tfworker.util.log as log + +from .factory import Copier + + +class FileSystemCopier(Copier): + _register_name = "fs" + + def copy(self, **kwargs) -> None: + """copy copies files from a local source on the file system to a destination path""" + dest = self.get_destination(**kwargs) + self.check_conflicts(self.local_path) + if "sub_path" in kwargs and kwargs["sub_path"]: + source_path = f"{self.local_path}/{kwargs['sub_path']}".rstrip("/") + else: + source_path = self.local_path + if not os.path.exists(source_path): + raise FileNotFoundError(f"{source_path} does not exist") + shutil.copytree(source_path, dest, dirs_exist_ok=True) + + @property + def local_path(self): + """local_path returns a complete local file system path""" + if not hasattr(self, "_local_path"): + # try with the root path explicitly provided + local_path = self.make_local_path(self.source, self.root_path) + if os.path.exists(local_path): + self._local_path = local_path + return self._local_path + + # try without a root path (this is when an absolute path is provided) + local_path = self.make_local_path(self.source, "") + if os.path.exists(local_path): + self._local_path = local_path + return self._local_path + + if not hasattr(self, "_local_path"): + raise FileNotFoundError(f"unable to find {self.source}") + + return self._local_path + + @staticmethod + def type_match(source: str, **kwargs) -> bool: + # check if the source was provided as an absolute path + log.trace(f"type_matching fs copier for {source}") + if os.path.isdir(source) or os.path.isfile(source): + return True + + # check if the source is relative to the root path + if "root_path" in kwargs: + source = FileSystemCopier.make_local_path(source, kwargs["root_path"]) + + if os.path.isdir(source) or os.path.isfile(source): + return True + + return False + + @staticmethod + def make_local_path(source: str, root_path: str) -> str: + """make_local_path appends together known path objects to provide a local path""" + full_path = f"{root_path}/{source}" + full_path = re.sub(r"/+", "/", full_path) + return full_path diff --git a/tfworker/copier/git_copier.py b/tfworker/copier/git_copier.py new file mode 100644 index 0000000..5fed97d --- /dev/null +++ b/tfworker/copier/git_copier.py @@ -0,0 +1,107 @@ +import os +import re +import shutil +import tempfile + +from tfworker.util.system import pipe_exec + +from .factory import Copier + + +class GitCopier(Copier): + _register_name = "git" + + def copy(self, **kwargs) -> None: + """copy clones a remote git repo, and puts the requested files into the destination""" + dest = self.get_destination(**kwargs) + branch = "master" + git_cmd = "git" + git_args = "" + reset_repo = False + + sub_path = "" + if "sub_path" in kwargs and kwargs["sub_path"]: + sub_path = kwargs["sub_path"].strip("/") + + if "branch" in kwargs and kwargs["branch"]: + branch = kwargs["branch"] + if "git_cmd" in kwargs and kwargs["git_cmd"]: + git_cmd = kwargs["git_cmd"] + if "git_args" in kwargs and kwargs["git_args"]: + git_args = kwargs["git_args"] + if "reset_repo" in kwargs and kwargs["reset_repo"]: + reset_repo = kwargs["reset_repo"] + + self.make_temp() + temp_path = f"{self._temp_dir}/{sub_path}" + exitcode, stdout, stderr = pipe_exec( + re.sub( + r"\s+", + " ", + f"{git_cmd} {git_args} clone {self._source} --branch {branch} --single-branch ./", + ), + cwd=self._temp_dir, + ) + + if exitcode != 0: + self.clean_temp() + raise RuntimeError( + f"unable to clone {self._source}, {stderr.decode('utf-8')}" + ) + + try: + self.check_conflicts(temp_path) + except FileExistsError as e: + self.clean_temp() + raise e + + if reset_repo: + self.repo_clean(f"{temp_path}") + + shutil.copytree(temp_path, dest, dirs_exist_ok=True) + self.clean_temp() + + @staticmethod + def type_match(source: str, **kwargs) -> bool: + # if the remote is a local file, then it's not a git repo + if os.path.exists(source): + return False + + """type matches uses git to see if the source is a valid git remote""" + git_cmd = "git" + git_args = "" + + if "git_cmd" in kwargs: + git_cmd = kwargs["git_cmd"] + if "git_args" in kwargs: + git_args = kwargs["git_args"] + + try: + (return_code, _, _) = pipe_exec(f"{git_cmd} {git_args} ls-remote {source}") + + except (PermissionError, FileNotFoundError): + return False + if return_code == 0: + return True + return False + + def make_temp(self) -> None: + if hasattr(self, "_temp_dir"): + pass + else: + self._temp_dir = tempfile.mkdtemp() + + def clean_temp(self) -> None: + """clean_temp removes the temporary path used by this copier""" + if hasattr(self, "_temp_dir"): + shutil.rmtree(self._temp_dir, ignore_errors=True) + del self._temp_dir + + @staticmethod + def repo_clean(p: str) -> None: + """repo_clean removes git and github files from a clone before doing the copy""" + for f in [".git", ".github"]: + try: + shutil.rmtree(f"{p}/{f}") + except FileNotFoundError: + pass diff --git a/tfworker/definitions.py b/tfworker/definitions.py deleted file mode 100644 index ed88aad..0000000 --- a/tfworker/definitions.py +++ /dev/null @@ -1,399 +0,0 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections -import copy -import json -from pathlib import Path, PosixPath, WindowsPath - -import click -import jinja2 -from mergedeep import merge - -from tfworker import constants as const -from tfworker.constants import ( - TF_PROVIDER_DEFAULT_LOCKFILE, - WORKER_LOCALS_FILENAME, - WORKER_TF_FILENAME, - WORKER_TFVARS_FILENAME, -) -from tfworker.exceptions import ReservedFileError -from tfworker.util.copier import CopyFactory -from tfworker.util.terraform import find_required_providers, generate_terraform_lockfile - -TERRAFORM_TPL = """\ -terraform {{ -{0} -{1} -}} -""" - - -class Definition: - _plan_file = None - _ready_to_apply = False - - def __init__( - self, - definition, - body, - deployment, - global_remote_vars, - global_template_vars, - global_terraform_vars, - providers, - repository_path, - temp_dir, - tf_version_major, - limited=False, - template_callback=None, - use_backend_remotes=False, - provider_cache=None, - ): - self.tag = definition - self._body = body - self._path = body.get("path") - self._remote_vars = self.make_vars( - body.get("remote_vars", dict()), global_remote_vars - ) - self._terraform_vars = self.make_vars( - body.get("terraform_vars", dict()), global_terraform_vars - ) - self._template_vars = self.make_vars( - body.get("template_vars", dict()), global_template_vars - ) - - self._always_apply = body.get("always_apply", False) - self._deployment = deployment - self._repository_path = repository_path - self._providers = providers - self._temp_dir = temp_dir - self._tf_version_major = tf_version_major - self._limited = limited - self._provider_cache = provider_cache - - self._target = f"{self._temp_dir}/definitions/{self.tag}".replace("//", "/") - self._template_callback = template_callback - - self._use_backend_remotes = use_backend_remotes - - @property - def body(self): - return self._body - - @property - def limited(self): - return self._limited - - @property - def path(self): - return self._path - - @property - def fs_path(self): - return Path(f"{self._temp_dir}/definitions/{self.tag}").resolve() - - @property - def provider_names(self): - try: - return list(find_required_providers(self.fs_path).keys()) - except AttributeError: - return None - - @property - def plan_file(self): - return self._plan_file - - @property - def template_vars(self): - return self._template_vars - - @plan_file.setter - def plan_file(self, value: Path): - if type(value) not in [PosixPath, WindowsPath, Path]: - raise TypeError("plan_file must be a Path like object") - self._plan_file = value - - def prep(self, backend): - """prepare the definitions for running""" - - # prep the definitions - try: - c = CopyFactory.create( - self.path, - root_path=self._repository_path, - conflicts=const.RESERVED_FILES, - ) - except NotImplementedError: - click.secho( - f"could not handle source path {self.path} for definition {self.tag}, either file does not exist or could not handle remote URI", - fg="red", - ) - raise SystemExit(1) - - remote_options = dict(self.body.get("remote_path_options", {})) - - try: - c.copy(destination=self._target, **remote_options) - except FileNotFoundError as e: - if remote_options.get("sub_path", False): - click.secho( - f"could not find sub_path {remote_options['sub_path']} for definition {self.tag}", - fg="red", - ) - raise SystemExit(1) - else: - raise e - except FileExistsError as e: - raise ReservedFileError(e) - except RuntimeError as e: - click.secho( - f"could not copy source path {self.path} for definition {self.tag}, error details:\n\n{e}", - fg="red", - ) - raise SystemExit(1) - - # render the templates - if self._template_callback is not None: - self._template_callback(self._target, template_vars=self._template_vars) - - # Create local vars from remote data sources - if len(list(self._remote_vars.keys())) > 0: - with open(f"{self._target}/{WORKER_LOCALS_FILENAME}", "w+") as tflocals: - tflocals.write("locals {\n") - for k, v in self._remote_vars.items(): - tflocals.write(f" {k} = data.terraform_remote_state.{v}\n") - tflocals.write("}\n\n") - - # create remote data sources, and required providers - if self._use_backend_remotes: - remotes = backend.remotes() - else: - remotes = list(map(lambda x: x.split(".")[0], self._remote_vars.values())) - - required_providers_content = ( - "" - if self.provider_names is not None - else self._providers.required_hcl(self.provider_names) - ) - - with open(f"{self._target}/{WORKER_TF_FILENAME}", "w+") as tffile: - tffile.write(f"{self._providers.provider_hcl(self.provider_names)}\n\n") - tffile.write( - TERRAFORM_TPL.format( - f"{backend.hcl(self.tag)}", - required_providers_content, - ) - ) - tffile.write(backend.data_hcl(remotes)) - - # Create the variable definitions - with open(f"{self._target}/{WORKER_TFVARS_FILENAME}", "w+") as varfile: - for k, v in self._terraform_vars.items(): - varfile.write(f"{k} = {self.vars_typer(v)}\n") - - self._prep_terraform_lockfile() - - def _prep_terraform_lockfile(self): - """ - Write a terraform lockfile in the definition directory - """ - if self._provider_cache is None: - return - - result = generate_terraform_lockfile( - providers=self._providers, - included_providers=self.provider_names, - cache_dir=self._provider_cache, - ) - - if result is not None: - with open( - f"{self._target}/{TF_PROVIDER_DEFAULT_LOCKFILE}", "w" - ) as lockfile: - lockfile.write(result) - - @staticmethod - def quote_str(some_string): - """Put literal quotes around a string.""" - return f'"{some_string}"' - - def make_vars(self, local_vars, global_vars): - """Make a variables dictionary based on default vars, as well as specific vars for an item.""" - global_vars = global_vars or dict() - item_vars = copy.deepcopy(global_vars) - for k, v in local_vars.items(): - item_vars[k] = v - return item_vars - - @staticmethod - def vars_typer(v, inner=False): - """ - vars_typer is used to assemble variables as they are parsed from the yaml configuration - into the required format to be used in terraform - """ - if v is True: - return "true" - elif v is False: - return "false" - elif isinstance(v, list): - rval = [] - for val in v: - result = Definition.vars_typer(val, inner=True) - try: - rval.append(result.strip('"').strip("'")) - except AttributeError: - rval.append(result) - if inner: - return rval - else: - return json.dumps(rval) - elif isinstance(v, dict): - rval = {} - for k, val in v.items(): - result = Definition.vars_typer(val, inner=True) - try: - rval[k] = result.strip('"').strip("'") - except AttributeError: - rval[k] = result - if inner: - return rval - else: - return json.dumps(rval) - return f'"{v}"' - - -class DefinitionsCollection(collections.abc.Mapping): - def __init__( - self, - definitions, - deployment, - limit, - plan_for, - providers, - repository_path, - rootc, - temp_dir, - tf_version_major, - provider_cache=None, - ): - self._body = definitions - self._plan_for = plan_for - self._definitions = dict() - self._limit = True if len(limit) > 0 else False - self._limit_size = len(limit) - self._root_args = rootc.args - - for definition, body in definitions.items(): - self._definitions[definition] = Definition( - definition, - body, - deployment, - rootc.remote_vars_odict, - rootc.template_vars_odict, - rootc.terraform_vars_odict, - providers, - repository_path, - temp_dir, - tf_version_major, - True if limit and definition in limit else False, - template_callback=self.render_templates, - use_backend_remotes=self._root_args.backend_use_all_remotes, - provider_cache=provider_cache, - ) - - def __len__(self): - return len(self._definitions) - - def __getitem__(self, value): - if type(value) is int: - return self._definitions[list(self._definitions.keys())[value]] - return self._definitions[value] - - def __iter__(self): - return self.iter(honor_destroy=True) - - def iter(self, honor_destroy=False): - if honor_destroy: - if self._plan_for == "destroy": - return iter(reversed(list(self._definitions.values()))) - return iter(self._definitions.values()) - - def limited(self): - # handle the case where nothing is filtered - iter_size = len( - list(filter(lambda d: d.limited, self.iter(honor_destroy=True))) - ) - if iter_size == 0: - # a limit was supplied, but not matched, raise an error - if self._limit: - raise ValueError("no definitions matching --limit") - # the run is not limited to anything, so return everything - else: - return self.iter(honor_destroy=True) - elif iter_size < self._limit_size: - # not all limit items are matched - raise ValueError("not all definitions match --limit") - else: - return iter(filter(lambda d: d.limited, self.iter(honor_destroy=True))) - - def render_templates(self, template_path, template_vars={}): - """render all the .tf.j2 files in a path, and rename them to .tf""" - - def filter_templates(filename): - """a small function to filter the list of files down to only j2 templates""" - return filename.endswith(".tf.j2") - - jinja_env = jinja2.Environment( - undefined=jinja2.StrictUndefined, - loader=jinja2.FileSystemLoader(template_path), - ) - jinja_env.globals = merge( - {}, - self._root_args.template_items(return_as_dict=True, get_env=True), - {"var": template_vars}, - ) - - for template_file in jinja_env.list_templates(filter_func=filter_templates): - template_target = ( - f"{template_path}/{'.'.join(template_file.split('.')[:-1])}" - ) - - try: - f = open(template_target, "x") - except FileExistsError: - click.secho( - f"ERROR: {template_target} already exists! Make sure there's not a .tf and .tf.j2 copy of this file", - fg="red", - ) - raise SystemExit(1) - - try: - f.writelines(jinja_env.get_template(template_file).generate()) - click.secho( - f"rendered {template_file} into {template_target}", - fg="yellow", - ) - except jinja2.exceptions.UndefinedError as e: - click.secho( - f"file contains invalid template substitutions: {e}", - fg="red", - ) - raise SystemExit(1) - finally: - f.close() - - @property - def body(self): - return self._body diff --git a/tfworker/definitions/__init__.py b/tfworker/definitions/__init__.py new file mode 100644 index 0000000..f33aeec --- /dev/null +++ b/tfworker/definitions/__init__.py @@ -0,0 +1,4 @@ +from .collection import DefinitionsCollection # pragma: no cover # noqa +from .model import Definition, DefinitionRemoteOptions # pragma: no cover # noqa +from .plan import DefinitionPlan # pragma: no cover # noqa +from .prepare import DefinitionPrepare # pragma: no cover # noqa diff --git a/tfworker/definitions/collection.py b/tfworker/definitions/collection.py new file mode 100644 index 0000000..9ab425a --- /dev/null +++ b/tfworker/definitions/collection.py @@ -0,0 +1,93 @@ +import threading +from collections.abc import Mapping +from typing import Dict, List + +from pydantic import GetCoreSchemaHandler, ValidationError +from pydantic_core import CoreSchema, core_schema + +import tfworker.util.log as log +from tfworker.exceptions import FrozenInstanceError +from tfworker.util.cli import handle_config_error + +from .model import Definition + + +class DefinitionsCollection(Mapping): + """ + The DefinitionsCollection holds information about all of the definitions that will need + to be managed during the execution for a particular deployment. The collection should be + used to pass resources to independent functions rather than containing all of the logic. + """ + + _instance = None + _lock = threading.Lock() + _frozen: bool = False + + def __new__(cls, *args, **kwargs): + with cls._lock: + if cls._instance is None: + cls._instance = super().__new__(cls) + return cls._instance + + def __init__( + self, definitions: Dict[str, "Definition"], limiter: List[str] | None = None + ) -> None: + if not hasattr(self, "_initialized"): + log.trace("initializing DefinitionsCollection") + self._definitions = {} + if limiter is None: + limiter = [] + for definition, body in definitions.items(): + # disallow commas in definition names + if "," in definition: + raise ValueError( + f"definition {definition} contains a comma, and commas are not allowed, aborting" + ) + # validation the definition regardless of inclusion + try: + log.trace(f"validating definition: {definition}") + body["name"] = definition + config = Definition.model_validate(body) + except ValidationError as e: + handle_config_error(e) + + if config.always_apply or config.always_include: + log.trace( + f"definition {definition} is set to always_[apply|include]" + ) + elif len(limiter) > 0 and definition not in limiter: + log.trace(f"definition {definition} not in limiter, skipping") + continue + + log.trace(f"adding definition {definition} to definitions") + self._definitions[definition] = config + self._initialized = True + + def __len__(self): + return len(self._definitions) + + def __getitem__(self, key: str) -> "Definition": + return self._definitions[key] + + def __iter__(self): + return iter(self._definitions) + + def __setitem__(self, key: str, value: "Definition"): + if self._frozen: + raise FrozenInstanceError("Cannot modify a frozen instance.") + self._definitions[key] = value + + def freeze(self): + self._frozen = True + + @classmethod + def reset(cls): + with cls._lock: + cls._instance = None + cls._frozen = False + + @classmethod + def __get_pydantic_core_schema__( + cls, _, handler: GetCoreSchemaHandler + ) -> CoreSchema: + return core_schema.no_info_after_validator_function(cls, handler(dict)) diff --git a/tfworker/definitions/model.py b/tfworker/definitions/model.py new file mode 100644 index 0000000..022f000 --- /dev/null +++ b/tfworker/definitions/model.py @@ -0,0 +1,226 @@ +from pathlib import Path +from typing import Any, Dict, List, Optional, Union + +from pydantic import BaseModel, ConfigDict, Field + +import tfworker.util.log as log + + +class DefinitionRemoteOptions(BaseModel): + """ + Model to define the remote_options of a definition + """ + + model_config = ConfigDict(extra="forbid") + + branch: Optional[str] = None + sub_path: Optional[str] = None + + +class Definition(BaseModel): + """ + Model to define a definition + """ + + model_config = ConfigDict(extra="forbid") + + name: str + path: str + always_apply: bool = False + always_include: bool = False + remote_path_options: Optional[DefinitionRemoteOptions] = Field( + default_factory=DefinitionRemoteOptions, + description="Options for the remote path of the definition", + ) + ignore_global_vars: bool = False + ignored_global_terraform_vars: Optional[List[str]] = Field( + [], description="List of global vars to ignore." + ) + ignored_global_remote_vars: Optional[List[str]] = Field( + [], description="List of global remote vars to ignore." + ) + ignored_global_template_vars: Optional[List[str]] = Field( + [], description="List of global template vars to ignore." + ) + use_global_terraform_vars: Optional[List[str]] = Field( + [], description="List of global vars to use." + ) + use_global_remote_vars: Optional[List[str]] = Field( + [], description="List of global remote vars to use." + ) + use_global_template_vars: Optional[List[str]] = Field( + [], description="List of global template vars to use." + ) + terraform_vars: Optional[Dict[str, Any]] = Field( + {}, description="Variables to pass to terraform via a generated .tfvars file." + ) + remote_vars: Optional[Dict[str, str]] = Field( + {}, + description="Variables which are used to generate local references to remote state vars.", + ) + template_vars: Optional[Dict[str, str]] = Field( + {}, description="Variables which are suppled to any jinja templates." + ) + + # Internals, these should not be set by the user + ready: bool = False + needs_apply: bool = False + plan_file: Optional[Union[str, None]] = None + + def get_target_path(self, working_dir: str) -> str: + """ + Get the target path of the definition + """ + return Path(f"{working_dir}/definitions/{self.name}").resolve() + + def get_template_vars(self, global_vars: Dict[str, str]) -> Dict[str, str]: + """ + get complete template vars for the definition + + Args: + global_vars (Dict[str, str]): the global vars to use + + Returns: + Dict[str, str]: the complete template vars + """ + full_vars = self.template_vars.copy() + log.trace(f"initial template vars: {full_vars}") + if self.ignore_global_vars: + log.trace("ignoring global vars, not adding to definition template vars") + return full_vars + for key, value in global_vars.items(): + if key in full_vars: + log.trace( + f"not adding global key: {key}, value: {value} to definition template vars, already exists" + ) + continue + if key in self.ignored_global_template_vars: + log.trace( + f"not adding global key: {key}, value: {value} to definition template vars, ignored" + ) + continue + if ( + self.use_global_template_vars + and key not in self.use_global_template_vars + ): + log.trace( + f"not adding global key: {key}, value: {value} to definition template vars, use list set, not in list" + ) + continue + log.trace( + f"adding global key: {key}, value: {value} to definition template vars" + ) + full_vars[key] = value + + return full_vars + + def get_remote_vars(self, global_vars: Dict[str, str]) -> Dict[str, str]: + """ + get complete local to remote var mappsings for the definition + + Args: + global_vars (Dict[str, str]): the global vars to use + + Returns: + Dict[str, str]: the complete local vars + """ + full_vars = self.remote_vars.copy() + log.trace(f"initial remote vars: {full_vars}") + if self.ignore_global_vars: + log.trace("ignoring global vars, not adding to definition template vars") + return full_vars + for key, value in global_vars.items(): + if key in full_vars: + log.trace( + f"not adding global key: {key}, value: {value} to definition remote vars, already exists" + ) + continue + if key in self.ignored_global_remote_vars: + log.trace( + f"not adding global key: {key}, value: {value} to definition remote vars, ignored" + ) + continue + if self.use_global_remote_vars and key not in self.use_global_remote_vars: + log.trace( + f"not adding global key: {key}, value: {value} to definition remote vars, use list set, not in list" + ) + continue + log.trace( + f"adding global key: {key}, value: {value} to definition remote vars" + ) + full_vars[key] = value + + return full_vars + + def get_terraform_vars(self, global_vars: Dict[str, str]) -> Dict[str, Any]: + """ + get complete terraform vars for the definition + + Args: + global_vars (Dict[str, str]): the global vars to use + + Returns: + Dict[str, Any]: the complete terraform vars + """ + full_vars = self.terraform_vars.copy() + log.trace(f"initial terraform vars: {full_vars}") + if self.ignore_global_vars: + log.trace("ignoring global vars, not adding to definition template vars") + return full_vars + for key, value in global_vars.items(): + if key in full_vars: + log.trace( + f"not adding global key: {key}, value: {value} to definition terraform vars, already exists" + ) + continue + if key in self.ignored_global_terraform_vars: + log.trace( + f"not adding global key: {key}, value: {value} to definition terraform vars, ignored" + ) + continue + if ( + self.use_global_terraform_vars + and key not in self.use_global_terraform_vars + ): + log.trace( + f"not adding global key: {key}, value: {value} to definition terraform vars, use list set, not in list" + ) + continue + log.trace( + f"adding global key: {key}, value: {value} to definition terraform vars" + ) + full_vars[key] = value + + return full_vars + + def get_used_providers(self, working_dir: str) -> Union[List[str], None]: + """ + Get the providers used by the definition + + Args: + working_dir (str): The working directory + invalidate_cache (int): Passing a random int here will ensure the LRU cache is bypassed + + Returns: + Union[List[str], None]: The list of providers used by the definition or none + """ + return cached_get_used_providers(self.get_target_path(working_dir)) + + +def cached_get_used_providers(working_dir: str) -> Union[List[str], None]: + """ + Get the providers used by the definition + + Args: + working_dir (str): The working directory + invalidate_cache (int): Passing a random int here will ensure the LRU cache is bypassed + + Returns: + Union[List[str], None]: The list of providers used by the definition or none + """ + from tfworker.util.terraform import find_required_providers + + try: + return list(find_required_providers(working_dir).keys()) + except AttributeError: + return None diff --git a/tfworker/definitions/plan.py b/tfworker/definitions/plan.py new file mode 100644 index 0000000..fde7eef --- /dev/null +++ b/tfworker/definitions/plan.py @@ -0,0 +1,77 @@ +from pathlib import Path +from typing import TYPE_CHECKING, Tuple + +from tfworker.types.terraform import TerraformAction + +if TYPE_CHECKING: + from click import Context # pragma: no cover # noqa: F401 + + from tfworker.app_state import AppState # pragma: no cover # noqa: F401 + from tfworker.definitions.model import Definition # pragma: no cover # noqa: F401 + + +class DefinitionPlan: + """ + DefinitionPlan is a class to help working with definitions to get everything + ready to execute terraform plan, after terraform init has been run + """ + + def __init__(self, ctx: "Context", app_state: "AppState"): + self._ctx: "Context" = ctx + self._app_state: "AppState" = app_state + + @property + def plan_for(self) -> TerraformAction: + if self._app_state.terraform_options.destroy: + return TerraformAction.DESTROY + return TerraformAction.APPLY + + def set_plan_file(self, definition: "Definition") -> str: + """ + Get the plan file for a definition + + Args: + name (str): The name of the definition + + Returns + str: The absolute path to the plan file + """ + if self._app_state.terraform_options.plan_file_path: + plan_base: str = Path( + f"{self._app_state.terraform_options.plan_file_path}/{self._app_state.deployment}" + ).resolve() + else: + plan_base: str = Path(f"{self._app_state.working_dir}/plans").resolve() + + plan_base.mkdir(parents=True, exist_ok=True) + plan_file: Path = plan_base / f"{definition.name}.tfplan" + definition.plan_file = plan_file.resolve() + + def needs_plan(self, definition: "Definition") -> Tuple[bool, str]: + """ + Check if a definition needs a plan + + Args: + name (str): The name of the definition + + Returns: + Tuple[bool, str]: A tuple with a boolean indicating if a plan is needed + and a string with the reason why a plan is or is not needed + """ + # no saved plans possible + if not ( + self._app_state.terraform_options.plan_file_path + or self._app_state.root_options.backend_plans + ): + return True, "no saved plans possible" + + plan_file: Path = Path(definition.plan_file) + + if plan_file.exists() and plan_file.stat().st_size > 0: + return False, "plan file exists" + + if plan_file.exists() and plan_file.stat().st_size == 0: + plan_file.unlink() + return True, "empty plan file" + + return True, "no plan file" diff --git a/tfworker/definitions/prepare.py b/tfworker/definitions/prepare.py new file mode 100644 index 0000000..3ff76e4 --- /dev/null +++ b/tfworker/definitions/prepare.py @@ -0,0 +1,377 @@ +import json +from os import environ +from typing import TYPE_CHECKING, Dict, Union + +import jinja2 + +import tfworker.util.log as log +from tfworker.constants import ( + RESERVED_FILES, + TF_PROVIDER_DEFAULT_LOCKFILE, + WORKER_LOCALS_FILENAME, + WORKER_TF_FILENAME, + WORKER_TFVARS_FILENAME, +) +from tfworker.copier import Copier, CopyFactory +from tfworker.exceptions import ReservedFileError, TFWorkerException +from tfworker.util.system import pipe_exec +from tfworker.util.terraform import generate_terraform_lockfile + +if TYPE_CHECKING: + from tfworker.app_state import AppState # pragma: no cover # noqa: F401 + from tfworker.definitions import Definition # pragma: no cover # noqa: F401 + + +TERRAFORM_TPL = """\ +terraform {{ +{0} +{1} +}} + +""" + + +class DefinitionPrepare: + """ + TerraformPrepare is a class that prepares a definition to be ready for terraform init + """ + + def __init__(self, app_state: "AppState"): + self._app_state: "AppState" = app_state + + def copy_files(self, name: str) -> None: + """ + Prepare is an orchestration function that prepares a definition to be ready for terraform init + """ + definition = self._app_state.definitions[name] + + log.trace( + f"fetching copier for definition {name} with path {definition.path} and repo_path {self._app_state.root_options.repository_path}" + ) + try: + c = get_coppier( + definition.path, self._app_state.root_options.repository_path + ) + except NotImplementedError as e: + raise TFWorkerException( + f"could not handle source path {definition.path} for definition {name}, either file does not exist or could not handle remote URI" + ) from e + + log.trace( + f"putting definition {name} in {definition.get_target_path(self._app_state.working_dir)} with copier {c.__class__.__name__}" + ) + try: + copy( + copier=c, + destination=definition.get_target_path(self._app_state.working_dir), + options=definition.remote_path_options.model_dump(), + ) + except (FileNotFoundError, ReservedFileError) as e: + raise TFWorkerException(e) from e + + def render_templates(self, name: str) -> None: + """render all the .tf.j2 files in a path, and rename them to .tf""" + definition = self._app_state.definitions[name] + template_path = definition.get_target_path(self._app_state.working_dir) + jinja_env = get_jinja_env( + template_path=template_path, jinja_globals=self._get_template_vars(name) + ) + for template_file in jinja_env.list_templates(filter_func=filter_templates): + write_template_file(jinja_env=jinja_env, template_path=template_path, template_file=template_file) + + def create_local_vars(self, name: str) -> None: + """Create local vars from remote data sources""" + definition = self._app_state.definitions[name] + log.trace(f"creating local vars for definition {name}") + with open( + f"{definition.get_target_path(self._app_state.working_dir)}/{WORKER_LOCALS_FILENAME}", + "w+", + ) as tflocals: + tflocals.write("locals {\n") + for k, v in definition.get_remote_vars( + global_vars=self._app_state.loaded_config.global_vars.remote_vars + ).items(): + tflocals.write(f" {k} = data.terraform_remote_state.{v}\n") + tflocals.write("}\n\n") + + def create_worker_tf(self, name: str) -> None: + """Create remote data sources, and required providers""" + log.trace(f"creating remote data sources for definition {name}") + remotes = self._get_remotes(name) + provider_content = self._get_provider_content(name) + self._write_worker_tf(name, remotes, provider_content) + + def create_terraform_vars(self, name: str) -> None: + """Create the variable definitions""" + definition = self._app_state.definitions[name] + log.trace(f"creating terraform vars for definition {name}") + with open( + f"{definition.get_target_path(self._app_state.working_dir)}/{WORKER_TFVARS_FILENAME}", + "w+", + ) as varfile: + for k, v in definition.get_terraform_vars( + global_vars=self._app_state.loaded_config.global_vars.terraform_vars + ).items(): + varfile.write(f"{k} = {vars_typer(v)}\n") + + def create_terraform_lockfile(self, name: str) -> None: + """Create the terraform lockfile""" + if ( + self._app_state.providers is None + or self._app_state.terraform_options.provider_cache is None + ): + log.trace( + f"no providers or provider cache, skipping lockfile creation for {name}" + ) + return + + definition = self._app_state.definitions[name] + log.trace(f"creating terraform lockfile for definition {name}") + result = generate_terraform_lockfile( + providers=self._app_state.providers, + included_providers=definition.get_used_providers( + self._app_state.working_dir + ), + cache_dir=self._app_state.terraform_options.provider_cache, + ) + + if result is not None: + with open( + f"{definition.get_target_path(self._app_state.working_dir)}/{TF_PROVIDER_DEFAULT_LOCKFILE}", + "w", + ) as lockfile: + lockfile.write(result) + + def download_modules(self, name: str, stream_output: bool = True) -> None: + """Download the modules""" + from tfworker.commands.terraform import TerraformResult + + definition = self._app_state.definitions[name] + log.trace(f"downloading modules for definition {name}") + result: TerraformResult = TerraformResult( + *pipe_exec( + "terraform get", + cwd=definition.get_target_path(self._app_state.working_dir), + stream_output=stream_output, + ) + ) + if not stream_output: + log.debug(f"terraform get result: {result.stdout}") + log.debug(f"terraform get error: {result.stderr}") + if result.exit_code != 0: + raise TFWorkerException( + f"could not download modules for definition {name}: {result.stderr}" + ) + + def _get_provider_content(self, name: str) -> str: + """Get the provider content""" + definition = self._app_state.definitions[name] + provider_names = definition.get_used_providers(self._app_state.working_dir) + + if provider_names is not None: + return "" + return self._app_state.providers.required_hcl(provider_names) + + def _get_remotes(self, name: str) -> list: + """Get the remote data sources""" + definition = self._app_state.definitions[name] + log.trace(f"getting remotes for definition {name}") + if self._app_state.terraform_options.backend_use_all_remotes: + log.trace(f"using all remotes for definition {name}") + remotes = self._app_state.backend.remotes + else: + remotes = list( + map(lambda x: x.split(".")[0], definition.remote_vars.values()) + ) + log.trace(f"using remotes {remotes} for definition {name}") + return remotes + + def _write_worker_tf(self, name: str, remotes: list, provider_content: str) -> None: + """Write the worker.tf file""" + definition = self._app_state.definitions[name] + + with open( + f"{definition.get_target_path(self._app_state.working_dir)}/{WORKER_TF_FILENAME}", + "w+", + ) as tffile: + # Write out the provider configurations for each provider + tffile.write( + f"{self._app_state.providers.provider_hcl(includes=definition.get_used_providers(self._app_state.working_dir))}\n\n" + ) + tffile.write( + TERRAFORM_TPL.format( + # the backend configuration + f"{self._app_state.backend.hcl(name)}", + # the required providers + provider_content, + ) + ) + tffile.write(self._app_state.backend.data_hcl(remotes)) + + def _get_template_vars(self, name: str) -> Dict[str, str]: + """ + Prepares the vars for rendering in a jinja template + + Creates a dictionary of vars from the following sources: + - definition vars + - root_command config-vars + - OS environment vars + - root_command template-vars + + Args: + name (str): the name of the definition + + Returns: + Dict[str, str]: the template vars + """ + definition: "Definition" = self._app_state.definitions[name] + template_vars = definition.get_template_vars( + self._app_state.loaded_config.global_vars.template_vars + ).copy() + + for item in self._app_state.root_options.config_var: + k, v = item.split("=") + template_vars[k] = v + + return { + "var": template_vars, + "env": dict(environ), + } + + +def get_coppier(path: str, root_path: str) -> Copier: + """ + Returns an appropriate copier for the definition path + + Args: + path (str): the path to the definition + root_path (str): the root path of the repository + + Returns: + Copier: the copier to use + + Raises: + NotImplementedError: if there is no copier to handle the path + """ + copier = CopyFactory.create(path, root_path=root_path, conflicts=RESERVED_FILES) + return copier + + +def copy( + copier: Copier, destination: str, options: Union[Dict[str, str], None] +) -> None: + """ + Copy the source to the destination + + Args: + copier (Copier): the copier to use + destination (str): the destination to copy to + options (Dict[str, str]): the options to pass to the copier + + Raises: + FileNotFoundError: if the source file does not exist + """ + if options is None: + options = {} + + try: + copier.copy(destination=destination, **options) + except FileNotFoundError: + raise + + +def get_jinja_env( + template_path: str, jinja_globals: Dict[str, str] +) -> jinja2.Environment: + """ + Get a jinja environment + + Args: + jinja_globals (Dict[str, str]): the globals to add to the environment + + Returns: + jinja2.Environment: the jinja environment + """ + jinja_env = jinja2.Environment( + undefined=jinja2.StrictUndefined, + loader=jinja2.FileSystemLoader(template_path), + ) + jinja_env.globals = jinja_globals + return jinja_env + + +def write_template_file( + jinja_env: jinja2.Environment, template_path: str, template_file: str +) -> None: + """ + Write a template file to disk + + Args: + jinja_env (jinja2.Environment): the jinja environment + template_path (str): the path to the template + template_file (str): the file to render + + Raises: + TFWorkerException: if the file already exists, or contains invalid template substitutions + """ + template_target = f"{template_path}/{'.'.join(template_file.split('.')[:-1])}" + + try: + f = open(template_target, "x") + try: + f.writelines(jinja_env.get_template(template_file).generate()) + log.debug(f"rendered {template_file} into {template_target}") + except ( + jinja2.exceptions.UndefinedError, + jinja2.exceptions.TemplateSyntaxError, + ) as e: + raise TFWorkerException( + f"{template_path}/{template_file} could not be rendered: {e}" + ) from e + except FileExistsError as e: + raise TFWorkerException( + f"{template_target} already exists! Make sure there's not a .tf and .tf.j2 copy of this file" + ) from e + finally: + f.close() + + +def filter_templates(filename): + """a small function to filter the list of files down to only j2 templates""" + return filename.endswith(".tf.j2") + + +def vars_typer(v, inner=False): + """ + vars_typer is used to assemble variables as they are parsed from the yaml configuration + into the required format to be used in terraform + """ + if v is True: + return "true" + elif v is False: + return "false" + elif isinstance(v, list): + rval = [] + for val in v: + result = Definition.vars_typer(val, inner=True) + try: + rval.append(result.strip('"').strip("'")) + except AttributeError: + rval.append(result) + if inner: + return rval + else: + return json.dumps(rval) + elif isinstance(v, dict): + rval = {} + for k, val in v.items(): + result = Definition.vars_typer(val, inner=True) + try: + rval[k] = result.strip('"').strip("'") + except AttributeError: + rval[k] = result + if inner: + return rval + else: + return json.dumps(rval) + return f'"{v}"' diff --git a/tfworker/exceptions.py b/tfworker/exceptions.py index b06e91a..e70a294 100644 --- a/tfworker/exceptions.py +++ b/tfworker/exceptions.py @@ -1,4 +1,29 @@ -class HookError(Exception): +class TFWorkerException(Exception): + """ + All exceptions raised by tfworker should inherit from this class. + """ + + pass + + +class MissingDependencyException(TFWorkerException): + pass + + +class BackendError(TFWorkerException): + # add custom "help" parameter to the exception + def __init__(self, message, help=None): + super().__init__(message) + self._help = help + + @property + def help(self): + if self._help is None: + return "No help available" + return self._help + + +class HookError(TFWorkerException): """ Exception is raised when a hook fails, or has execution issues. """ @@ -6,7 +31,7 @@ class HookError(Exception): pass -class PlanChange(Exception): +class PlanChange(TFWorkerException): """ Exception is raised when a terraform plan has changes. """ @@ -22,12 +47,12 @@ class PlanChange(Exception): # pass -class UnknownProvider(Exception): +class UnknownProvider(TFWorkerException): def __init__(self, provider): super().__init__(f"{provider} is not a known value.") -class ReservedFileError(Exception): +class ReservedFileError(TFWorkerException): """ Exception is raised when a reserved file is found in the repository; @@ -38,9 +63,48 @@ class ReservedFileError(Exception): pass -class TerraformError(Exception): +class TerraformError(TFWorkerException): """ Exception is raised when a terraform command fails. """ pass + + +class UnknownHandler(TFWorkerException): + """ + This is an excpetion that indicates configuration was attempted for a handler that is not supported. + """ + + def __init__(self, provider: str) -> None: + self.provider = provider + + def __str__(self) -> str: + return f"Unknown handler: {self.provider}" + + +class HandlerError(TFWorkerException): + """ + This is an exception that indicates an error occurred while attempting to execute a handler. + """ + + def __init__(self, message: str, terminate: bool = True) -> None: + self.message = message + self.terminate = terminate + + def __str__(self) -> str: + return f"Handler error: {self.message}" + + +class FrozenInstanceError(TFWorkerException): + """ + This is an exception that indicates an attempt to modify a frozen instance. + """ + + def __str__(self) -> str: + return "Cannot modify a frozen instance." + + +class UnknownAuthenticator(Exception): + def __init__(self, provider): + super().__init__(f"{provider} is not a known authenticator.") diff --git a/tfworker/handlers/__init__.py b/tfworker/handlers/__init__.py index d8ef323..f7d2d2a 100644 --- a/tfworker/handlers/__init__.py +++ b/tfworker/handlers/__init__.py @@ -1,59 +1,5 @@ -import collections - -# Make all of the handlers available from the handlers module -from .base import BaseHandler # noqa: F401 -from .bitbucket import BitbucketHandler # noqa: F401 -from .exceptions import HandlerError, UnknownHandler # noqa: F401 -from .trivy import TrivyHandler # noqa: F401 - - -class HandlersCollection(collections.abc.Mapping): - """ - The HandlersCollection class is a collection of handlers. It is meant to be used as a singleton. - """ - - def __init__(self, handlers_config): - """ - Initialize the HandlersCollection object, only add handlers which have a provider key in the handlers_config dict. - """ - self._handlers = dict() - - for k in handlers_config: - if k in self._handlers.keys(): - raise TypeError(f"Duplicate handler: {k}") - if k == "bitbucket": - self._handlers["bitbucket"] = BitbucketHandler(handlers_config[k]) - elif k == "trivy": - self._handlers["trivy"] = TrivyHandler(handlers_config[k]) - else: - raise UnknownHandler(provider=k) - - def __len__(self): - return len(self._handlers) - - def __getitem__(self, value): - if type(value) is int: - return self._handlers[list(self._handlers.keys())[value]] - return self._handlers[value] - - def __iter__(self): - return iter(self._handlers.values()) - - def __setitem__(self, key, value): - self._handlers[key] = value - - def update(self, handlers_config): - """ - update is used to update the handlers collection with new handlers - """ - for k in handlers_config: - if k in self._handlers.keys(): - raise TypeError(f"Duplicate handler: {k}") - self._handlers[k] = handlers_config[k] - - def get(self, value): - try: - return self[value] - except Exception: - raise UnknownHandler(provider=value) - return None +from .base import BaseHandler # pragma: no cover # noqa +from .bitbucket import BitbucketConfig, BitbucketHandler # pragma: no cover # noqa +from .collection import HandlersCollection # pragma: no cover # noqa +from .s3 import S3Handler # pragma: no cover # noqa +from .trivy import TrivyConfig, TrivyHandler # pragma: no cover # noqa diff --git a/tfworker/handlers/base.py b/tfworker/handlers/base.py index 9a31adf..9ba450b 100644 --- a/tfworker/handlers/base.py +++ b/tfworker/handlers/base.py @@ -1,13 +1,29 @@ from abc import ABCMeta, abstractmethod +from typing import TYPE_CHECKING, Union + +from pydantic import BaseModel + +if TYPE_CHECKING: + from tfworker.commands.terraform import TerraformResult + from tfworker.definitions.model import Definition + from tfworker.types import TerraformAction, TerraformStage + + +class BaseConfig(BaseModel): ... # noqa class BaseHandler(metaclass=ABCMeta): """The base handler class should be extended by all handler classes.""" actions = [] - required_vars = [] + config_model = BaseConfig + _ready = False + + @abstractmethod + def __init__(self, config: BaseModel) -> None: + pass - def is_ready(self): # pragma: no cover + def is_ready(self) -> bool: # pragma: no cover """is_ready is called to determine if a handler is ready to be executed""" try: return self._ready @@ -15,7 +31,15 @@ def is_ready(self): # pragma: no cover return False @abstractmethod - def execute(self, action: str, stage: str, **kwargs) -> None: # pragma: no cover + def execute( + self, + action: "TerraformAction", + stage: "TerraformStage", + deployment: str, + definition: "Definition", + working_dir: str, + result: Union["TerraformResult", None] = None, + ) -> None: # pragma: no cover """ execute is called when a handler should trigger, it accepts to parameters action: the action that triggered the handler (one of plan, clean, apply, destroy) diff --git a/tfworker/handlers/bitbucket.py b/tfworker/handlers/bitbucket.py index 4a3e7a5..a339233 100644 --- a/tfworker/handlers/bitbucket.py +++ b/tfworker/handlers/bitbucket.py @@ -1,11 +1,38 @@ -import os +from typing import TYPE_CHECKING, Union from atlassian.bitbucket import Cloud +from pydantic import BaseModel, Field +from pydantic_settings import SettingsConfigDict + +from tfworker.exceptions import HandlerError +from tfworker.types.terraform import TerraformAction, TerraformStage from .base import BaseHandler -from .exceptions import HandlerError +from .registry import HandlerRegistry + +if TYPE_CHECKING: + from tfworker.commands.terraform import TerraformResult + from tfworker.definitions.model import Definition + + +class BitbucketConfig(BaseModel): + model_config = SettingsConfigDict(env_prefix="BITBUCKET_") + username: str + password: str + workspace: str + project: str + repository: str + pull_request: str = Field( + description="The pull request number to add the comment to.", + json_schema_extra={"env": "PULL_REQUEST"}, + ) + pr_text: str = ( + "Terraform Plan Output for {deployment} / {definition} \n---\n\n```\n{text}\n```" + ) + +@HandlerRegistry.register("bitbucket") class BitbucketHandler(BaseHandler): """ The BitbucketHandler class is meant to interact with bitbucket cloud. @@ -14,41 +41,29 @@ class BitbucketHandler(BaseHandler): """ # define supported actions - actions = ["plan"] - # define required variables / since different handlers may have different requirements we must rely on kwargs - required_vars = [ - "username", - "password", - "workspace", - "project", - "repository", - "pull_request", - ] - # define the text to be added to the pull request - pr_text = "Terraform Plan Output for {deployment} / {definition} \n---\n\n```\n{text}\n```" - - def __init__(self, kwargs): - # set ready to false until we are able to successfully get the pull request - self._ready = False + actions = [TerraformAction.PLAN] + config_model = BitbucketConfig + ready = False + def __init__(self, config: BitbucketConfig) -> None: # ensure all of the required variables are set - for var in self.required_vars: - if var not in kwargs: - raise HandlerError( - f"Missing required variable: {var}, required variabls are {','.join(sorted(self.required_vars))}" - ) + # for var in self.required_vars: + # if var not in kwargs: + # raise HandlerError( + # f"Missing required variable: {var}, required variabls are {','.join(sorted(self.required_vars))}" + # ) # initialize the bitbucket object - self._bb = Cloud( - username=kwargs["username"], password=kwargs["password"], cloud=True - ) + self._bb = Cloud(username=config.username, password=config.password, cloud=True) + + self.config: BitbucketConfig = config # get the workspace, project, and repository objects from bitbucket try: - self._workspace = self._bb.workspaces.get(kwargs["workspace"]) - self._project = self._workspace.projects.get(kwargs["project"], by="name") + self._workspace = self._bb.workspaces.get(config.workspace) + self._project = self._workspace.projects.get(config.project, by="name") self._repository = self._project.repositories.get( - kwargs["repository"], by="name" + config.repository, by="name" ) # TODO: catch specific exceptions # the exceptions raised by the bitbucket module are not well defined, so we will catch all exceptions for now @@ -57,12 +72,9 @@ def __init__(self, kwargs): # In the future more logic may be needed if we want to support sommething other than adding PR text as a comment # or if we want to support other mechanisms for supplying a pull request number other than from an environment variable - p = kwargs["pull_request"].get("envvar", None) - if p is not None: - prnum = os.environ.get(p, None) - if prnum is not None: - self._pull_request = self._repository.pullrequests.get(prnum) - self._ready = True + prnum = config.pull_request + self._pull_request = self._repository.pullrequests.get(prnum) + self._ready = True def is_ready(self): """ @@ -70,23 +82,27 @@ def is_ready(self): """ return self._ready - def execute(self, action, stage, **kwargs): + def execute( + self, + action: "TerraformAction", + stage: "TerraformStage", + deployment: str, + definition: "Definition", + working_dir: str, + result: Union["TerraformResult", None] = None, + ) -> None: """ execute is a generic method that will execute the specified action with the provided arguments. """ - if action == "plan": - for v in ["text", "planfile", "deployment", "definition"]: - if v not in kwargs: - raise HandlerError(f"Missing required argument: {v}") + if action == TerraformAction.PLAN and stage == TerraformStage.POST: self.plan( - kwargs["deployment"], - kwargs["definition"], - kwargs["text"], - kwargs["planfile"], + deployment=definition.deployment, + definition=definition.name, + text=result.stdout_str, ) - def plan(self, deployment, definition, text, planfile): + def plan(self, deployment, definition, text): """ plan will post a comment to the pull request with the output of a terraform plan. """ diff --git a/tfworker/handlers/collection.py b/tfworker/handlers/collection.py new file mode 100644 index 0000000..e18768e --- /dev/null +++ b/tfworker/handlers/collection.py @@ -0,0 +1,116 @@ +import threading +from collections.abc import Mapping +from typing import TYPE_CHECKING, Dict, Union + +import tfworker.util.log as log +from tfworker.exceptions import FrozenInstanceError, HandlerError, UnknownHandler + +if TYPE_CHECKING: + from tfworker.commands.terraform import TerraformResult + from tfworker.definitions.model import Definition + from tfworker.types import TerraformAction, TerraformStage + + from .base import BaseHandler # noqa: F401 + + +class HandlersCollection(Mapping): + """ + The HandlersCollection class is a collection of handlers that are active in a various execution. + """ + + _instance = None + _lock = threading.Lock() + _frozen: bool = False + + def __new__(cls, *args, **kwargs): + with cls._lock: + if cls._instance is None: + cls._instance = super().__new__(cls) + return cls._instance + + def __init__(self, handlers: Dict[str, Union["BaseHandler", None]] = None): + """ + Initialize the HandlersCollection object, only add handlers which have a provider key in the handlers_config dict. + """ + if not hasattr(self, "_initialized"): + self._handlers = dict() + if handlers: + for k, v in handlers.items(): + log.trace(f"Adding handler {k} to handlers collection") + log.trace(f"Handler cls: {v}") + self._handlers[k] = v + self._initialized = True + + def __len__(self): + return len(self._handlers) + + def __getitem__(self, value): + if isinstance(value, int): + return self._handlers[list(self._handlers.keys())[value]] + return self._handlers[value] + + def __iter__(self): + return iter(self._handlers.keys()) + + def __setitem__(self, key, value): + if self._frozen: + raise FrozenInstanceError("Cannot modify a frozen instance.") + self._handlers[key] = value + + def freeze(self): + """ + freeze is used to prevent further modification of the handlers collection. + """ + self._frozen = True + + def update(self, handlers_config): + """ + update is used to update the handlers collection with new handlers. + """ + for k in handlers_config: + if k in self._handlers.keys(): + raise TypeError(f"Duplicate handler: {k}") + self._handlers[k] = handlers_config[k] + + def get(self, value): + try: + return self[value] + except KeyError: + raise UnknownHandler(provider=value) + + def exec_handlers( + self, + action: "TerraformAction", + stage: "TerraformStage", + deployment: str, + definition: "Definition", + working_dir: str, + result: Union["TerraformResult", None] = None, + ): + """ + exec_handlers is used to execute a specific action on all handlers. + """ + from tfworker.types import TerraformAction, TerraformStage + + handler: BaseHandler + + if action not in TerraformAction: + raise HandlerError(f"Invalid action {action}") + if stage not in TerraformStage: + raise HandlerError(f"Invalid stage {stage}") + for name, handler in self._handlers.items(): + if handler is not None: + if action in handler.actions and handler.is_ready(): + log.trace( + f"Executing handler {name} for {definition.name} action {action} and stage {stage}" + ) + handler.execute( + action=action, + stage=stage, + deployment=deployment, + definition=definition, + working_dir=working_dir, + result=result, + ) + else: + log.trace(f"Handler {name} is not ready for action {action}") diff --git a/tfworker/handlers/exceptions.py b/tfworker/handlers/exceptions.py deleted file mode 100644 index 2ba40e5..0000000 --- a/tfworker/handlers/exceptions.py +++ /dev/null @@ -1,23 +0,0 @@ -class UnknownHandler(Exception): - """ - This is an excpetion that indicates configuration was attempted for a handler that is not supported. - """ - - def __init__(self, provider: str) -> None: - self.provider = provider - - def __str__(self) -> str: - return f"Unknown handler: {self.provider}" - - -class HandlerError(Exception): - """ - This is an exception that indicates an error occurred while attempting to execute a handler. - """ - - def __init__(self, message: str, terminate: bool = True) -> None: - self.message = message - self.terminate = terminate - - def __str__(self) -> str: - return f"Handler error: {self.message}" diff --git a/tfworker/handlers/registry.py b/tfworker/handlers/registry.py new file mode 100644 index 0000000..f8174c9 --- /dev/null +++ b/tfworker/handlers/registry.py @@ -0,0 +1,87 @@ +from typing import Callable, Dict, List + +from pydantic import BaseModel + +from tfworker.exceptions import HandlerError + +from .base import BaseHandler + + +class HandlerRegistry: + """ + All handlers must be registered in order to be available for selection in an execution + """ + + _registry = {} + _universal = [] + + @classmethod + def register(cls, name: str, always: bool = False) -> Callable: + """ + Class method to register handlers + """ + + def inner_wrapper(wrapped_class: BaseHandler) -> Callable: + if name in cls._registry: + raise ValueError(f"Handler {name} already exists") + cls._registry[name] = wrapped_class + if always: + cls._universal.append(name) + return wrapped_class + + return inner_wrapper + + @classmethod + def list_universal_handlers(cls) -> List[str]: + """ + get_universal_handlers returns all of the registered universal handlers + """ + return cls._universal + + @classmethod + def get_handler(cls, name: str) -> BaseHandler: + """ + get_handler returns a handler type that supports the provided name + """ + return cls._registry[name] + + @classmethod + def get_handlers(cls) -> Dict[str, BaseHandler]: + """ + get_handlers returns all of the registered handlers + """ + return cls._registry + + @classmethod + def get_handler_names(cls) -> List[str]: + """ + get_handler_names returns all of the registered handler names + """ + return list(cls._registry.keys()) + + @classmethod + def get_handler_config_model(cls, name: str) -> BaseModel: + """ + get_handler_config_model returns the config model for the handler + """ + try: + return cls._registry[name].config_model + except KeyError: + raise HandlerError(f"Handler {name} not found") + + @classmethod + def match_handler(cls, config_model: BaseModel) -> BaseHandler: + """ + match_handler returns the handler that matches the config model + """ + for handler in cls._registry.values(): + if handler.config_model == config_model: + return handler + raise ValueError("No handler found for config model") + + @classmethod + def make_handler(cls, name: str, config: BaseModel) -> BaseHandler: + """ + make_handler returns a new handler instance + """ + return cls._registry[name](config) diff --git a/tfworker/handlers/s3.py b/tfworker/handlers/s3.py new file mode 100644 index 0000000..27dda70 --- /dev/null +++ b/tfworker/handlers/s3.py @@ -0,0 +1,274 @@ +import json +from pathlib import Path +from typing import TYPE_CHECKING, Union +from uuid import uuid4 +from zipfile import ZipFile + +import boto3 +import botocore +import click + +import tfworker.util.log as log +from tfworker.backends import Backends +from tfworker.exceptions import HandlerError +from tfworker.types.terraform import TerraformAction, TerraformStage + +from .base import BaseConfig, BaseHandler +from .registry import HandlerRegistry + +if TYPE_CHECKING: + from tfworker.commands.terraform import TerraformResult + from tfworker.definitions.model import Definition + + +@HandlerRegistry.register("s3", always=True) +class S3Handler(BaseHandler): + """The S3Handler class is a handler for the s3 backend""" + + actions = [TerraformAction.PLAN, TerraformAction.APPLY] + config_model = BaseConfig + _ready = False + + def __init__(self, _: BaseConfig = None): + # defer properties until they are loaded + + self._bucket = None + self._prefix = None + self._s3_client = None + self._app_state = None + + self.execution_functions = { + TerraformAction.PLAN: { + TerraformStage.PRE: self._check_plan, + TerraformStage.POST: self._post_plan, + }, + TerraformAction.APPLY: { + TerraformStage.PRE: self._pre_apply, + }, + } + + @property + def bucket(self): + if self._bucket is None: + self._bucket = self.app_state.root_options.backend_bucket + return self._bucket + + @property + def prefix(self): + if self._prefix is None: + self._prefix = self.app_state.root_options.backend_prefix + return self._prefix + + @property + def s3_client(self): + if self._s3_client is None: + self._s3_client = self.app_state.authenticators[ + "aws" + ].backend_session.client("s3") + return self._s3_client + + @property + def app_state(self): + if self._app_state is None: + self._app_state = click.get_current_context().obj + return self._app_state + + def is_ready(self) -> bool: + """ + is_ready performs a test to ensure that the handler is able to perform + the required operations in s3 + """ + if self.app_state.root_options.backend != Backends.S3: + return False + if self.app_state.root_options.backend_plans is not True: + return False + if self.app_state.authenticators.get("aws") is None: + return False + + if self._ready is not True: + filename = str(uuid4().hex[:6].upper()) + if self.s3_client.list_objects( + Bucket=self.bucket, + Prefix=f"{self.prefix}/{filename}", + ).get("Contents"): + raise HandlerError( + f"Error initializing S3Handler, remote file already exists: {filename}" + ) + try: + self.s3_client.upload_file( + "/dev/null", + self._bucket, + f"{self.prefix}/{filename}", + ) + except boto3.exceptions.S3UploadFailedError as e: + raise HandlerError( + f"Error initializing S3Handler, could not create file: {e}" + ) + try: + self.s3_client.delete_object( + Bucket=self.bucket, + Key=f"{self.prefix}/{filename}", + ) + except boto3.exceptions.S3UploadFailedError as e: + raise HandlerError( + f"Error initializing S3Handler, could not delete file: {e}" + ) + self._ready = True + return self._ready + + def get_remote_file(self, name: str) -> str: + """get_remote_file returns the remote file path for a given name""" + return f"{self.prefix}/{name}/terraform.tfplan" + + def execute( + self, + action: "TerraformAction", + stage: "TerraformStage", + deployment: str, + definition: "Definition", + working_dir: str, + result: Union["TerraformResult", None] = None, + ) -> None: # pragma: no cover + # save a copy of the planfile to the backend state bucket + if action in self.execution_functions.keys(): + if stage in self.execution_functions[action].keys(): + self.execution_functions[action][stage]( + deployment=deployment, + definition=definition, + working_dir=working_dir, + result=result, + ) + + def _check_plan(self, deployment: str, definition: "Definition", **kwargs): + """check_plan runs while the plan is being checked, it should fetch a file from the backend and store it in the local location""" + # ensure planfile does not exist or is zero bytes if it does + remotefile = self.get_remote_file(definition.name) + statefile = f"{self.prefix}/{definition.name}/terraform.tfstate" + planfile = Path(definition.plan_file) + if planfile.exists(): + if planfile.stat().st_size == 0: + planfile.unlink() + else: + raise HandlerError(f"planfile already exists: {planfile}") + + if self._s3_get_plan(planfile.resolve(), remotefile): + if not planfile.exists(): + raise HandlerError(f"planfile not found after download: {planfile}") + # verify the lineage and serial from the planfile matches the statefile + if not self._verify_lineage(planfile, statefile): + log.warn( + f"planfile {remotefile} lineage does not match statefile, remote plan is unsuitable and will be removed" + ) + self._s3_delete_plan(remotefile) + planfile.unlink() + else: + log.info( + f"remote planfile downloaded: s3://{self.bucket}/{remotefile} -> {planfile}" + ) + return None + + def _post_plan(self, definition: str, result: "TerraformResult", **kwargs): + """ + post_plan runs after the plan is completed, it should upload the planfile to the backend + """ + planfile = Path(definition.plan_file) + logfile = planfile.with_suffix(".log") + remotefile = self.get_remote_file(definition.name) + remotelog = remotefile.replace(".tfplan", ".log") + + result.log_file(logfile.resolve()) + try: + if planfile.exists() and result.exit_code == 2: + if self._s3_put_plan(planfile, remotefile): + log.info( + f"remote planfile uploaded: {planfile} -> s3://{self.bucket}/{remotefile}" + ) + if self._s3_put_plan(logfile, remotelog): + log.debug( + f"remote logfile uploaded: {logfile} -> s3://{self.bucket}/{remotelog}" + ) + return None + except Exception as e: + raise HandlerError(f"Error uploading planfile: {e}") + finally: + logfile.unlink() + + def _pre_apply(self, definition: "Definition", **kwargs): + """_pre_apply runs before the apply is started, it should remove the planfile from the backend""" + remotefile = self.get_remote_file(definition.name) + remotelog = remotefile.replace(".tfplan", ".log") + if self._s3_delete_plan(remotefile): + log.debug(f"remote planfile removed: s3://{self.bucket}/{remotefile}") + if self._s3_delete_plan(remotelog): + log.debug(f"remote logfile removed: s3://{self.bucket}/{remotelog}") + return None + + def _s3_get_plan(self, planfile: Path, remotefile: str) -> bool: + """_get_plan downloads the file from s3""" + # fetch the planfile from the backend + downloaded = False + try: + self.s3_client.download_file(self.bucket, remotefile, planfile) + # make sure the local file exists, and is greater than 0 bytes + downloaded = True + except botocore.exceptions.ClientError as e: + if e.response["Error"]["Code"] == "404": + log.debug(f"remote plan {remotefile} not found") + pass + else: + raise HandlerError(f"Error downloading planfile: {e}") + return downloaded + + def _s3_put_plan(self, planfile: Path, remotefile: str) -> bool: + """_put_plan uploads the file to s3""" + uploaded = False + # don't upload empty plans + if planfile.stat().st_size == 0: + return uploaded + try: + self.s3_client.upload_file(str(planfile), self.bucket, remotefile) + uploaded = True + except botocore.exceptions.ClientError as e: + raise HandlerError(f"Error uploading planfile: {e}") + return uploaded + + def _s3_delete_plan(self, remotefile: str) -> bool: + """_delete_plan removes a remote plan file""" + deleted = False + try: + self.s3_client.delete_object(Bucket=self.bucket, Key=remotefile) + deleted = True + except botocore.exceptions.ClientError as e: + raise HandlerError(f"Error deleting planfile: {e}") + return deleted + + def _verify_lineage(self, planfile: Path, statefile: str) -> bool: + # load the statefile as a json object from the backend + state = None + try: + state = json.loads( + self.s3_client.get_object(Bucket=self.bucket, Key=statefile)[ + "Body" + ].read() + ) + except botocore.exceptions.ClientError as e: + raise HandlerError(f"Error downloading statefile: {e}") + + # load the planfile as a json object + plan = None + try: + with ZipFile(str(planfile), "r") as zip: + with zip.open("tfstate") as f: + plan = json.loads(f.read()) + except Exception as e: + raise HandlerError(f"Error loading planfile: {e}") + + # compare the lineage and serial from the planfile to the statefile + if not (state and plan): + return False + if state["serial"] != plan["serial"]: + return False + if state["lineage"] != plan["lineage"]: + return False + + return True diff --git a/tfworker/handlers/trivy.py b/tfworker/handlers/trivy.py index da503d1..6cb9749 100644 --- a/tfworker/handlers/trivy.py +++ b/tfworker/handlers/trivy.py @@ -1,63 +1,76 @@ import os from pathlib import Path +from typing import TYPE_CHECKING, Union -import click +from pydantic import BaseModel + +import tfworker.util.log as log +from tfworker.exceptions import HandlerError +from tfworker.types.terraform import TerraformAction, TerraformStage from ..util.system import pipe_exec, strip_ansi from .base import BaseHandler -from .exceptions import HandlerError - - +from .registry import HandlerRegistry + +if TYPE_CHECKING: + from tfworker.commands.terraform import TerraformResult + from tfworker.definitions.model import Definition + + +class TrivyConfig(BaseModel): + args: dict = {} + cache_dir: str = "/tmp/trivy_cache" + debug: bool = False + exit_code: str = "1" + format: str = None + handler_debug: bool = False + path: str = "/usr/bin/trivy" + quiet: bool = True + required: bool = False + severity: str = "HIGH,CRITICAL" + skip_dirs: list = ["**/examples"] + template: str = ( + '\'ERRORS: {{ range . }}{{ range .Misconfigurations}}{{ .Severity }} - {{ .ID }} - {{ .AVDID }} - {{ .Title -}} - {{ .Description }} - {{ .Message }} - {{ .Resolution }} - {{ .PrimaryURL }} - {{ range .References }}{{ . }}{{ end }}{{ "\\n" }}{{ end }}{{ "\\n" }}{{ end }}{{ "\\n" }}\'' + ) + skip_planfile: bool = False + skip_definition: bool = False + stream_output: bool = True + + +@HandlerRegistry.register("trivy") class TrivyHandler(BaseHandler): """ The TrivyHandler will execute a trivy scan on a specified terraform plan file """ - actions = ["plan"] - - defaults = { - "args": {}, - "cache_dir": "/tmp/trivy_cache", - "debug": False, - "exit_code": "1", - "format": None, - "handler_debug": False, - "path": "/usr/bin/trivy", - "quiet": True, - "required": False, - "severity": "HIGH,CRITICAL", - "skip_dirs": ["**/examples"], - "template": '\'ERRORS: {{ range . }}{{ range .Misconfigurations}}{{ .Severity }} - {{ .ID }} - {{ .AVDID }} - {{ .Title -}} - {{ .Description }} - {{ .Message }} - {{ .Resolution }} - {{ .PrimaryURL }} - {{ range .References }}{{ . }}{{ end }}{{ "\\n" }}{{ end }}{{ "\\n" }}{{ end }}{{ "\\n" }}\'', - "skip_planfile": False, - "skip_definition": False, - "stream_output": True, - } - - def __init__(self, kwargs): - self._ready = False + actions = [TerraformAction.PLAN] + config_model = TrivyConfig + _ready = False + def __init__(self, config: BaseModel) -> None: # configure the handler - for k in self.defaults: - if k in kwargs: - setattr(self, f"_{k}", kwargs[k]) - else: - setattr(self, f"_{k}", self.defaults[k]) + for k in config.model_fields: + setattr(self, f"_{k}", getattr(config, k)) # ensure trivy is runnable if not self._trivy_runable(self._path): + if self.required: + raise HandlerError( + f"Trivy is not runnable at {self._path}", terminate=True + ) raise HandlerError(f"Trivy is not runnable at {self._path}") self._ready = True def execute( self, - action: str, - stage: str, - planfile: str = None, - definition_path: Path = None, - changes: bool = False, - **kwargs, - ) -> None: + action: "TerraformAction", + stage: "TerraformStage", + deployment: str, + definition: "Definition", + working_dir: str, + result: Union["TerraformResult", None] = None, + ) -> None: # pragma: no cover """execute is called when a handler should trigger, if this is run post plan and there are changes, a scan will be executed @@ -73,7 +86,8 @@ def execute( None """ # pre plan; trivy scan the definition if its applicable - if action == "plan" and stage == "pre": + definition_path = definition.get_target_path(working_dir=working_dir) + if action == TerraformAction.PLAN and stage == TerraformStage.PRE: if definition_path is None: raise HandlerError( "definition_path is not provided, can't scan", @@ -81,16 +95,19 @@ def execute( ) if self._skip_definition: - click.secho("Skipping trivy scan of definition", fg="yellow") + log.info(f"Skipping trivy scan of definition: {definition_path}") return None - click.secho( - f"scanning definition with trivy: {definition_path}", fg="green" - ) + log.info(f"scanning definition with trivy: {definition_path}") self._scan(definition_path) # post plan; trivy scan the planfile if its applicable - if action == "plan" and stage == "post" and changes: + if ( + action == TerraformAction.PLAN + and stage == TerraformStage.POST + and result.has_changes() + ): + planfile = definition.plan_file if planfile is None: raise HandlerError( "planfile is not provided, can't scan", terminate=self._required @@ -103,10 +120,10 @@ def execute( ) if self._skip_planfile: - click.secho("Skipping trivy scan of planfile", fg="yellow") + log.info(f"Skipping trivy scan of planfile: {planfile}") return None - click.secho(f"scanning planfile with trivy: {planfile}", fg="green") + log.info(f"scanning planfile with trivy: {planfile}") self._scan(definition_path, planfile) def _scan(self, definition_path: Path, planfile: Path = None): @@ -167,7 +184,7 @@ def _scan(self, definition_path: Path, planfile: Path = None): try: if self._debug: - click.secho(f"cmd: {' '.join(trivy_args)}", fg="yellow") + log.debug(f"cmd: {' '.join(trivy_args)}") (exit_code, stdout, stderr) = pipe_exec( f"{' '.join(trivy_args)}", cwd=str(definition_path), @@ -190,14 +207,14 @@ def _handle_results(self, exit_code, stdout, stderr, planfile): None """ if exit_code != 0: - click.secho(f"trivy scan failed with exit code {exit_code}", fg="red") + log.error(f"trivy scan failed with exit code {exit_code}") if self._stream_output is False: - click.secho(strip_ansi(f"stdout: {stdout.decode('UTF-8')}"), fg="red") - click.secho(strip_ansi(f"stderr: {stderr.decode('UTF-8')}"), fg="red") + log.error(strip_ansi(f"stdout: {stdout.decode('UTF-8')}")) + log.error(strip_ansi(f"stderr: {stderr.decode('UTF-8')}")) if self._required: if planfile is not None: - click.secho(f"Removing planfile: {planfile}", fg="yellow") + log.warn(f"Removing planfile: {planfile}") os.remove(planfile) raise HandlerError( "trivy scan required; aborting execution", terminate=True diff --git a/tfworker/providers/__init__.py b/tfworker/providers/__init__.py new file mode 100644 index 0000000..292c32b --- /dev/null +++ b/tfworker/providers/__init__.py @@ -0,0 +1,11 @@ +from .base import BaseProvider # pragma: no cover # noqa +from .collection import ProvidersCollection # pragma: no cover # noqa +from .generic import GenericProvider # pragma: no cover # noqa +from .google import GoogleProvider # pragma: no cover # noqa +from .google_beta import GoogleBetaProvider # pragma: no cover # noqa +from .model import ( # pragma: no cover # noqa + Provider, + ProviderConfig, + ProviderGID, + ProviderRequirements, +) diff --git a/tfworker/providers/base.py b/tfworker/providers/base.py index a7d7143..e5db041 100644 --- a/tfworker/providers/base.py +++ b/tfworker/providers/base.py @@ -1,24 +1,17 @@ -# Copyright 2020 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from tfworker.types import ProviderConfig, ProviderGID +from typing import TYPE_CHECKING + +from pydantic import GetCoreSchemaHandler +from pydantic_core import CoreSchema, core_schema + +if TYPE_CHECKING: + from .model import ProviderConfig, ProviderGID class BaseProvider: tag = None requires_auth = False - def __init__(self, config: ProviderConfig) -> None: + def __init__(self, config: "ProviderConfig") -> None: self.vars = config.vars or {} self.config_blocks = config.config_blocks or {} self.version = config.requirements.version @@ -29,7 +22,7 @@ def __str__(self): return self.tag @property - def gid(self) -> ProviderGID: + def gid(self) -> "ProviderGID": from tfworker.util.terraform import get_provider_gid_from_source return get_provider_gid_from_source(self.source) @@ -66,7 +59,7 @@ def hcl(self) -> str: result.append(self._hclify(self.config_blocks[k], depth=4)) result.append(" }") - result.append("}") + result.append("}\n") return "\n".join(result) def required(self): @@ -75,7 +68,7 @@ def required(self): f" {self.tag} = {{", f' source = "{self.source}"', f' version = "{self.version}"', - " }", + " }\n", ] ) @@ -116,11 +109,11 @@ def _hclify(self, s, depth=4): return "\n".join(result) - -def validate_backend_region(state): - """ - validate_backend_region validates that a statefile - was previously used in the region the current - deployment is being created for - """ - raise NotImplementedError("validate_backend_region is not implemented") + @classmethod + def __get_pydantic_core_schema__( + cls, _, handler: GetCoreSchemaHandler + ) -> CoreSchema: + """ + Allow this class to be used as a Pydantic model type + """ + return core_schema.is_instance_schema(cls) diff --git a/tfworker/providers/collection.py b/tfworker/providers/collection.py new file mode 100644 index 0000000..dcd07eb --- /dev/null +++ b/tfworker/providers/collection.py @@ -0,0 +1,149 @@ +import copy +import threading +from collections.abc import Mapping +from typing import TYPE_CHECKING, Dict, List + +from pydantic import GetCoreSchemaHandler, ValidationError +from pydantic_core import CoreSchema, core_schema + +import tfworker.util.log as log +from tfworker.exceptions import FrozenInstanceError + +if TYPE_CHECKING: + from tfworker.providers.model import Provider + + +class ProvidersCollection(Mapping): + _instance = None + _lock = threading.Lock() + _frozen: bool = False + + @classmethod + def get_named_providers(cls): + from .google import GoogleProvider + from .google_beta import GoogleBetaProvider + + NAMED_PROVIDERS = [GoogleProvider, GoogleBetaProvider] + return NAMED_PROVIDERS + + def __new__(cls, *args, **kwargs): + with cls._lock: + if cls._instance is None: + cls._instance = super().__new__(cls) + return cls._instance + + def __init__(self, providers_odict=None, authenticators: Dict = dict()): + if not hasattr(self, "_initialized"): + from .generic import GenericProvider + from .model import Provider, ProviderConfig + + provider_map = dict( + [(prov.tag, prov) for prov in ProvidersCollection.get_named_providers()] + ) + self._providers = copy.deepcopy(providers_odict) if providers_odict else {} + for k, v in self._providers.items(): + try: + config = ProviderConfig.model_validate(v) + except ValidationError as e: + e.ctx = ("provider", k) + raise e + + if k in provider_map: + obj = provider_map[k](config) + else: + obj = GenericProvider(config, tag=k) + + if obj.requires_auth: + obj.add_authenticators(authenticators) + + log.trace(f"Adding provider {k} to providers collection") + self._providers[k] = Provider.model_validate( + {"name": k, "obj": obj, "config": config} + ) + log.trace( + f"Provider Attributes: Name:{self._providers[k].name}, GID:{self._providers[k].gid}, Class:{type(self._providers[k].obj)}, Config:{self._providers[k].config}" + ) + self._initialized = True + + def __len__(self): + return len(self._providers) + + def __getitem__(self, key: str) -> "Provider": + return self._providers[key] + + def __iter__(self): + return iter(self._providers) + + def __str__(self): + return str([f"{x.name}: {str(x.gid)}" for x in self._providers.values()]) + + def __setitem__(self, key, value): + if self._frozen: + raise FrozenInstanceError("Cannot modify a frozen instance.") + self._providers[key] = value + + def freeze(self): + self._frozen = True + + @classmethod + def delete_instance(cls): + cls._instance = None + + @classmethod + def __get_pydantic_core_schema__( + cls, _, handler: GetCoreSchemaHandler + ) -> CoreSchema: + return core_schema.no_info_after_validator_function(cls, handler(dict)) + + def items(self): + return self._providers.items() + + def keys(self): + return self._providers.keys() + + def provider_hcl(self, includes: List[str] = None) -> str: + """ + Returns a string of HCL code for the specified providers. + + If no providers are specified, HCL code for all providers is returned. + + Args: + includes (List[str], optional): List of provider keys to include. + Defaults to None, which includes all providers. + + Returns: + str: HCL code for the specified providers. + """ + if includes is None: + includes = list(self._providers.keys()) + + return "\n".join( + [prov.obj.hcl() for k, prov in self._providers.items() if k in includes] + ) + + def required_hcl(self, includes: List[str] = None) -> str: + """ + Returns a string of HCL code for the terraform "required" block for the specified providers. + + If no providers are specified, HCL code for all providers is returned. + + Args: + includes (List[str], optional): List of provider keys to include. + Defaults to None, which includes all providers. + + Returns: + str: HCL code for the specified providers. + """ + if includes is None: + includes = list(self._providers.keys()) + + return_str = " required_providers {\n" + return_str += "\n".join( + [ + prov.obj.required() + for k, prov in self._providers.items() + if k in includes + ] + ) + return_str += "\n }\n" + return return_str diff --git a/tfworker/providers/generic.py b/tfworker/providers/generic.py index 78ecc82..47f6b58 100644 --- a/tfworker/providers/generic.py +++ b/tfworker/providers/generic.py @@ -1,19 +1,5 @@ -# Copyright 2020 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from tfworker.providers.base import BaseProvider -from tfworker.types import ProviderConfig +from .base import BaseProvider +from .model import ProviderConfig class GenericProvider(BaseProvider): diff --git a/tfworker/providers/google.py b/tfworker/providers/google.py index 2cb5905..5362c1c 100644 --- a/tfworker/providers/google.py +++ b/tfworker/providers/google.py @@ -1,20 +1,10 @@ -# Copyright 2020 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from tfworker.authenticators import AuthenticatorsCollection -from tfworker.providers.base import BaseProvider -from tfworker.types import ProviderConfig +from typing import TYPE_CHECKING + +from .base import BaseProvider +from .model import ProviderConfig + +if TYPE_CHECKING: + from tfworker.authenticators import AuthenticatorsCollection class GoogleProvider(BaseProvider): @@ -26,7 +16,7 @@ def __init__(self, body: ProviderConfig): self._authenticator = None - def add_authenticators(self, authenticators: AuthenticatorsCollection): + def add_authenticators(self, authenticators: "AuthenticatorsCollection"): self._authenticator = authenticators.get(self.tag) # if there is a creds file, tuck it into the provider vars diff --git a/tfworker/providers/google_beta.py b/tfworker/providers/google_beta.py index a6a2fa6..1634d98 100644 --- a/tfworker/providers/google_beta.py +++ b/tfworker/providers/google_beta.py @@ -1,18 +1,4 @@ -# Copyright 2020 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from tfworker.providers.google import GoogleProvider +from .google import GoogleProvider class GoogleBetaProvider(GoogleProvider): diff --git a/tfworker/providers/model.py b/tfworker/providers/model.py new file mode 100644 index 0000000..2e4e74d --- /dev/null +++ b/tfworker/providers/model.py @@ -0,0 +1,91 @@ +from typing import Any, Dict, Optional + +from pydantic import BaseModel, ConfigDict, model_validator + +from tfworker.constants import ( + TF_PROVIDER_DEFAULT_HOSTNAME, + TF_PROVIDER_DEFAULT_NAMESPACE, +) + + +class ProviderRequirements(BaseModel): + model_config = ConfigDict(extra="forbid") + + version: str + source: Optional[str] = None + + +class ProviderConfig(BaseModel): + model_config = ConfigDict(extra="forbid") + + requirements: ProviderRequirements + vars: Optional[Dict[str, Any]] = None + config_blocks: Optional[Dict[str, Any]] = None + + +class ProviderGID(BaseModel): + """ + The provider global identifier + """ + + model_config = ConfigDict(extra="forbid") + + hostname: Optional[str] = TF_PROVIDER_DEFAULT_HOSTNAME + namespace: Optional[str] = TF_PROVIDER_DEFAULT_NAMESPACE + type: str + + def __str__(self): + return f"{self.hostname}/{self.namespace}/{self.type}" + + +class Provider(BaseModel): + model_config = ConfigDict(extra="forbid", allow_arbitrary_types=True) + + name: str + gid: ProviderGID + config: ProviderConfig + obj: "BaseProvider" # noqa: F821 + + # When the model is created, the gid is created from requirements.source, or the name + @model_validator(mode="before") + @classmethod + def create_gid(cls, v: Dict[str, Any]) -> Dict[str, Any]: + if v.get("gid", None) is not None: + print("GID already exists") + return v + if v.get("config", None) is None: + raise ValueError("config is required for provider") + + if v["config"].requirements.source is None: + v["gid"] = ProviderGID(type=v["name"]) + return v + + # parse the source to get the hostname, namespace, and type + gid_parts = v["config"].requirements.source.split("/") + if len(gid_parts) > 4: + raise ValueError( + f"Invalid source for provider {v['name']}: {v.config.source}" + ) + if len(gid_parts) == 3: + v["gid"] = ProviderGID( + hostname=gid_parts[0], namespace=gid_parts[1], type=gid_parts[2] + ) + elif len(gid_parts) == 2: + v["gid"] = ProviderGID(namespace=gid_parts[0], type=gid_parts[1]) + else: + v["gid"] = ProviderGID(type=gid_parts[0]) + + return v + + def __str__(self): + return self.name + + +def init_forward_refs(): + # this is required to prevent circular imports + from tfworker.providers.base import BaseProvider # noqa: F401 + + Provider.model_rebuild() + + +init_forward_refs() diff --git a/tfworker/providers/providers_collection.py b/tfworker/providers/providers_collection.py deleted file mode 100644 index 24d7d9e..0000000 --- a/tfworker/providers/providers_collection.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright 2020 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections -import copy -from typing import List - -from tfworker.providers.generic import GenericProvider -from tfworker.providers.google import GoogleProvider -from tfworker.providers.google_beta import GoogleBetaProvider -from tfworker.types import ProviderConfig - -NAMED_PROVIDERS = [GoogleProvider, GoogleBetaProvider] - - -class ProvidersCollection(collections.abc.Mapping): - def __init__(self, providers_odict, authenticators): - provider_map = dict([(prov.tag, prov) for prov in NAMED_PROVIDERS]) - self._providers = copy.deepcopy(providers_odict) - for k, v in self._providers.items(): - config = ProviderConfig.parse_obj(v) - - if k in provider_map: - self._providers[k] = provider_map[k](config) - if self._providers[k].requires_auth: - self._providers[k].add_authenticators(authenticators) - else: - self._providers[k] = GenericProvider(config, tag=k) - - def __len__(self): - return len(self._providers) - - def __getitem__(self, value): - if type(value) is int: - return self._providers[list(self._providers.keys())[value]] - return self._providers[value] - - def __iter__(self): - return iter(self._providers.values()) - - def __str__(self): - return str([f"{x.tag}: {str(x.gid)}" for x in self._providers.values()]) - - def keys(self): - return self._providers.keys() - - def provider_hcl(self, includes: List[str] = None) -> str: - """ - Returns a string of HCL code for the specified providers. - - If no providers are specified, HCL code for all providers is returned. - - Args: - includes (List[str], optional): List of provider keys to include. - Defaults to None, which includes all providers. - - Returns: - str: HCL code for the specified providers. - """ - if includes is None: - includes = list(self._providers.keys()) - - return "\n".join( - [prov.hcl() for k, prov in self._providers.items() if k in includes] - ) - - def required_hcl(self, includes: List[str] = None) -> str: - """ - Returns a string of HCL code for the terraform "required" block for the specified providers. - - If no providers are specified, HCL code for all providers is returned. - - Args: - includes (List[str], optional): List of provider keys to include. - Defaults to None, which includes all providers. - - Returns: - str: HCL code for the specified providers. - """ - if includes is None: - includes = list(self._providers.keys()) - - return_str = " required_providers {\n" - return_str += "\n".join( - [prov.required() for k, prov in self._providers.items() if k in includes] - ) - return_str += "\n }" - return return_str diff --git a/tfworker/types/__init__.py b/tfworker/types/__init__.py index 8dc00ab..28357d6 100644 --- a/tfworker/types/__init__.py +++ b/tfworker/types/__init__.py @@ -1,8 +1,4 @@ -from tfworker.types.cli_options import CLIOptionsRoot # noqa: F401 -from tfworker.types.json import JSONType # noqa: F401 -from tfworker.types.provider import ProviderConfig, Requirements # noqa: F401 -from tfworker.types.terraform import ( # noqa: F401 - ProviderGID, - TerraformAction, - TerraformStage, -) +from .config_file import ConfigFile, GlobalVars # pragma: no cover # noqa +from .freezable_basemodel import FreezableBaseModel # pragma: no cover # noqa +from .json import JSONType # pragma: no cover # noqa +from .terraform import TerraformAction, TerraformStage # pragma: no cover # noqa diff --git a/tfworker/types/cli_options.py b/tfworker/types/cli_options.py deleted file mode 100644 index 4152458..0000000 --- a/tfworker/types/cli_options.py +++ /dev/null @@ -1,163 +0,0 @@ -import os -from pathlib import Path -from typing import List, Optional, Union - -from pydantic import BaseModel, Field, field_validator, root_validator - -from tfworker import constants as const - - -class CLIOptionsRoot(BaseModel): - aws_access_key_id: Optional[str] = Field( - None, env="AWS_ACCESS_KEY_ID", description="AWS Access key" - ) - aws_secret_access_key: Optional[str] = Field( - None, env="AWS_SECRET_ACCESS_KEY", description="AWS access key secret" - ) - aws_session_token: Optional[str] = Field( - None, env="AWS_SESSION_TOKEN", description="AWS access key token" - ) - aws_role_arn: Optional[str] = Field( - None, - env="AWS_ROLE_ARN", - description="If provided, credentials will be used to assume this role (complete ARN)", - ) - aws_external_id: Optional[str] = Field( - None, - env="AWS_EXTERNAL_ID", - description="If provided, will be used to assume the role specified by --aws-role-arn", - ) - aws_region: str = Field( - const.DEFAULT_AWS_REGION, - env="AWS_DEFAULT_REGION", - description="AWS Region to build in", - ) - aws_profile: Optional[str] = Field( - None, env="AWS_PROFILE", description="The AWS/Boto3 profile to use" - ) - gcp_region: str = Field( - const.DEFAULT_GCP_REGION, env="GCP_REGION", description="Region to build in" - ) - gcp_creds_path: Optional[str] = Field( - None, - env="GCP_CREDS_PATH", - description="Relative path to the credentials JSON file for the service account to be used.", - ) - gcp_project: Optional[str] = Field( - None, - env="GCP_PROJECT", - description="GCP project name to which work will be applied", - ) - config_file: str = Field( - const.DEFAULT_CONFIG, - env="WORKER_CONFIG_FILE", - description="Path to the configuration file", - required=True, - ) - repository_path: str = Field( - const.DEFAULT_REPOSITORY_PATH, - env="WORKER_REPOSITORY_PATH", - description="The path to the terraform module repository", - required=True, - ) - backend: Optional[str] = Field( - None, - env="WORKER_BACKEND", - description="State/locking provider. One of: s3, gcs", - ) - backend_bucket: Optional[str] = Field( - None, - env="WORKER_BACKEND_BUCKET", - description="Bucket (must exist) where all terraform states are stored", - ) - backend_prefix: str = Field( - const.DEFAULT_BACKEND_PREFIX, - env="WORKER_BACKEND_PREFIX", - description="Prefix to use in backend storage bucket for all terraform states", - ) - backend_region: str = Field( - const.DEFAULT_AWS_REGION, - description="Region where terraform root/lock bucket exists", - ) - backend_use_all_remotes: bool = Field( - True, - env="WORKER_BACKEND_USE_ALL_REMOTES", - description="Generate remote data sources based on all definition paths present in the backend", - ) - create_backend_bucket: bool = Field( - True, description="Create the backend bucket if it does not exist" - ) - config_var: Optional[List[str]] = Field( - [], - description='key=value to be supplied as jinja variables in config_file under "var" dictionary, can be specified multiple times', - ) - working_dir: Optional[str] = Field( - None, - env="WORKER_WORKING_DIR", - description="Specify the path to use instead of a temporary directory, must exist, be empty, and be writeable, --clean applies to this directory as well", - ) - clean: Optional[bool] = Field( - None, - env="WORKER_CLEAN", - description="Clean up the temporary directory created by the worker after execution", - ) - backend_plans: bool = Field( - False, env="WORKER_BACKEND_PLANS", description="Store plans in the backend" - ) - - @root_validator(pre=True) - def set_default_clean(cls, values): - if values.get("working_dir") is not None: - if "clean" not in values or values["clean"] is None: - values["clean"] = False - else: - if "clean" not in values or values["clean"] is None: - values["clean"] = True - return values - - @field_validator("working_dir") - @classmethod - def validate_working_dir(cls, fpath: Union[str, None]) -> Union[str, None]: - """Validate the working directory path. - - Args: - fpath: Path to the working directory. - - Returns: - Path to the working directory. - - Raises: - ValueError: If the path does not exist, is not a directory, or is not empty. - """ - if fpath is None: - return - with Path(fpath) as wpath: - if not wpath.exists(): - raise ValueError(f"Working path {fpath} does not exist!") - if not wpath.is_dir(): - raise ValueError(f"Working path {fpath} is not a directory!") - if any(wpath.iterdir()): - raise ValueError(f"Working path {fpath} must be empty!") - return fpath - - @field_validator("gcp_creds_path") - @classmethod - def validate_gcp_creds_path(cls, fpath: Union[str, None]) -> Union[str, None]: - """Validate the GCP credentials path. - - Args: - fpath: Path to the GCP credentials file. - - Returns: - Fully resolved path to the GCP credentials file. - - Raises: - ValueError: If the path does not exist or is not a file. - """ - if fpath is None: - return - if not os.path.isabs(fpath): - fpath = os.path.abspath(fpath) - if os.path.isfile(fpath): - return fpath - raise ValueError(f"Path {fpath} is not a file!") diff --git a/tfworker/types/config_file.py b/tfworker/types/config_file.py new file mode 100644 index 0000000..f1887ea --- /dev/null +++ b/tfworker/types/config_file.py @@ -0,0 +1,51 @@ +from typing import Any, Dict, Optional + +from pydantic import ConfigDict, Field + +from .freezable_basemodel import FreezableBaseModel + + +class GlobalVars(FreezableBaseModel): + """ + Global Variables can be defined inside of the configuration file, this is a model for those variables. + """ + + model_config = ConfigDict(extra="forbid") + + terraform_vars: Dict[str, str | bool] = Field( + {}, description="Variables to pass to terraform via a generated .tfvars file." + ) + remote_vars: Dict[str, str | bool] = Field( + {}, + description="Variables which are used to generate local references to remote state vars.", + ) + template_vars: Dict[str, str | bool] = Field( + {}, description="Variables which are suppled to any jinja templates." + ) + + +class ConfigFile(FreezableBaseModel): + """ + This model is used to validate and deserialize the configuration file. + """ + + model_config = ConfigDict(extra="forbid") + + definitions: Dict[str, Any] = Field( + {}, description="The definition configurations." + ) + global_vars: Optional[GlobalVars] = Field( + default_factory=GlobalVars, + description="Global variables that are used in the configuration file.", + ) + providers: Dict[str, Any] = Field({}, description="The provider configurations.") + worker_options: Dict[str, str | bool] = Field( + {}, description="The base worker options, overlaps with command line options" + ) + handlers: Dict[str, Any] = Field({}, description="The handler configurations.") + + def freeze(self): + super().freeze() + if self.global_vars: + self.global_vars.freeze() + return self diff --git a/tfworker/types/freezable_basemodel.py b/tfworker/types/freezable_basemodel.py new file mode 100644 index 0000000..cca1f06 --- /dev/null +++ b/tfworker/types/freezable_basemodel.py @@ -0,0 +1,15 @@ +from pydantic import BaseModel, PrivateAttr + +from tfworker.exceptions import FrozenInstanceError + + +class FreezableBaseModel(BaseModel): + _is_frozen: bool = PrivateAttr(default=False) + + def __setattr__(self, name, value): + if self._is_frozen and name != "_is_frozen": + raise FrozenInstanceError(f"{self.__class__.__name__} is frozen") + super().__setattr__(name, value) + + def freeze(self): + object.__setattr__(self, "_is_frozen", True) diff --git a/tfworker/types/provider.py b/tfworker/types/provider.py deleted file mode 100644 index de1734f..0000000 --- a/tfworker/types/provider.py +++ /dev/null @@ -1,14 +0,0 @@ -from typing import Any, Dict, Optional - -from pydantic import BaseModel - - -class Requirements(BaseModel): - version: str - source: Optional[str] = None - - -class ProviderConfig(BaseModel): - requirements: Requirements - vars: Optional[Dict[str, Any]] = None - config_blocks: Optional[Dict[str, Any]] = None diff --git a/tfworker/types/terraform.py b/tfworker/types/terraform.py index 44f800b..1c56bdc 100644 --- a/tfworker/types/terraform.py +++ b/tfworker/types/terraform.py @@ -1,12 +1,4 @@ from enum import Enum -from typing import Optional - -from pydantic import BaseModel - -from tfworker.constants import ( - TF_PROVIDER_DEFAULT_HOSTNAME, - TF_PROVIDER_DEFAULT_NAMESPACE, -) class TerraformAction(Enum): @@ -33,16 +25,3 @@ class TerraformStage(Enum): def __str__(self): return self.value - - -class ProviderGID(BaseModel): - """ - The provider global identifier - """ - - hostname: Optional[str] = TF_PROVIDER_DEFAULT_HOSTNAME - namespace: Optional[str] = TF_PROVIDER_DEFAULT_NAMESPACE - type: str - - def __str__(self): - return f"{self.hostname}/{self.namespace}/{self.type}" diff --git a/tfworker/util/cli.py b/tfworker/util/cli.py index 0b197e4..9aa5df2 100644 --- a/tfworker/util/cli.py +++ b/tfworker/util/cli.py @@ -1,13 +1,79 @@ import typing as t +from enum import Enum import click -from pydantic import BaseModel +from pydantic import BaseModel, ValidationError from pydantic.fields import PydanticUndefined +import tfworker.util.log as log +from tfworker.util.system import get_platform + + +def handle_option_error(e: ValidationError) -> None: + """Handle a Pydantic validation error. + + Rather than raising a click.BackOptionUsage, this function will capture and report + all validation errors, instead of just the first one encountered. + + Args: + e: Pydantic validation error. + + Raises: + click.ClickBadOption: Pydantic validation error. + """ + error_message = ["options error(s):"] + try: + for error in e.errors(): + # pydantic adds "Value error," to the beginning of the error message, so we remove it + error_message.append( + f"{error['loc'][0]}: {error['msg'].split(',', 1)[1].strip()}" + ) + except IndexError: + error_message.append(str(e)) + + # use .format to work around python f-string limitation of not being able to use \n + # log.msg(f"{'\\n '.join(error_message)}", log.LogLevel.ERROR) + log.error("{}".format("\n ".join(error_message))) + click.get_current_context().exit(1) + + +def handle_config_error(e: ValidationError) -> None: + """Handle a Pydantic validation error. + + Args: + e: Pydantic validation error. + + Raises: + click.ClickBadOption: Pydantic validation error. + """ + if e.error_count() == 1: + error_message = ["config error:"] + else: + error_message = ["config errors:"] + + if hasattr(e, "ctx"): + error_message.append( + f"validation error while loading {e.ctx[0]} named {e.ctx[1]}" + ) + for error in e.errors(): + error_message.append(" Details:") + error_message.append(f" Error Type: {error['type']}") + error_message.append(f" Error Loc: {error['loc']}") + error_message.append(f" Error Msg: {error['msg']}") + error_message.append(f" Input Value: {error['input']}") + + # use .format to work around python f-string limitation of not being able to use \n + log.error("{}".format("\n ".join(error_message))) + click.get_current_context().exit(1) + def pydantic_to_click(pydantic_model: t.Type[BaseModel]) -> click.Command: """Convert a Pydantic model to a Click command. + There are some limitations on types that are supported, custom validation + needs done on the model for ENUM types in order to keep this generic enough + to be usable and easily extendable + Args: pydantic_model: Pydantic model to convert. @@ -17,10 +83,19 @@ def pydantic_to_click(pydantic_model: t.Type[BaseModel]) -> click.Command: def decorator(func): model_types = t.get_type_hints(pydantic_model) - for fname, fdata in pydantic_model.model_fields.items(): - description = fdata.description or "" + for fname, fdata in reversed(sorted(pydantic_model.model_fields.items())): default = fdata.default multiple = False + has_extra = fdata.json_schema_extra is not None + + c_option_kwargs = { + "help": fdata.description, + "required": fdata.is_required(), + } + + if has_extra and fdata.json_schema_extra.get("env"): + c_option_kwargs["envvar"] = fdata.json_schema_extra["env"] + c_option_kwargs["show_envvar"] = True if model_types[fname] in [str, t.Optional[str]]: option_type = click.STRING @@ -35,25 +110,64 @@ def decorator(func): multiple = True if default is PydanticUndefined: default = [] + elif isinstance(model_types[fname], type) and issubclass( + model_types[fname], Enum + ): + option_type = click.STRING else: raise ValueError(f"Unsupported type {model_types[fname]}") + c_option_kwargs["type"] = option_type + c_option_kwargs["multiple"] = multiple + c_option_kwargs["default"] = default c_option_args = [f"--{fname.replace('_', '-')}"] - c_option_kwargs = { - "help": description, - "default": default, - "type": option_type, - "required": fdata.is_required(), - "multiple": multiple, - } + if has_extra and fdata.json_schema_extra.get("short_arg"): + c_option_args.append(f"-{fdata.json_schema_extra['short_arg']}") if option_type == click.BOOL: c_option_args = [ f"--{fname.replace('_', '-')}/--no-{fname.replace('_', '-')}" ] del c_option_kwargs["type"] - + log.msg( + f'generated option "{fname}" with params {c_option_args}, {c_option_kwargs} from {fdata}', + log.LogLevel.TRACE, + ) func = click.option(*c_option_args, **c_option_kwargs)(func) return func return decorator + + +def validate_deployment(ctx, deployment, name): + """Validate the deployment is no more than 32 characters.""" + if len(name) > 32: + log.msg("deployment must be less than 32 characters", log.LogLevel.ERROR) + raise SystemExit(1) + if " " in name: + log.msg("deployment must not contain spaces", log.LogLevel.ERROR) + raise SystemExit(1) + return name + + +def validate_host() -> None: + """Ensure that the script is being run on a supported platform. + + Raises: + NotImplemented: If the script is being run on an unsupported platform. + """ + + supported_opsys = ["darwin", "linux"] + supported_machine = ["amd64", "arm64"] + + opsys, machine = get_platform() + message = [] + + if opsys not in supported_opsys: + message.append(f"running on {opsys} is not supported") + + if machine not in supported_machine: + message.append(f"running on {machine} is not supported") + + if message: + raise NotImplementedError("\n".join(message)) diff --git a/tfworker/util/copier.py b/tfworker/util/copier.py deleted file mode 100644 index 708e1c8..0000000 --- a/tfworker/util/copier.py +++ /dev/null @@ -1,286 +0,0 @@ -# Copyright 2021-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License.import collections -import os -import re -import shutil -import tempfile -from abc import ABCMeta, abstractmethod, abstractstaticmethod -from pathlib import Path -from typing import Callable - -from tfworker.util.system import pipe_exec - - -class CopyFactory: - """The factory class for creating copiers""" - - registry = {} - - @classmethod - def register(cls, name: str) -> Callable: - """Class method to register copiers""" - - def inner_wrapper(wrapped_class: Copier) -> Callable: - if name in cls.registry: - raise ValueError(f"Executor {name} already exists") - cls.registry[name] = wrapped_class - return wrapped_class - - return inner_wrapper - - @classmethod - def create(cls, source: str, **kwargs) -> "Copier": - """create returns a copier type that supports handling the provided source""" - copier_class = cls.registry[cls.get_copier_type(source, **kwargs)] - copier = copier_class(source, **kwargs) - return copier - - @classmethod - def get_copier_type(cls, source: str, **kwargs) -> str: - """get_copier_type tries to find a supported copier based on the provided source""" - for copier_type, copier_class in cls.registry.items(): - if copier_class.type_match(source, **kwargs): - return copier_type - raise NotImplementedError(f"no valid copier for {source}") - - -class Copier(metaclass=ABCMeta): - """The base class for definition copiers""" - - def __init__(self, source: str, **kwargs): - self._source = source - self._kwargs = {} - - for k, v in kwargs.items(): - if k in ["conflicts", "destination", "root_path"]: - setattr(self, f"_{k}", v) - else: - self._kwargs[k] = v - - self._kwargs = kwargs - - if hasattr(self, "_conflicts"): - if type(self._conflicts) is not list: - raise ValueError("Conflicts must be a list of filenames to disallow") - - @abstractstaticmethod - def type_match(source: str, **kwargs) -> bool: # pragma: no cover - """type_match determins if the source is supported/handled by a copier""" - pass - - @abstractmethod - def copy(self, **kwargs) -> None: # pragma: no cover - """copy executes the copy from the source, into the working path""" - pass - - @property - def root_path(self): - """root_path returns an optional root path to use for relative file operations""" - if hasattr(self, "_root_path"): - return self._root_path - else: - return "" - - @property - def conflicts(self): - """conflicts returns a list of disallowed files""" - if hasattr(self, "_conflicts"): - return self._conflicts - else: - return [] - - @property - def source(self): - """source contains the source path providede""" - return self._source - - def get_destination(self, make_dir: bool = True, **kwargs) -> str: - """get_destination returns the destination path, and optionally makes the destinatination directory""" - if not (hasattr(self, "_destination") or "destination" in kwargs.keys()): - raise ValueError("no destination provided") - if "destination" in kwargs: - d = kwargs["destination"] - else: - d = self._destination - - if make_dir: - make_d = Path(d) - make_d.mkdir(parents=True, exist_ok=True) - - return d - - def check_conflicts(self, path: str) -> None: - """Checks for files with conflicting names in a path""" - conflicting = [] - if self.conflicts: - check_path = Path(path) - for check_file in check_path.glob("*"): - if check_file.name in self.conflicts: - conflicting.append(check_file.name) - - if conflicting: - raise FileExistsError(f"{','.join(conflicting)}") - - -@CopyFactory.register("git") -class GitCopier(Copier): - def copy(self, **kwargs) -> None: - """copy clones a remote git repo, and puts the requested files into the destination""" - dest = self.get_destination(**kwargs) - branch = "master" - git_cmd = "git" - git_args = "" - reset_repo = False - - sub_path = "" - if "sub_path" in kwargs: - sub_path = kwargs["sub_path"].strip("/") - - if "branch" in kwargs: - branch = kwargs["branch"] - if "git_cmd" in kwargs: - git_cmd = kwargs["git_cmd"] - if "git_args" in kwargs: - git_args = kwargs["git_args"] - if "reset_repo" in kwargs: - reset_repo = kwargs["reset_repo"] - - self.make_temp() - temp_path = f"{self._temp_dir}/{sub_path}" - exitcode, stdout, stderr = pipe_exec( - re.sub( - r"\s+", - " ", - f"{git_cmd} {git_args} clone {self._source} --branch {branch} --single-branch ./", - ), - cwd=self._temp_dir, - ) - - if exitcode != 0: - self.clean_temp() - raise RuntimeError( - f"unable to clone {self._source}, {stderr.decode('utf-8')}" - ) - - try: - self.check_conflicts(temp_path) - except FileExistsError as e: - self.clean_temp() - raise e - - if reset_repo: - self.repo_clean(f"{temp_path}") - - shutil.copytree(temp_path, dest, dirs_exist_ok=True) - self.clean_temp() - - @staticmethod - def type_match(source: str, **kwargs) -> bool: - # if the remote is a local file, then it's not a git repo - if os.path.exists(source): - return False - - """type matches uses git to see if the source is a valid git remote""" - git_cmd = "git" - git_args = "" - - if "git_cmd" in kwargs: - git_cmd = kwargs["git_cmd"] - if "git_args" in kwargs: - git_args = kwargs["git_args"] - - try: - (return_code, _, _) = pipe_exec(f"{git_cmd} {git_args} ls-remote {source}") - - except (PermissionError, FileNotFoundError): - return False - if return_code == 0: - return True - return False - - def make_temp(self) -> None: - if hasattr(self, "_temp_dir"): - pass - else: - self._temp_dir = tempfile.mkdtemp() - - def clean_temp(self) -> None: - """clean_temp removes the temporary path used by this copier""" - if hasattr(self, "_temp_dir"): - shutil.rmtree(self._temp_dir, ignore_errors=True) - del self._temp_dir - - @staticmethod - def repo_clean(p: str) -> None: - """repo_clean removes git and github files from a clone before doing the copy""" - for f in [".git", ".github"]: - try: - shutil.rmtree(f"{p}/{f}") - except FileNotFoundError: - pass - - -@CopyFactory.register("fs") -class FileSystemCopier(Copier): - def copy(self, **kwargs) -> None: - """copy copies files from a local source on the file system to a destination path""" - dest = self.get_destination(**kwargs) - self.check_conflicts(self.local_path) - source_path = f"{self.local_path}/{kwargs.get('sub_path', '')}".rstrip("/") - if not os.path.exists(source_path): - raise FileNotFoundError(f"{kwargs.get('sub_path')} does not exist") - shutil.copytree(source_path, dest, dirs_exist_ok=True) - - @property - def local_path(self): - """local_path returns a complete local file system path""" - if not hasattr(self, "_local_path"): - # try with the root path explicitly provided - local_path = self.make_local_path(self.source, self.root_path) - if os.path.exists(local_path): - self._local_path = local_path - return self._local_path - - # try without a root path (this is when an absolute path is provided) - local_path = self.make_local_path(self.source, "") - if os.path.exists(local_path): - self._local_path = local_path - return self._local_path - - if not hasattr(self, "_local_path"): - raise FileNotFoundError(f"unable to find {self.source}") - - return self._local_path - - @staticmethod - def type_match(source: str, **kwargs) -> bool: - # check if the source was provided as an absolute path - if os.path.isdir(source) or os.path.isfile(source): - return True - - # check if the source is relative to the root path - if "root_path" in kwargs: - source = FileSystemCopier.make_local_path(source, kwargs["root_path"]) - - if os.path.isdir(source) or os.path.isfile(source): - return True - - return False - - @staticmethod - def make_local_path(source: str, root_path: str) -> str: - """make_local_path appends together known path objects to provide a local path""" - full_path = f"{root_path}/{source}" - full_path = re.sub(r"/+", "/", full_path) - return full_path diff --git a/tfworker/util/hooks.py b/tfworker/util/hooks.py index 03683c2..bafe1f3 100644 --- a/tfworker/util/hooks.py +++ b/tfworker/util/hooks.py @@ -7,20 +7,21 @@ import os import re from enum import Enum -from typing import Any, Dict +from typing import TYPE_CHECKING, Any, Dict -import click - -from tfworker.backends import BaseBackend +import tfworker.util.log as log from tfworker.constants import ( TF_STATE_CACHE_NAME, WORKER_LOCALS_FILENAME, WORKER_TFVARS_FILENAME, ) from tfworker.exceptions import HookError -from tfworker.types import TerraformAction, TerraformStage +from tfworker.types.terraform import TerraformAction, TerraformStage from tfworker.util.system import pipe_exec +if TYPE_CHECKING: + from tfworker.backends.base import BaseBackend + class TFHookVarType(Enum): """ @@ -41,7 +42,7 @@ def get_state_item( terraform_bin: str, state: str, item: str, - backend: BaseBackend = None, + backend: "BaseBackend" = None, ) -> str: """ General handler function for getting a state item. First tries to get the item from another definition's output, @@ -62,12 +63,11 @@ def get_state_item( """ try: - click.secho(f"Getting state item {state}.{item} from output", fg="blue") + log.trace(f"Getting state item {state}.{item} from output") return _get_state_item_from_output(working_dir, env, terraform_bin, state, item) except FileNotFoundError: - click.secho( - f"Remote state not setup, falling back to getting state item {state}.{item} from remote", - fg="blue", + log.trace( + "Remote state not setup, falling back to getting state item from remote" ) return _get_state_item_from_remote(working_dir, env, terraform_bin, state, item) @@ -109,7 +109,7 @@ def _get_state_item_from_output( if exit_code != 0: raise HookError( - f"Error reading remote state item {state}.{item}, details: {stderr}" + f"Error reading remote state item {state}.{item}, details: {stderr.decode()}" ) if stdout is None: @@ -144,7 +144,6 @@ def check_hooks( """ hook_dir = f"{working_dir}/hooks" if not os.path.isdir(hook_dir): - # there is no hooks dir return False for f in os.listdir(hook_dir): if os.path.splitext(f)[0] == f"{phase}_{command}": @@ -339,7 +338,12 @@ def _set_hook_env_var( key = key.replace(k, v) for k, v in val_replace_items.items(): - value = value.replace(k, v) + if isinstance(value, str): + value = value.replace(k, v) + if isinstance(value, bytes): + value = value.decode().replace(k, v) + if isinstance(value, bool): + value = str(value).upper() if b64_encode: value = base64.b64encode(value.encode()) @@ -354,6 +358,7 @@ def _execute_hook_script( working_dir: str, local_env: Dict[str, str], debug: bool, + stream_output: bool = False, ) -> None: """ Executes the hook script and handles its output. @@ -371,16 +376,20 @@ def _execute_hook_script( """ hook_dir = os.path.join(working_dir, "hooks") exit_code, stdout, stderr = pipe_exec( - f"{hook_script} {phase} {command}", cwd=hook_dir, env=local_env + f"{hook_script} {phase} {command}", + cwd=hook_dir, + env=local_env, + stream_output=stream_output, ) if debug: - click.secho(f"Results from hook script: {hook_script}", fg="blue") - click.secho(f"exit code: {exit_code}", fg="blue") - for line in stdout.decode().splitlines(): - click.secho(f"stdout: {line}", fg="blue") - for line in stderr.decode().splitlines(): - click.secho(f"stderr: {line}", fg="red") + log.debug(f"Results from hook script: {hook_script}") + log.debug(f"exit code: {exit_code}") + if not stream_output: + for line in stdout.decode().splitlines(): + log.debug(f"stdout: {line}") + for line in stderr.decode().splitlines(): + log.debug(f"stderr: {line}") if exit_code != 0: raise HookError( diff --git a/tfworker/util/log.py b/tfworker/util/log.py new file mode 100644 index 0000000..86d420e --- /dev/null +++ b/tfworker/util/log.py @@ -0,0 +1,198 @@ +import re +from enum import Enum +from functools import partial +from typing import Any, Dict, List, Union + +from click import secho + +from tfworker.constants import REDACTED_ITEMS + + +class LogLevel(Enum): + TRACE = 0 + DEBUG = 1 + INFO = 2 + WARN = 3 + ERROR = 4 + + +log_level = LogLevel.ERROR + + +def log( + msg: Union[str | Dict[str, Any]], level: LogLevel = LogLevel.INFO, redact=False +) -> None: + """ + Out currently emits to stdout via click, but exists to easily + change to a python logger for alternate output options in the + future + + Args: + msg () + """ + global log_level + level_colors = { + log_level.TRACE: "cyan", + log_level.DEBUG: "blue", + log_level.INFO: "green", + log_level.WARN: "yellow", + log_level.ERROR: "red", + } + + if redact: + msg = redact_items_token(msg) + + if level.value >= log_level.value: + secho(msg, fg=level_colors[level]) + return + + +def redact_items_token( + items: Union[Dict[str, Any], str], redact: List[str] = REDACTED_ITEMS +) -> Union[Dict[str, Any], str]: + """ + Redact items from a dictionary or string using tokenization + + Args: + items (Union[Dict[str, Any], str]): Items to redact + redact (List[str]): List of items to redact + + Returns: + Union[Dict[str, Any], str]: Redacted items + + Raises: + ValueError: If passed an item that is not a dictionary or string + """ + + if isinstance(items, str): + """ + Redacting items from a string is a bit more complex, since the items + in the string could have many variations. For example, a key could be + followed by a space, a colon, an equals sign, or a tab. The value could + be enclosed in quotes or not. The value could be a string, a number, or + a boolean. This function will attempt to redact the items in the string + while preserving the structure of the string. + """ + result = [] + i = 0 + while i < len(items): + # check if a key has been found + found = False + for key in redact: + # check if the key is in the items + if items[i : i + len(key)] == key: # noqa: E203 + found = True + result.append(key) + i += len(key) + # Include delimiters after the key (spaces, tabs, colons, equals signs) + while i < len(items) and items[i] in " \t=:": + result.append(items[i]) + i += 1 + # Check if the value is enclosed in quotes + if i < len(items) and items[i] in "\"'": + # handle quoted values + quote = items[i] + result.append(quote) + i += 1 + # skip chars until the closing quote + while i < len(items) and items[i] != quote: + i += 1 + result.append("REDACTED") + # include the closing quote if present + if i < len(items) and items[i] == quote: + result.append(quote) + i += 1 + else: + # handle unquoted values + while i < len(items) and items[i] not in " \t,:;\n": + i += 1 + result.append("REDACTED") + if not found: + # the token was not in the redact list + result.append(items[i]) + i += 1 + return "".join(result) + + elif isinstance(items, dict): + for k, v in items.items(): + if isinstance(v, dict): + items[k] = redact_items_token(v, redact) + elif k in redact: + items[k] = "REDACTED" + elif isinstance(v, str): + items[k] = redact_items_token(v, redact) + return items + + else: + raise ValueError("Items must be a dictionary or a string") + + +def redact_items_re( + items: Union[Dict[str, Any], str], redact: List[str] = REDACTED_ITEMS +) -> Union[Dict[str, Any], str]: + """ + Redact items from a dictionary or string using regex + + Args: + items (Union[Dict[str, Any], str]): Items to redact + redact (List[str]): List of items to redact + + Returns: + Union[Dict[str, Any], str]: Redacted items + + Raises: + ValueError: If passed an item that is not a dictionary or string + """ + if isinstance(items, str): + # The regex pattern is designed to match and redact sensitive information from a string, preserving the original key, delimiter, and quote style. + # + # Pattern Components: + # r'(' + '|'.join(re.escape(key) for key in redact) + r')': This part dynamically constructs a regex group that matches any of the keys specified in the 'redact' list. 're.escape' ensures that any special characters in the keys are treated as literals. + # + # (\s*[:=]\s*|\s+): This group matches the delimiter that follows the key. It accounts for zero or more spaces (\s*) followed by either a colon (:) or an equals sign (=), again followed by zero or more spaces. Alternatively, it matches one or more spaces (\s+), allowing for different styles of key-value separation. + # + # (["\']?): This optional group matches either a single quote ('), a double quote ("), or no quote at all, capturing the opening quote style if present. + # + # (.*?): This non-greedy group matches the value associated with the key. The non-greedy qualifier (?) ensures that it stops matching at the first instance of the following group, which is the closing quote or the end of the value. + # + # (\3): This group is a backreference to the third group, matching the same quote style as the opening quote to ensure the closing quote is identical. If the opening quote was absent, this group matches nothing. + # + # (?=\s|$): This positive lookahead asserts that the character following the value (or closing quote if present) is either a whitespace character (\s) or the end of the string ($). This ensures that the match ends at the correct point without consuming any characters, allowing for subsequent matches to proceed correctly. + # + # The 'pattern.sub(r'\1\2\3REDACTED\5', items)' call replaces the matched value with 'REDACTED', preserving the key, delimiter, and quote style. The replacement string uses backreferences (\1, \2, \3, \5) to reconstruct the original text around 'REDACTED'. + pattern = re.compile( + r"(" + + "|".join(re.escape(key) for key in redact) + + r')(\s*[:=]\s*|\s+)(["\']?)(.*?)(\3)(?=\s|$)', + re.IGNORECASE, + ) + return pattern.sub(r"\1\2\3REDACTED\5", items) + + elif isinstance(items, dict): + for k, v in items.items(): + if isinstance(v, dict): + items[k] = redact_items_re(v, redact) + elif k in redact: + items[k] = "REDACTED" + elif isinstance(v, str): + items[k] = redact_items_re(v, redact) + return items + + else: + raise ValueError("Items must be a dictionary or a string") + + +# Allow a non stuttering method when importing the library to print +msg = log + +# these partials allow easy logging at a level via: log.("message") +trace = partial(log, level=LogLevel["TRACE"]) +safe_trace = partial(log, level=LogLevel["TRACE"], redact=True) +debug = partial(log, level=LogLevel["DEBUG"]) +safe_debug = partial(log, level=LogLevel["DEBUG"], redact=True) +info = partial(log, level=LogLevel["INFO"]) +safe_info = partial(log, level=LogLevel["INFO"], redact=True) +warn = partial(log, level=LogLevel["WARN"]) +safe_warn = partial(log, level=LogLevel["WARN"], redact=True) +error = partial(log, level=LogLevel["ERROR"]) +safe_error = partial(log, level=LogLevel["ERROR"], redact=True) diff --git a/tfworker/util/system.py b/tfworker/util/system.py index ddca7bc..f45d3d9 100644 --- a/tfworker/util/system.py +++ b/tfworker/util/system.py @@ -1,17 +1,3 @@ -# Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import importlib.metadata import os import platform import re @@ -19,8 +5,6 @@ import subprocess from typing import Dict, List, Tuple, Union -import click - def strip_ansi(line: str) -> str: """ @@ -101,7 +85,7 @@ def pipe_exec( if lastloop and stream_output: popen_kwargs["stderr"] = subprocess.STDOUT - commands.append(subprocess.Popen(shlex.split(args[i]), **popen_kwargs)) + commands.append(subprocess.Popen(shlex.split(cmd_str), **popen_kwargs)) # close stdout on the command before we just added to allow recieving SIGPIPE commands[-2].stdout.close() @@ -128,7 +112,7 @@ def pipe_exec( # for a single command this will be the only command, for a pipeline reading from the # last command will trigger all of the commands, communicating through their pipes for line in iter(commands[-1].stdout.readline, ""): - click.secho(line.rstrip()) + print(line.rstrip()) stdout += line # for streaming output stderr will be included with stdout, there's no way to make @@ -154,45 +138,6 @@ def pipe_exec( return (returncode, stdout, stderr) -def which(program: str) -> Union[str, None]: - """ - A function to mimic the behavior of the `which` command in Unix-like systems. - - Args: - program (str): The program to search for in the PATH. - - Returns: - str: The full path to the program if found, otherwise None. - """ - - def is_exe(fpath): - return os.path.isfile(fpath) and os.access(fpath, os.X_OK) - - fpath, fname = os.path.split(program) - if fpath: - if is_exe(program): - return program - else: - for path in os.environ["PATH"].split(os.pathsep): - exe_file = os.path.join(path, program) - if is_exe(exe_file): - return exe_file - return None - - -def get_version() -> str: - """ - Get the version of the current package - - Returns: - str: The version of the package - """ - try: - return importlib.metadata.version("terraform-worker") - except importlib.metadata.PackageNotFoundError: - return "unknown" - - def get_platform() -> Tuple[str, str]: """ Returns a formatted operating system / architecture tuple that is consistent with common distribution creation tools. diff --git a/tfworker/util/terraform.py b/tfworker/util/terraform.py index 11bb8f1..1dfe219 100644 --- a/tfworker/util/terraform.py +++ b/tfworker/util/terraform.py @@ -1,93 +1,59 @@ # This file contains functions primarily used by the "TerraformCommand" class # the goal of moving these functions here is to reduce the responsibility of # the TerraformCommand class, making it easier to test and maintain -import pathlib import re -import shutil from functools import lru_cache from typing import Dict, List, Union import click +import tfworker.util.log as log import tfworker.util.terraform_helpers as tfhelpers from tfworker.constants import ( - DEFAULT_REPOSITORY_PATH, TF_PROVIDER_DEFAULT_HOSTNAME, TF_PROVIDER_DEFAULT_NAMESPACE, ) -from tfworker.providers.providers_collection import ProvidersCollection -from tfworker.types import ProviderGID +from tfworker.exceptions import TFWorkerException +from tfworker.providers import Provider, ProviderGID, ProvidersCollection from tfworker.util.system import pipe_exec -def prep_modules( - module_path: str, - target_path: str, - ignore_patterns: list[str] = None, - required: bool = False, -) -> None: - """This puts any terraform modules from the module path in place. By default - it will not generate an error if the module path is not found. If required - is set to True, it will raise an error if the module path is not found. - - Args: - module_path (str): The path to the terraform modules directory - target_path (str): The path to the target directory, /terraform-modules will be appended - ignore_patterns (list(str)): A list of patterns to ignore - required (bool): If the terraform modules directory is required - """ - if module_path == "": - module_path = f"{DEFAULT_REPOSITORY_PATH}/terraform-modules" - - module_path = pathlib.Path(module_path) - target_path = pathlib.Path(f"{target_path}/terraform-modules".replace("//", "/")) - - if not module_path.exists() and required: - click.secho( - f"The specified terraform-modules directory '{module_path}' does not exists", - fg="red", - ) - raise SystemExit(1) - - if not module_path.exists(): - return - - if ignore_patterns is None: - ignore_patterns = ["test", ".terraform", "terraform.tfstate*"] - - click.secho(f"copying modules from {module_path} to {target_path}", fg="yellow") - shutil.copytree( - module_path, - target_path, - symlinks=True, - ignore=shutil.ignore_patterns(*ignore_patterns), - ) - - @lru_cache -def get_terraform_version(terraform_bin: str) -> tuple[int, int]: +def get_terraform_version(terraform_bin: str, validation=False) -> tuple[int, int]: """ Get the terraform version and return the major and minor version. Args: terraform_bin (str): The path to the terraform binary. + validation (bool, optional): A boolean indicating if the function should raise an error if the version cannot be determined. Defaults to False. """ + # @TODO: instead of exiting, raise an error to handle it in the caller + def click_exit(): + log.error(f"unable to get terraform version from {terraform_bin} version") + click.get_current_context().exit(1) + + def validation_exit(): + raise ValueError( + f"unable to get terraform version from {terraform_bin} version" + ) + (return_code, stdout, stderr) = pipe_exec(f"{terraform_bin} version") if return_code != 0: - click.secho(f"unable to get terraform version\n{stderr}", fg="red") - raise SystemExit(1) + if validation: + validation_exit() + click_exit() version = stdout.decode("UTF-8").split("\n")[0] version_search = re.search(r".*\s+v(\d+)\.(\d+)\.(\d+)", version) if version_search: - click.secho( + log.debug( f"Terraform Version Result: {version}, using major:{version_search.group(1)}, minor:{version_search.group(2)}", - fg="yellow", ) return (int(version_search.group(1)), int(version_search.group(2))) else: - click.secho(f"unable to get terraform version\n{stderr}", fg="red") - raise SystemExit(1) + if validation: + validation_exit() + click_exit() def mirror_providers( @@ -102,22 +68,20 @@ def mirror_providers( working_dir (str): The working directory. cache_dir (str): The cache directory. """ - click.secho(f"Mirroring providers to {cache_dir}", fg="yellow") - tfhelpers._validate_cache_dir(cache_dir) + log.debug(f"Mirroring providers to {cache_dir}") try: with tfhelpers._write_mirror_configuration( providers, working_dir, cache_dir ) as temp_dir: - (return_code, stdout, stderr) = pipe_exec( + (return_code, _, stderr) = pipe_exec( f"{terraform_bin} providers mirror {cache_dir}", cwd=temp_dir, stream_output=True, ) if return_code != 0: - click.secho(f"Unable to mirror providers\n{stderr.decode()}", fg="red") - raise SystemExit(1) + raise TFWorkerException(f"Unable to mirror providers: {stderr}") except IndexError: - click.secho("All providers in cache", fg="yellow") + log.debug("All providers in cache") def generate_terraform_lockfile( @@ -139,21 +103,32 @@ def generate_terraform_lockfile( Union[None, str]: The content of the .terraform.lock.hcl file or None if any required providers are not in the cache """ lockfile = [] - click.secho( - f"Generating lockfile for providers: {included_providers or [x.tag for x in providers]}", - fg="yellow", + provider: Provider + + log.trace( + f"generating lockfile for providers: {included_providers or [x.name for x in providers.values()]}" ) - for provider in providers: - if tfhelpers._not_in_cache(provider.gid, provider.version, cache_dir): + for provider in providers.values(): + log.trace(f"checking provider {provider} / {provider.gid}") + if tfhelpers._not_in_cache( + provider.gid, provider.config.requirements.version, cache_dir + ): + log.trace( + f"Provider {provider.gid} not in cache, skipping lockfile generation" + ) return None - if included_providers is not None and provider.tag not in included_providers: + if included_providers is not None and provider.name not in included_providers: + log.trace( + f"Provider {provider.gid} not in included_providers, not adding to lockfile" + ) continue + log.trace(f"Provider {provider.gid} is in cache, adding to lockfile") lockfile.append(f'provider "{str(provider.gid)}" {{') - lockfile.append(f' version = "{provider.version}"') - lockfile.append(f' constraints = "{provider.version}"') + lockfile.append(f' version = "{provider.config.requirements.version}"') + lockfile.append(f' constraints = "{provider.config.requirements.version}"') lockfile.append(" hashes = [") for hash in tfhelpers._get_cached_hash( - provider.gid, provider.version, cache_dir + provider.gid, provider.config.requirements.version, cache_dir ): lockfile.append(f' "{hash}",') lockfile.append(" ]") @@ -197,10 +172,10 @@ def get_provider_gid_from_source(source: str) -> ProviderGID: return ProviderGID(hostname=hostname, namespace=namespace, type=ptype) -@lru_cache +# @lru_cache def find_required_providers( search_dir: str, -) -> Union[None, Dict[str, [Dict[str, str]]]]: +) -> Union[None, Dict[str, List[Dict[str, str]]]]: """ Find all the required providers in the search directory. diff --git a/tfworker/util/terraform_helpers.py b/tfworker/util/terraform_helpers.py index 8d43be7..da08545 100644 --- a/tfworker/util/terraform_helpers.py +++ b/tfworker/util/terraform_helpers.py @@ -2,18 +2,27 @@ import os import pathlib from tempfile import TemporaryDirectory -from typing import Dict, List, Union +from typing import TYPE_CHECKING, Dict, List -import click import hcl2 from lark.exceptions import UnexpectedToken +from packaging.specifiers import InvalidSpecifier, SpecifierSet -from tfworker.providers.providers_collection import ProvidersCollection -from tfworker.types import ProviderGID +import tfworker.util.log as log +from tfworker.exceptions import TFWorkerException from tfworker.util.system import get_platform +if TYPE_CHECKING: + from tfworker.providers.collection import ( # pragma: no cover # noqa: F401 + ProvidersCollection, + ) + from tfworker.providers.model import ( # pragma: no cover # noqa: F401 + ProviderGID, + ProviderRequirements, + ) -def _not_in_cache(gid: ProviderGID, version: str, cache_dir: str) -> bool: + +def _not_in_cache(gid: "ProviderGID", version: str, cache_dir: str) -> bool: """ Check if the provider is not in the cache directory. @@ -40,7 +49,7 @@ def _not_in_cache(gid: ProviderGID, version: str, cache_dir: str) -> bool: return False -def _get_cached_hash(gid: ProviderGID, version: str, cache_dir: str) -> str: +def _get_cached_hash(gid: "ProviderGID", version: str, cache_dir: str) -> str: """ Get the hash of the cached provider. @@ -65,7 +74,7 @@ def _get_cached_hash(gid: ProviderGID, version: str, cache_dir: str) -> str: def _write_mirror_configuration( - providers: ProvidersCollection, working_dir: str, cache_dir: str + providers: "ProvidersCollection", working_dir: str, cache_dir: str ) -> TemporaryDirectory: """ Write the mirror configuration to a temporary directory in the working directory. @@ -80,10 +89,16 @@ def _write_mirror_configuration( Raises: IndexError: If there are no providers to mirror. """ - includes = [x.tag for x in providers if _not_in_cache(x.gid, x.version, cache_dir)] + includes = [ + x.name + for x in providers.values() + if _not_in_cache(x.gid, x.config.requirements.version, cache_dir) + ] + if len(includes) == 0: raise IndexError("No providers to mirror") + log.info(f"mirroring providers: {', '.join(includes)}") mirror_configuration = _create_mirror_configuration( providers=providers, includes=includes ) @@ -95,7 +110,7 @@ def _write_mirror_configuration( def _create_mirror_configuration( - providers: ProvidersCollection, includes: List[str] = [] + providers: "ProvidersCollection", includes: List[str] = [] ) -> str: """ Generate a terraform configuration file with all of the providers @@ -108,32 +123,7 @@ def _create_mirror_configuration( return "\n".join(tf_string) -def _validate_cache_dir(cache_dir: str) -> None: - """ - Validate the cache directory, it should exist and be writable. - - Args: - cache_dir (str): The cache directory. - """ - cache_dir = pathlib.Path(cache_dir) - if not cache_dir.exists(): - click.secho(f"Cache directory {cache_dir} does not exist", fg="red") - raise SystemExit(1) - if not cache_dir.is_dir(): - click.secho(f"Cache directory {cache_dir} is not a directory", fg="red") - raise SystemExit(1) - if not os.access(cache_dir, os.W_OK): - click.secho(f"Cache directory {cache_dir} is not writable", fg="red") - raise SystemExit(1) - if not os.access(cache_dir, os.R_OK): - click.secho(f"Cache directory {cache_dir} is not readable", fg="red") - raise SystemExit(1) - if not os.access(cache_dir, os.X_OK): - click.secho(f"Cache directory {cache_dir} is not executable", fg="red") - raise SystemExit(1) - - -def _get_provider_cache_dir(gid: ProviderGID, cache_dir: str) -> str: +def _get_provider_cache_dir(gid: "ProviderGID", cache_dir: str) -> str: """ Get the cache directory for a provider. @@ -147,9 +137,52 @@ def _get_provider_cache_dir(gid: ProviderGID, cache_dir: str) -> str: return pathlib.Path(cache_dir) / gid.hostname / gid.namespace / gid.type -def _parse_required_providers(content: dict) -> Union[None, Dict[str, Dict[str, str]]]: +def _find_required_providers( + search_dir: str, +) -> Dict[str, Dict[str, "ProviderRequirements"]]: + """ + Find all of the specified required providers in the search directory. + + Args: + search_dir (str): The directory to search for required providers. + + Returns: + Dict[str, Dict[str, ProviderRequirements]]: A dictionary of required providers. + """ + providers = {} + for root, _, files in os.walk(search_dir, followlinks=True): + for file in files: + if file.endswith(".tf"): + with open(f"{root}/{file}", "r") as f: + try: + content = hcl2.load(f) + except UnexpectedToken as e: + log.info( + f"not processing {root}/{file} for required providers; see debug output for HCL parsing errors" + ) + log.debug(f"HCL processing errors in {root}/{file}: {e}") + continue + _update_parsed_providers( + providers, _parse_required_providers(content) + ) + log.trace( + f"Found required providers: {[x for x in providers.keys()]} in {search_dir}" + ) + return providers + + +def _parse_required_providers(content: dict) -> Dict[str, "ProviderRequirements"]: + """ + Parse the required providers from the content. + + Args: + content (dict): The content to parse. + + Returns: + Dict[str, Dict[str, str]]: The required providers. + """ if "terraform" not in content: - return None + return {} providers = {} terraform_blocks = content["terraform"] @@ -159,25 +192,54 @@ def _parse_required_providers(content: dict) -> Union[None, Dict[str, Dict[str, for required_provider in block["required_providers"]: for k, v in required_provider.items(): providers[k] = v + return providers - if len(providers.keys()) == 0: - return None - return providers +def _update_parsed_providers(providers: dict, parsed_providers: dict): + """ + Update the providers with the parsed providers. + Args: + providers (dict): The providers to update. + parsed_providers (dict): The parsed providers to update with. -def _find_required_providers(search_dir: str) -> Dict[str, [Dict[str, str]]]: - providers = {} - for root, _, files in os.walk(search_dir, followlinks=True): - for file in files: - if file.endswith(".tf"): - with open(f"{root}/{file}", "r") as f: - try: - content = hcl2.load(f) - except UnexpectedToken as e: - click.secho(f"skipping {root}/{file}: {e}", fg="blue") - continue - new_providers = _parse_required_providers(content) - if new_providers is not None: - providers.update(new_providers) + Raises: + TFWorkerException: If there are conflicting sources for the same provider. + """ + for k, v in parsed_providers.items(): + if k not in providers: + new_provider = { + "source": v.get("source", ""), + "version": _get_specifier_set(v.get("version", "")), + } + providers[k] = new_provider + continue + if v.get("source") is not None and providers[k].get("source") is not None: + if v["source"] != providers[k]["source"]: + raise TFWorkerException( + f"provider {k} has conflicting sources: {v['source']} and {providers[k]['source']}" + ) + if v.get("version") is not None: + providers[k]["version"] = providers[k]["version"] & _get_specifier_set( + v["version"] + ) return providers + + +def _get_specifier_set(version: str) -> SpecifierSet: + """ + Get the SpecifierSet for the version. + + Args: + version (str): The version to get the SpecifierSet for. + + Returns: + SpecifierSet: The SpecifierSet for the version. + """ + try: + return SpecifierSet(version) + except InvalidSpecifier: + try: + return SpecifierSet(f"=={version}") + except InvalidSpecifier: + raise TFWorkerException(f"Invalid version specifier: {version}")