From 4676f020931bf329a628c4a87718429d9ed0c180 Mon Sep 17 00:00:00 2001 From: Joel Klinger Date: Thu, 12 Oct 2023 16:03:06 +0100 Subject: [PATCH] [feature/PI-49-logging_with_step_chain] --- .pre-commit-config.yaml | 2 + poetry.lock | 193 +++++++++++++++++- pyproject.toml | 3 + src/api/createProduct/index.py | 37 ++++ src/api/createProduct/src/v1/steps.py | 9 + src/api/createProduct/tests/test_index.py | 14 ++ .../tests/test_placeholder_createProduct.py | 19 -- src/conftest.py | 26 +++ .../domain/response/error_collections.py | 1 + src/layers/event/environment/__init__.py | 25 +++ .../environment/tests/test_environment.py | 12 ++ src/layers/event/json/__init__.py | 20 ++ src/layers/event/json/errors.py | 2 + src/layers/event/json/test/test_json.py | 18 ++ src/layers/event/logging/log_reference.py | 12 ++ src/layers/event/logging/logger.py | 10 + src/layers/event/logging/models.py | 28 +++ src/layers/event/logging/step_decorators.py | 34 +++ .../event/logging/tests/test_log_reference.py | 14 ++ src/layers/event/logging/tests/test_logger.py | 25 +++ .../logging/tests/test_step_decorators.py | 124 +++++++++++ src/layers/event/placeholder.py | 2 - src/layers/event/response/steps.py | 10 + src/layers/event/step_chain/__init__.py | 71 +++++++ src/layers/event/step_chain/errors.py | 2 + .../step_chain/tests/test_frozen_dict.py | 15 ++ .../event/step_chain/tests/test_step_chain.py | 100 +++++++++ src/layers/event/step_chain/tests/utils.py | 10 + src/layers/event/step_chain/types.py | 42 ++++ .../event/tests/test_placeholder_event.py | 16 -- src/layers/event/versioning/constants.py | 6 + src/layers/event/versioning/errors.py | 2 + src/layers/event/versioning/models.py | 11 + src/layers/event/versioning/steps.py | 62 ++++++ src/layers/event/versioning/tests/__init__.py | 0 .../versioning/tests/example_api/__init__.py | 0 .../versioning/tests/example_api/index.py | 0 .../tests/example_api/src/v0/steps.py | 1 + .../tests/example_api/src/v1/steps.py | 1 + .../tests/example_api/src/v3/steps.py | 1 + .../event/versioning/tests/test_steps.py | 98 +++++++++ .../event/versioning/tests/test_steps_e2e.py | 43 ++++ 42 files changed, 1083 insertions(+), 38 deletions(-) create mode 100644 src/api/createProduct/src/v1/steps.py create mode 100644 src/api/createProduct/tests/test_index.py delete mode 100644 src/api/createProduct/tests/test_placeholder_createProduct.py create mode 100644 src/layers/domain/response/error_collections.py create mode 100644 src/layers/event/environment/__init__.py create mode 100644 src/layers/event/environment/tests/test_environment.py create mode 100644 src/layers/event/json/__init__.py create mode 100644 src/layers/event/json/errors.py create mode 100644 src/layers/event/json/test/test_json.py create mode 100644 src/layers/event/logging/log_reference.py create mode 100644 src/layers/event/logging/logger.py create mode 100644 src/layers/event/logging/models.py create mode 100644 src/layers/event/logging/step_decorators.py create mode 100644 src/layers/event/logging/tests/test_log_reference.py create mode 100644 src/layers/event/logging/tests/test_logger.py create mode 100644 src/layers/event/logging/tests/test_step_decorators.py delete mode 100644 src/layers/event/placeholder.py create mode 100644 src/layers/event/response/steps.py create mode 100644 src/layers/event/step_chain/__init__.py create mode 100644 src/layers/event/step_chain/errors.py create mode 100644 src/layers/event/step_chain/tests/test_frozen_dict.py create mode 100644 src/layers/event/step_chain/tests/test_step_chain.py create mode 100644 src/layers/event/step_chain/tests/utils.py create mode 100644 src/layers/event/step_chain/types.py delete mode 100644 src/layers/event/tests/test_placeholder_event.py create mode 100644 src/layers/event/versioning/constants.py create mode 100644 src/layers/event/versioning/errors.py create mode 100644 src/layers/event/versioning/models.py create mode 100644 src/layers/event/versioning/steps.py create mode 100644 src/layers/event/versioning/tests/__init__.py create mode 100644 src/layers/event/versioning/tests/example_api/__init__.py create mode 100644 src/layers/event/versioning/tests/example_api/index.py create mode 100644 src/layers/event/versioning/tests/example_api/src/v0/steps.py create mode 100644 src/layers/event/versioning/tests/example_api/src/v1/steps.py create mode 100644 src/layers/event/versioning/tests/example_api/src/v3/steps.py create mode 100644 src/layers/event/versioning/tests/test_steps.py create mode 100644 src/layers/event/versioning/tests/test_steps_e2e.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 823521b1a..b479c1350 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -78,6 +78,7 @@ repos: entry: json\.loads language: pygrep types: [python] + exclude: src/layers/event/json/__init__.py - repo: local hooks: @@ -86,6 +87,7 @@ repos: entry: json\.load language: pygrep types: [python] + exclude: src/layers/event/json/__init__.py - repo: local hooks: diff --git a/poetry.lock b/poetry.lock index a6c778124..b79468d4e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,6 +11,47 @@ files = [ {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"}, ] +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] + +[[package]] +name = "aws-lambda-powertools" +version = "2.25.1" +description = "Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity." +optional = false +python-versions = ">=3.7.4,<4.0.0" +files = [ + {file = "aws_lambda_powertools-2.25.1-py3-none-any.whl", hash = "sha256:e241eb74163cd4b066762359fcdf9cd5e709641669655afaa6c99e970509c07c"}, + {file = "aws_lambda_powertools-2.25.1.tar.gz", hash = "sha256:35f117005c4853829b905b57507200c08df998f1ad35f9e7d01d9361b239cca4"}, +] + +[package.dependencies] +boto3 = {version = ">=1.20.32,<2.0.0", optional = true, markers = "extra == \"aws-sdk\""} +typing-extensions = ">=4.6.2,<5.0.0" + +[package.extras] +all = ["aws-xray-sdk (>=2.8.0,<3.0.0)", "fastjsonschema (>=2.14.5,<3.0.0)", "pydantic (>=1.8.2,<2.0.0)"] +aws-sdk = ["boto3 (>=1.20.32,<2.0.0)"] +datadog = ["datadog-lambda (>=4.77.0,<5.0.0)"] +parser = ["pydantic (>=1.8.2,<2.0.0)"] +tracer = ["aws-xray-sdk (>=2.8.0,<3.0.0)"] +validation = ["fastjsonschema (>=2.14.5,<3.0.0)"] + [[package]] name = "behave" version = "1.2.6" @@ -75,6 +116,44 @@ d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "boto3" +version = "1.28.62" +description = "The AWS SDK for Python" +optional = false +python-versions = ">= 3.7" +files = [ + {file = "boto3-1.28.62-py3-none-any.whl", hash = "sha256:0dfa2fc96ccafce4feb23044d6cba8b25075ad428a0c450d369d099c6a1059d2"}, + {file = "boto3-1.28.62.tar.gz", hash = "sha256:148eeba0f1867b3db5b3e5ae2997d75a94d03fad46171374a0819168c36f7ed0"}, +] + +[package.dependencies] +botocore = ">=1.31.62,<1.32.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.7.0,<0.8.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.31.62" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">= 3.7" +files = [ + {file = "botocore-1.31.62-py3-none-any.whl", hash = "sha256:be792d806afc064694a2d0b9b25779f3ca0c1584b29a35ac32e67f0064ddb8b7"}, + {file = "botocore-1.31.62.tar.gz", hash = "sha256:272b78ac65256b6294cb9cdb0ac484d447ad3a85642e33cb6a3b1b8afee15a4c"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""} + +[package.extras] +crt = ["awscrt (==0.16.26)"] + [[package]] name = "cfgv" version = "3.4.0" @@ -154,6 +233,37 @@ mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.11.0,<2.12.0" pyflakes = ">=3.1.0,<3.2.0" +[[package]] +name = "hypothesis" +version = "6.87.3" +description = "A library for property-based testing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "hypothesis-6.87.3-py3-none-any.whl", hash = "sha256:684a7b56a4a2e990cb0efb3124c2d886c5138453550b6f4f4a3b75bfc8ef24d4"}, + {file = "hypothesis-6.87.3.tar.gz", hash = "sha256:e67391efb9e6f663031f493d04b5edfb2e47bfc5a6ea56190aed3bc7993d5899"}, +] + +[package.dependencies] +attrs = ">=19.2.0" +sortedcontainers = ">=2.1.0,<3.0.0" + +[package.extras] +all = ["backports.zoneinfo (>=0.2.1)", "black (>=19.10b0)", "click (>=7.0)", "django (>=3.2)", "dpcontracts (>=0.4)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.17.3)", "pandas (>=1.1)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2023.3)"] +cli = ["black (>=19.10b0)", "click (>=7.0)", "rich (>=9.0.0)"] +codemods = ["libcst (>=0.3.16)"] +dateutil = ["python-dateutil (>=1.4)"] +django = ["django (>=3.2)"] +dpcontracts = ["dpcontracts (>=0.4)"] +ghostwriter = ["black (>=19.10b0)"] +lark = ["lark (>=0.10.1)"] +numpy = ["numpy (>=1.17.3)"] +pandas = ["pandas (>=1.1)"] +pytest = ["pytest (>=4.6)"] +pytz = ["pytz (>=2014.1)"] +redis = ["redis (>=3.0.0)"] +zoneinfo = ["backports.zoneinfo (>=0.2.1)", "tzdata (>=2023.3)"] + [[package]] name = "identify" version = "2.5.30" @@ -179,6 +289,17 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + [[package]] name = "mccabe" version = "0.7.0" @@ -201,6 +322,17 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "nhs-context-logging" +version = "0.2.7" +description = "" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "nhs_context_logging-0.2.7-py3-none-any.whl", hash = "sha256:4a07c4b06fee5b5dc5d5d41dab9f517d994260a7dd163ee7381719b1d4fc737e"}, + {file = "nhs_context_logging-0.2.7.tar.gz", hash = "sha256:cce84172d19e2d6702a428c2d73bc2bd0d62c04e7ec748e385f77fcca1152995"}, +] + [[package]] name = "nodeenv" version = "1.8.0" @@ -509,6 +641,20 @@ files = [ [package.dependencies] pytest = ">=4.0.2" +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + [[package]] name = "pyyaml" version = "6.0.1" @@ -558,6 +704,23 @@ files = [ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] +[[package]] +name = "s3transfer" +version = "0.7.0" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">= 3.7" +files = [ + {file = "s3transfer-0.7.0-py3-none-any.whl", hash = "sha256:10d6923c6359175f264811ef4bf6161a3156ce8e350e705396a7557d6293c33a"}, + {file = "s3transfer-0.7.0.tar.gz", hash = "sha256:fd3889a66f5fe17299fe75b82eae6cf722554edca744ca5d5fe308b104883d2e"}, +] + +[package.dependencies] +botocore = ">=1.12.36,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] + [[package]] name = "setuptools" version = "68.2.2" @@ -596,6 +759,17 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "sortedcontainers" +version = "2.4.0" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +optional = false +python-versions = "*" +files = [ + {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, + {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, +] + [[package]] name = "typing-extensions" version = "4.8.0" @@ -607,6 +781,23 @@ files = [ {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] +[[package]] +name = "urllib3" +version = "2.0.6" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.6-py3-none-any.whl", hash = "sha256:7a7c7003b000adf9e7ca2a377c9688bbc54ed41b985789ed576570342a375cd2"}, + {file = "urllib3-2.0.6.tar.gz", hash = "sha256:b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + [[package]] name = "virtualenv" version = "20.24.5" @@ -630,4 +821,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = "3.11.4" -content-hash = "fd39b2d84c08b434b6786ea477f96f1adb8447f0cfcfafd6af52b01c864985fa" +content-hash = "f06a9860fada0cef1a329fdd39b7f36625269a17d0a4940897d64311c43c41dd" diff --git a/pyproject.toml b/pyproject.toml index ead7f7280..ac9d1cc59 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,6 +16,7 @@ packages = [ [tool.poetry.dependencies] python = "3.11.4" pydantic = "^2.4.2" +nhs-context-logging = "^0.2.7" [tool.poetry.group.dev.dependencies] pre-commit = "^3.4.0" @@ -25,6 +26,8 @@ behave = "^1.2.6" pytest = "^7.4.2" pytest-custom-exit-code = "^0.3.0" sh = "^2.0.6" +hypothesis = "^6.87.3" +aws-lambda-powertools = { extras = ["aws-sdk"], version = "^2.25.1" } [build-system] requires = ["poetry-core"] diff --git a/src/api/createProduct/index.py b/src/api/createProduct/index.py index e69de29bb..e21609a1a 100644 --- a/src/api/createProduct/index.py +++ b/src/api/createProduct/index.py @@ -0,0 +1,37 @@ +from event.environment import BaseEnvironment +from event.logging.logger import setup_logger +from event.logging.step_decorators import logging_step_decorators +from event.response.steps import response_steps +from event.step_chain import StepChain +from event.versioning.steps import get_steps_for_requested_version, versioning_steps + + +class Environment(BaseEnvironment): + SOMETHING: str + + +cache = {**Environment.model_construct().model_dump()} +step_decorators = [*logging_step_decorators] +pre_steps = [*versioning_steps] +post_steps = [*response_steps] + + +def handler(event: dict, context=None): + setup_logger(service_name=__file__) + + pre_step_chain = StepChain( + step_chain=versioning_steps, step_decorators=step_decorators + ) + pre_step_chain.run(init={"event": event, "__file__": __file__}) + + if isinstance(pre_step_chain.result, Exception): + result = pre_step_chain.result + else: + steps = pre_step_chain.data[get_steps_for_requested_version] + step_chain = StepChain(step_chain=steps, step_decorators=step_decorators) + step_chain.run(cache=cache, init=event) + result = step_chain.result + + post_step_chain = StepChain(step_chain=post_steps, step_decorators=step_decorators) + post_step_chain.run(init=result) + return post_step_chain.result diff --git a/src/api/createProduct/src/v1/steps.py b/src/api/createProduct/src/v1/steps.py new file mode 100644 index 000000000..d21b5b0eb --- /dev/null +++ b/src/api/createProduct/src/v1/steps.py @@ -0,0 +1,9 @@ +def a(data, cache): + return {"blah", "hi"} + + +def b(data, cache): + return {"a's result": data[a]} + + +steps = [a, b] diff --git a/src/api/createProduct/tests/test_index.py b/src/api/createProduct/tests/test_index.py new file mode 100644 index 000000000..378a78f9a --- /dev/null +++ b/src/api/createProduct/tests/test_index.py @@ -0,0 +1,14 @@ +import pytest + +from api.createProduct.index import handler + + +@pytest.mark.parametrize( + "version", + [ + "1", + ], +) +def test_index(version): + result = handler(event={"headers": {"version": version}}) + assert result == "OK" diff --git a/src/api/createProduct/tests/test_placeholder_createProduct.py b/src/api/createProduct/tests/test_placeholder_createProduct.py deleted file mode 100644 index 3e5dfe81c..000000000 --- a/src/api/createProduct/tests/test_placeholder_createProduct.py +++ /dev/null @@ -1,19 +0,0 @@ -import pydantic # noqa: F401 -import pytest -from domain.placeholder import placeholder - -import api.createProduct.index # noqa: F401 - - -@pytest.mark.integration -def test_dummy_integration(): - placeholder() - - -def test_dummy_unit(): - placeholder() - - -@pytest.mark.smoke -def test_dummy_smoke(): - raise Exception("delete me") diff --git a/src/conftest.py b/src/conftest.py index a8004e075..231ff24a9 100644 --- a/src/conftest.py +++ b/src/conftest.py @@ -1,5 +1,31 @@ +import json + +from event.logging.logger import setup_logger +from nhs_context_logging.fixtures import log_capture, log_capture_global # noqa: F401 +from nhs_context_logging.formatters import json_serializer +from pytest import FixtureRequest, fixture + + def pytest_collection_modifyitems(items, config): # add `unit` marker to all unmarked items for item in items: if not any(item.iter_markers()): item.add_marker("unit") + + +@fixture(autouse=True) +def log_on_failure(request: FixtureRequest, log_capture): + setup_logger(request.node.name) + + exception = None + try: + yield + except Exception as exception: + pass + + std_out, std_err = log_capture + for log in (*std_out, *std_err): + print(json.dumps(log, indent=2, default=json_serializer)) # noqa: T201 + + if isinstance(exception, Exception): + raise exception diff --git a/src/layers/domain/response/error_collections.py b/src/layers/domain/response/error_collections.py new file mode 100644 index 000000000..f1aca5913 --- /dev/null +++ b/src/layers/domain/response/error_collections.py @@ -0,0 +1 @@ +NonFatalErrorCollection = () # Add non-fatal (i.e. "2XX" and "4XX") errors here diff --git a/src/layers/event/environment/__init__.py b/src/layers/event/environment/__init__.py new file mode 100644 index 000000000..7a8c0a927 --- /dev/null +++ b/src/layers/event/environment/__init__.py @@ -0,0 +1,25 @@ +import os +from abc import ABC +from typing import TypeVar + +from pydantic import BaseModel + +Model = TypeVar("Model") + + +class BaseEnvironment(BaseModel, ABC): + """ + Automatically parse environmental variables, + and additionally validate that the members of this model + have been set. For example, the following will raise an + error if 'SOMETHING' has not been set in the environment: + + class Environment(BaseEnvironment): + SOMETHING: str + + Environment.model_construct() + """ + + @classmethod + def model_construct(cls: Model) -> Model: + return super().model_construct(**os.environ) diff --git a/src/layers/event/environment/tests/test_environment.py b/src/layers/event/environment/tests/test_environment.py new file mode 100644 index 000000000..a1f82e622 --- /dev/null +++ b/src/layers/event/environment/tests/test_environment.py @@ -0,0 +1,12 @@ +import os +from unittest import mock + +from event.environment import BaseEnvironment + + +def test_base_environment(): + class Environment(BaseEnvironment): + FOO: str + + with mock.patch.dict(os.environ, {"FOO": "bar"}): + Environment.model_construct() diff --git a/src/layers/event/json/__init__.py b/src/layers/event/json/__init__.py new file mode 100644 index 000000000..ce4c6bc1a --- /dev/null +++ b/src/layers/event/json/__init__.py @@ -0,0 +1,20 @@ +import json + +from .errors import DuplicateKeyError + + +def dict_raise_on_duplicates(list_of_pairs): + checked_pairs = {} + for k, v in list_of_pairs: + if k in checked_pairs: + raise DuplicateKeyError("Duplicate key: %r" % (k,)) + checked_pairs[k] = v + return checked_pairs + + +def json_loads(json_string): + return json.loads(json_string, object_pairs_hook=dict_raise_on_duplicates) + + +def json_load(json_file_obj): + return json.load(json_file_obj, object_pairs_hook=dict_raise_on_duplicates) diff --git a/src/layers/event/json/errors.py b/src/layers/event/json/errors.py new file mode 100644 index 000000000..610cb361a --- /dev/null +++ b/src/layers/event/json/errors.py @@ -0,0 +1,2 @@ +class DuplicateKeyError(Exception): + pass diff --git a/src/layers/event/json/test/test_json.py b/src/layers/event/json/test/test_json.py new file mode 100644 index 000000000..a149d424a --- /dev/null +++ b/src/layers/event/json/test/test_json.py @@ -0,0 +1,18 @@ +from tempfile import TemporaryFile + +import pytest +from event.json import json_load, json_loads +from event.json.errors import DuplicateKeyError + + +def test_dict_raise_on_duplicates_loads(): + with pytest.raises(DuplicateKeyError): + json_loads('{"a": "foo", "a": "bar"}') + + +def test_dict_raise_on_duplicates_load(): + with TemporaryFile() as f: + f.write(b'{"a": "foo", "a": "bar"}') + f.seek(0) + with pytest.raises(DuplicateKeyError): + json_load(f) diff --git a/src/layers/event/logging/log_reference.py b/src/layers/event/logging/log_reference.py new file mode 100644 index 000000000..16c2a35da --- /dev/null +++ b/src/layers/event/logging/log_reference.py @@ -0,0 +1,12 @@ +import re + +NON_CAPS_RE = re.compile(r"[^a-zA-Z0-9]") +SEPARATOR = "-" +DOUBLE_SEPARATOR = f"{SEPARATOR}{SEPARATOR}" + + +def make_log_reference(name): + stripped_name = NON_CAPS_RE.sub(SEPARATOR, name) + while DOUBLE_SEPARATOR in stripped_name: + stripped_name = stripped_name.replace(DOUBLE_SEPARATOR, SEPARATOR) + return stripped_name.upper() diff --git a/src/layers/event/logging/logger.py b/src/layers/event/logging/logger.py new file mode 100644 index 000000000..39ccb6b0f --- /dev/null +++ b/src/layers/event/logging/logger.py @@ -0,0 +1,10 @@ +from uuid import uuid4 + +from nhs_context_logging import app_logger + + +def setup_logger(service_name: str, uuid: str = None): + if uuid is None: + uuid = str(uuid4()) + app_logger._is_setup = False + app_logger.setup(service_name="-".join((service_name, uuid))) diff --git a/src/layers/event/logging/models.py b/src/layers/event/logging/models.py new file mode 100644 index 000000000..6759613f6 --- /dev/null +++ b/src/layers/event/logging/models.py @@ -0,0 +1,28 @@ +from typing import Any, Literal + +from pydantic import BaseModel, Field, FilePath + + +class LogInfoTemplate(BaseModel): + level: Literal["INFO", "DEBUG", "WARNING", "ERROR"] + path: FilePath + line_no: int + func: str + pid: int + thread: int + + +class LogTemplate(BaseModel): + timestamp: float + log_reference: str = (Field(pattern=r"^[A-Z]+$"),) + internal_id: str = Field(pattern=r"^[a-z0-9]{32}+$") + action: str + action_duration: float + action_status: Literal["succeeded", "failed", "error"] + log_info: LogInfoTemplate + + +class StepLog(LogTemplate): + data: dict + cache: dict + result: Any diff --git a/src/layers/event/logging/step_decorators.py b/src/layers/event/logging/step_decorators.py new file mode 100644 index 000000000..3561128c0 --- /dev/null +++ b/src/layers/event/logging/step_decorators.py @@ -0,0 +1,34 @@ +from ast import FunctionType +from functools import wraps + +from domain.response.error_collections import NonFatalErrorCollection +from nhs_context_logging import add_fields, log_action + +from .log_reference import make_log_reference + + +def modify_logger(function): + @wraps(function) + def wrapper(data, cache): + try: + result = function(data=data, cache=cache) + except Exception as exception: + add_fields(result=exception) + raise + else: + add_fields(result=result) + return result + + return wrapper + + +def log_step(function): + return log_action( + action=f"{function.__module__}.{function.__name__}", + log_reference=make_log_reference(name=function.__name__), + log_args=["data", "cache"], + expected_errors=NonFatalErrorCollection, + )(function) + + +logging_step_decorators: list[FunctionType] = [log_step, modify_logger] diff --git a/src/layers/event/logging/tests/test_log_reference.py b/src/layers/event/logging/tests/test_log_reference.py new file mode 100644 index 000000000..c344dda0f --- /dev/null +++ b/src/layers/event/logging/tests/test_log_reference.py @@ -0,0 +1,14 @@ +import pytest +from event.logging.log_reference import make_log_reference + + +@pytest.mark.parametrize( + "input_name, expected_output", + [ + ("This is a Test!@#123", "THIS-IS-A-TEST-123"), + ("ONLYUPPERCASELETTERS", "ONLYUPPERCASELETTERS"), + ("onlylowercaseletters", "ONLYLOWERCASELETTERS"), + ], +) +def test_make_log_reference(input_name, expected_output): + assert make_log_reference(input_name) == expected_output diff --git a/src/layers/event/logging/tests/test_logger.py b/src/layers/event/logging/tests/test_logger.py new file mode 100644 index 000000000..918b50308 --- /dev/null +++ b/src/layers/event/logging/tests/test_logger.py @@ -0,0 +1,25 @@ +from event.logging.logger import setup_logger + + +def test_setup_logger_with_default_uuid(): + from nhs_context_logging import app_logger + + setup_logger(service_name="foo") + foo_logger = app_logger.logger() + + setup_logger(service_name="foo") + foo2_logger = app_logger.logger() + + assert foo_logger is not foo2_logger + + +def test_setup_logger_with_same_uuid(): + from nhs_context_logging import app_logger + + setup_logger(service_name="foo", uuid="bar") + foo_logger = app_logger.logger() + + setup_logger(service_name="foo", uuid="bar") + foo2_logger = app_logger.logger() + + assert foo_logger is foo2_logger diff --git a/src/layers/event/logging/tests/test_step_decorators.py b/src/layers/event/logging/tests/test_step_decorators.py new file mode 100644 index 000000000..51f0ab994 --- /dev/null +++ b/src/layers/event/logging/tests/test_step_decorators.py @@ -0,0 +1,124 @@ +from unittest import mock + +from event.logging.models import StepLog +from event.logging.step_decorators import logging_step_decorators +from event.step_chain import StepChain +from event.step_chain.tests.utils import step_data +from nhs_context_logging.fixtures import log_capture, log_capture_global # noqa: F401 + + +def test_logging_step_decorators(log_capture): + return_value = "return value!" + init_data = "init data!" + cache = {"foo": "bar"} + + # Define a step + def a_function(data, cache): + return return_value + + # Run the step, with logging + step_chain = StepChain( + step_chain=[a_function], step_decorators=logging_step_decorators + ) + step_chain.run(init=init_data, cache=cache) + assert step_chain.result == return_value + + # Assert no error messages + std_out, std_err = log_capture + assert len(std_err) == 0 + + # Validate the log structure + (log,) = std_out + parsed_log = StepLog(**log) + + # Validate the log data + assert parsed_log.data == dict(step_data(init=init_data)) + assert ( + parsed_log.cache is not cache + ) # Make sure that the log doesn't have a direct reference to global data + assert parsed_log.cache == cache + assert parsed_log.result == return_value + assert parsed_log.action == "test_step_decorators.a_function" + assert parsed_log.log_reference == "A-FUNCTION" + assert parsed_log.action_status == "succeeded" + + +def test_logging_step_decorators_with_fatal_error(log_capture): + init_data = "init data!" + cache = {"foo": "bar"} + error_message = "oops!" + + class MyException(Exception): + pass + + # Define a step + def a_function(data, cache): + raise MyException(error_message) + + # Run the step, with logging + step_chain = StepChain( + step_chain=[a_function], step_decorators=logging_step_decorators + ) + step_chain.run(init=init_data, cache=cache) + assert isinstance(step_chain.result, MyException) + + # Assert only error messages + std_out, std_err = log_capture + assert len(std_out) == 0 + + # Validate the log structure + (log,) = std_err + parsed_log = StepLog(**log) + + # Validate the log data + assert parsed_log.data == dict(step_data(init=init_data)) + assert ( + parsed_log.cache is not cache + ) # Make sure that the log doesn't have a direct reference to global data + assert parsed_log.cache == cache + assert isinstance(parsed_log.result, MyException) + assert parsed_log.action == "test_step_decorators.a_function" + assert parsed_log.log_reference == "A-FUNCTION" + assert parsed_log.action_status == "failed" + + +def test_logging_step_decorators_with_non_fatal_error(log_capture): + init_data = "init data!" + cache = {"foo": "bar"} + error_message = "oops!" + + class MyException(Exception): + pass + + # Define a step + def a_function(data, cache): + raise MyException(error_message) + + # Run the step, with logging + with mock.patch( + "event.logging.step_decorators.NonFatalErrorCollection", (MyException,) + ): + step_chain = StepChain( + step_chain=[a_function], step_decorators=logging_step_decorators + ) + step_chain.run(init=init_data, cache=cache) + assert isinstance(step_chain.result, MyException) + + # Assert no error messages + std_out, std_err = log_capture + assert len(std_err) == 0 + + # Validate the log structure + (log,) = std_out + parsed_log = StepLog(**log) + + # Validate the log data + assert parsed_log.data == dict(step_data(init=init_data)) + assert ( + parsed_log.cache is not cache + ) # Make sure that the log doesn't have a direct reference to global data + assert parsed_log.cache == cache + assert isinstance(parsed_log.result, MyException) + assert parsed_log.action == "test_step_decorators.a_function" + assert parsed_log.log_reference == "A-FUNCTION" + assert parsed_log.action_status == "error" diff --git a/src/layers/event/placeholder.py b/src/layers/event/placeholder.py deleted file mode 100644 index b7a44a7c4..000000000 --- a/src/layers/event/placeholder.py +++ /dev/null @@ -1,2 +0,0 @@ -def placeholder(): - pass diff --git a/src/layers/event/response/steps.py b/src/layers/event/response/steps.py new file mode 100644 index 000000000..bbe84abca --- /dev/null +++ b/src/layers/event/response/steps.py @@ -0,0 +1,10 @@ +from types import FunctionType + + +def render_response(data, cache) -> dict: + return "OK" + + +response_steps: list[FunctionType] = [ + render_response, +] diff --git a/src/layers/event/step_chain/__init__.py b/src/layers/event/step_chain/__init__.py new file mode 100644 index 000000000..615501140 --- /dev/null +++ b/src/layers/event/step_chain/__init__.py @@ -0,0 +1,71 @@ +from types import FunctionType + +from event.step_chain.errors import StepChainError + +from .types import FrozenDict + + +class StepChain: + """ + Function ("step") chaining with the following features: + * Steps run in sequence + * Steps expected to have the signature `f(data, cache) -> any` + * Can access results of previous steps using `data[step]` + * Can access "global" data from `cache` + * Can apply decorators to all steps + * Execute the pipeline with `StepChain.run` + * Retrieve the final step's result from the `result` member + + Example: + + def a(data, cache): + return {"blah", "hi"} + + def b(data, cache): + return {"a's result": data[a]} + + step_chain = StepChain([a, b], step_decorators=[]) + step_chain.run(cache={}, init={"event": None}) + print(step_chain.data) + """ + + INIT = "INIT" + + def __init__( + self, step_chain: list[FunctionType], step_decorators: list[FunctionType] = None + ): + if step_decorators is None: + step_decorators = [] + + if len(step_chain) != len(set(step_chain)): + raise StepChainError( + f"Duplicate step detected in step chain '{[step.__name__ for step in step_chain]}'" + ) + + # Decorate the steps in "reverse" order, which actually means that + # they get applied in the logical order + decorated_steps = step_chain + for deco in reversed(step_decorators): + decorated_steps = list(map(deco, decorated_steps)) + self.step_chain = decorated_steps + + # Store a mapping to the original unwrapped steps, so that the user + # may look up data by the original step reference + self.naked_step_lookup = dict(zip(decorated_steps, step_chain)) + + def run(self, cache: dict = None, init: any = None): + if cache is None: + cache = {} + + data = FrozenDict(**{self.INIT: init}) + for step in self.step_chain: + try: + result = step(data=data, cache=cache) + except Exception as exception: + result = exception + break + naked_step = self.naked_step_lookup[step] # unwrap decorators off the step + data = FrozenDict({**data, naked_step: result}) + + self.data = data + self.result = result diff --git a/src/layers/event/step_chain/errors.py b/src/layers/event/step_chain/errors.py new file mode 100644 index 000000000..f42d0043e --- /dev/null +++ b/src/layers/event/step_chain/errors.py @@ -0,0 +1,2 @@ +class StepChainError(Exception): + pass diff --git a/src/layers/event/step_chain/tests/test_frozen_dict.py b/src/layers/event/step_chain/tests/test_frozen_dict.py new file mode 100644 index 000000000..964da815b --- /dev/null +++ b/src/layers/event/step_chain/tests/test_frozen_dict.py @@ -0,0 +1,15 @@ +import pytest +from event.step_chain.types import FrozenDict + + +def test_frozen_dict(): + fd = FrozenDict(a=1, b=2) + assert {**fd} == {"a": 1, "b": 2} + assert fd["a"] == 1 + assert fd["b"] == 2 + + +def test_frozen_dict_is_frozen(): + fd = FrozenDict(a=1, b=2) + with pytest.raises(TypeError): + fd["a"] = 3 diff --git a/src/layers/event/step_chain/tests/test_step_chain.py b/src/layers/event/step_chain/tests/test_step_chain.py new file mode 100644 index 000000000..6e2751692 --- /dev/null +++ b/src/layers/event/step_chain/tests/test_step_chain.py @@ -0,0 +1,100 @@ +from functools import wraps + +import pytest +from event.step_chain import StepChain +from event.step_chain.errors import StepChainError +from event.step_chain.types import FrozenDict + + +def test_step_chain(): + cache = {"foo": "FOO"} + + def a(data, cache): + cache["foo"] = "OOF" + cache["bar"] = "BAR" + return {"blah", "hi"} + + def b(data, cache): + cache["foo"] = "fool" + return {"a's result": data[a]} + + step_chain = StepChain(step_chain=[a, b], step_decorators=[]) + step_chain.run(cache=cache, init={"event": None}) + + assert step_chain.result == {"a's result": {"blah", "hi"}} + assert step_chain.data == FrozenDict( + { + StepChain.INIT: {"event": None}, + a: {"blah", "hi"}, + b: {"a's result": {"blah", "hi"}}, + } + ) + assert cache == {"foo": "fool", "bar": "BAR"} + + +def test_step_chain_with_error(): + cache = {} + + class MyException(Exception): + pass + + def a(data, cache): + raise MyException + + def b(data, cache): + return {"a's result": data[a]} + + step_chain = StepChain(step_chain=[a, b], step_decorators=[]) + step_chain.run(cache=cache, init={"event": None}) + + assert isinstance(step_chain.result, MyException) + assert step_chain.data == FrozenDict({StepChain.INIT: {"event": None}}) + + +def test_step_chain_with_decorators(): + cache = {} + + def mutate_cache(function): + @wraps(function) + def wrapper(data, cache): + cache["foo"] = cache.get("foo", "") + "foo" + return function(data=data, cache=cache) + + return wrapper + + def also_mutate_cache(function): + @wraps(function) + def wrapper(data, cache): + cache["foo"] = cache.get("foo", "") + "bar" + return function(data=data, cache=cache) + + return wrapper + + def a(data, cache): + return {"cache_contents": dict(cache)} + + def b(data, cache): + return {"cache_contents": dict(cache)} + + step_chain = StepChain( + step_chain=[a, b], step_decorators=[mutate_cache, also_mutate_cache] + ) + step_chain.run(cache=cache) + + assert step_chain.result == {"cache_contents": {"foo": "foobarfoobar"}} + assert step_chain.data == FrozenDict( + { + StepChain.INIT: None, + a: {"cache_contents": {"foo": "foobar"}}, + b: {"cache_contents": {"foo": "foobarfoobar"}}, + } + ) + assert cache == {"foo": "foobarfoobar"} + + +def test_step_chain_no_duplicate_steps_allowed(): + def a(): + pass + + with pytest.raises(StepChainError): + StepChain(step_chain=[a, a]) diff --git a/src/layers/event/step_chain/tests/utils.py b/src/layers/event/step_chain/tests/utils.py new file mode 100644 index 000000000..2d73fb1b2 --- /dev/null +++ b/src/layers/event/step_chain/tests/utils.py @@ -0,0 +1,10 @@ +from event.step_chain import StepChain +from event.step_chain.types import FrozenDict + + +def step_data(init=None, kwargs=None): + data = {StepChain.INIT: init} + if kwargs is None: + kwargs = {} + data.update(kwargs) + return FrozenDict(data) diff --git a/src/layers/event/step_chain/types.py b/src/layers/event/step_chain/types.py new file mode 100644 index 000000000..effa45cf4 --- /dev/null +++ b/src/layers/event/step_chain/types.py @@ -0,0 +1,42 @@ +from typing import Mapping + + +class FrozenDict(Mapping): + """An implementation of a frozen dict, lifted from https://stackoverflow.com/a/2704866/1571593""" + + def __init__(self, *args, **kwargs): + self._d = dict(*args, **kwargs) + self._hash = None + + def __iter__(self): + return iter(self._d) + + def __len__(self): + return len(self._d) + + def __getitem__(self, key): + return self._d[key] + + def __str__(self): + return str(self._d) + + def __repr__(self): + return repr(self._d) + + def __eq__(self, other: object) -> bool: + if isinstance(other, FrozenDict): + return other._d == self._d + return False + + def __hash__(self): + # It would have been simpler and maybe more obvious to + # use hash(tuple(sorted(self._d.iteritems()))) from this discussion + # so far, but this solution is O(n). I don't know what kind of + # n we are going to run into, but sometimes it's hard to resist the + # urge to optimize when it will gain improved algorithmic performance. + if self._hash is None: + hash_ = 0 + for pair in self.items(): + hash_ ^= hash(pair) + self._hash = hash_ + return self._hash diff --git a/src/layers/event/tests/test_placeholder_event.py b/src/layers/event/tests/test_placeholder_event.py deleted file mode 100644 index c59b9bd79..000000000 --- a/src/layers/event/tests/test_placeholder_event.py +++ /dev/null @@ -1,16 +0,0 @@ -import pytest -from event.placeholder import placeholder - - -@pytest.mark.integration -def test_dummy_integration(): - placeholder() - - -def test_dummy_unit(): - placeholder() - - -@pytest.mark.smoke -def test_dummy_smoke(): - raise Exception("delete me") diff --git a/src/layers/event/versioning/constants.py b/src/layers/event/versioning/constants.py new file mode 100644 index 000000000..71e9bb207 --- /dev/null +++ b/src/layers/event/versioning/constants.py @@ -0,0 +1,6 @@ +import re + +VERSION_HEADER_PATTERN = r"^(\d+)$" +VERSION_RE = re.compile(r"^v(\d+)$") +API_ROOT_DIRNAME = "src/api" +VERSIONED_HANDLER_GLOB = "src/v*/steps.py" diff --git a/src/layers/event/versioning/errors.py b/src/layers/event/versioning/errors.py new file mode 100644 index 000000000..32b9b9326 --- /dev/null +++ b/src/layers/event/versioning/errors.py @@ -0,0 +1,2 @@ +class VersionException(Exception): + pass diff --git a/src/layers/event/versioning/models.py b/src/layers/event/versioning/models.py new file mode 100644 index 000000000..b129b16ae --- /dev/null +++ b/src/layers/event/versioning/models.py @@ -0,0 +1,11 @@ +from pydantic import BaseModel, Field + +from .constants import VERSION_HEADER_PATTERN + + +class VersionHeader(BaseModel): + version: str = Field(pattern=VERSION_HEADER_PATTERN) + + +class LambdaEventForVersioning(BaseModel): + headers: VersionHeader diff --git a/src/layers/event/versioning/steps.py b/src/layers/event/versioning/steps.py new file mode 100644 index 000000000..e9bf5b03e --- /dev/null +++ b/src/layers/event/versioning/steps.py @@ -0,0 +1,62 @@ +import math +from importlib import import_module +from pathlib import Path +from types import FunctionType + +from event.step_chain import StepChain + +from .constants import API_ROOT_DIRNAME, VERSION_RE, VERSIONED_HANDLER_GLOB +from .errors import VersionException +from .models import LambdaEventForVersioning + + +def _module_path_from_file_path(file_path: Path): + path = str(file_path.parent / file_path.stem) + path_relative_to_api_root = Path(path[path.find(API_ROOT_DIRNAME) :]) + return ".".join(path_relative_to_api_root.parts) + + +def get_requested_version(data, cache=None): + event = LambdaEventForVersioning(**data[StepChain.INIT]["event"]) + return event.headers.version + + +def get_steps_by_version(data, cache=None) -> dict[str, list[FunctionType]]: + api_index_file_path = data[StepChain.INIT]["api_index_file_path"] + versions_paths = Path(api_index_file_path).parent.glob(VERSIONED_HANDLER_GLOB) + versioned_steps = {} + for file_path in versions_paths: + (version_number,) = VERSION_RE.match(file_path.parent.name).groups() + module_path = _module_path_from_file_path(file_path) + versioned_handler = import_module(module_path) + versioned_steps[version_number] = versioned_handler.steps + return versioned_steps + + +def get_largest_possible_version(data, cache=None) -> str: + requested_version = data[get_requested_version] + possible_versions = data[get_steps_by_version] + integer_versions = map(int, possible_versions) + possible_versions = [ + version + for version in integer_versions + if int(float(requested_version)) >= version + ] + largest_possible_version = max(possible_versions, default=math.inf) + if not math.isfinite(largest_possible_version): + raise VersionException("Version not supported") + return str(largest_possible_version) + + +def get_steps_for_requested_version(data, cache=None): + steps_by_version = data[get_steps_by_version] + largest_possible_version = data[get_largest_possible_version] + return steps_by_version[largest_possible_version] + + +versioning_steps: list[FunctionType] = [ + get_requested_version, + get_steps_by_version, + get_largest_possible_version, + get_steps_for_requested_version, +] diff --git a/src/layers/event/versioning/tests/__init__.py b/src/layers/event/versioning/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/layers/event/versioning/tests/example_api/__init__.py b/src/layers/event/versioning/tests/example_api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/layers/event/versioning/tests/example_api/index.py b/src/layers/event/versioning/tests/example_api/index.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/layers/event/versioning/tests/example_api/src/v0/steps.py b/src/layers/event/versioning/tests/example_api/src/v0/steps.py new file mode 100644 index 000000000..fc92f9821 --- /dev/null +++ b/src/layers/event/versioning/tests/example_api/src/v0/steps.py @@ -0,0 +1 @@ +steps = "v0_steps" diff --git a/src/layers/event/versioning/tests/example_api/src/v1/steps.py b/src/layers/event/versioning/tests/example_api/src/v1/steps.py new file mode 100644 index 000000000..271bde62f --- /dev/null +++ b/src/layers/event/versioning/tests/example_api/src/v1/steps.py @@ -0,0 +1 @@ +steps = "v1_steps" diff --git a/src/layers/event/versioning/tests/example_api/src/v3/steps.py b/src/layers/event/versioning/tests/example_api/src/v3/steps.py new file mode 100644 index 000000000..82ffa00e3 --- /dev/null +++ b/src/layers/event/versioning/tests/example_api/src/v3/steps.py @@ -0,0 +1 @@ +steps = "v3_steps" diff --git a/src/layers/event/versioning/tests/test_steps.py b/src/layers/event/versioning/tests/test_steps.py new file mode 100644 index 000000000..85585bf11 --- /dev/null +++ b/src/layers/event/versioning/tests/test_steps.py @@ -0,0 +1,98 @@ +from pathlib import Path +from unittest import mock + +import pytest +from event.step_chain.tests.utils import step_data +from event.versioning.constants import VERSION_HEADER_PATTERN +from event.versioning.errors import VersionException +from event.versioning.models import LambdaEventForVersioning, VersionHeader +from event.versioning.steps import ( + get_largest_possible_version, + get_requested_version, + get_steps_by_version, +) +from hypothesis import given +from hypothesis.strategies import builds, dictionaries, from_regex, none, text +from pydantic import ValidationError + +PATH_TO_HERE = Path(__file__).parent + + +@given( + event=builds( + LambdaEventForVersioning, + headers=builds( + VersionHeader, + version=from_regex(VERSION_HEADER_PATTERN, fullmatch=True), + ), + ) +) +def test_get_requested_version_pass(event: LambdaEventForVersioning): + version = get_requested_version(data=step_data(init={"event": event.model_dump()})) + assert version == event.headers.version + + +@given(event=dictionaries(keys=text(min_size=3, max_size=3), values=none())) +def test_get_requested_version_fail(event: dict): + with pytest.raises(ValidationError): + get_requested_version(data=step_data(init={"event": event})) + + +@pytest.mark.parametrize( + "requested_version,expected_version", + [ + ("3", "3"), + ("4", "3"), + ("5", "3"), + ("6", "6"), + ("7", "6"), + ("8", "6"), + ("9", "9"), + ("1000", "9"), + ("3.0", "3"), + ("3.5", "3"), + ("3.9", "3"), + ("10000.1234", "9"), + ], +) +def test_largest_possible_version(requested_version: str, expected_version: str): + handler_versions = {"3": "handler3", "6": "handler6", "9": "handler9"} + + actual_version = get_largest_possible_version( + data=step_data( + kwargs={ + get_requested_version: requested_version, + get_steps_by_version: handler_versions, + } + ) + ) + assert actual_version == expected_version + + +@pytest.mark.parametrize("requested_version", ["-2", "-1", "0", "1", "2"]) +def test_largest_possible_version_error(requested_version: str): + handler_versions = {"3": "handler3", "6": "handler6", "9": "handler9"} + + with pytest.raises(VersionException) as e: + get_largest_possible_version( + data=step_data( + kwargs={ + get_requested_version: requested_version, + get_steps_by_version: handler_versions, + } + ) + ) + assert str(e.value) == "Version not supported" + + +@mock.patch("event.versioning.steps.API_ROOT_DIRNAME", "event/versioning/tests") +def test_get_versioned_steps(): + from .example_api import index + + assert get_steps_by_version( + data=step_data(init={"api_index_file_path": index.__file__}) + ) == { + "0": "v0_steps", + "1": "v1_steps", + "3": "v3_steps", + } diff --git a/src/layers/event/versioning/tests/test_steps_e2e.py b/src/layers/event/versioning/tests/test_steps_e2e.py new file mode 100644 index 000000000..d405cf5cf --- /dev/null +++ b/src/layers/event/versioning/tests/test_steps_e2e.py @@ -0,0 +1,43 @@ +from types import FunctionType +from unittest import mock + +import pytest +from event.logging.logger import setup_logger +from event.logging.step_decorators import logging_step_decorators +from event.step_chain import StepChain +from event.versioning.models import LambdaEventForVersioning, VersionHeader +from event.versioning.steps import versioning_steps +from nhs_context_logging.fixtures import log_capture, log_capture_global # noqa: F401 + +from .example_api import index +from .example_api.src.v0.steps import steps as v0_steps +from .example_api.src.v1.steps import steps as v1_steps +from .example_api.src.v3.steps import steps as v3_steps + + +@mock.patch("event.versioning.steps.API_ROOT_DIRNAME", "event/versioning/tests") +@pytest.mark.parametrize( + ("requested_version", "expected_steps"), + ( + ["0", v0_steps], + ["1", v1_steps], + ["2", v1_steps], + ["3", v3_steps], + ), +) +def test_versioning_steps(requested_version: str, expected_steps: list[FunctionType]): + setup_logger(service_name=f"test_versioning_steps-{requested_version}") + + _event = LambdaEventForVersioning( + headers=VersionHeader( + version=requested_version, + ), + ) + + step_chain = StepChain( + step_chain=versioning_steps, step_decorators=logging_step_decorators + ) + step_chain.run( + init={"event": _event.model_dump(), "api_index_file_path": index.__file__} + ) + assert step_chain.result is expected_steps