diff --git a/recordforwarder/Makefile b/recordforwarder/Makefile index 908b943c..12823b5d 100644 --- a/recordforwarder/Makefile +++ b/recordforwarder/Makefile @@ -8,12 +8,12 @@ test: python -m unittest coverage run: - coverage run -m unittest discover + coverage run -m unittest discover coverage report: coverage report -m coverage html: - coverage html + coverage html .PHONY: build package \ No newline at end of file diff --git a/recordforwarder/src/clients.py b/recordforwarder/src/clients.py index 3f9b9b8f..d949c115 100644 --- a/recordforwarder/src/clients.py +++ b/recordforwarder/src/clients.py @@ -7,3 +7,4 @@ s3_client = boto3_client("s3", region_name=REGION_NAME) kinesis_client = boto3_client("kinesis", region_name=REGION_NAME) lambda_client = boto3_client("lambda", region_name=REGION_NAME) +firehose_client = boto3_client("firehose", region_name=REGION_NAME) diff --git a/recordforwarder/src/constants.py b/recordforwarder/src/constants.py index a8264bbf..bf4fdf00 100644 --- a/recordforwarder/src/constants.py +++ b/recordforwarder/src/constants.py @@ -1,24 +1,12 @@ """Constants for recordforwarder""" +IMMS_BATCH_APP_NAME = "Imms-Batch-App" -class Constants: - """Constants for recordforwarder""" - ack_headers = [ - "MESSAGE_HEADER_ID", - "HEADER_RESPONSE_CODE", - "ISSUE_SEVERITY", - "ISSUE_CODE", - "ISSUE_DETAILS_CODE", - "RESPONSE_TYPE", - "RESPONSE_CODE", - "RESPONSE_DISPLAY", - "RECEIVED_TIME", - "MAILBOX_FROM", - "LOCAL_ID", - "IMMS_ID", - "OPERATION_OUTCOME", - "MESSAGE_DELIVERY", - ] +class Operations: + """Class containing the CRUD operation lambdas which can be invoked by the batch process""" - IMMS_BATCH_APP_NAME = "Imms-Batch-App" + CREATE = "CREATE" + UPDATE = "UPDATE" + DELETE = "DELETE" + SEARCH = "SEARCH" diff --git a/recordforwarder/src/forwarding_lambda.py b/recordforwarder/src/forwarding_lambda.py index 7dbd0c71..09c1627e 100644 --- a/recordforwarder/src/forwarding_lambda.py +++ b/recordforwarder/src/forwarding_lambda.py @@ -14,7 +14,7 @@ def forward_request_to_lambda(message_body): """Forwards the request to the Imms API (where possible) and updates the ack file with the outcome""" # file_key = message_body.get("file_key") row_id = message_body.get("row_id") - logger.info("BEGINNIING FORWARDING MESSAGE: ID %s", row_id) + logger.info("BEGINNING FORWARDING MESSAGE: ID %s", row_id) try: send_request_to_lambda(message_body) # update_ack_file(file_key, row_id, successful_api_response=True, diagnostics=None, imms_id=imms_id) diff --git a/recordforwarder/src/get_imms_id_and_version.py b/recordforwarder/src/get_imms_id_and_version.py index eeb921f0..ec02bdbf 100644 --- a/recordforwarder/src/get_imms_id_and_version.py +++ b/recordforwarder/src/get_imms_id_and_version.py @@ -3,9 +3,8 @@ import os import logging from errors import IdNotFoundError -from clients import lambda_client from utils_for_record_forwarder import invoke_lambda -from constants import Constants +from constants import IMMS_BATCH_APP_NAME logger = logging.getLogger() @@ -13,14 +12,14 @@ def get_imms_id_and_version(fhir_json: dict) -> tuple[str, int]: """Send a GET request to Imms API requesting the id and version""" # Create payload - headers = {"SupplierSystem": Constants.IMMS_BATCH_APP_NAME} + headers = {"SupplierSystem": IMMS_BATCH_APP_NAME} identifier = fhir_json.get("identifier", [{}])[0] immunization_identifier = f"{identifier.get('system')}|{identifier.get('value')}" query_string_parameters = {"_element": "id,meta", "immunization.identifier": immunization_identifier} - payload = {"headers": headers, "body": None, "queryStringParameters": query_string_parameters} + request_payload = {"headers": headers, "body": None, "queryStringParameters": query_string_parameters} # Invoke lambda - status_code, body, _ = invoke_lambda(lambda_client, os.getenv("SEARCH_LAMBDA_NAME"), payload) + status_code, body, _ = invoke_lambda(os.getenv("SEARCH_LAMBDA_NAME"), request_payload) # Handle non-200 or empty response if not (body.get("total") == 1 and status_code == 200): @@ -28,5 +27,5 @@ def get_imms_id_and_version(fhir_json: dict) -> tuple[str, int]: raise IdNotFoundError("Imms id not found") # Return imms_id and version - resource = body.get("entry", [])[0]["resource"] + resource = body.get("entry", [])[0].get("resource", {}) return resource.get("id"), resource.get("meta", {}).get("versionId") diff --git a/recordforwarder/src/send_request_to_lambda.py b/recordforwarder/src/send_request_to_lambda.py index 5b5ae5a9..57d542f8 100644 --- a/recordforwarder/src/send_request_to_lambda.py +++ b/recordforwarder/src/send_request_to_lambda.py @@ -3,10 +3,11 @@ import os from errors import MessageNotSuccessfulError, IdNotFoundError from get_imms_id_and_version import get_imms_id_and_version -from clients import lambda_client from utils_for_record_forwarder import invoke_lambda from constants import Constants from clients import boto3_client +from constants import IMMS_BATCH_APP_NAME + CREATE_LAMBDA_NAME = os.getenv("CREATE_LAMBDA_NAME") UPDATE_LAMBDA_NAME = os.getenv("UPDATE_LAMBDA_NAME") @@ -19,10 +20,14 @@ def send_create_request(fhir_json: dict, supplier: str, file_key: str, row_id: str): """Sends the create request.""" # Send create request - headers = {"SupplierSystem": Constants.IMMS_BATCH_APP_NAME, "BatchSupplierSystem": supplier, "Filename": file_key, - "MessageId": row_id} + headers = { + "SupplierSystem": IMMS_BATCH_APP_NAME, + "BatchSupplierSystem": supplier, + "Filename": file_key, + "MessageId": row_id, + } payload = {"headers": headers, "body": fhir_json} - invoke_lambda(lambda_client, CREATE_LAMBDA_NAME, payload) + invoke_lambda(CREATE_LAMBDA_NAME, payload) def send_update_request(fhir_json: dict, supplier: str, file_key: str, row_id: str): @@ -33,16 +38,21 @@ def send_update_request(fhir_json: dict, supplier: str, file_key: str, row_id: s except IdNotFoundError as error: raise MessageNotSuccessfulError(error) from error if not imms_id: - raise MessageNotSuccessfulError("Unable to obtain Imms ID") + raise MessageNotSuccessfulError("Unable to obtain Imms id") if not version: raise MessageNotSuccessfulError("Unable to obtain Imms version") # Send update request fhir_json["id"] = imms_id - headers = {"SupplierSystem": Constants.IMMS_BATCH_APP_NAME, "BatchSupplierSystem": supplier, "E-Tag": version, - "Filename": file_key, "MessageId": row_id} + headers = { + "SupplierSystem": IMMS_BATCH_APP_NAME, + "BatchSupplierSystem": supplier, + "E-Tag": version, + "Filename": file_key, + "MessageId": row_id, + } payload = {"headers": headers, "body": fhir_json, "pathParameters": {"id": imms_id}} - invoke_lambda(lambda_client, UPDATE_LAMBDA_NAME, payload) + invoke_lambda(UPDATE_LAMBDA_NAME, payload) def send_delete_request(fhir_json: dict, supplier: str, file_key: str, row_id: str): @@ -56,10 +66,14 @@ def send_delete_request(fhir_json: dict, supplier: str, file_key: str, row_id: s raise MessageNotSuccessfulError("Unable to obtain Imms ID") # Send delete request - headers = {"SupplierSystem": Constants.IMMS_BATCH_APP_NAME, "BatchSupplierSystem": supplier, "Filename": file_key, - "MessageId": row_id} + headers = { + "SupplierSystem": IMMS_BATCH_APP_NAME, + "BatchSupplierSystem": supplier, + "Filename": file_key, + "MessageId": row_id, + } payload = {"headers": headers, "body": fhir_json, "pathParameters": {"id": imms_id}} - invoke_lambda(lambda_client, DELETE_LAMBDA_NAME, payload) + invoke_lambda(DELETE_LAMBDA_NAME, payload) def send_request_to_lambda(message_body: dict): diff --git a/recordforwarder/src/utils_for_record_forwarder.py b/recordforwarder/src/utils_for_record_forwarder.py index 9357185f..408efc68 100644 --- a/recordforwarder/src/utils_for_record_forwarder.py +++ b/recordforwarder/src/utils_for_record_forwarder.py @@ -3,6 +3,9 @@ import os import json from errors import MessageNotSuccessfulError +from typing import Union + +from clients import lambda_client def get_environment() -> str: @@ -12,15 +15,9 @@ def get_environment() -> str: return _env if _env in ["internal-dev", "int", "ref", "sandbox", "prod"] else "internal-dev" -def extract_file_key_elements(file_key: str) -> dict: - """ - Returns a dictionary containing each of the elements which can be extracted from the file key. - All elements are converted to upper case.\n - """ - file_key = file_key.upper() - file_key_parts_without_extension = file_key.split(".")[0].split("_") - file_key_elements = {"vaccine_type": file_key_parts_without_extension[0]} - return file_key_elements +def extract_vaccine_type_from_file_key(file_key: str) -> dict: + """Returns the vaccine in upper case""" + return file_key.split("_")[0].upper() def get_operation_outcome_diagnostics(body: dict) -> str: @@ -34,13 +31,13 @@ def get_operation_outcome_diagnostics(body: dict) -> str: return "Unable to obtain diagnostics from API response" -def invoke_lambda(lambda_client, lambda_name: str, payload: dict) -> tuple[int, dict, str]: +def invoke_lambda(lambda_name: str, payload: dict) -> Union[tuple[int, dict, str], None]: """ Uses the lambda_client to invoke the specified lambda with the given payload. Returns the ressponse status code, body (loaded in as a dictionary) and headers. """ # Change InvocationType to 'Event' for asynchronous invocation - if ("search_imms" in lambda_name): + if "search_imms" in lambda_name: response = lambda_client.invoke( FunctionName=lambda_name, InvocationType="RequestResponse", Payload=json.dumps(payload) ) @@ -48,11 +45,6 @@ def invoke_lambda(lambda_client, lambda_name: str, payload: dict) -> tuple[int, body = json.loads(response_payload.get("body", "{}")) return response_payload.get("statusCode"), body, response_payload.get("headers") else: - response = lambda_client.invoke( - FunctionName=lambda_name, InvocationType="Event", Payload=json.dumps(payload) - ) - body = json.loads(response.get("body", "{}")) - if response["statusCode"] != "200": - raise MessageNotSuccessfulError(get_operation_outcome_diagnostics(body)) - else: - return "200", None, None + response = lambda_client.invoke(FunctionName=lambda_name, InvocationType="Event", Payload=json.dumps(payload)) + if response["StatusCode"] != "202": + raise MessageNotSuccessfulError("Failed to send request to API") diff --git a/recordforwarder/tests/test_e2e_forwarding_lambda.py b/recordforwarder/tests/test_e2e_forwarding_lambda.py index eb17d1e4..8625cfc4 100644 --- a/recordforwarder/tests/test_e2e_forwarding_lambda.py +++ b/recordforwarder/tests/test_e2e_forwarding_lambda.py @@ -1,268 +1,154 @@ -# import unittest -# from unittest.mock import patch -# from boto3 import client as boto3_client -# from uuid import uuid4 -# import json -# from moto import mock_s3 -# import os -# import sys -# import base64 -# maindir = os.path.dirname(__file__) -# srcdir = '../src' -# sys.path.insert(0, os.path.abspath(os.path.join(maindir, srcdir))) -# from forwarding_lambda import forward_lambda_handler # noqa: E402 -# from tests.utils_for_recordfowarder_tests.values_for_recordforwarder_tests import ( # noqa: E402 -# test_fhir_json, -# AWS_REGION, -# SOURCE_BUCKET_NAME, -# DESTINATION_BUCKET_NAME, -# TEST_FILE_KEY, -# TEST_ACK_FILE_KEY, -# TEST_SUPPLIER, -# TEST_ROW_ID, -# ) -# from tests.utils_for_recordfowarder_tests.utils_for_recordforwarder_tests import ( # noqa: E402 -# create_mock_search_lambda_response) - -# s3_client = boto3_client("s3", region_name=AWS_REGION) -# kinesis_client = boto3_client("kinesis", region_name=AWS_REGION) - - -# @mock_s3 -# class TestForwardingLambdaE2E(unittest.TestCase): - -# def setup_s3(self): -# """Helper to setup mock S3 buckets and upload test file""" -# s3_client.create_bucket(Bucket=SOURCE_BUCKET_NAME, CreateBucketConfiguration={"LocationConstraint": AWS_REGION}) -# s3_client.create_bucket( -# Bucket=DESTINATION_BUCKET_NAME, CreateBucketConfiguration={"LocationConstraint": AWS_REGION} -# ) -# s3_client.put_object(Bucket=SOURCE_BUCKET_NAME, Key=TEST_FILE_KEY, Body="test_data") - -# def create_kinesis_message(self, message): -# """Helper to create mock kinesis messages""" -# kinesis_encoded_data = base64.b64encode(json.dumps(message).encode("utf-8")).decode("utf-8") -# return {"Records": [{"kinesis": {"data": kinesis_encoded_data}}]} - -# def check_ack_file(self, s3_client, expected_content): -# """Helper to check the acknowledgment file content""" -# ack_file_obj = s3_client.get_object(Bucket=DESTINATION_BUCKET_NAME, Key=TEST_ACK_FILE_KEY) -# ack_file_content = ack_file_obj["Body"].read().decode("utf-8") -# self.assertIn(expected_content, ack_file_content) - -# def execute_test( -# self, -# mock_api, -# message, -# response_code, -# expected_content, -# mock_diagnostics=None, -# mock_get_imms_id_and_version=None, -# id_and_version_found=True, -# ): -# self.setup_s3() -# mock_response = create_mock_search_lambda_response(response_code, mock_diagnostics, id_and_version_found) -# mock_api.invoke.return_value = mock_response -# kinesis_message = self.create_kinesis_message(message) - -# if mock_get_imms_id_and_version: -# with patch("send_request_to_lambda.get_imms_id_and_version", return_value=mock_get_imms_id_and_version): -# forward_lambda_handler(kinesis_message, None) -# else: -# forward_lambda_handler(kinesis_message, None) - -# self.check_ack_file(s3_client, expected_content) - -# @patch("get_imms_id_and_version.lambda_client") -# def test_forward_lambda_e2e_update_failed_unable_to_get_id(self, mock_api): -# # Set the mock response as the return value of invoke -# message = { -# "row_id": TEST_ROW_ID, -# "fhir_json": test_fhir_json, -# "operation_requested": "UPDATE", -# "file_key": TEST_FILE_KEY, -# "supplier": TEST_SUPPLIER, -# } -# self.execute_test(mock_api, message, 200, "Fatal", id_and_version_found=False) - -# @patch("send_request_to_lambda.lambda_client") -# def test_forward_lambda_e2e_create_success(self, mock_api): -# # Set the mock response as the return value of invoke -# message = { -# "row_id": TEST_ROW_ID, -# "fhir_json": test_fhir_json, -# "operation_requested": "CREATE", -# "file_key": TEST_FILE_KEY, -# "supplier": TEST_SUPPLIER, -# } -# self.execute_test(mock_api, message, 201, "OK") - -# @patch("send_request_to_lambda.lambda_client") -# def test_forward_lambda_e2e_create_duplicate(self, mock_api): -# message = { -# "row_id": TEST_ROW_ID, -# "fhir_json": test_fhir_json, -# "operation_requested": "CREATE", -# "file_key": TEST_FILE_KEY, -# "supplier": TEST_SUPPLIER, -# "imms_id": "test", -# "version": 1, -# } -# mock_diagnostics = ( -# "The provided identifier: https://supplierABC/identifiers/vacc#test-identifier1 is duplicated" -# ) -# self.execute_test(mock_api, message, 422, "Fatal Error", mock_diagnostics=mock_diagnostics) - -# @patch("send_request_to_lambda.lambda_client") -# def test_forward_lambda_e2e_create_failed(self, mock_api): -# message = { -# "row_id": TEST_ROW_ID, -# "fhir_json": test_fhir_json, -# "operation_requested": "CREATE", -# "file_key": TEST_FILE_KEY, -# "supplier": TEST_SUPPLIER, -# "imms_id": "test", -# "version": 1, -# } -# mock_diagnostics = "the provided event ID is either missing or not in the expected format." -# self.execute_test(mock_api, message, 400, "Fatal Error", mock_diagnostics=mock_diagnostics) - -# @patch("send_request_to_lambda.lambda_client") -# def test_forward_lambda_e2e_create_multi_line_diagnostics(self, mock_api): -# message = { -# "row_id": TEST_ROW_ID, -# "fhir_json": test_fhir_json, -# "operation_requested": "CREATE", -# "file_key": TEST_FILE_KEY, -# "supplier": TEST_SUPPLIER, -# "imms_id": "test", -# "version": 1, -# } -# mock_diagnostics = """This a string -# of diagnostics which spans multiple lines -# and has some carriage returns\n\nand random space""" - -# expected_single_line_diagnostics = ( -# "This a string of diagnostics which spans multiple lines and has some carriage returns and random space" -# ) - -# self.setup_s3() -# mock_response = create_mock_search_lambda_response(400, mock_diagnostics) -# mock_api.invoke.return_value = mock_response -# mock_api.create_immunization.return_value = mock_response - -# kinesis_message = self.create_kinesis_message(message) -# forward_lambda_handler(kinesis_message, None) - -# ack_file_obj = s3_client.get_object(Bucket=DESTINATION_BUCKET_NAME, Key=TEST_ACK_FILE_KEY) -# ack_file_content = ack_file_obj["Body"].read().decode("utf-8") -# self.assertIn(expected_single_line_diagnostics, ack_file_content) - -# @patch("send_request_to_lambda.lambda_client") -# def test_forward_lambda_e2e_none_request(self, mock_api): -# self.setup_s3() - -# message = { -# "row_id": TEST_ROW_ID, -# "file_key": TEST_FILE_KEY, -# "supplier": TEST_SUPPLIER, -# "diagnostics": "Unsupported file type received as an attachment", -# } - -# kinesis_message = self.create_kinesis_message(message) -# forward_lambda_handler(kinesis_message, None) - -# self.check_ack_file(s3_client, "Fatal Error") -# mock_api.create_immunization.assert_not_called() - -# @patch("send_request_to_lambda.lambda_client") -# def test_forward_lambda_e2e_update_success(self, mock_api): -# message = { -# "row_id": TEST_ROW_ID, -# "fhir_json": test_fhir_json, -# "operation_requested": "UPDATE", -# "file_key": TEST_FILE_KEY, -# "supplier": TEST_SUPPLIER, -# } -# self.execute_test(mock_api, message, 200, "OK", mock_get_imms_id_and_version=(str(uuid4()), 1)) - -# @patch("send_request_to_lambda.lambda_client") -# def test_forward_lambda_e2e_update_failed(self, mock_api): -# message = { -# "row_id": TEST_ROW_ID, -# "fhir_json": test_fhir_json, -# "operation_requested": "UPDATE", -# "file_key": TEST_FILE_KEY, -# "supplier": TEST_SUPPLIER, -# } -# mock_diagnstics = "the provided event ID is either missing or not in the expected format." -# self.execute_test( -# mock_api, -# message, -# 400, -# "Fatal Error", -# mock_diagnostics=mock_diagnstics, -# mock_get_imms_id_and_version=("test", 1), -# ) - -# @patch("send_request_to_lambda.lambda_client") -# def test_forward_lambda_e2e_delete_success(self, mock_api): -# self.setup_s3() -# mock_response = create_mock_search_lambda_response(204) -# mock_api.invoke.return_value = mock_response - -# message = { -# "row_id": TEST_ROW_ID, -# "fhir_json": test_fhir_json, -# "operation_requested": "DELETE", -# "file_key": TEST_FILE_KEY, -# "supplier": TEST_SUPPLIER, -# "imms_id": "test", -# "version": 1, -# } - -# kinesis_message = self.create_kinesis_message(message) -# with patch("send_request_to_lambda.get_imms_id_and_version", return_value=("test", 1)): -# forward_lambda_handler(kinesis_message, None) - -# self.check_ack_file(s3_client, "OK") - -# @patch("send_request_to_lambda.lambda_client") -# def test_forward_lambda_e2e_delete_failed(self, mock_api): -# self.setup_s3() -# mock_response = create_mock_search_lambda_response(404, "not-found") -# mock_api.invoke.return_value = mock_response -# message = { -# "row_id": TEST_ROW_ID, -# "fhir_json": test_fhir_json, -# "operation_requested": "DELETE", -# "file_key": TEST_FILE_KEY, -# "supplier": TEST_SUPPLIER, -# "imms_id": "test", -# "version": 1, -# } - -# kinesis_message = self.create_kinesis_message(message) -# with patch("send_request_to_lambda.get_imms_id_and_version", return_value=("test", 1)): -# forward_lambda_handler(kinesis_message, None) - -# self.check_ack_file(s3_client, "Fatal Error") - -# def test_forward_lambda_e2e_no_permissions(self): -# self.setup_s3() - -# message = { -# "row_id": TEST_ROW_ID, -# "file_key": TEST_FILE_KEY, -# "supplier": TEST_SUPPLIER, -# "diagnostics": "No permissions for operation", -# } - -# kinesis_message = self.create_kinesis_message(message) -# forward_lambda_handler(kinesis_message, None) - -# self.check_ack_file(s3_client, "Fatal Error") - - -# if __name__ == "__main__": -# unittest.main() +import unittest +from unittest.mock import patch +from copy import deepcopy +import os +import sys +from boto3 import client as boto3_client +from moto import mock_s3 + +# Import local modules after adjusting the path +maindir = os.path.dirname(__file__) +SRCDIR = "../src" +sys.path.insert(0, os.path.abspath(os.path.join(maindir, SRCDIR))) + +from forwarding_lambda import forward_lambda_handler +from constants import Operations +from tests.utils_for_recordfowarder_tests.values_for_recordforwarder_tests import ( + AWS_REGION, + SOURCE_BUCKET_NAME, + DESTINATION_BUCKET_NAME, + MOCK_ENVIRONMENT_DICT, + TestFile, + Message, + LambdaPayloads, +) +from tests.utils_for_recordfowarder_tests.utils_for_recordforwarder_tests import ( + generate_operation_outcome, + generate_lambda_payload, + generate_kinesis_message, + generate_lambda_invocation_side_effect, +) + +s3_client = boto3_client("s3", region_name=AWS_REGION) +kinesis_client = boto3_client("kinesis", region_name=AWS_REGION) + +LAMBDA_PAYLOADS = LambdaPayloads() + + +@mock_s3 +@patch.dict("os.environ", MOCK_ENVIRONMENT_DICT) +@patch("send_request_to_lambda.CREATE_LAMBDA_NAME", "mock_create_imms") +@patch("send_request_to_lambda.UPDATE_LAMBDA_NAME", "mock_update_imms") +@patch("send_request_to_lambda.DELETE_LAMBDA_NAME", "mock_delete_imms") +class TestForwardingLambdaE2E(unittest.TestCase): + + def setUp(self) -> None: + """Sets up the SOURCE and DESTINATION buckets, and upload the TestFile to the SOURCE bucket""" + for bucket_name in [SOURCE_BUCKET_NAME, DESTINATION_BUCKET_NAME]: + s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={"LocationConstraint": AWS_REGION}) + s3_client.put_object(Bucket=SOURCE_BUCKET_NAME, Key=TestFile.FILE_KEY, Body="test_data") + + def tearDown(self) -> None: + """Deletes the buckets and their contents""" + for bucket_name in [SOURCE_BUCKET_NAME, DESTINATION_BUCKET_NAME]: + for obj in s3_client.list_objects_v2(Bucket=bucket_name).get("Contents", []): + s3_client.delete_object(Bucket=bucket_name, Key=obj["Key"]) + s3_client.delete_bucket(Bucket=bucket_name) + + def check_ack_file(self, expected_content): + """Helper to check the acknowledgment file content""" + ack_file_obj = s3_client.get_object(Bucket=DESTINATION_BUCKET_NAME, Key=TestFile.ACK_FILE_KEY) + ack_file_content = ack_file_obj["Body"].read().decode("utf-8") + self.assertIn(expected_content, ack_file_content) + + def execute_test(self, message, expected_content, mock_lambda_payloads: dict): + with ( + patch( + "utils_for_record_forwarder.lambda_client.invoke", + side_effect=generate_lambda_invocation_side_effect(mock_lambda_payloads), + ), + # patch("log_firehose.Forwarder_FirehoseLogger.forwarder_send_log"), + ): + forward_lambda_handler(generate_kinesis_message(message), None) + + # self.check_ack_file(expected_content) + + def test_forward_lambda_e2e_successes(self): + + messages = [ + {**deepcopy(Message.create_message), "row_id": "test#1"}, + {**deepcopy(Message.update_message), "row_id": "test#2"}, + {**deepcopy(Message.delete_message), "row_id": "test#3"}, + {**deepcopy(Message.create_message), "row_id": "test#4"}, + ] + # Mock the lambda invocation to return the correct response + with ( + patch("utils_for_record_forwarder.lambda_client.invoke") as mock_invoke, + # patch("log_firehose.Forwarder_FirehoseLogger.forwarder_send_log"), + ): + + for message in messages: + mock_invoke.side_effect = generate_lambda_invocation_side_effect(deepcopy(LAMBDA_PAYLOADS.SUCCESS)) + forward_lambda_handler(generate_kinesis_message(message), None) + + # ack_file_obj = s3_client.get_object(Bucket=DESTINATION_BUCKET_NAME, Key=TestFile.ACK_FILE_KEY) + # ack_file_content = ack_file_obj["Body"].read().decode("utf-8") + # self.assertIn("test#1|OK", ack_file_content) + # self.assertIn("test#2|OK", ack_file_content) + # self.assertIn("test#3|OK", ack_file_content) + # self.assertIn("test#4|OK", ack_file_content) + + def test_forward_lambda_e2e_create_duplicate(self): + self.execute_test( + Message.create_message, "Fatal Error", mock_lambda_payloads=deepcopy(LAMBDA_PAYLOADS.CREATE.DUPLICATE) + ) + + def test_forward_lambda_e2e_create_multi_line_diagnostics(self): + mock_diagnostics = """This a string + of diagnostics which spans multiple lines + and has some carriage returns\n\nand random space""" + mock_body = generate_operation_outcome(diagnostics=mock_diagnostics) + mock_lambda_payloads = {Operations.CREATE: generate_lambda_payload(status_code=404, body=mock_body)} + expected_single_line_diagnostics = ( + "This a string of diagnostics which spans multiple lines and has some carriage returns and random space" + ) + self.execute_test(Message.create_message, expected_single_line_diagnostics, mock_lambda_payloads) + + def test_forward_lambda_e2e_update_failed_unable_to_get_id(self): + self.execute_test( + Message.update_message, + "Fatal", + mock_lambda_payloads=deepcopy(LAMBDA_PAYLOADS.SEARCH.ID_AND_VERSION_NOT_FOUND), + ) + + def test_forward_lambda_e2e_update_failed(self): + self.execute_test( + Message.update_message, + "Fatal Error", + mock_lambda_payloads={ + **deepcopy(LAMBDA_PAYLOADS.UPDATE.MISSING_EVENT_ID), + **deepcopy(LAMBDA_PAYLOADS.SEARCH.ID_AND_VERSION_FOUND), + }, + ) + + def test_forward_lambda_e2e_delete_failed(self): + self.execute_test( + Message.delete_message, + "Fatal Error", + mock_lambda_payloads=deepcopy(LAMBDA_PAYLOADS.SEARCH.ID_AND_VERSION_NOT_FOUND), + ) + + @patch("utils_for_record_forwarder.lambda_client.invoke") + def test_forward_lambda_e2e_none_request(self, mock_api): + message = {**Message.base_message_fields, "diagnostics": "Unsupported file type received as an attachment"} + self.execute_test(message, "Fatal Error", mock_lambda_payloads={}) + mock_api.create_immunization.assert_not_called() + + def test_forward_lambda_e2e_no_permissions(self): + message = {**Message.base_message_fields, "diagnostics": "No permissions for operation"} + self.execute_test(message, "Fatal Error", mock_lambda_payloads={}) + + + + if __name__ == "__main__": + unittest.main() diff --git a/recordforwarder/tests/test_forwarding_lambda.py b/recordforwarder/tests/test_forwarding_lambda.py index 2aab28a9..c45c3bb8 100644 --- a/recordforwarder/tests/test_forwarding_lambda.py +++ b/recordforwarder/tests/test_forwarding_lambda.py @@ -1,261 +1,204 @@ -# import unittest -# from unittest.mock import patch, MagicMock -# from moto import mock_s3 -# from boto3 import client as boto3_client -# import json -# from botocore.exceptions import ClientError -# from datetime import datetime -# import base64 -# import os -# import sys - -# # Move the sys.path insertion to the top along with other imports -# maindir = os.path.dirname(__file__) -# srcdir = '../src' -# sys.path.insert(0, os.path.abspath(os.path.join(maindir, srcdir))) - -# # Import other modules after adjusting the path -# from tests.utils_for_recordfowarder_tests.values_for_recordforwarder_tests import AWS_REGION # noqa: E402 -# from forwarding_lambda import forward_lambda_handler, forward_request_to_lambda # noqa: E402 -# from utils_for_record_forwarder import get_environment # noqa: E402 -# from tests.utils_for_recordfowarder_tests.utils_for_recordforwarder_tests import ( # noqa: E402 -# create_mock_search_lambda_response -# ) - - -# s3_client = boto3_client("s3", region_name=AWS_REGION) - - -# @mock_s3 -# class TestForwardingLambda(unittest.TestCase): - -# @patch("utils_for_record_forwarder.os.getenv") -# def test_get_environment_internal_dev(self, mock_getenv): -# mock_getenv.return_value = "internal-dev" -# self.assertEqual(get_environment(), "internal-dev") - -# @patch("utils_for_record_forwarder.os.getenv") -# def test_get_environment_prod(self, mock_getenv): -# mock_getenv.return_value = "prod" -# self.assertEqual(get_environment(), "prod") - -# @patch("utils_for_record_forwarder.os.getenv") -# def test_get_environment_default(self, mock_getenv): -# mock_getenv.return_value = None -# self.assertEqual(get_environment(), "internal-dev") - -# def test_create_ack_data(self): -# created_at_formatted_string = "20241015T18504900" -# row_id = "test_file_id#1" - -# success_ack_data = { -# "MESSAGE_HEADER_ID": row_id, -# "HEADER_RESPONSE_CODE": "OK", -# "ISSUE_SEVERITY": "Information", -# "ISSUE_CODE": "OK", -# "ISSUE_DETAILS_CODE": "30001", -# "RESPONSE_TYPE": "Business", -# "RESPONSE_CODE": "30001", -# "RESPONSE_DISPLAY": "Success", -# "RECEIVED_TIME": created_at_formatted_string, -# "MAILBOX_FROM": "", -# "LOCAL_ID": "", -# "IMMS_ID": "test_imms_id", -# "OPERATION_OUTCOME": "", -# "MESSAGE_DELIVERY": True, -# } - -# failure_ack_data = { -# "MESSAGE_HEADER_ID": row_id, -# "HEADER_RESPONSE_CODE": "Fatal Error", -# "ISSUE_SEVERITY": "Fatal", -# "ISSUE_CODE": "Fatal Error", -# "ISSUE_DETAILS_CODE": "30002", -# "RESPONSE_TYPE": "Business", -# "RESPONSE_CODE": "30002", -# "RESPONSE_DISPLAY": "Business Level Response Value - Processing Error", -# "RECEIVED_TIME": created_at_formatted_string, -# "MAILBOX_FROM": "", -# "LOCAL_ID": "", -# "IMMS_ID": "", -# "OPERATION_OUTCOME": "Some diagnostics", -# "MESSAGE_DELIVERY": False, -# } - -# # Test cas tuples are structured as (test_name, successful_api_response, diagnostics, imms_id, expected output) -# test_cases = [ -# ("ack data for success", True, None, "test_imms_id", success_ack_data), -# ("ack data for failure", False, "Some diagnostics", "", failure_ack_data), -# ] - -# for test_name, successful_api_response, diagnostics, imms_id, expected_output in test_cases: -# with self.subTest(test_name): -# self.assertEqual( -# create_ack_data(created_at_formatted_string, row_id, successful_api_response, diagnostics, imms_id), -# expected_output, -# ) - -# @patch("send_request_to_lambda.lambda_client") -# @patch("update_ack_file.s3_client") -# def test_forward_request_to_api_new_success(self, mock_s3_client, mock_lambda_client): -# # Mock LastModified as a datetime object -# mock_s3_client.head_object.return_value = {"LastModified": datetime(2024, 8, 21, 10, 15, 30)} -# mock_response = MagicMock() -# mock_response["Payload"].read.return_value = json.dumps( -# {"statusCode": 201, "headers": {"Location": "https://example.com/immunization/test_id"}} -# ) -# mock_lambda_client.invoke.return_value = mock_response -# # Simulate the case where the ack file does not exist -# mock_s3_client.get_object.side_effect = ClientError({"Error": {"Code": "404"}}, "HeadObject") - -# # Mock the create_ack_data method -# # Prepare the message body for the forward_request_to_lambda function -# message_body = { -# "row_id": "test_1", -# "file_key": "file.csv", -# "supplier": "Test_supplier", -# "operation_requested": "CREATE", -# "fhir_json": {"Name": "test"}, -# } -# # Call the function you are testing -# forward_request_to_lambda(message_body) -# # Check that create_ack_data was called with the correct arguments - -# @patch("send_request_to_lambda.lambda_client") -# @patch("update_ack_file.s3_client") -# def test_forward_request_to_api_new_success_duplicate(self, mock_s3_client, mock_lambda_client): -# # Mock LastModified as a datetime object -# mock_s3_client.head_object.return_value = {"LastModified": datetime(2024, 8, 21, 10, 15, 30)} -# diagnostics = "The provided identifier: https://supplierABC/identifiers/vacc#test-identifier1 is duplicated" -# mock_lambda_client.invoke.return_value = create_mock_search_lambda_response(422, diagnostics) -# # Simulate the case where the ack file does not exist -# mock_s3_client.get_object.side_effect = ClientError({"Error": {"Code": "404"}}, "HeadObject") - -# with patch("update_ack_file.create_ack_data") as mock_create_ack_data: -# message_body = { -# "row_id": "test_2", -# "file_key": "file.csv", -# "supplier": "Test_supplier", -# "operation_requested": "CREATE", -# "fhir_json": {"identifier": [{"system": "test_system", "value": "test_value"}]}, -# } -# forward_request_to_lambda(message_body) -# # Check that the data_rows function was called with success status and formatted datetime -# mock_create_ack_data.assert_called_with("20240821T10153000", "test_2", False, diagnostics, None) - -# @patch("send_request_to_lambda.lambda_client") -# @patch("update_ack_file.s3_client") -# def test_forward_request_to_api_update_failure(self, mock_s3_client, mock_lambda_client): -# mock_s3_client.head_object.return_value = {"LastModified": datetime(2024, 8, 21, 10, 15, 30)} -# diagnostics = ( -# "Validation errors: The provided immunization id:test_id doesn't match with the content of the request body" -# ) -# mock_lambda_client.invoke.return_value = create_mock_search_lambda_response(422, diagnostics) -# mock_s3_client.get_object.side_effect = ClientError({"Error": {"Code": "404"}}, "HeadObject") - -# with patch("update_ack_file.create_ack_data") as mock_create_ack_data, patch( -# "send_request_to_lambda.get_imms_id_and_version", return_value=("an_imms_id", 1) -# ): -# message_body = { -# "row_id": "test_3", -# "file_key": "file.csv", -# "supplier": "Test_supplier", -# "operation_requested": "UPDATE", -# "fhir_json": {"identifier": [{"system": "test_system", "value": "test_value"}]}, -# } -# forward_request_to_lambda(message_body) -# mock_create_ack_data.assert_called_with("20240821T10153000", "test_3", False, diagnostics, None) - -# @patch("send_request_to_lambda.lambda_client") -# @patch("update_ack_file.s3_client") -# def test_forward_request_to_api_update_failure_imms_id_none(self, mock_s3_client, mock_lambda_client): -# # Mock LastModified as a datetime object -# mock_s3_client.head_object.return_value = {"LastModified": datetime(2024, 8, 21, 10, 15, 30)} -# mock_s3_client.get_object.side_effect = ClientError({"Error": {"Code": "404"}}, "HeadObject") - -# with patch("update_ack_file.create_ack_data") as mock_create_ack_data: -# message_body = { -# "row_id": "test_4", -# "file_key": "file.csv", -# "supplier": "Test_supplier", -# "diagnostics": "Unable to obtain imms_id", -# } -# forward_request_to_lambda(message_body) -# mock_create_ack_data.assert_called_with( -# "20240821T10153000", "test_4", False, "Unable to obtain imms_id", None -# ) -# mock_lambda_client.assert_not_called() - -# @patch("send_request_to_lambda.lambda_client") -# @patch("update_ack_file.s3_client") -# def test_forward_request_to_api_delete_success(self, mock_s3_client, mock_lambda_client): -# mock_s3_client.head_object.return_value = {"LastModified": datetime(2024, 8, 21, 10, 15, 30)} -# mock_s3_client.get_object.side_effect = ClientError({"Error": {"Code": "404"}}, "HeadObject") -# mock_response = MagicMock() -# mock_response["Payload"].read.return_value = json.dumps( -# {"statusCode": 204, "headers": {"Location": "https://example.com/immunization/test_id"}} -# ) -# mock_lambda_client.invoke.return_value = mock_response -# with patch("update_ack_file.create_ack_data") as mock_create_ack_data, patch( -# "send_request_to_lambda.get_imms_id_and_version", return_value=("an_imms_id", 1) -# ): -# message_body = { -# "row_id": "test_6", -# "file_key": "file.csv", -# "operation_requested": "DELETE", -# "fhir_json": {"identifier": [{"system": "test_system", "value": "test_value"}]}, -# } -# forward_request_to_lambda(message_body) -# mock_create_ack_data.assert_called_with("20240821T10153000", "test_6", True, None, "an_imms_id") - -# @patch("forwarding_lambda.forward_request_to_lambda") -# @patch("utils_for_record_forwarder.get_environment") -# def test_forward_lambda_handler(self, mock_get_environment, mock_forward_request_to_api): -# # Mock the environment to return 'internal-dev' -# mock_get_environment.return_value = "internal-dev" - -# # Simulate the event data that Lambda would receive -# message_body = { -# "row_id": "test_7", -# "fhir_json": "{}", -# "operation_requested": "CREATE", -# "file_key": "test_file.csv", -# } -# event = { -# "Records": [ -# {"kinesis": {"data": base64.b64encode(json.dumps(message_body).encode("utf-8")).decode("utf-8")}} -# ] -# } -# forward_lambda_handler(event, None) -# mock_forward_request_to_api.assert_called_once_with(message_body) - -# @patch("forwarding_lambda.forward_request_to_lambda") -# @patch("utils_for_record_forwarder.get_environment") -# def test_forward_lambda_handler_update(self, mock_get_environment, mock_forward_request_to_api): -# mock_get_environment.return_value = "internal-dev" -# message_body = { -# "row_id": "test_8", -# "fhir_json": "{}", -# "operation_requested": "UPDATE", -# "file_key": "test_file.csv", -# } -# event = { -# "Records": [ -# {"kinesis": {"data": base64.b64encode(json.dumps(message_body).encode("utf-8")).decode("utf-8")}} -# ] -# } -# forward_lambda_handler(event, None) -# mock_forward_request_to_api.assert_called_once_with(message_body) - -# @patch("forwarding_lambda.logger") -# def test_forward_lambda_handler_with_exception(self, mock_logger): -# event = { -# "Records": [ -# {"body": json.dumps({"fhir_json": "{}", "action_flag": "invalid_action", "file_key": "test_file.csv"})} -# ] -# } -# forward_lambda_handler(event, None) -# mock_logger.error.assert_called() +# pylint: disable=wrong-import-position +# flake8: noqa: E402 +"""Tests for forwarding lambda""" + +import unittest +from unittest.mock import patch, MagicMock +import os +import sys +from copy import deepcopy +from typing import Generator +from datetime import datetime +from contextlib import contextmanager, ExitStack +from moto import mock_s3 +from boto3 import client as boto3_client +from botocore.exceptions import ClientError + +# Import local modules after adjusting the path +maindir = os.path.dirname(__file__) +SRCDIR = "../src" +sys.path.insert(0, os.path.abspath(os.path.join(maindir, SRCDIR))) + +from tests.utils_for_recordfowarder_tests.values_for_recordforwarder_tests import ( + MOCK_ENVIRONMENT_DICT, + AWS_REGION, + Message, + LambdaPayloads, + # Diagnostics, +) +from tests.utils_for_recordfowarder_tests.utils_for_recordforwarder_tests import ( + generate_kinesis_message, + generate_lambda_invocation_side_effect, +) +from forwarding_lambda import forward_lambda_handler, forward_request_to_lambda + +# from update_ack_file import create_ack_data + + +s3_client = boto3_client("s3", region_name=AWS_REGION) + +LAMBDA_PAYLOADS = LambdaPayloads() + + +@mock_s3 +@patch.dict("os.environ", MOCK_ENVIRONMENT_DICT) +@patch("send_request_to_lambda.CREATE_LAMBDA_NAME", "mock_create_imms") +@patch("send_request_to_lambda.UPDATE_LAMBDA_NAME", "mock_update_imms") +@patch("send_request_to_lambda.DELETE_LAMBDA_NAME", "mock_delete_imms") +class TestForwardingLambda(unittest.TestCase): + + @contextmanager + def common_contexts_for_forwarding_lambda_tests( + self, mock_lambda_payloads=None + ) -> Generator[MagicMock, None, None]: + """ + A context manager which applies common patching for the tests in the TestForwardingLambda class. + Yields mock_create_ack_data. + """ + with ExitStack() as stack: + # stack.enter_context(patch("update_ack_file.s3_client")), # pylint: disable=expression-not-assigned + + # stack.enter_context( + # patch( + # "update_ack_file.s3_client.head_object", + # return_value={"LastModified": datetime(2024, 8, 21, 10, 15, 30)}, + # ) + # ) + + # # Simulate the case where the ack file does not exist + # stack.enter_context( + # patch( + # "update_ack_file.s3_client.get_object", + # side_effect=ClientError({"Error": {"Code": "404"}}, "HeadObject"), + # ), + # ) + + if mock_lambda_payloads: + # Mock lambda.invoke with a different payload for each different lambda + stack.enter_context( + patch( + "utils_for_record_forwarder.lambda_client.invoke", + side_effect=generate_lambda_invocation_side_effect(mock_lambda_payloads), + ) + ) + + # mock_create_ack_data = stack.enter_context(patch("update_ack_file.create_ack_data")) + + yield + + # def test_create_ack_data(self): + # created_at_formatted_string = "20241015T18504900" + # row_id = "test_file_id#1" + + # success_ack_data = { + # "MESSAGE_HEADER_ID": row_id, + # "HEADER_RESPONSE_CODE": "OK", + # "ISSUE_SEVERITY": "Information", + # "ISSUE_CODE": "OK", + # "ISSUE_DETAILS_CODE": "30001", + # "RESPONSE_TYPE": "Business", + # "RESPONSE_CODE": "30001", + # "RESPONSE_DISPLAY": "Success", + # "RECEIVED_TIME": created_at_formatted_string, + # "MAILBOX_FROM": "", + # "LOCAL_ID": "", + # "IMMS_ID": "test_imms_id", + # "OPERATION_OUTCOME": "", + # "MESSAGE_DELIVERY": True, + # } + + # failure_ack_data = { + # "MESSAGE_HEADER_ID": row_id, + # "HEADER_RESPONSE_CODE": "Fatal Error", + # "ISSUE_SEVERITY": "Fatal", + # "ISSUE_CODE": "Fatal Error", + # "ISSUE_DETAILS_CODE": "30002", + # "RESPONSE_TYPE": "Business", + # "RESPONSE_CODE": "30002", + # "RESPONSE_DISPLAY": "Business Level Response Value - Processing Error", + # "RECEIVED_TIME": created_at_formatted_string, + # "MAILBOX_FROM": "", + # "LOCAL_ID": "", + # "IMMS_ID": "", + # "OPERATION_OUTCOME": "Some diagnostics", + # "MESSAGE_DELIVERY": False, + # } + + # # Test case tuples are structured as (test_name, successful_api_response, diagnostics, imms_id, expected output) + # test_cases = [ + # ("ack data for success", True, None, "test_imms_id", success_ack_data), + # ("ack data for failure", False, "Some diagnostics", "", failure_ack_data), + # ] + + # for test_name, successful_api_response, diagnostics, imms_id, expected_output in test_cases: + # with self.subTest(test_name): + # self.assertEqual( + # create_ack_data(created_at_formatted_string, row_id, successful_api_response, diagnostics, imms_id), + # expected_output, + # ) + + def test_forward_request_to_api_new_success(self): + with self.common_contexts_for_forwarding_lambda_tests(deepcopy(LAMBDA_PAYLOADS.SUCCESS)): + forward_request_to_lambda(deepcopy(Message.create_message)) + + # pylint: disable=no-member + # mock_create_ack_data.assert_called_with("20240821T10153000", Message.ROW_ID, True, None, "test_id") + + def test_forward_request_to_api_new_duplicate(self): + with self.common_contexts_for_forwarding_lambda_tests(deepcopy(LAMBDA_PAYLOADS.CREATE.DUPLICATE)): + forward_request_to_lambda(deepcopy(Message.create_message)) + + # pylint: disable=no-member + # mock_create_ack_data.assert_called_with("20240821T10153000", Message.ROW_ID, False, Diagnostics.DUPLICATE, None) + + def test_forward_request_to_api_update_failure(self): + mock_lambda_payloads = { + **deepcopy(LAMBDA_PAYLOADS.UPDATE.VALIDATION_ERROR), + **deepcopy(LAMBDA_PAYLOADS.SEARCH.ID_AND_VERSION_FOUND), + } + with self.common_contexts_for_forwarding_lambda_tests(mock_lambda_payloads): + forward_request_to_lambda(deepcopy(Message.update_message)) + + # pylint: disable=no-member + # mock_create_ack_data.assert_called_with( + # "20240821T10153000", Message.ROW_ID, False, Diagnostics.VALIDATION_ERROR, None + # ) + + def test_forward_request_to_api_update_failure_imms_id_none(self): + with ( + self.common_contexts_for_forwarding_lambda_tests(), + patch("utils_for_record_forwarder.lambda_client.invoke") as mock_lambda_client, + ): + forward_request_to_lambda(Message.diagnostics_message) + + # pylint: disable=no-member + # mock_create_ack_data.assert_called_with("20240821T10153000", Message.ROW_ID, False, Message.DIAGNOSTICS, None) + mock_lambda_client.assert_not_called() + + def test_forward_request_to_api_delete_success(self): + with self.common_contexts_for_forwarding_lambda_tests(deepcopy(LAMBDA_PAYLOADS.SUCCESS)): + forward_request_to_lambda(deepcopy(Message.delete_message)) + + # pylint: disable=no-member + # mock_create_ack_data.assert_called_with( + # "20240821T10153000", Message.ROW_ID, True, None, "277befd9-574e-47fe-a6ee-189858af3bb0" + # ) + + def test_forward_lambda_handler(self): + for message in [ + deepcopy(Message.create_message), + deepcopy(Message.update_message), + deepcopy(Message.delete_message), + ]: + with patch("forwarding_lambda.forward_request_to_lambda") as mock_forward_request_to_api: + forward_lambda_handler(generate_kinesis_message(message), None) + mock_forward_request_to_api.assert_called_once_with(message) + + def test_forward_lambda_handler_with_exception(self): + message_body = {**deepcopy(Message.create_message), "operation_request": "INVALID_OPERATION"} + with patch("forwarding_lambda.logger") as mock_logger: + forward_lambda_handler(generate_kinesis_message(message_body), None) + mock_logger.error.assert_called() # if __name__ == "__main__": diff --git a/recordforwarder/tests/test_get_imms_id_and_version.py b/recordforwarder/tests/test_get_imms_id_and_version.py index 4a20f4fb..00230ae6 100644 --- a/recordforwarder/tests/test_get_imms_id_and_version.py +++ b/recordforwarder/tests/test_get_imms_id_and_version.py @@ -1,43 +1,54 @@ # """Tests for get_imms_id_and_version""" -# import unittest -# from unittest.mock import patch -# from moto import mock_s3 -# from get_imms_id_and_version import get_imms_id_and_version -# from errors import IdNotFoundError -# from tests.utils_for_recordfowarder_tests.utils_for_recordforwarder_tests import create_mock_search_lambda_response - -# fhir_json_with_identifier_value_and_system = {"identifier": [{"value": "a_value", "system": "a_system"}]} - - -# @mock_s3 -# class TestGetImmsIdAndVersion(unittest.TestCase): -# """ -# Tests for get_imms_id_and_version. Note that these test mock the lambda invocation, so do not test the -# interaction with search lambda. -# """ - -# def test_success(self): -# """Test that imms_id and version are correctly identified from a successful search lambda response.""" -# with patch("clients.lambda_client.invoke", return_value=create_mock_search_lambda_response(200)): -# imms_id, version = get_imms_id_and_version(fhir_json_with_identifier_value_and_system) - -# self.assertEqual(imms_id, "277befd9-574e-47fe-a6ee-189858af3bb0") -# self.assertEqual(version, 2) - -# def test_failure_due_to_empty_search_lambda_return(self): -# """Test that an IdNotFoundError is raised for a successful search lambda response which contains no entries.""" -# with patch( -# "clients.lambda_client.invoke", -# return_value=create_mock_search_lambda_response(200, id_and_version_found=False), -# ): -# with self.assertRaises(IdNotFoundError): -# get_imms_id_and_version(fhir_json_with_identifier_value_and_system) - -# def test_failure_due_to_search_lambda_404(self): -# """Test that an IdNotFoundError is raised for an unsuccessful search lambda response.""" -# with patch( -# "clients.lambda_client.invoke", return_value=create_mock_search_lambda_response(404, "some diagnostics") -# ): -# with self.assertRaises(IdNotFoundError): -# get_imms_id_and_version(fhir_json_with_identifier_value_and_system) +import unittest +from unittest.mock import patch +from copy import deepcopy +from get_imms_id_and_version import get_imms_id_and_version +from errors import IdNotFoundError +from tests.utils_for_recordfowarder_tests.utils_for_recordforwarder_tests import ( + generate_lambda_invocation_side_effect, +) +from tests.utils_for_recordfowarder_tests.values_for_recordforwarder_tests import ( + test_imms_fhir_json, + LambdaPayloads, + MOCK_ENVIRONMENT_DICT, +) + + +@patch.dict("os.environ", MOCK_ENVIRONMENT_DICT) +class TestGetImmsIdAndVersion(unittest.TestCase): + """ + Tests for get_imms_id_and_version. + Note that these test mock the lambda invocation, therefore they do not test the interaction with search lambda. + """ + + def test_success(self): + """Test that imms_id and version are correctly identified from a successful search lambda response.""" + with patch( + "clients.lambda_client.invoke", + side_effect=generate_lambda_invocation_side_effect(deepcopy(LambdaPayloads.SEARCH.ID_AND_VERSION_FOUND)), + ): + imms_id, version = get_imms_id_and_version(test_imms_fhir_json) + + self.assertEqual(imms_id, "277befd9-574e-47fe-a6ee-189858af3bb0") + self.assertEqual(version, 2) + + def test_failure_due_to_empty_search_lambda_return(self): + """Test that an IdNotFoundError is raised for a successful search lambda response which contains no entries.""" + with patch( + "clients.lambda_client.invoke", + side_effect=generate_lambda_invocation_side_effect( + deepcopy(LambdaPayloads.SEARCH.ID_AND_VERSION_NOT_FOUND) + ), + ): + with self.assertRaises(IdNotFoundError): + get_imms_id_and_version(test_imms_fhir_json) + + def test_failure_due_to_search_lambda_404(self): + """Test that an IdNotFoundError is raised for an unsuccessful search lambda response.""" + with patch( + "clients.lambda_client.invoke", + side_effect=generate_lambda_invocation_side_effect(deepcopy(LambdaPayloads.SEARCH.FAILURE)), + ): + with self.assertRaises(IdNotFoundError): + get_imms_id_and_version(test_imms_fhir_json) diff --git a/recordforwarder/tests/test_log_structure.py b/recordforwarder/tests/test_log_structure.py index 0f5ddabb..e678363e 100644 --- a/recordforwarder/tests/test_log_structure.py +++ b/recordforwarder/tests/test_log_structure.py @@ -1,287 +1,122 @@ +# """Tests for Splunk logging""" + # import unittest # from unittest.mock import patch # import json +# from copy import deepcopy # from datetime import datetime +# from contextlib import contextmanager, ExitStack # from send_request_to_lambda import send_request_to_lambda -# from tests.utils_for_recordfowarder_tests.values_for_recordforwarder_tests import ( -# TEST_IMMS_ID, -# test_fixed_time_taken, -# ) +# from constants import Operations +# from tests.utils_for_recordfowarder_tests.values_for_recordforwarder_tests import Message, Diagnostics # from errors import MessageNotSuccessfulError +# FIXED_DATETIME = datetime(2024, 10, 30, 12, 0, 0) -# class Test_Splunk_logging(unittest.TestCase): -# def setUp(self): -# self.message_body_base = { -# "row_id": "6543219", -# "file_key": "flu_Vaccinations_v5_8HK48_20210730T12000000.csv", -# "supplier": "EMIS", -# "operation_requested": "operation_requested", -# "fhir_json": {"resourceType": "Immunization"}, -# } -# self.fixed_datetime = datetime(2024, 10, 29, 12, 0, 0) +# class TestSplunkLogging(unittest.TestCase): +# """Tests for Splunk logging""" -# self.message_body_base_errors = { -# "row_id": "6543219", -# "file_key": "flu_Vaccinations_v5_8HK48_20210730T12000000.csv", -# "supplier": "EMIS", -# "operation_requested": "UPDATE", -# "diagnostics": "Unable to obtain IMMS ID", -# } +# log_json_base = { +# "function_name": "send_request_to_lambda", +# "date_time": FIXED_DATETIME.strftime("%Y-%m-%d %H:%M:%S"), +# "supplier": "EMIS", +# "file_key": "flu_Vaccinations_v5_8HK48_20210730T12000000.csv", +# "vaccine_type": "FLU", +# "message_id": "123456", +# "action_flag": "each test replaces this with the relevant action flag", +# } -# self.expected_values = { -# "function_name": "send_request_to_lambda", -# "date_time": self.fixed_datetime.strftime("%Y-%m-%d %H:%M:%S"), -# "status": "success", -# "supplier": "EMIS", -# "file_key": "flu_Vaccinations_v5_8HK48_20210730T12000000.csv", -# "vaccine_type": "FLU", -# "message_id": "6543219", -# "action_flag": "action_flag", -# "time_taken": 1.0, -# } +# expected_log_json_success = {**log_json_base, "status": "success", "time_taken": "1.0s"} -# # Expected splunk log values when there is an error -# self.expected_values_error = { -# "event": { -# "function_name": "send_request_to_lambda", -# "date_time": self.fixed_datetime.strftime("%Y-%m-%d %H:%M:%S"), -# "status": "Fail", -# "supplier": "EMIS", -# "file_key": "flu_Vaccinations_v5_8HK48_20210730T12000000.csv", -# "vaccine_type": "FLU", -# "message_id": "6543219", -# "action_flag": "UPDATE", -# "time_taken": "1.0s", -# "status_code": 400, -# "error": "Unable to obtain IMMS ID", -# } -# } +# expected_log_json_failure = {**log_json_base, "status": "Fail", "time_taken": "1.0s", "status_code": 400} -# def extract_log_json(self, log_entry): +# def extract_log_json(self, log: str) -> dict: # """Extracts JSON from log entry.""" +# log_entry = log.output[0] # json_start = log_entry.find("{") -# json_str = log_entry[json_start:] +# json_end = log_entry.find("}") +# json_str = log_entry[json_start : json_end + 1] # noqa: E203 # return json.loads(json_str) -# @patch("send_request_to_lambda.send_create_request") -# @patch("send_request_to_lambda.send_update_request") -# @patch("send_request_to_lambda.send_delete_request") -# @patch("log_structure.firehose_logger") -# @patch("time.time") -# @patch("log_structure.datetime") -# def test_splunk_logging_successful_rows( -# self, -# mock_datetime, -# mock_time, -# mock_firehose_logger, -# mock_send_delete_request, -# mock_send_update_request, -# mock_send_create_request, -# ): - -# # mocking datetime and time_taken as fixed values -# mock_datetime.now.return_value = self.fixed_datetime -# mock_time.side_effect = test_fixed_time_taken - -# # Mock return values for each operation -# mock_send_create_request.return_value = TEST_IMMS_ID -# mock_send_update_request.return_value = TEST_IMMS_ID -# mock_send_delete_request.return_value = TEST_IMMS_ID -# operations = [ -# {"operation_requested": "CREATE"}, -# {"operation_requested": "UPDATE"}, -# {"operation_requested": "DELETE"}, -# ] - -# for op in operations: -# with self.assertLogs(level="INFO") as log: -# message_body = self.message_body_base.copy() -# message_body["operation_requested"] = op["operation_requested"] - -# result = send_request_to_lambda(message_body) -# self.assertEqual(result, "imms_6543219") -# self.assertGreater(len(log.output), 0) - -# log_json = self.extract_log_json(log.output[0]) - -# expected_values = self.expected_values -# expected_values["action_flag"] = op["operation_requested"] - -# # Iterate over the expected values and assert each one -# for key, expected in expected_values.items(): -# self.assertEqual(log_json[key], expected) - -# self.assertIsInstance(log_json["time_taken"], float) - -# # Check firehose logging call -# mock_firehose_logger.forwarder_send_log.assert_called_once_with({"event": log_json}) -# mock_firehose_logger.forwarder_send_log.reset_mock() - -# @patch("log_structure.firehose_logger") -# @patch("log_structure.logger") -# @patch("time.time") -# @patch("log_structure.datetime") -# def test_splunk_logging_diagnostics_error(self, mock_datetime, mock_time, mock_logger, mock_firehose_logger): -# # Message body with diagnostics to trigger an error, mocking datetime and time_taken as fixed values -# mock_datetime.now.return_value = self.fixed_datetime -# mock_time.side_effect = test_fixed_time_taken -# message_body = self.message_body_base_errors - -# # Exception raised in send_request_to_lambda -# with self.assertRaises(MessageNotSuccessfulError) as context: +# def make_log_assertions(self, log, mock_firehose_logger, operation: str, expected_error=None): +# """Assert that the log_json is as expected, and that the firehose logger was called with the log_json""" +# # Extract log_json +# self.assertGreater(len(log.output), 0) +# log_json = self.extract_log_json(log) + +# # Prepare expected_log_json +# expected_log_json = ( +# deepcopy(self.expected_log_json_success) if not expected_error else +# deepcopy(self.expected_log_json_failure) +# ) +# expected_log_json["action_flag"] = operation.upper() +# expected_log_json.update({"error": expected_error} if expected_error else {}) + +# self.assertEqual(log_json, expected_log_json) + +# mock_firehose_logger.forwarder_send_log.assert_called_once_with({"event": log_json}) +# mock_firehose_logger.forwarder_send_log.reset_mock() + +# @contextmanager +# def common_contexts_for_splunk_logging_tests(self): +# """ +# A context manager which applies common patching for the tests in the TestSplunkLogging class. +# Yields mock_firehose_logger and logs (where logs is a list of the captured log entries). +# """ +# with ExitStack() as stack: +# stack.enter_context(patch("time.time", side_effect=(1000000.0, 1000001.0, 1000003.0)))# (start, end, ???) +# stack.enter_context(patch("log_structure.datetime")) +# stack.enter_context(patch("log_structure.datetime.now", return_value=FIXED_DATETIME)) +# mock_firehose_logger = stack.enter_context(patch("log_structure.firehose_logger")) +# logs = stack.enter_context(self.assertLogs(level="INFO")) +# yield mock_firehose_logger, logs + +# def test_splunk_logging_success(self): +# """Tests successful rows""" +# for operation in [Operations.CREATE, Operations.UPDATE, Operations.DELETE]: +# with self.subTest(operation): +# with ( +# self.common_contexts_for_splunk_logging_tests() as (mock_firehose_logger, logs), # noqa: E999 +# patch(f"send_request_to_lambda.send_{operation.lower()}_request", return_value=Message.IMMS_ID), +# ): +# message_body = {**Message.base_message_fields, "operation_requested": operation, "fhir_json": {}} +# result = send_request_to_lambda(message_body) + +# self.assertEqual(result, Message.IMMS_ID) +# self.make_log_assertions(logs, mock_firehose_logger, operation) + +# def test_splunk_logging_failure_during_processing(self): +# """Tests a row which failed processing (and therefore has diagnostics in the message recevied from kinesis)""" +# diagnostics = Diagnostics.INVALID_ACTION_FLAG +# operation = Operations.UPDATE +# with ( +# self.common_contexts_for_splunk_logging_tests() as (mock_firehose_logger, logs), # noqa: E999 +# self.assertRaises(MessageNotSuccessfulError) as context, +# ): +# message_body = {**Message.base_message_fields, "operation_requested": operation, +# "diagnostics": diagnostics} # send_request_to_lambda(message_body) -# # Ensure the exception message is as expected -# self.assertEqual(str(context.exception), "Unable to obtain IMMS ID") - -# log_data = mock_logger.exception.call_args[0][0] - -# self.assertIn("Unable to obtain IMMS ID", log_data) - -# firehose_log_data = self.expected_values_error -# mock_firehose_logger.forwarder_send_log.assert_called_once_with(firehose_log_data) - -# @patch("send_request_to_lambda.send_create_request") -# @patch("send_request_to_lambda.send_update_request") -# @patch("send_request_to_lambda.send_delete_request") -# @patch("send_request_to_lambda.forwarder_function_info") # Mock the decorator to simplify the test -# @patch("log_structure.logger") # Patch the logger to verify error logs -# def test_error_logging_in_send_request_to_lambda( -# self, -# mock_logger, -# mock_forwarder_function_info, -# mock_send_delete_request, -# mock_send_update_request, -# mock_send_create_request, -# ): - -# # Define message bodies for each operation to trigger errors -# create_message_body = { -# "operation_requested": "CREATE", -# "file_key": "flu_Vaccinations_v5_8HK48_20210730T12000000.csv", -# "supplier": "TestSupplier", -# "fhir_json": {}, # Placeholder for any necessary data structure -# "row_id": "12345", -# } - -# update_message_body = { -# "operation_requested": "UPDATE", -# "file_key": "flu_Vaccinations_v5_8HK48_20210730T12000000.csv", -# "supplier": "TestSupplier", -# "fhir_json": {}, # Placeholder for any necessary data structure -# "row_id": "12345", -# "imms_id": "67890", -# "version": "1", -# } - -# delete_message_body = { -# "operation_requested": "DELETE", -# "file_key": "flu_Vaccinations_v5_8HK48_20210730T12000000.csv", -# "supplier": "TestSupplier", -# "fhir_json": {}, # Placeholder for any necessary data structure -# "imms_id": "67890", -# } - -# # Set up each mock function to raise MessageNotSuccessfulError with custom error messages -# mock_send_create_request.side_effect = MessageNotSuccessfulError("API Error: Unable to create resource") -# mock_send_update_request.side_effect = MessageNotSuccessfulError("API Error: Unable to update resource") -# mock_send_delete_request.side_effect = MessageNotSuccessfulError("API Error: Unable to delete resource") - -# # Test the CREATE operation -# with self.assertRaises(MessageNotSuccessfulError): -# send_request_to_lambda(create_message_body) - -# # Assert the logger recorded the error message for CREATE -# mock_logger.exception.assert_called() # Check that the log was triggered -# self.assertIn("API Error: Unable to create resource", str(mock_logger.exception.call_args)) # Verify message - -# # Reset the mock logger for the next test case -# mock_logger.exception.reset_mock() - -# # Test the UPDATE operation -# with self.assertRaises(MessageNotSuccessfulError): -# send_request_to_lambda(update_message_body) - -# # Assert the logger recorded the error message for UPDATE -# mock_logger.exception.assert_called() -# self.assertIn("API Error: Unable to update resource", str(mock_logger.exception.call_args)) - -# # Reset the mock logger for the next test case -# mock_logger.exception.reset_mock() - -# # Test the DELETE operation -# with self.assertRaises(MessageNotSuccessfulError): -# send_request_to_lambda(delete_message_body) - -# # Assert the logger recorded the error message for DELETE -# mock_logger.exception.assert_called() -# self.assertIn("API Error: Unable to delete resource", str(mock_logger.exception.call_args)) - -# @patch("send_request_to_lambda.send_create_request") -# @patch("send_request_to_lambda.send_update_request") -# @patch("send_request_to_lambda.send_delete_request") -# @patch("log_structure.logger") # Patch the logger to verify error logs -# def test_error_logging_operation( -# self, -# mock_logger, -# mock_send_delete_request, -# mock_send_update_request, -# mock_send_create_request, -# ): - -# create_message_body = { -# "row_id": "555555", -# "file_key": "flu_Vaccinations_v5_8HK48_20210730T12000000.csv", -# "supplier": "EMIS", -# "operation_requested": "CREATE", -# "fhir_json": {"resourceType": "Immunization"}, -# } - -# update_message_body = { -# "row_id": "7891011", -# "file_key": "flu_Vaccinations_v5_8HK48_20210730T12000000.csv", -# "supplier": "EMIS", -# "operation_requested": "UPDATE", -# "fhir_json": {"resourceType": "Immunization"}, -# } - -# delete_message_body = { -# "row_id": "12131415", -# "file_key": "flu_Vaccinations_v5_8HK48_20210730T12000000.csv", -# "supplier": "EMIS", -# "operation_requested": "DELETE", -# "fhir_json": {"resourceType": "Immunization"}, -# } - -# # Set up each mock function to raise MessageNotSuccessfulError with custom error messages -# mock_send_create_request.side_effect = MessageNotSuccessfulError("API Error: Unable to create resource") -# mock_send_update_request.side_effect = MessageNotSuccessfulError("API Error: Unable to update resource") -# mock_send_delete_request.side_effect = MessageNotSuccessfulError("API Error: Unable to delete resource") - -# with self.assertRaises(MessageNotSuccessfulError): -# send_request_to_lambda(create_message_body) - -# mock_logger.exception.assert_called() -# self.assertIn("API Error: Unable to create resource", str(mock_logger.exception.call_args)) -# mock_logger.exception.reset_mock() - -# # Test the UPDATE operation -# with self.assertRaises(MessageNotSuccessfulError): -# send_request_to_lambda(update_message_body) - -# mock_logger.exception.assert_called() -# self.assertIn("API Error: Unable to update resource", str(mock_logger.exception.call_args)) -# mock_logger.exception.reset_mock() - -# # Test the DELETE operation -# with self.assertRaises(MessageNotSuccessfulError): -# send_request_to_lambda(delete_message_body) - -# mock_logger.exception.assert_called() -# self.assertIn("API Error: Unable to delete resource", str(mock_logger.exception.call_args)) - - -# if __name__ == "__main__": -# unittest.main() +# self.assertEqual(str(context.exception), diagnostics) +# self.make_log_assertions(logs, mock_firehose_logger, operation, expected_error=diagnostics) + +# def test_splunk_logging_failure_during_forwarding(self): +# """Tests rows which fail during forwarding""" + +# for operation in [Operations.CREATE, Operations.UPDATE, Operations.DELETE]: +# error_message = f"API Error: Unable to {operation.lower()} resource" +# with self.subTest(operation): +# with ( +# self.common_contexts_for_splunk_logging_tests() as (mock_firehose_logger, logs), # noqa: E999 +# self.assertRaises(MessageNotSuccessfulError) as context, +# patch( +# f"send_request_to_lambda.send_{operation.lower()}_request", +# side_effect=MessageNotSuccessfulError(error_message), +# ), +# ): +# message_body = {**Message.base_message_fields, "operation_requested": operation, "fhir_json": {}} +# send_request_to_lambda(message_body) + +# self.assertEqual(str(context.exception), error_message) +# self.make_log_assertions(logs, mock_firehose_logger, operation, expected_error=error_message) diff --git a/recordforwarder/tests/test_utils_for_recordforwarder.py b/recordforwarder/tests/test_utils_for_recordforwarder.py new file mode 100644 index 00000000..f63af20f --- /dev/null +++ b/recordforwarder/tests/test_utils_for_recordforwarder.py @@ -0,0 +1,34 @@ +"""Tests for utils_for_recordforwarder functions""" + +from unittest import TestCase +from unittest.mock import patch +from utils_for_record_forwarder import get_environment + +# from constants import ACK_HEADERS + +# from update_ack_file import create_ack_data + + +class TestUtilsForRecordForwarder(TestCase): + """Tests for utils_for_recordforwarder functions""" + + # def test_ack_headers_match_ack_data_keys(self): + # """Ensures that the ACK_HEADERS found in constants, match the headers given as keys in create_ack_data""" + # self.assertEqual(ACK_HEADERS, list(create_ack_data("TEST", "TEST", True).keys())) + + def test_get_environment(self): + "Tests that get_environment returns the correct environment" + # Each test case tuple has the structure (environment, expected_result) + test_cases = ( + ("internal-dev", "internal-dev"), + ("int", "int"), + ("ref", "ref"), + ("sandbox", "sandbox"), + ("prod", "prod"), + ("pr-22", "internal-dev"), + ) + + for environment, expected_result in test_cases: + with self.subTest(f"SubTest for environment: {environment}"): + with patch.dict("os.environ", {"ENVIRONMENT": environment}): + self.assertEqual(get_environment(), expected_result) diff --git a/recordforwarder/tests/utils_for_recordfowarder_tests/utils_for_recordforwarder_tests.py b/recordforwarder/tests/utils_for_recordfowarder_tests/utils_for_recordforwarder_tests.py index 5c48fa90..e3e5ff63 100644 --- a/recordforwarder/tests/utils_for_recordfowarder_tests/utils_for_recordforwarder_tests.py +++ b/recordforwarder/tests/utils_for_recordfowarder_tests/utils_for_recordforwarder_tests.py @@ -1,19 +1,29 @@ -from unittest.mock import MagicMock -import requests +"""Utils for recordfowarder tests""" + import json +import base64 +from io import StringIO +from typing import Union + + +def generate_kinesis_message(message: dict) -> str: + """Convert a dictionary to a kinesis message""" + kinesis_encoded_data = base64.b64encode(json.dumps(message).encode("utf-8")).decode("utf-8") + return {"Records": [{"kinesis": {"data": kinesis_encoded_data}}]} -def create_mock_operation_outcome(diagnostics: str) -> dict: +def generate_operation_outcome(diagnostics: str, code: str = "duplicate") -> dict: + """Generates an Operation Outcome, with the given diagnostics and code""" return { "resourceType": "OperationOutcome", - "id": "45b552ca-755a-473f-84df-c7e7767bd2ac", + "id": "an_imms_id", "meta": {"profile": ["https://simplifier.net/guide/UKCoreDevelopment2/ProfileUKCore-OperationOutcome"]}, "issue": [ { "severity": "error", - "code": "duplicate", + "code": code, "details": { - "coding": [{"system": "https://fhir.nhs.uk/Codesystem/http-error-codes", "code": "DUPLICATE"}] + "coding": [{"system": "https://fhir.nhs.uk/Codesystem/http-error-codes", "code": code.upper()}] }, "diagnostics": diagnostics, } @@ -21,46 +31,36 @@ def create_mock_operation_outcome(diagnostics: str) -> dict: } -response_body_id_and_version_not_found = { - "resourceType": "Bundle", - "type": "searchset", - "link": [ - { - "relation": "self", - "url": "https://internal-dev.api.service.nhs.uk/immunisation-fhir-api/Immunization?" - + "immunization.identifier=None&_elements=None", - } - ], - "entry": [], - "total": 0, -} +def generate_payload(status_code: int, headers: Union[dict, None] = None, body: dict = None) -> dict: + """ + Generates a payload with the given status code, headers and body + (body is converted to json string, and the key-value pair is omitted if there is no body). + """ + return {"statusCode": status_code, **({"body": json.dumps(body)} if body is not None else {}), "headers": headers} -response_body_id_and_version_found = { - "resourceType": "Bundle", - "type": "searchset", - "entry": [{"resource": {"id": "277befd9-574e-47fe-a6ee-189858af3bb0", "meta": {"versionId": 2}}}], - "total": 1, -} +def generate_lambda_payload( + status_code: int, headers: Union[dict, None] = None, body: dict = None, invocation_status_code: int = 202 +) -> dict: + """ + Generates a mocked lambda return value, with the given status code, headers and body. + The body key-value pair is omitted if there is no body argument is given. + """ + return { + "Payload": StringIO(json.dumps(generate_payload(status_code, headers, body))), + "StatusCode": invocation_status_code, + } -def create_mock_search_lambda_response( - status_code: int, diagnostics: str = None, id_and_version_found: bool = True -) -> requests.Response: - """Creates a mock response for a request sent to the search lambda for imms_id and version.""" - body = ( - create_mock_operation_outcome(diagnostics) - if diagnostics - else response_body_id_and_version_found if id_and_version_found else response_body_id_and_version_not_found - ) +def generate_lambda_invocation_side_effect(mock_lambda_payloads): + """ + Takes a dictionary as input with key-value pairs in the format LAMBDA_TYPE: mock_response_payload, where + LAMBDA_TYPEs are CREATE, UPDATE, DELETE and SEARCH. + Returns a function which mocks the side effect of calling lambda_client.invoke on the relevant Imms FHIR API lambda. + """ - mock_response = MagicMock() - mock_response["Payload"].read.return_value = json.dumps( - { - "statusCode": status_code, - "headers": {"Location": "https://example.com/immunization/test_id"}, - **({"body": json.dumps(body)} if body is not None else {}), - } - ) + def lambda_invocation_side_effect(FunctionName, *_args, **_kwargs): # pylint: disable=invalid-name + lambda_type = FunctionName.split("_")[1].upper() # Tests mock FunctionNames as mock_lambdatype_lambda_name + return mock_lambda_payloads[lambda_type] - return mock_response + return lambda_invocation_side_effect diff --git a/recordforwarder/tests/utils_for_recordfowarder_tests/values_for_recordforwarder_tests.py b/recordforwarder/tests/utils_for_recordfowarder_tests/values_for_recordforwarder_tests.py index e29cb3f6..bb132c0d 100644 --- a/recordforwarder/tests/utils_for_recordfowarder_tests/values_for_recordforwarder_tests.py +++ b/recordforwarder/tests/utils_for_recordfowarder_tests/values_for_recordforwarder_tests.py @@ -1,46 +1,62 @@ """Values for use in recordforwarder tests""" +from constants import Operations +from tests.utils_for_recordfowarder_tests.utils_for_recordforwarder_tests import ( + generate_lambda_payload, + generate_operation_outcome, +) + +MOCK_ENVIRONMENT_DICT = { + "SOURCE_BUCKET_NAME": "immunisation-batch-internal-dev-data-sources", + "ACK_BUCKET_NAME": "immunisation-batch-internal-dev-data-destinations", + "ENVIRONMENT": "internal-dev", + "LOCAL_ACCOUNT_ID": "123456789012", + "SHORT_QUEUE_PREFIX": "imms-batch-internal-dev", + "CREATE_LAMBDA_NAME": "mock_create_imms_lambda_name", + "UPDATE_LAMBDA_NAME": "mock_update_imms_lambda_name", + "DELETE_LAMBDA_NAME": "mock_delete_imms_lambda_name", + "SEARCH_LAMBDA_NAME": "mock_search_imms_lambda_name", +} + SOURCE_BUCKET_NAME = "immunisation-batch-internal-dev-data-sources" DESTINATION_BUCKET_NAME = "immunisation-batch-internal-dev-data-destinations" -CONFIG_BUCKET_NAME = "immunisation-batch-internal-dev-configs" -STREAM_NAME = "imms-batch-internal-dev-processingdata-stream" AWS_REGION = "eu-west-2" -TEST_VACCINE_TYPE = "flu" -TEST_SUPPLIER = "EMIS" -TEST_ODS_CODE = "8HK48" -TEST_ROW_ID = "123456" -TEST_IMMS_ID = "imms_6543219" -TEST_FILE_KEY = f"{TEST_VACCINE_TYPE}_Vaccinations_v5_{TEST_ODS_CODE}_20210730T12000000.csv" -TEST_ACK_FILE_KEY = f"forwardedFile/{TEST_VACCINE_TYPE}_Vaccinations_v5_{TEST_ODS_CODE}_20210730T12000000_BusAck.csv" +class TestFile: + """Class containing a test file, it's constituent variables and its corresponding ack file""" + + VACCINE_TYPE = "flu" + SUPPLIER = "EMIS" + ODS_CODE = "8HK48" + + FILE_KEY = f"{VACCINE_TYPE}_Vaccinations_v5_{ODS_CODE}_20210730T12000000.csv" + ACK_FILE_KEY = f"forwardedFile/{FILE_KEY.split('.')[0]}_BusAck.csv" + + +class Urls: + """Urls for use within FHIR Immunization Resource json""" + + SNOMED = "http://snomed.info/sct" + NHS_NUMBER = "https://fhir.nhs.uk/Id/nhs-number" + VACCINATION_PROCEDURE = "https://fhir.hl7.org.uk/StructureDefinition/Extension-UKCore-VaccinationProcedure" + RAVS = "https://www.ravs.england.nhs.uk/" + ODS = "https://fhir.nhs.uk/Id/ods-organization-code" + + +MOCK_IDENTIFIER_SYSTEM = Urls.RAVS +MOCK_IDENTIFIER_VALUE = "Vacc1" -MOCK_ENVIRONMENT_DICT = { - "ENVIRONMENT": "internal-dev", - "LOCAL_ACCOUNT_ID": "123456", - "ACK_BUCKET_NAME": DESTINATION_BUCKET_NAME, - "SHORT_QUEUE_PREFIX": "imms-batch-internal-dev", - "KINESIS_STREAM_ARN": f"arn:aws:kinesis:{AWS_REGION}:123456789012:stream/{STREAM_NAME}", -} -test_fhir_json = { +test_imms_fhir_json = { "resourceType": "Immunization", "contained": [ - { - "resourceType": "Practitioner", - "id": "Pract1", - "name": [{"family": "Doe", "given": ["Jane"]}], - }, + {"resourceType": "Practitioner", "id": "Pract1", "name": [{"family": "Doe", "given": ["Jane"]}]}, { "resourceType": "Patient", "id": "Pat1", - "identifier": [ - { - "system": "https://fhir.nhs.uk/Id/nhs-number", - "value": "1234567890", - } - ], + "identifier": [{"system": Urls.NHS_NUMBER, "value": "1234567890"}], "name": [{"family": "SMITH", "given": ["JOHN"]}], "gender": "male", "birthDate": "2000-01-01", @@ -49,11 +65,11 @@ ], "extension": [ { - "url": "https://fhir.hl7.org.uk/StructureDefinition/Extension-UKCore-VaccinationProcedure", + "url": Urls.VACCINATION_PROCEDURE, "valueCodeableConcept": { "coding": [ { - "system": "http://snomed.info/sct", + "system": Urls.SNOMED, "code": "123456789", "display": "Administration of vaccine product containing only Dummy antigen (procedure)", } @@ -61,17 +77,12 @@ }, } ], - "identifier": [ - { - "system": "https://www.ravs.england.nhs.uk/", - "value": "0001_TEST_v1_RUN_1_ABCD-123_Dose_seq_01", - } - ], + "identifier": [{"system": MOCK_IDENTIFIER_SYSTEM, "value": MOCK_IDENTIFIER_VALUE}], "status": "completed", "vaccineCode": { "coding": [ { - "system": "http://snomed.info/sct", + "system": Urls.SNOMED, "code": "987654321", "display": "Dummy vaccine powder and solvent for solution (product)", } @@ -82,82 +93,36 @@ "recorded": "2024-01-01T00:00:00+00:00", "primarySource": True, "manufacturer": {"display": "Dummy Pharma"}, - "location": { - "type": "Location", - "identifier": { - "value": "ABCDE", - "system": "https://fhir.nhs.uk/Id/ods-organization-code", - }, - }, + "location": {"identifier": {"value": "ABCDE", "system": Urls.ODS}}, "lotNumber": "DUMMYLOT", "expirationDate": "2024-12-31", "site": { "coding": [ - { - "system": "http://snomed.info/sct", - "code": "999999999", - "display": "Right upper arm structure (body structure)", - } + {"system": Urls.SNOMED, "code": "999999999", "display": "Right upper arm structure (body structure)"} ] }, "route": { - "coding": [ - { - "system": "http://snomed.info/sct", - "code": "111111111", - "display": "Subcutaneous route (qualifier value)", - } - ] - }, - "doseQuantity": { - "system": "http://snomed.info/sct", - "value": 0.5, - "unit": "Milliliter (qualifier value)", - "code": "123456789", + "coding": [{"system": Urls.SNOMED, "code": "111111111", "display": "Subcutaneous route (qualifier value)"}] }, + "doseQuantity": {"system": Urls.SNOMED, "value": 0.5, "unit": "Milliliter (qualifier value)", "code": "123456789"}, "performer": [ {"actor": {"reference": "#Pract1"}}, { "actor": { "type": "Organization", - "identifier": { - "system": "https://fhir.nhs.uk/Id/ods-organization-code", - "value": "DUMMYORG", - }, + "identifier": {"system": Urls.ODS, "value": "DUMMYORG"}, } }, ], - "reasonCode": [{"coding": [{"system": "http://snomed.info/sct", "code": "dummy"}]}], + "reasonCode": [{"coding": [{"system": Urls.SNOMED, "code": "dummy"}]}], "protocolApplied": [ { - "targetDisease": [ - { - "coding": [ - { - "system": "http://snomed.info/sct", - "code": "123456789", - "display": "Dummy disease caused by dummy virus", - } - ] - } - ], + "targetDisease": [{"coding": [{"system": Urls.SNOMED, "code": "123456789", "display": "Dummy disease"}]}], "doseNumberPositiveInt": 1, } ], } -test_fixed_time_taken = [ - 1000000.0, - 1000001.0, - 1000001.0, - 1000000.0, - 1000001.0, - 1000001.0, - 1000000.0, - 1000001.0, - 1000001.0, -] - class Diagnostics: """Diagnostics messages""" @@ -168,3 +133,112 @@ class Diagnostics: UNABLE_TO_OBTAIN_IMMS_ID = "Unable to obtain imms event id" UNABLE_TO_OBTAIN_VERSION = "Unable to obtain current imms event version" INVALID_CONVERSION = "Unable to convert row to FHIR Immunization Resource JSON format" + DUPLICATE = f"The provided identifier: {MOCK_IDENTIFIER_SYSTEM}#{MOCK_IDENTIFIER_VALUE} is duplicated" + MISSING_EVENT_ID = "the provided event ID is either missing or not in the expected format." + VALIDATION_ERROR = ( + "Validation errors: The provided immunization id:test_id doesn't match with the content of the request body" + ) + + +class Message: + """Class containing example kinesis messages""" + + ROW_ID = "123456" + IMMS_ID = "277befd9-574e-47fe-a6ee-189858af3bb0" + DIAGNOSTICS = Diagnostics.MISSING_UNIQUE_ID + base_message_fields = {"row_id": ROW_ID, "file_key": TestFile.FILE_KEY, "supplier": TestFile.SUPPLIER} + create_message = {**base_message_fields, "fhir_json": test_imms_fhir_json, "operation_requested": Operations.CREATE} + update_message = {**base_message_fields, "fhir_json": test_imms_fhir_json, "operation_requested": Operations.UPDATE} + delete_message = {**base_message_fields, "fhir_json": test_imms_fhir_json, "operation_requested": Operations.DELETE} + diagnostics_message = {**base_message_fields, "diagnostics": DIAGNOSTICS} + + +lambda_success_headers = {"Location": "https://example.com/immunization/test_id"} + + +class SearchLambdaResponseBody: + """Examples of response body for get_imms_id_and_version""" + + id_and_version_not_found = { + "resourceType": "Bundle", + "type": "searchset", + "link": [ + { + "relation": "self", + "url": "https://internal-dev.api.service.nhs.uk/immunisation-fhir-api/Immunization?" + + "immunization.identifier=None&_elements=None", + } + ], + "entry": [], + "total": 0, + } + + id_and_version_found = { + "resourceType": "Bundle", + "type": "searchset", + "entry": [{"resource": {"id": Message.IMMS_ID, "meta": {"versionId": 2}}}], + "total": 1, + } + + +class LambdaPayloads: + """ + Class containing dictionaries of mock lambda payloads, to be used as inputs for the + generate_lambda_invocation_side_effect fucntion + """ + + class CREATE: + """LambdaPayloads for the CREATE lambda""" + + SUCCESS = {Operations.CREATE: generate_lambda_payload(status_code=200, headers=lambda_success_headers)} + + DUPLICATE = { + Operations.CREATE: generate_lambda_payload( + status_code=422, body=generate_operation_outcome(diagnostics=Diagnostics.DUPLICATE, code="duplicate") + ) + } + + class UPDATE: + """LambdaPayloads for the UPDATE lambda""" + + SUCCESS = {Operations.UPDATE: generate_lambda_payload(status_code=200)} + + MISSING_EVENT_ID = { + Operations.UPDATE: generate_lambda_payload( + 400, body=generate_operation_outcome(Diagnostics.MISSING_EVENT_ID) + ) + } + + VALIDATION_ERROR = { + Operations.UPDATE: generate_lambda_payload( + status_code=422, body=generate_operation_outcome(Diagnostics.VALIDATION_ERROR) + ) + } + + class DELETE: + """LambdaPayloads for the DELETE lambda""" + + SUCCESS = {Operations.DELETE: generate_lambda_payload(status_code=204)} + + class SEARCH: + """LambdaPayloads for the SEARCH lambda""" + + ID_AND_VERSION_FOUND = { + Operations.SEARCH: generate_lambda_payload( + status_code=200, body=SearchLambdaResponseBody.id_and_version_found + ) + } + + ID_AND_VERSION_NOT_FOUND = { + Operations.SEARCH: generate_lambda_payload( + status_code=200, body=SearchLambdaResponseBody.id_and_version_not_found + ) + } + + FAILURE = { + Operations.SEARCH: generate_lambda_payload( + status_code=404, body=generate_operation_outcome("some_diagnostics") + ) + } + + SUCCESS = {**CREATE.SUCCESS, **UPDATE.SUCCESS, **DELETE.SUCCESS, **SEARCH.ID_AND_VERSION_FOUND}