Skip to content

Commit

Permalink
Merge pull request #98 from NHSDigital/AMB-0008---Final-Changes
Browse files Browse the repository at this point in the history
AMB-0008- final changes
  • Loading branch information
Valswyn-NHS authored Nov 12, 2024
2 parents 65f8be2 + d216244 commit afca140
Show file tree
Hide file tree
Showing 36 changed files with 1,884 additions and 1,693 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/sonarcube.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ jobs:
- name: Run unittest with filenameprocessor-coverage
run: |
pip install poetry moto==4.2.11 coverage redis botocore==1.35.49 simplejson
pip install poetry moto==4.2.11 coverage redis botocore==1.35.49 simplejson pandas
poetry run coverage run --source=filenameprocessor -m unittest discover -s filenameprocessor
poetry run coverage xml -o filenameprocessor-coverage.xml
Expand Down
24 changes: 17 additions & 7 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,21 +1,31 @@

.direnv
**/.terraform
.direnv/
node_modules/
bin/
dist/
build/
public/
newman/
.idea
.idea/
.DS_Store
.#*
**/*.iml

__pycache__/
.envrc
.idea/
.venv/

.env
.envrc
smoketest-report.xml
env
lambda_typescript/**/*.js
terraform/zips
.dir-locals.el
*.pyc
.python-version
**/.terraform/
sandbox/specification/
openapi.json

**/.vscode/**/*
!**/.vscode/settings.json.default

devtools/volume/
4 changes: 1 addition & 3 deletions azure/azure-build-pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,4 @@ extends:
template: ./templates/build-pipeline.yml
parameters:
service_name: ${{ variables.service_name }}
short_service_name: ${{ variables.short_service_name }}
post_lint:
- template: ./templates/build.yml
short_service_name: ${{ variables.short_service_name }}
6 changes: 0 additions & 6 deletions azure/templates/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,3 @@ steps:
displayName: Test ecs code for recordprocessor
workingDirectory: "$(Pipeline.Workspace)/s/$(SERVICE_NAME)/recordprocessor"
- bash: |
BUILDKIT_PROGRESS=plain docker build --target test -t imms-batch-build -f Dockerfile .
displayName: Test lambda code for recordforwarder
workingDirectory: "$(Pipeline.Workspace)/s/$(SERVICE_NAME)/recordforwarder"
37 changes: 0 additions & 37 deletions filenameprocessor/src/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,43 +8,6 @@ class Constants:

VALID_VERSIONS = ["V5"]

EXPECTED_CSV_HEADERS = [
"NHS_NUMBER",
"PERSON_FORENAME",
"PERSON_SURNAME",
"PERSON_DOB",
"PERSON_GENDER_CODE",
"PERSON_POSTCODE",
"DATE_AND_TIME",
"SITE_CODE",
"SITE_CODE_TYPE_URI",
"UNIQUE_ID",
"UNIQUE_ID_URI",
"ACTION_FLAG",
"PERFORMING_PROFESSIONAL_FORENAME",
"PERFORMING_PROFESSIONAL_SURNAME",
"RECORDED_DATE",
"PRIMARY_SOURCE",
"VACCINATION_PROCEDURE_CODE",
"VACCINATION_PROCEDURE_TERM",
"DOSE_SEQUENCE",
"VACCINE_PRODUCT_CODE",
"VACCINE_PRODUCT_TERM",
"VACCINE_MANUFACTURER",
"BATCH_NUMBER",
"EXPIRY_DATE",
"SITE_OF_VACCINATION_CODE",
"SITE_OF_VACCINATION_TERM",
"ROUTE_OF_VACCINATION_CODE",
"ROUTE_OF_VACCINATION_TERM",
"DOSE_AMOUNT",
"DOSE_UNIT_CODE",
"DOSE_UNIT_TERM",
"INDICATION_CODE",
"LOCATION_CODE",
"LOCATION_CODE_TYPE_URI",
]

# Mappings from ODS code to supplier name.
# NOTE: Any ODS code not found in this dictionary's keys is invalid for this service
ODS_TO_SUPPLIER_MAPPINGS = {
Expand Down
18 changes: 10 additions & 8 deletions filenameprocessor/src/file_name_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from uuid import uuid4
from initial_file_validation import initial_file_validation
from send_sqs_message import make_and_send_sqs_message
from make_and_upload_ack_file import make_and_upload_ack_file
from make_and_upload_ack_file import make_and_upload_the_ack_file
from s3_clients import s3_client
from elasticcache import upload_to_elasticache
from log_structure import function_info
Expand Down Expand Up @@ -42,13 +42,15 @@ def lambda_handler(event, context): # pylint: disable=unused-argument
# Process the file
if "data-sources" in bucket_name:
# Process file from batch_data_source_bucket with validation
validation_passed, permission = initial_file_validation(file_key, bucket_name)
validation_passed, permission = initial_file_validation(file_key)
message_delivered = (
make_and_send_sqs_message(file_key, message_id, permission) if validation_passed else False
)
make_and_upload_ack_file(
message_id, file_key, validation_passed, message_delivered, created_at_formatted_string
make_and_send_sqs_message(file_key, message_id, permission, created_at_formatted_string)
if validation_passed else False
)
if not validation_passed:
make_and_upload_the_ack_file(
message_id, file_key, message_delivered, created_at_formatted_string
)
return {
"statusCode": 200,
"body": json_dumps("Successfully sent to SQS queue"),
Expand All @@ -74,8 +76,8 @@ def lambda_handler(event, context): # pylint: disable=unused-argument
logging.error("Error processing file'%s': %s", file_key, str(error))
error_files.append(file_key)
if "data-sources" in bucket_name:
make_and_upload_ack_file(
message_id, file_key, validation_passed, message_delivered, created_at_formatted_string
make_and_upload_the_ack_file(
message_id, file_key, message_delivered, created_at_formatted_string
)
return {
"statusCode": 400,
Expand Down
57 changes: 2 additions & 55 deletions filenameprocessor/src/initial_file_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from datetime import datetime
from constants import Constants
from fetch_permissions import get_permissions_config_json_from_cache
from utils_for_filenameprocessor import extract_file_key_elements, get_csv_content_dict_reader
from utils_for_filenameprocessor import extract_file_key_elements

logger = logging.getLogger()

Expand All @@ -29,11 +29,6 @@ def is_valid_datetime(timestamp: str) -> bool:
return True


def validate_content_headers(csv_content_reader):
"""Returns a bool to indicate whether the given CSV headers match the 34 expected headers exactly"""
return csv_content_reader.fieldnames == Constants.EXPECTED_CSV_HEADERS


def get_supplier_permissions(supplier: str) -> list:
"""
Returns the permissions for the given supplier. Returns an empty list if the permissions config json could not
Expand All @@ -48,40 +43,7 @@ def validate_vaccine_type_permissions(supplier: str, vaccine_type: str):
return vaccine_type in " ".join(allowed_permissions)


def validate_action_flag_permissions(csv_content_dict_reader, supplier: str, vaccine_type: str) -> bool:
"""
Returns True if the supplier has permission to perform ANY of the requested actions for the given vaccine type,
else False.
"""
# Obtain the allowed permissions for the supplier
allowed_permissions_set = set(get_supplier_permissions(supplier))

# If the supplier has full permissions for the vaccine type return True
if f"{vaccine_type}_FULL" in allowed_permissions_set:
logger.info("%s has FULL permissions to create, update and delete", supplier)
return True

# Extract a list of all unique operation permissions requested in the csv file
operations_requested = set()
for row in csv_content_dict_reader:
action_flag = row.get("ACTION_FLAG", "").upper()
operations_requested.add("CREATE" if action_flag == "NEW" else action_flag)

# Check if any of the CSV permissions match the allowed permissions
operation_requests_set = {f"{vaccine_type}_{operation}" for operation in operations_requested}
if operation_requests_set.intersection(allowed_permissions_set):
logger.info(
"%s permissions %s matches one of the requested permissions required to %s",
supplier,
allowed_permissions_set,
operation_requests_set,
)
return True

return False


def initial_file_validation(file_key: str, bucket_name: str):
def initial_file_validation(file_key: str):
"""
Returns True if all elements of file key are valid, content headers are valid and the supplier has the
appropriate permissions. Else returns False.
Expand All @@ -108,24 +70,9 @@ def initial_file_validation(file_key: str, bucket_name: str):
logger.error("Initial file validation failed: invalid file key")
return False

# Obtain the file content
csv_content_dict_reader = get_csv_content_dict_reader(bucket_name=bucket_name, file_key=file_key)

# Validate the content headers
if not validate_content_headers(csv_content_dict_reader):
logger.error("Initial file validation failed: incorrect column headers")
return False

# Validate has permissions for the vaccine type
if not validate_vaccine_type_permissions(supplier, vaccine_type):
logger.error("Initial file validation failed: %s does not have permissions for %s", supplier, vaccine_type)
return False

# Validate has permission to perform at least one of the requested actions
if not validate_action_flag_permissions(csv_content_dict_reader, supplier, vaccine_type):
logger.info(
"Initial file validation failed: %s does not have permissions for any csv ACTION_FLAG operations", supplier
)
return False

return True, get_permissions_config_json_from_cache().get("all_permissions", {}).get(supplier, [])
23 changes: 11 additions & 12 deletions filenameprocessor/src/make_and_upload_ack_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,22 +7,21 @@
from s3_clients import s3_client


def make_ack_data(
message_id: str, validation_passed: bool, message_delivered: bool, created_at_formatted_string
def make_the_ack_data(
message_id: str, message_delivered: bool, created_at_formatted_string: str
) -> dict:
"""Returns a dictionary of ack data based on the input values. Dictionary keys are the ack file headers,
dictionary values are the values for the ack file row"""
success_display = "Success"
failure_display = "Infrastructure Level Response Value - Processing Error"
return {
"MESSAGE_HEADER_ID": message_id,
"HEADER_RESPONSE_CODE": "Success" if (validation_passed and message_delivered) else "Failure",
"ISSUE_SEVERITY": "Information" if validation_passed else "Fatal",
"ISSUE_CODE": "OK" if validation_passed else "Fatal Error",
"ISSUE_DETAILS_CODE": "20013" if validation_passed else "10001",
"HEADER_RESPONSE_CODE": "Failure",
"ISSUE_SEVERITY": "Fatal",
"ISSUE_CODE": "Fatal Error",
"ISSUE_DETAILS_CODE": "10001",
"RESPONSE_TYPE": "Technical",
"RESPONSE_CODE": "20013" if (validation_passed and message_delivered) else "10002",
"RESPONSE_DISPLAY": success_display if (validation_passed and message_delivered) else failure_display,
"RESPONSE_CODE": "10002",
"RESPONSE_DISPLAY": failure_display,
"RECEIVED_TIME": created_at_formatted_string,
"MAILBOX_FROM": "", # TODO: Leave blank for DPS, add mailbox if from mesh mailbox
"LOCAL_ID": "", # TODO: Leave blank for DPS, add from ctl file if data picked up from MESH mailbox
Expand All @@ -47,9 +46,9 @@ def upload_ack_file(file_key: str, ack_data: dict) -> None:
s3_client.upload_fileobj(csv_bytes, ack_bucket_name, ack_filename)


def make_and_upload_ack_file(
message_id: str, file_key: str, validation_passed: bool, message_delivered: bool, created_at_formatted_string
def make_and_upload_the_ack_file(
message_id: str, file_key: str, message_delivered: bool, created_at_formatted_string: str
) -> None:
"""Creates the ack file and uploads it to the S3 ack bucket"""
ack_data = make_ack_data(message_id, validation_passed, message_delivered, created_at_formatted_string)
ack_data = make_the_ack_data(message_id, message_delivered, created_at_formatted_string)
upload_ack_file(file_key=file_key, ack_data=ack_data)
10 changes: 7 additions & 3 deletions filenameprocessor/src/send_sqs_message.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,8 @@ def send_to_supplier_queue(message_body: dict) -> bool:
return True


def make_message_body_for_sqs(file_key: str, message_id: str, permission: str) -> dict:
def make_message_body_for_sqs(file_key: str, message_id: str, permission: str,
created_at_formatted_string: str) -> dict:
"""Returns the message body for the message which will be sent to SQS"""
file_key_elements = extract_file_key_elements(file_key)
return {
Expand All @@ -42,13 +43,16 @@ def make_message_body_for_sqs(file_key: str, message_id: str, permission: str) -
"timestamp": file_key_elements["timestamp"],
"filename": file_key,
"permission": permission,
"created_at_formatted_string": created_at_formatted_string
}


def make_and_send_sqs_message(file_key: str, message_id: str, permission: str) -> bool:
def make_and_send_sqs_message(file_key: str, message_id: str, permission: str,
created_at_formatted_string: str) -> bool:
"""
Attempts to send a message to the SQS queue.
Returns a bool to indication if the message has been sent successfully.
"""
message_body = make_message_body_for_sqs(file_key=file_key, message_id=message_id, permission=permission)
message_body = make_message_body_for_sqs(file_key=file_key, message_id=message_id, permission=permission,
created_at_formatted_string=created_at_formatted_string)
return send_to_supplier_queue(message_body)
Loading

0 comments on commit afca140

Please sign in to comment.