From 8760354db7c2bba575a9b6a8a19c684b135a22f1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Julian=20Gr=C3=BCndner?= Date: Fri, 22 Mar 2024 16:03:01 +0100 Subject: [PATCH 1/6] Add query to create list of installed plugins accross sites --- .env.default | 3 +- .gitignore | 4 +- docker/Dockerfile | 1 + docker/docker-compose.yml | 1 + docker/docker-entrypoint.sh | 5 +- src/execute-local.sh | 7 +- .../execute_collect_activity_definitions.py | 212 ++++++++++++++++++ src/py/feasibility-monitoring.py | 15 ++ 8 files changed, 243 insertions(+), 5 deletions(-) create mode 100644 src/py/execute_collect_activity_definitions.py diff --git a/.env.default b/.env.default index cef135a..0f3a671 100644 --- a/.env.default +++ b/.env.default @@ -15,7 +15,8 @@ HISTORY_TABLE_LEN=14 EXECUTE_FEAS_TEST=false CONFLUENCE_PAGE_ID_FEAS="" EXECUTE_DSF_PING_TEST=false +EXECUTE_ACTIVITY_DEF_COLLECTION=false CONFLUENCE_PAGE_ID_PING="" LOCAL_DSF_CERT_PATH= LOCAL_DSF_KEY_PATH= -LOG_LEVEL=INFO \ No newline at end of file +LOG_LEVEL=INFO diff --git a/.gitignore b/.gitignore index 754df0e..82c8fe3 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,9 @@ feasibility-results.json dev-call.sh reports/*.json +reports/*.csv certs/ temp/ __pycache__/ -config/config.yml \ No newline at end of file +config/config.yml +venv \ No newline at end of file diff --git a/docker/Dockerfile b/docker/Dockerfile index 56b2d5f..609c65e 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -5,6 +5,7 @@ RUN mkdir -p /opt/reportclient COPY src/py/feasibility-monitoring.py /opt/reportclient/src/py/feasibility-monitoring.py COPY src/py/execute_ping_test.py /opt/reportclient/src/py/execute_ping_test.py COPY src/py/execute_feasibility_test.py /opt/reportclient/src/py/execute_feasibility_test.py +COPY src/py/execute_collect_activity_definitions.py /opt/reportclient/src/py/execute_collect_activity_definitions.py COPY config/input-queries.json /opt/reportclient/config/input-queries.json COPY config/history-query.json /opt/reportclient/config/history-query.json COPY config/config.yml /opt/reportclient/config/config.yml diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 86267a0..ce510ee 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -22,6 +22,7 @@ services: - EXECUTE_DSF_PING_TEST=${EXECUTE_DSF_PING_TEST:-"false"} - CONFLUENCE_PAGE_ID_PING=${CONFLUENCE_PAGE_ID_PING:-""} - LOG_LEVEL=${LOG_LEVEL:-"INFO"} + - EXECUTE_ACTIVITY_DEF_COLLECTION=${EXECUTE_ACTIVITY_DEF_COLLECTION:-"false"} volumes: - ${LOCAL_DSF_CERT_PATH:-./cert/dsf-cert.cer}:/opt/reportclient/certs/dsf-cert.cer - ${LOCAL_DSF_KEY_PATH:-./cert/dsf-key.key}:/opt/reportclient/certs/dsf-key.key diff --git a/docker/docker-entrypoint.sh b/docker/docker-entrypoint.sh index f9bb151..0542c87 100644 --- a/docker/docker-entrypoint.sh +++ b/docker/docker-entrypoint.sh @@ -20,8 +20,11 @@ CONFLUENCE_PAGE_ID_HIST=${CONFLUENCE_PAGE_ID_HIST:-""} HISTORY_TABLE_LEN=${HISTORY_TABLE_LEN:-14} EXECUTE_DSF_PING_TEST=${EXECUTE_DSF_PING_TEST:-"false"} CONFLUENCE_PAGE_ID_PING=${CONFLUENCE_PAGE_ID_PING:-""} +EXECUTE_ACTIVITY_DEF_COLLECTION=${EXECUTE_ACTIVITY_DEF_COLLECTION:-"false"} LOG_LEVEL=${LOG_LEVEL:-"INFO"} + + python src/py/feasibility-monitoring.py --dsf_cert_path $DSF_CERT_PATH --dsf_key_path $DSF_KEY_PATH --backend_base_url "$BACKEND_BASE_URL" --backend_client_id "$BACKEND_CLIENT_ID" \ --client_secret "$BACKEND_CLIENT_SECRET" --keycloak_token_url "$KEYCLOAK_TOKEN_URL" \ --confluence_api_base_url "$CONFLUENCE_API_BASE_URL" --conf_user "$CONF_USER" --conf_pw "$CONF_PW"\ @@ -29,4 +32,4 @@ python src/py/feasibility-monitoring.py --dsf_cert_path $DSF_CERT_PATH --dsf_key --dsf_base_url $DSF_BASE_URL --execute_feas_test $EXECUTE_FEAS_TEST --confluence_page_id_feas $CONFLUENCE_PAGE_ID_FEAS\ --execute_history_test $EXECUTE_HISTORY_TEST --confluence_page_id_hist $CONFLUENCE_PAGE_ID_HIST --history_table_len $HISTORY_TABLE_LEN\ --execute_ping_test $EXECUTE_DSF_PING_TEST --confluence_page_id_ping $CONFLUENCE_PAGE_ID_PING --wait_result_secs_ping "$WAIT_RESULT_SECS_PING"\ - --log_level "$LOG_LEVEL" + --execute_collect_activity_definition $EXECUTE_ACTIVITY_DEF_COLLECTION --log_level "$LOG_LEVEL" diff --git a/src/execute-local.sh b/src/execute-local.sh index 2ce5802..8091db1 100644 --- a/src/execute-local.sh +++ b/src/execute-local.sh @@ -20,11 +20,14 @@ CONFLUENCE_PAGE_ID_HIST=${CONFLUENCE_PAGE_ID_HIST:-""} HISTORY_TABLE_LEN=${HISTORY_TABLE_LEN:-14} EXECUTE_DSF_PING_TEST=${EXECUTE_DSF_PING_TEST:-"false"} CONFLUENCE_PAGE_ID_PING=$CONFLUENCE_PAGE_ID_PING +EXECUTE_ACTIVITY_DEF_COLLECTION=${EXECUTE_ACTIVITY_DEF_COLLECTION:-"false"} +LOG_LEVEL=${LOG_LEVEL:-"INFO"} -python src/py/feasibility-monitoring.py --dsf_cert_path $DSF_CERT_PATH --dsf_key_path $DSF_KEY_PATH --backend_base_url "$BACKEND_BASE_URL" --backend_client_id "$BACKEND_CLIENT_ID" \ +python3 src/py/feasibility-monitoring.py --dsf_cert_path $DSF_CERT_PATH --dsf_key_path $DSF_KEY_PATH --backend_base_url "$BACKEND_BASE_URL" --backend_client_id "$BACKEND_CLIENT_ID" \ --client_secret "$BACKEND_CLIENT_SECRET" --keycloak_token_url "$KEYCLOAK_TOKEN_URL" \ --confluence_api_base_url "$CONFLUENCE_API_BASE_URL" --conf_user "$CONF_USER" --conf_pw "$CONF_PW"\ --send_results_confluence $SEND_TO_CONFLUENCE --wait_result_secs_feas "$WAIT_RESULT_SECS_FEAS"\ --dsf_base_url $DSF_BASE_URL --execute_feas_test $EXECUTE_FEAS_TEST --confluence_page_id_feas $CONFLUENCE_PAGE_ID_FEAS\ --execute_history_test $EXECUTE_HISTORY_TEST --confluence_page_id_hist $CONFLUENCE_PAGE_ID_HIST --history_table_len $HISTORY_TABLE_LEN\ - --execute_ping_test $EXECUTE_DSF_PING_TEST --confluence_page_id_ping $CONFLUENCE_PAGE_ID_PING --wait_result_secs_ping "$WAIT_RESULT_SECS_PING" + --execute_ping_test $EXECUTE_DSF_PING_TEST --confluence_page_id_ping $CONFLUENCE_PAGE_ID_PING --wait_result_secs_ping "$WAIT_RESULT_SECS_PING"\ + --execute_collect_activity_definition $EXECUTE_ACTIVITY_DEF_COLLECTION --log_level $LOG_LEVEL diff --git a/src/py/execute_collect_activity_definitions.py b/src/py/execute_collect_activity_definitions.py new file mode 100644 index 0000000..99eb38b --- /dev/null +++ b/src/py/execute_collect_activity_definitions.py @@ -0,0 +1,212 @@ +from prettytable import PrettyTable +import requests +import logging +import csv + +process_name_to_plugin_map = { + "Report Autostart": "mii-process-report", + "Report Send": "mii-process-report", + "Report Receive": "mii-process-report", + "FeasibilityRequest": "mii-process-feasibility", + "ExecuteFeasibility": "mii-process-feasibility", + "dataSend": "mii-process-data-transfer", + "dataReceive": "mii-process-data-transfer", + "Ping": "dsf-process-ping-pong", + "Pong": "dsf-process-ping-pong", + "PingAutostart": "dsf-process-ping-pong", + "DownloadAllowList": "dsf-process-allowlist", + "UpdateAllowList": "dsf-process-allowlist", +} + + +def get_next_link(link_elem): + for elem in link_elem: + if elem["relation"] == "next": + return elem["url"] + + return None + + +def page_through_results_and_collect(resp, dsf_cert_path, dsf_key_path): + + result_entry = [] + next_link = get_next_link(resp.json()["link"]) + if "entry" not in resp.json().keys(): + return result_entry + if len(resp.json()["entry"]) > 0: + result_entry = result_entry + resp.json()["entry"] + + if next_link: + logging.debug(f"Getting next page {next_link}") + resp = requests.get(next_link, cert=(dsf_cert_path, dsf_key_path)) + if "entry" not in resp.json().keys(): + return result_entry + + return result_entry + page_through_results_and_collect(resp, dsf_cert_path, dsf_key_path) + + return result_entry + + +def convert_process_info_to_process_name(process): + return f"{process['plugin']}|{process['name']}|{process['version']}" + + +def create_results_table(activity_information): + + process_list = [] + + for site_ident in activity_information.keys(): + site_info = activity_information[site_ident] + + for process in site_info["installedProcesses"]: + + process_name_version = convert_process_info_to_process_name(process) + + if process_name_version not in process_list: + process_list.append(process_name_version) + + process_list_overall = [0] * len(process_list) + site_process_lists = [] + + process_list = sorted(process_list) + + for site_ident in activity_information.keys(): + site_info = activity_information[site_ident] + site_process_list = [0] * len(process_list) + + for process_index in range(0, len(process_list)): + + process = process_list[process_index] + for installed_process in site_info["installedProcesses"]: + + process_name_version = convert_process_info_to_process_name(installed_process) + + if process == process_name_version: + site_process_list[process_index] = 1 + process_list_overall[process_index] = process_list_overall[process_index] + 1 + break + + site_process_lists.append([site_ident] + site_process_list) + + table = [] + + table.append(["Site Identifier"] + process_list) + table.append(["Overall"] + process_list_overall) + + for site_process_list in site_process_lists: + table.append(site_process_list) + + return table + + +def map_process_name_to_plugin(process_name): + + if process_name not in process_name_to_plugin_map: + return "unknown-process" + else: + return process_name_to_plugin_map[process_name] + + +def create_activity_list_and_add_to_org( + organization_list, organization, endpoint, activity_definitions +): + activity_definition_list = [] + + for activity_definition in activity_definitions: + + activity_definition = activity_definition["resource"] + + if "url" not in activity_definition: + logging.error(f"ERROR - broken activity defnition in endpoint {endpoint}") + continue + + activity_definition_list.append( + { + "url": activity_definition["url"], + "name": activity_definition["name"], + "plugin": map_process_name_to_plugin(activity_definition["name"]), + "version": activity_definition["version"], + "status": activity_definition["status"], + } + ) + + organization_list[organization["resource"]["identifier"][0]["value"]] = { + "endpoint": endpoint, + "installedProcesses": activity_definition_list, + } + + +def generate_activity_list_for_orgs( + organization_list, organizations, dsf_base_url, dsf_cert_path, dsf_key_path +): + for organization in organizations: + + org_ident = organization["resource"]["identifier"][0]["value"] + + if "endpoint" not in organization["resource"]: + continue + + endpoint = requests.get( + f'{dsf_base_url}/{organization["resource"]["endpoint"][0]["reference"]}?_format=json', + cert=(dsf_cert_path, dsf_key_path), + ).json()["address"] + + logging.info(f"Querying endpoint: {endpoint} for org: {org_ident}") + + try: + activity_def_req_res = requests.get( + f"{endpoint}/ActivityDefinition?_format=json", + cert=(dsf_cert_path, dsf_key_path), + timeout=10, + ) + activity_definitions = page_through_results_and_collect( + activity_def_req_res, dsf_cert_path, dsf_key_path + ) + + except requests.exceptions.RequestException: + logging.debug(f"Could not connect to endpoint {endpoint}") + organization_list[org_ident] = { + "endpoint": endpoint, + "installedProcesses": [], + "errors": [ + { + "code": "connection-error", + "display": "Could not connect to endpoint", + } + ], + } + + continue + + create_activity_list_and_add_to_org( + organization_list, organization, endpoint, activity_definitions + ) + + +def save_results_to_disk(table): + + with open("reports/dsf_installed_plugins.csv", "w", newline="") as csvfile: + writer = csv.writer(csvfile, delimiter=";", quotechar='"', quoting=csv.QUOTE_MINIMAL) + + for row in table: + writer.writerow(row) + + +def execute_collect_activity_definitions(dsf_base_url, dsf_cert_path, dsf_key_path): + + organization_list = {} + organizations_req_res = requests.get( + f"{dsf_base_url}/Organization?_format=json", + cert=(dsf_cert_path, dsf_key_path), + timeout=20, + ) + organizations = page_through_results_and_collect( + organizations_req_res, dsf_cert_path, dsf_key_path + ) + + generate_activity_list_for_orgs( + organization_list, organizations, dsf_base_url, dsf_cert_path, dsf_key_path + ) + sorted_orgs_list = dict(sorted(organization_list.items())) + table = create_results_table(sorted_orgs_list) + save_results_to_disk(table) diff --git a/src/py/feasibility-monitoring.py b/src/py/feasibility-monitoring.py index cd03629..8fd2829 100644 --- a/src/py/feasibility-monitoring.py +++ b/src/py/feasibility-monitoring.py @@ -1,6 +1,7 @@ import argparse import execute_ping_test import execute_feasibility_test +import execute_collect_activity_definitions import logging import sys @@ -49,6 +50,7 @@ def str_to_bool(s): parser.add_argument('--execute_feas_test', help='', type=str_to_bool, default="false") parser.add_argument('--execute_history_test', help='', type=str_to_bool, default="false") parser.add_argument('--execute_ping_test', help='', type=str_to_bool, default="false") + parser.add_argument('--execute_collect_activity_definition', help='', type=str_to_bool, default="false") parser.add_argument('--log_level', help='', default="INFO") args = vars(parser.parse_args()) @@ -73,6 +75,7 @@ def str_to_bool(s): b_execute_feas_test = args["execute_feas_test"] b_execute_history_test = args["execute_history_test"] b_execute_ping_test = args["execute_ping_test"] + b_execute_activity_config_collection = args["execute_collect_activity_definition"] log_level = args["log_level"] logging.basicConfig(stream=sys.stdout, level=log_level) @@ -116,3 +119,15 @@ def str_to_bool(s): dsf_cert_path, dsf_key_path) logging.info("Finished executing ping test") + + if b_execute_activity_config_collection: + logging.info("### Executing the collection of activity definitions (installed processes at sites)") + execute_collect_activity_definitions.execute_collect_activity_definitions(dsf_base_url, + wait_result_secs_ping, + b_send_results_confluence, + confluence_api_base_url, + confluence_page_id_ping, + conf_user, conf_pw, + dsf_cert_path, + dsf_key_path) + logging.info("### Finished executing the collection of activity definitions") From 01d09983242d551359f805590039a9ca3859b98d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Julian=20Gr=C3=BCndner?= Date: Fri, 22 Mar 2024 16:16:29 +0100 Subject: [PATCH 2/6] fix call execute_collect_activity_definitions --- src/py/feasibility-monitoring.py | 177 ++++++++++++++++++------------- 1 file changed, 104 insertions(+), 73 deletions(-) diff --git a/src/py/feasibility-monitoring.py b/src/py/feasibility-monitoring.py index 8fd2829..f28632e 100644 --- a/src/py/feasibility-monitoring.py +++ b/src/py/feasibility-monitoring.py @@ -13,45 +13,70 @@ def str_to_bool(s): if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('--backend_base_url', help='base url of the feasibility backend', - default="https://localhost/api/v2") - parser.add_argument('--backend_client_id', help='backend client id', - nargs="?", default="feasibility-gui") - parser.add_argument('--dsf_base_url', help='base url of the feasibility backend', - default="https://localhost/api/v2") - parser.add_argument('--dsf_cert_path', help='path to the dsf cert', - nargs="?", default="certs/dsf-cert.cer") - parser.add_argument('--dsf_key_path', help='path to the dsf key', - nargs="?", default="certs/dsf-key.key") - parser.add_argument('--backend_user', help='', nargs="?", default="test") - parser.add_argument('--client_secret', help='', nargs="?", default="test") - parser.add_argument('--keycloak_token_url', help='keycloak token url', - default="https://localhost/auth/realms/feasibility/protocol/openid-connect/token") - parser.add_argument('--confluence_api_base_url', help='', nargs="?", - default='https://myconfluence-rest-api-url') - parser.add_argument('--confluence_page_id_feas', help='', nargs="?", - default='') - parser.add_argument('--confluence_page_id_hist', help='', nargs="?", - default='') - parser.add_argument('--confluence_page_id_ping', help='', nargs="?", - default='') - parser.add_argument('--send_results_confluence', help='', type=str_to_bool, - default="false") parser.add_argument( - '--conf_user', help='username of confluence account', nargs="?", default='username') + "--backend_base_url", + help="base url of the feasibility backend", + default="https://localhost/api/v2", + ) parser.add_argument( - '--conf_pw', help='password of confluence account', nargs="?", default='password') - parser.add_argument('--wait_result_secs_feas', - help='number of seconds to wait before results for a query are fetched', nargs="?", default='60') - parser.add_argument('--wait_result_secs_ping', - help='number of seconds to wait before results for the ping task fetched', nargs="?", default='600') - parser.add_argument('--history_table_len', - help='length of the history table that is sent to confluence', nargs="?", default='14') - parser.add_argument('--execute_feas_test', help='', type=str_to_bool, default="false") - parser.add_argument('--execute_history_test', help='', type=str_to_bool, default="false") - parser.add_argument('--execute_ping_test', help='', type=str_to_bool, default="false") - parser.add_argument('--execute_collect_activity_definition', help='', type=str_to_bool, default="false") - parser.add_argument('--log_level', help='', default="INFO") + "--backend_client_id", help="backend client id", nargs="?", default="feasibility-gui" + ) + parser.add_argument( + "--dsf_base_url", + help="base url of the feasibility backend", + default="https://localhost/api/v2", + ) + parser.add_argument( + "--dsf_cert_path", help="path to the dsf cert", nargs="?", default="certs/dsf-cert.cer" + ) + parser.add_argument( + "--dsf_key_path", help="path to the dsf key", nargs="?", default="certs/dsf-key.key" + ) + parser.add_argument("--backend_user", help="", nargs="?", default="test") + parser.add_argument("--client_secret", help="", nargs="?", default="test") + parser.add_argument( + "--keycloak_token_url", + help="keycloak token url", + default="https://localhost/auth/realms/feasibility/protocol/openid-connect/token", + ) + parser.add_argument( + "--confluence_api_base_url", help="", nargs="?", default="https://myconfluence-rest-api-url" + ) + parser.add_argument("--confluence_page_id_feas", help="", nargs="?", default="") + parser.add_argument("--confluence_page_id_hist", help="", nargs="?", default="") + parser.add_argument("--confluence_page_id_ping", help="", nargs="?", default="") + parser.add_argument("--send_results_confluence", help="", type=str_to_bool, default="false") + parser.add_argument( + "--conf_user", help="username of confluence account", nargs="?", default="username" + ) + parser.add_argument( + "--conf_pw", help="password of confluence account", nargs="?", default="password" + ) + parser.add_argument( + "--wait_result_secs_feas", + help="number of seconds to wait before results for a query are fetched", + nargs="?", + default="60", + ) + parser.add_argument( + "--wait_result_secs_ping", + help="number of seconds to wait before results for the ping task fetched", + nargs="?", + default="600", + ) + parser.add_argument( + "--history_table_len", + help="length of the history table that is sent to confluence", + nargs="?", + default="14", + ) + parser.add_argument("--execute_feas_test", help="", type=str_to_bool, default="false") + parser.add_argument("--execute_history_test", help="", type=str_to_bool, default="false") + parser.add_argument("--execute_ping_test", help="", type=str_to_bool, default="false") + parser.add_argument( + "--execute_collect_activity_definition", help="", type=str_to_bool, default="false" + ) + parser.add_argument("--log_level", help="", default="INFO") args = vars(parser.parse_args()) backend_base_url = args["backend_base_url"] @@ -79,55 +104,61 @@ def str_to_bool(s): log_level = args["log_level"] logging.basicConfig(stream=sys.stdout, level=log_level) - logging.info(f'Running Feasibility Monitoring with log level {log_level}') + logging.info(f"Running Feasibility Monitoring with log level {log_level}") if b_execute_history_test: logging.info("Executing history test") - execute_feasibility_test.execute_history_query(backend_base_url, - backend_client_id, - client_secret, - keycloak_token_url, - wait_result_secs_feas, - conf_user, conf_pw, - confluence_api_base_url, - confluence_page_id_hist, - b_send_results_confluence, - history_table_len) + execute_feasibility_test.execute_history_query( + backend_base_url, + backend_client_id, + client_secret, + keycloak_token_url, + wait_result_secs_feas, + conf_user, + conf_pw, + confluence_api_base_url, + confluence_page_id_hist, + b_send_results_confluence, + history_table_len, + ) logging.info("Finished executing history test") if b_execute_feas_test: logging.info("Executing feasibility test") - execute_feasibility_test.execute_feas_test_queries(backend_base_url, - backend_client_id, - client_secret, - keycloak_token_url, - conf_user, conf_pw, - confluence_api_base_url, - confluence_page_id_feas, - wait_result_secs_feas, - b_send_results_confluence) + execute_feasibility_test.execute_feas_test_queries( + backend_base_url, + backend_client_id, + client_secret, + keycloak_token_url, + conf_user, + conf_pw, + confluence_api_base_url, + confluence_page_id_feas, + wait_result_secs_feas, + b_send_results_confluence, + ) logging.info("Finished executing feasibility test") if b_execute_ping_test: logging.info("Executing ping test") - execute_ping_test.execute_ping_task(dsf_base_url, - wait_result_secs_ping, - b_send_results_confluence, - confluence_api_base_url, - confluence_page_id_ping, - conf_user, conf_pw, - dsf_cert_path, - dsf_key_path) + execute_ping_test.execute_ping_task( + dsf_base_url, + wait_result_secs_ping, + b_send_results_confluence, + confluence_api_base_url, + confluence_page_id_ping, + conf_user, + conf_pw, + dsf_cert_path, + dsf_key_path, + ) logging.info("Finished executing ping test") if b_execute_activity_config_collection: logging.info("### Executing the collection of activity definitions (installed processes at sites)") - execute_collect_activity_definitions.execute_collect_activity_definitions(dsf_base_url, - wait_result_secs_ping, - b_send_results_confluence, - confluence_api_base_url, - confluence_page_id_ping, - conf_user, conf_pw, - dsf_cert_path, - dsf_key_path) + + execute_collect_activity_definitions.execute_collect_activity_definitions( + dsf_base_url, dsf_cert_path, dsf_key_path + ) + logging.info("### Finished executing the collection of activity definitions") From 940b754db6fb65d62c8fb0391680fd0399a35508 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Julian=20Gr=C3=BCndner?= Date: Fri, 22 Mar 2024 16:21:23 +0100 Subject: [PATCH 3/6] check http response status --- src/py/execute_collect_activity_definitions.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/py/execute_collect_activity_definitions.py b/src/py/execute_collect_activity_definitions.py index 99eb38b..ce03c76 100644 --- a/src/py/execute_collect_activity_definitions.py +++ b/src/py/execute_collect_activity_definitions.py @@ -30,6 +30,10 @@ def get_next_link(link_elem): def page_through_results_and_collect(resp, dsf_cert_path, dsf_key_path): result_entry = [] + + if resp.status_code != 200: + return result_entry + next_link = get_next_link(resp.json()["link"]) if "entry" not in resp.json().keys(): return result_entry From e386a09e0fb607cd396ba457bbd202220ede163e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Julian=20Gr=C3=BCndner?= Date: Mon, 25 Mar 2024 14:52:21 +0100 Subject: [PATCH 4/6] add generate report overview and fix history table --- .env.default | 1 + docker/Dockerfile | 1 + docker/docker-compose.yml | 1 + docker/docker-entrypoint.sh | 3 +- src/execute-local.sh | 3 +- src/py/execute_collect_report_overview.py | 85 +++++++++++++++++++++++ src/py/execute_feasibility_test.py | 3 +- src/py/feasibility-monitoring.py | 14 ++++ 8 files changed, 107 insertions(+), 4 deletions(-) create mode 100644 src/py/execute_collect_report_overview.py diff --git a/.env.default b/.env.default index 0f3a671..d93a550 100644 --- a/.env.default +++ b/.env.default @@ -16,6 +16,7 @@ EXECUTE_FEAS_TEST=false CONFLUENCE_PAGE_ID_FEAS="" EXECUTE_DSF_PING_TEST=false EXECUTE_ACTIVITY_DEF_COLLECTION=false +EXECUTE_COLLECT_REPORT_OVERVIEW=false CONFLUENCE_PAGE_ID_PING="" LOCAL_DSF_CERT_PATH= LOCAL_DSF_KEY_PATH= diff --git a/docker/Dockerfile b/docker/Dockerfile index 609c65e..4c37686 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -6,6 +6,7 @@ COPY src/py/feasibility-monitoring.py /opt/reportclient/src/py/feasibility-monit COPY src/py/execute_ping_test.py /opt/reportclient/src/py/execute_ping_test.py COPY src/py/execute_feasibility_test.py /opt/reportclient/src/py/execute_feasibility_test.py COPY src/py/execute_collect_activity_definitions.py /opt/reportclient/src/py/execute_collect_activity_definitions.py +COPY src/py/execute_collect_activity_definitions.py /opt/reportclient/src/py/execute_collect_report_overview.py COPY config/input-queries.json /opt/reportclient/config/input-queries.json COPY config/history-query.json /opt/reportclient/config/history-query.json COPY config/config.yml /opt/reportclient/config/config.yml diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index ce510ee..b94e153 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -23,6 +23,7 @@ services: - CONFLUENCE_PAGE_ID_PING=${CONFLUENCE_PAGE_ID_PING:-""} - LOG_LEVEL=${LOG_LEVEL:-"INFO"} - EXECUTE_ACTIVITY_DEF_COLLECTION=${EXECUTE_ACTIVITY_DEF_COLLECTION:-"false"} + - EXECUTE_COLLECT_REPORT_OVERVIEW=${EXECUTE_COLLECT_REPORT_OVERVIEW:-"false"} volumes: - ${LOCAL_DSF_CERT_PATH:-./cert/dsf-cert.cer}:/opt/reportclient/certs/dsf-cert.cer - ${LOCAL_DSF_KEY_PATH:-./cert/dsf-key.key}:/opt/reportclient/certs/dsf-key.key diff --git a/docker/docker-entrypoint.sh b/docker/docker-entrypoint.sh index 0542c87..ac84f24 100644 --- a/docker/docker-entrypoint.sh +++ b/docker/docker-entrypoint.sh @@ -21,6 +21,7 @@ HISTORY_TABLE_LEN=${HISTORY_TABLE_LEN:-14} EXECUTE_DSF_PING_TEST=${EXECUTE_DSF_PING_TEST:-"false"} CONFLUENCE_PAGE_ID_PING=${CONFLUENCE_PAGE_ID_PING:-""} EXECUTE_ACTIVITY_DEF_COLLECTION=${EXECUTE_ACTIVITY_DEF_COLLECTION:-"false"} +EXECUTE_COLLECT_REPORT_OVERVIEW=${EXECUTE_COLLECT_REPORT_OVERVIEW:-"false"} LOG_LEVEL=${LOG_LEVEL:-"INFO"} @@ -32,4 +33,4 @@ python src/py/feasibility-monitoring.py --dsf_cert_path $DSF_CERT_PATH --dsf_key --dsf_base_url $DSF_BASE_URL --execute_feas_test $EXECUTE_FEAS_TEST --confluence_page_id_feas $CONFLUENCE_PAGE_ID_FEAS\ --execute_history_test $EXECUTE_HISTORY_TEST --confluence_page_id_hist $CONFLUENCE_PAGE_ID_HIST --history_table_len $HISTORY_TABLE_LEN\ --execute_ping_test $EXECUTE_DSF_PING_TEST --confluence_page_id_ping $CONFLUENCE_PAGE_ID_PING --wait_result_secs_ping "$WAIT_RESULT_SECS_PING"\ - --execute_collect_activity_definition $EXECUTE_ACTIVITY_DEF_COLLECTION --log_level "$LOG_LEVEL" + --execute_collect_activity_definition $EXECUTE_ACTIVITY_DEF_COLLECTION --log_level "$LOG_LEVEL" --execute_collect_report_overview $EXECUTE_COLLECT_REPORT_OVERVIEW diff --git a/src/execute-local.sh b/src/execute-local.sh index 8091db1..e6312fc 100644 --- a/src/execute-local.sh +++ b/src/execute-local.sh @@ -21,6 +21,7 @@ HISTORY_TABLE_LEN=${HISTORY_TABLE_LEN:-14} EXECUTE_DSF_PING_TEST=${EXECUTE_DSF_PING_TEST:-"false"} CONFLUENCE_PAGE_ID_PING=$CONFLUENCE_PAGE_ID_PING EXECUTE_ACTIVITY_DEF_COLLECTION=${EXECUTE_ACTIVITY_DEF_COLLECTION:-"false"} +EXECUTE_COLLECT_REPORT_OVERVIEW=${EXECUTE_COLLECT_REPORT_OVERVIEW:-"false"} LOG_LEVEL=${LOG_LEVEL:-"INFO"} python3 src/py/feasibility-monitoring.py --dsf_cert_path $DSF_CERT_PATH --dsf_key_path $DSF_KEY_PATH --backend_base_url "$BACKEND_BASE_URL" --backend_client_id "$BACKEND_CLIENT_ID" \ @@ -30,4 +31,4 @@ python3 src/py/feasibility-monitoring.py --dsf_cert_path $DSF_CERT_PATH --dsf_ke --dsf_base_url $DSF_BASE_URL --execute_feas_test $EXECUTE_FEAS_TEST --confluence_page_id_feas $CONFLUENCE_PAGE_ID_FEAS\ --execute_history_test $EXECUTE_HISTORY_TEST --confluence_page_id_hist $CONFLUENCE_PAGE_ID_HIST --history_table_len $HISTORY_TABLE_LEN\ --execute_ping_test $EXECUTE_DSF_PING_TEST --confluence_page_id_ping $CONFLUENCE_PAGE_ID_PING --wait_result_secs_ping "$WAIT_RESULT_SECS_PING"\ - --execute_collect_activity_definition $EXECUTE_ACTIVITY_DEF_COLLECTION --log_level $LOG_LEVEL + --execute_collect_activity_definition $EXECUTE_ACTIVITY_DEF_COLLECTION --log_level $LOG_LEVEL --execute_collect_report_overview $EXECUTE_COLLECT_REPORT_OVERVIEW diff --git a/src/py/execute_collect_report_overview.py b/src/py/execute_collect_report_overview.py new file mode 100644 index 0000000..547c68e --- /dev/null +++ b/src/py/execute_collect_report_overview.py @@ -0,0 +1,85 @@ +from prettytable import PrettyTable +import requests +import logging +import csv +import json + + +def get_next_link(link_elem): + for elem in link_elem: + if elem["relation"] == "next": + return elem["url"] + + return None + + +def page_through_results_and_collect(resp, dsf_cert_path, dsf_key_path): + + result_entry = [] + + if resp.status_code != 200: + return result_entry + + next_link = get_next_link(resp.json()["link"]) + if "entry" not in resp.json().keys(): + return result_entry + if len(resp.json()["entry"]) > 0: + result_entry = result_entry + resp.json()["entry"] + + if next_link: + logging.debug(f"Getting next page {next_link}") + resp = requests.get(next_link, cert=(dsf_cert_path, dsf_key_path)) + if "entry" not in resp.json().keys(): + return result_entry + + return result_entry + page_through_results_and_collect(resp, dsf_cert_path, dsf_key_path) + + return result_entry + + +def create_results_table(reports): + + table = [] + + table.append(["Site Identifier", "Last Report"]) + + for site_ident in reports.keys(): + table.append([site_ident, reports[site_ident]]) + + return table + + +def save_results_to_disk(table): + + with open("reports/last_kds_reports.csv", "w", newline="") as csvfile: + writer = csv.writer(csvfile, delimiter=";", quotechar='"', quoting=csv.QUOTE_MINIMAL) + + for row in table: + writer.writerow(row) + + +def execute_collect_report_overview(dsf_base_url, dsf_cert_path, dsf_key_path): + + report_list = {} + print(dsf_base_url) + print(dsf_cert_path) + print(dsf_key_path) + report_req_res = requests.get( + f"{dsf_base_url}/Bundle?_format=json&_profile=http://medizininformatik-initiative.de/fhir/Bundle/search-bundle-response-report|1.0", + cert=(dsf_cert_path, dsf_key_path), + timeout=20, + ) + + print(report_req_res.status_code) + + reports = page_through_results_and_collect( + report_req_res, dsf_cert_path, dsf_key_path + ) + + for report in reports: + report_list[report['resource']['identifier']['value']] = report['resource']['meta']['lastUpdated'][0:10] + + results_table = create_results_table(report_list) + save_results_to_disk(results_table) + + print(json.dumps(report_list)) diff --git a/src/py/execute_feasibility_test.py b/src/py/execute_feasibility_test.py index b09854a..4f3a84e 100644 --- a/src/py/execute_feasibility_test.py +++ b/src/py/execute_feasibility_test.py @@ -292,10 +292,9 @@ def execute_history_query(backend_base_url, backend_client_id, client_secret, ke history_query = load_json_file(HISTORY_QUERY_FILE) results = send_query_and_get_results(history_query, backend_base_url, backend_client_id, client_secret, keycloak_token_url, wait_result_secs_feas) converted_result = convert_results(results) - update_and_save_history_report(history_report, converted_result) - history_table = convert_to_table(history_report["reports"][-int(history_table_len):], "date") + history_table = convert_to_table(history_report["reports"][0:int(history_table_len)], "date") logging.info(history_table) if send_results_confluence: diff --git a/src/py/feasibility-monitoring.py b/src/py/feasibility-monitoring.py index f28632e..4b6bb60 100644 --- a/src/py/feasibility-monitoring.py +++ b/src/py/feasibility-monitoring.py @@ -2,6 +2,7 @@ import execute_ping_test import execute_feasibility_test import execute_collect_activity_definitions +import execute_collect_report_overview import logging import sys @@ -76,6 +77,9 @@ def str_to_bool(s): parser.add_argument( "--execute_collect_activity_definition", help="", type=str_to_bool, default="false" ) + parser.add_argument( + "--execute_collect_report_overview", help="", type=str_to_bool, default="false" + ) parser.add_argument("--log_level", help="", default="INFO") args = vars(parser.parse_args()) @@ -101,6 +105,7 @@ def str_to_bool(s): b_execute_history_test = args["execute_history_test"] b_execute_ping_test = args["execute_ping_test"] b_execute_activity_config_collection = args["execute_collect_activity_definition"] + b_execute_collect_report_overview = args["execute_collect_report_overview"] log_level = args["log_level"] logging.basicConfig(stream=sys.stdout, level=log_level) @@ -162,3 +167,12 @@ def str_to_bool(s): ) logging.info("### Finished executing the collection of activity definitions") + + if b_execute_collect_report_overview: + logging.info("### Executing the collection of report overview") + + execute_collect_report_overview.execute_collect_report_overview( + dsf_base_url, dsf_cert_path, dsf_key_path + ) + + logging.info("### Finished executing the collection of report overview") From 2df8b15a2fe1df8df625fe7ff70a4b38b8e1d1db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Julian=20Gr=C3=BCndner?= Date: Mon, 25 Mar 2024 15:27:34 +0100 Subject: [PATCH 5/6] fix discrepancy to develop --- src/py/execute_feasibility_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/py/execute_feasibility_test.py b/src/py/execute_feasibility_test.py index 4f3a84e..e77c434 100644 --- a/src/py/execute_feasibility_test.py +++ b/src/py/execute_feasibility_test.py @@ -294,7 +294,7 @@ def execute_history_query(backend_base_url, backend_client_id, client_secret, ke converted_result = convert_results(results) update_and_save_history_report(history_report, converted_result) - history_table = convert_to_table(history_report["reports"][0:int(history_table_len)], "date") + history_table = convert_to_table(history_report["reports"][:int(history_table_len)], "date") logging.info(history_table) if send_results_confluence: From f2708c2145c15501671d4dd9f5b2fa6ee8511293 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Julian=20Gr=C3=BCndner?= Date: Mon, 25 Mar 2024 17:26:37 +0100 Subject: [PATCH 6/6] fix docker build --- docker/Dockerfile | 2 +- src/py/execute_collect_report_overview.py | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 4c37686..f3d9128 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -6,7 +6,7 @@ COPY src/py/feasibility-monitoring.py /opt/reportclient/src/py/feasibility-monit COPY src/py/execute_ping_test.py /opt/reportclient/src/py/execute_ping_test.py COPY src/py/execute_feasibility_test.py /opt/reportclient/src/py/execute_feasibility_test.py COPY src/py/execute_collect_activity_definitions.py /opt/reportclient/src/py/execute_collect_activity_definitions.py -COPY src/py/execute_collect_activity_definitions.py /opt/reportclient/src/py/execute_collect_report_overview.py +COPY src/py/execute_collect_report_overview.py /opt/reportclient/src/py/execute_collect_report_overview.py COPY config/input-queries.json /opt/reportclient/config/input-queries.json COPY config/history-query.json /opt/reportclient/config/history-query.json COPY config/config.yml /opt/reportclient/config/config.yml diff --git a/src/py/execute_collect_report_overview.py b/src/py/execute_collect_report_overview.py index 547c68e..ecf13e7 100644 --- a/src/py/execute_collect_report_overview.py +++ b/src/py/execute_collect_report_overview.py @@ -61,9 +61,6 @@ def save_results_to_disk(table): def execute_collect_report_overview(dsf_base_url, dsf_cert_path, dsf_key_path): report_list = {} - print(dsf_base_url) - print(dsf_cert_path) - print(dsf_key_path) report_req_res = requests.get( f"{dsf_base_url}/Bundle?_format=json&_profile=http://medizininformatik-initiative.de/fhir/Bundle/search-bundle-response-report|1.0", cert=(dsf_cert_path, dsf_key_path),