diff --git a/.env.default b/.env.default index cef135a..d93a550 100644 --- a/.env.default +++ b/.env.default @@ -15,7 +15,9 @@ HISTORY_TABLE_LEN=14 EXECUTE_FEAS_TEST=false CONFLUENCE_PAGE_ID_FEAS="" EXECUTE_DSF_PING_TEST=false +EXECUTE_ACTIVITY_DEF_COLLECTION=false +EXECUTE_COLLECT_REPORT_OVERVIEW=false CONFLUENCE_PAGE_ID_PING="" LOCAL_DSF_CERT_PATH= LOCAL_DSF_KEY_PATH= -LOG_LEVEL=INFO \ No newline at end of file +LOG_LEVEL=INFO diff --git a/.gitignore b/.gitignore index 754df0e..82c8fe3 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,9 @@ feasibility-results.json dev-call.sh reports/*.json +reports/*.csv certs/ temp/ __pycache__/ -config/config.yml \ No newline at end of file +config/config.yml +venv \ No newline at end of file diff --git a/docker/Dockerfile b/docker/Dockerfile index 56b2d5f..f3d9128 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -5,6 +5,8 @@ RUN mkdir -p /opt/reportclient COPY src/py/feasibility-monitoring.py /opt/reportclient/src/py/feasibility-monitoring.py COPY src/py/execute_ping_test.py /opt/reportclient/src/py/execute_ping_test.py COPY src/py/execute_feasibility_test.py /opt/reportclient/src/py/execute_feasibility_test.py +COPY src/py/execute_collect_activity_definitions.py /opt/reportclient/src/py/execute_collect_activity_definitions.py +COPY src/py/execute_collect_report_overview.py /opt/reportclient/src/py/execute_collect_report_overview.py COPY config/input-queries.json /opt/reportclient/config/input-queries.json COPY config/history-query.json /opt/reportclient/config/history-query.json COPY config/config.yml /opt/reportclient/config/config.yml diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 86267a0..b94e153 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -22,6 +22,8 @@ services: - EXECUTE_DSF_PING_TEST=${EXECUTE_DSF_PING_TEST:-"false"} - CONFLUENCE_PAGE_ID_PING=${CONFLUENCE_PAGE_ID_PING:-""} - LOG_LEVEL=${LOG_LEVEL:-"INFO"} + - EXECUTE_ACTIVITY_DEF_COLLECTION=${EXECUTE_ACTIVITY_DEF_COLLECTION:-"false"} + - EXECUTE_COLLECT_REPORT_OVERVIEW=${EXECUTE_COLLECT_REPORT_OVERVIEW:-"false"} volumes: - ${LOCAL_DSF_CERT_PATH:-./cert/dsf-cert.cer}:/opt/reportclient/certs/dsf-cert.cer - ${LOCAL_DSF_KEY_PATH:-./cert/dsf-key.key}:/opt/reportclient/certs/dsf-key.key diff --git a/docker/docker-entrypoint.sh b/docker/docker-entrypoint.sh index f9bb151..ac84f24 100644 --- a/docker/docker-entrypoint.sh +++ b/docker/docker-entrypoint.sh @@ -20,8 +20,12 @@ CONFLUENCE_PAGE_ID_HIST=${CONFLUENCE_PAGE_ID_HIST:-""} HISTORY_TABLE_LEN=${HISTORY_TABLE_LEN:-14} EXECUTE_DSF_PING_TEST=${EXECUTE_DSF_PING_TEST:-"false"} CONFLUENCE_PAGE_ID_PING=${CONFLUENCE_PAGE_ID_PING:-""} +EXECUTE_ACTIVITY_DEF_COLLECTION=${EXECUTE_ACTIVITY_DEF_COLLECTION:-"false"} +EXECUTE_COLLECT_REPORT_OVERVIEW=${EXECUTE_COLLECT_REPORT_OVERVIEW:-"false"} LOG_LEVEL=${LOG_LEVEL:-"INFO"} + + python src/py/feasibility-monitoring.py --dsf_cert_path $DSF_CERT_PATH --dsf_key_path $DSF_KEY_PATH --backend_base_url "$BACKEND_BASE_URL" --backend_client_id "$BACKEND_CLIENT_ID" \ --client_secret "$BACKEND_CLIENT_SECRET" --keycloak_token_url "$KEYCLOAK_TOKEN_URL" \ --confluence_api_base_url "$CONFLUENCE_API_BASE_URL" --conf_user "$CONF_USER" --conf_pw "$CONF_PW"\ @@ -29,4 +33,4 @@ python src/py/feasibility-monitoring.py --dsf_cert_path $DSF_CERT_PATH --dsf_key --dsf_base_url $DSF_BASE_URL --execute_feas_test $EXECUTE_FEAS_TEST --confluence_page_id_feas $CONFLUENCE_PAGE_ID_FEAS\ --execute_history_test $EXECUTE_HISTORY_TEST --confluence_page_id_hist $CONFLUENCE_PAGE_ID_HIST --history_table_len $HISTORY_TABLE_LEN\ --execute_ping_test $EXECUTE_DSF_PING_TEST --confluence_page_id_ping $CONFLUENCE_PAGE_ID_PING --wait_result_secs_ping "$WAIT_RESULT_SECS_PING"\ - --log_level "$LOG_LEVEL" + --execute_collect_activity_definition $EXECUTE_ACTIVITY_DEF_COLLECTION --log_level "$LOG_LEVEL" --execute_collect_report_overview $EXECUTE_COLLECT_REPORT_OVERVIEW diff --git a/src/execute-local.sh b/src/execute-local.sh index 2ce5802..e6312fc 100644 --- a/src/execute-local.sh +++ b/src/execute-local.sh @@ -20,11 +20,15 @@ CONFLUENCE_PAGE_ID_HIST=${CONFLUENCE_PAGE_ID_HIST:-""} HISTORY_TABLE_LEN=${HISTORY_TABLE_LEN:-14} EXECUTE_DSF_PING_TEST=${EXECUTE_DSF_PING_TEST:-"false"} CONFLUENCE_PAGE_ID_PING=$CONFLUENCE_PAGE_ID_PING +EXECUTE_ACTIVITY_DEF_COLLECTION=${EXECUTE_ACTIVITY_DEF_COLLECTION:-"false"} +EXECUTE_COLLECT_REPORT_OVERVIEW=${EXECUTE_COLLECT_REPORT_OVERVIEW:-"false"} +LOG_LEVEL=${LOG_LEVEL:-"INFO"} -python src/py/feasibility-monitoring.py --dsf_cert_path $DSF_CERT_PATH --dsf_key_path $DSF_KEY_PATH --backend_base_url "$BACKEND_BASE_URL" --backend_client_id "$BACKEND_CLIENT_ID" \ +python3 src/py/feasibility-monitoring.py --dsf_cert_path $DSF_CERT_PATH --dsf_key_path $DSF_KEY_PATH --backend_base_url "$BACKEND_BASE_URL" --backend_client_id "$BACKEND_CLIENT_ID" \ --client_secret "$BACKEND_CLIENT_SECRET" --keycloak_token_url "$KEYCLOAK_TOKEN_URL" \ --confluence_api_base_url "$CONFLUENCE_API_BASE_URL" --conf_user "$CONF_USER" --conf_pw "$CONF_PW"\ --send_results_confluence $SEND_TO_CONFLUENCE --wait_result_secs_feas "$WAIT_RESULT_SECS_FEAS"\ --dsf_base_url $DSF_BASE_URL --execute_feas_test $EXECUTE_FEAS_TEST --confluence_page_id_feas $CONFLUENCE_PAGE_ID_FEAS\ --execute_history_test $EXECUTE_HISTORY_TEST --confluence_page_id_hist $CONFLUENCE_PAGE_ID_HIST --history_table_len $HISTORY_TABLE_LEN\ - --execute_ping_test $EXECUTE_DSF_PING_TEST --confluence_page_id_ping $CONFLUENCE_PAGE_ID_PING --wait_result_secs_ping "$WAIT_RESULT_SECS_PING" + --execute_ping_test $EXECUTE_DSF_PING_TEST --confluence_page_id_ping $CONFLUENCE_PAGE_ID_PING --wait_result_secs_ping "$WAIT_RESULT_SECS_PING"\ + --execute_collect_activity_definition $EXECUTE_ACTIVITY_DEF_COLLECTION --log_level $LOG_LEVEL --execute_collect_report_overview $EXECUTE_COLLECT_REPORT_OVERVIEW diff --git a/src/py/execute_collect_activity_definitions.py b/src/py/execute_collect_activity_definitions.py new file mode 100644 index 0000000..ce03c76 --- /dev/null +++ b/src/py/execute_collect_activity_definitions.py @@ -0,0 +1,216 @@ +from prettytable import PrettyTable +import requests +import logging +import csv + +process_name_to_plugin_map = { + "Report Autostart": "mii-process-report", + "Report Send": "mii-process-report", + "Report Receive": "mii-process-report", + "FeasibilityRequest": "mii-process-feasibility", + "ExecuteFeasibility": "mii-process-feasibility", + "dataSend": "mii-process-data-transfer", + "dataReceive": "mii-process-data-transfer", + "Ping": "dsf-process-ping-pong", + "Pong": "dsf-process-ping-pong", + "PingAutostart": "dsf-process-ping-pong", + "DownloadAllowList": "dsf-process-allowlist", + "UpdateAllowList": "dsf-process-allowlist", +} + + +def get_next_link(link_elem): + for elem in link_elem: + if elem["relation"] == "next": + return elem["url"] + + return None + + +def page_through_results_and_collect(resp, dsf_cert_path, dsf_key_path): + + result_entry = [] + + if resp.status_code != 200: + return result_entry + + next_link = get_next_link(resp.json()["link"]) + if "entry" not in resp.json().keys(): + return result_entry + if len(resp.json()["entry"]) > 0: + result_entry = result_entry + resp.json()["entry"] + + if next_link: + logging.debug(f"Getting next page {next_link}") + resp = requests.get(next_link, cert=(dsf_cert_path, dsf_key_path)) + if "entry" not in resp.json().keys(): + return result_entry + + return result_entry + page_through_results_and_collect(resp, dsf_cert_path, dsf_key_path) + + return result_entry + + +def convert_process_info_to_process_name(process): + return f"{process['plugin']}|{process['name']}|{process['version']}" + + +def create_results_table(activity_information): + + process_list = [] + + for site_ident in activity_information.keys(): + site_info = activity_information[site_ident] + + for process in site_info["installedProcesses"]: + + process_name_version = convert_process_info_to_process_name(process) + + if process_name_version not in process_list: + process_list.append(process_name_version) + + process_list_overall = [0] * len(process_list) + site_process_lists = [] + + process_list = sorted(process_list) + + for site_ident in activity_information.keys(): + site_info = activity_information[site_ident] + site_process_list = [0] * len(process_list) + + for process_index in range(0, len(process_list)): + + process = process_list[process_index] + for installed_process in site_info["installedProcesses"]: + + process_name_version = convert_process_info_to_process_name(installed_process) + + if process == process_name_version: + site_process_list[process_index] = 1 + process_list_overall[process_index] = process_list_overall[process_index] + 1 + break + + site_process_lists.append([site_ident] + site_process_list) + + table = [] + + table.append(["Site Identifier"] + process_list) + table.append(["Overall"] + process_list_overall) + + for site_process_list in site_process_lists: + table.append(site_process_list) + + return table + + +def map_process_name_to_plugin(process_name): + + if process_name not in process_name_to_plugin_map: + return "unknown-process" + else: + return process_name_to_plugin_map[process_name] + + +def create_activity_list_and_add_to_org( + organization_list, organization, endpoint, activity_definitions +): + activity_definition_list = [] + + for activity_definition in activity_definitions: + + activity_definition = activity_definition["resource"] + + if "url" not in activity_definition: + logging.error(f"ERROR - broken activity defnition in endpoint {endpoint}") + continue + + activity_definition_list.append( + { + "url": activity_definition["url"], + "name": activity_definition["name"], + "plugin": map_process_name_to_plugin(activity_definition["name"]), + "version": activity_definition["version"], + "status": activity_definition["status"], + } + ) + + organization_list[organization["resource"]["identifier"][0]["value"]] = { + "endpoint": endpoint, + "installedProcesses": activity_definition_list, + } + + +def generate_activity_list_for_orgs( + organization_list, organizations, dsf_base_url, dsf_cert_path, dsf_key_path +): + for organization in organizations: + + org_ident = organization["resource"]["identifier"][0]["value"] + + if "endpoint" not in organization["resource"]: + continue + + endpoint = requests.get( + f'{dsf_base_url}/{organization["resource"]["endpoint"][0]["reference"]}?_format=json', + cert=(dsf_cert_path, dsf_key_path), + ).json()["address"] + + logging.info(f"Querying endpoint: {endpoint} for org: {org_ident}") + + try: + activity_def_req_res = requests.get( + f"{endpoint}/ActivityDefinition?_format=json", + cert=(dsf_cert_path, dsf_key_path), + timeout=10, + ) + activity_definitions = page_through_results_and_collect( + activity_def_req_res, dsf_cert_path, dsf_key_path + ) + + except requests.exceptions.RequestException: + logging.debug(f"Could not connect to endpoint {endpoint}") + organization_list[org_ident] = { + "endpoint": endpoint, + "installedProcesses": [], + "errors": [ + { + "code": "connection-error", + "display": "Could not connect to endpoint", + } + ], + } + + continue + + create_activity_list_and_add_to_org( + organization_list, organization, endpoint, activity_definitions + ) + + +def save_results_to_disk(table): + + with open("reports/dsf_installed_plugins.csv", "w", newline="") as csvfile: + writer = csv.writer(csvfile, delimiter=";", quotechar='"', quoting=csv.QUOTE_MINIMAL) + + for row in table: + writer.writerow(row) + + +def execute_collect_activity_definitions(dsf_base_url, dsf_cert_path, dsf_key_path): + + organization_list = {} + organizations_req_res = requests.get( + f"{dsf_base_url}/Organization?_format=json", + cert=(dsf_cert_path, dsf_key_path), + timeout=20, + ) + organizations = page_through_results_and_collect( + organizations_req_res, dsf_cert_path, dsf_key_path + ) + + generate_activity_list_for_orgs( + organization_list, organizations, dsf_base_url, dsf_cert_path, dsf_key_path + ) + sorted_orgs_list = dict(sorted(organization_list.items())) + table = create_results_table(sorted_orgs_list) + save_results_to_disk(table) diff --git a/src/py/execute_collect_report_overview.py b/src/py/execute_collect_report_overview.py new file mode 100644 index 0000000..ecf13e7 --- /dev/null +++ b/src/py/execute_collect_report_overview.py @@ -0,0 +1,82 @@ +from prettytable import PrettyTable +import requests +import logging +import csv +import json + + +def get_next_link(link_elem): + for elem in link_elem: + if elem["relation"] == "next": + return elem["url"] + + return None + + +def page_through_results_and_collect(resp, dsf_cert_path, dsf_key_path): + + result_entry = [] + + if resp.status_code != 200: + return result_entry + + next_link = get_next_link(resp.json()["link"]) + if "entry" not in resp.json().keys(): + return result_entry + if len(resp.json()["entry"]) > 0: + result_entry = result_entry + resp.json()["entry"] + + if next_link: + logging.debug(f"Getting next page {next_link}") + resp = requests.get(next_link, cert=(dsf_cert_path, dsf_key_path)) + if "entry" not in resp.json().keys(): + return result_entry + + return result_entry + page_through_results_and_collect(resp, dsf_cert_path, dsf_key_path) + + return result_entry + + +def create_results_table(reports): + + table = [] + + table.append(["Site Identifier", "Last Report"]) + + for site_ident in reports.keys(): + table.append([site_ident, reports[site_ident]]) + + return table + + +def save_results_to_disk(table): + + with open("reports/last_kds_reports.csv", "w", newline="") as csvfile: + writer = csv.writer(csvfile, delimiter=";", quotechar='"', quoting=csv.QUOTE_MINIMAL) + + for row in table: + writer.writerow(row) + + +def execute_collect_report_overview(dsf_base_url, dsf_cert_path, dsf_key_path): + + report_list = {} + report_req_res = requests.get( + f"{dsf_base_url}/Bundle?_format=json&_profile=http://medizininformatik-initiative.de/fhir/Bundle/search-bundle-response-report|1.0", + cert=(dsf_cert_path, dsf_key_path), + timeout=20, + ) + + print(report_req_res.status_code) + + reports = page_through_results_and_collect( + report_req_res, dsf_cert_path, dsf_key_path + ) + + for report in reports: + report_list[report['resource']['identifier']['value']] = report['resource']['meta']['lastUpdated'][0:10] + + results_table = create_results_table(report_list) + save_results_to_disk(results_table) + + print(json.dumps(report_list)) diff --git a/src/py/execute_feasibility_test.py b/src/py/execute_feasibility_test.py index b09854a..e77c434 100644 --- a/src/py/execute_feasibility_test.py +++ b/src/py/execute_feasibility_test.py @@ -292,10 +292,9 @@ def execute_history_query(backend_base_url, backend_client_id, client_secret, ke history_query = load_json_file(HISTORY_QUERY_FILE) results = send_query_and_get_results(history_query, backend_base_url, backend_client_id, client_secret, keycloak_token_url, wait_result_secs_feas) converted_result = convert_results(results) - update_and_save_history_report(history_report, converted_result) - history_table = convert_to_table(history_report["reports"][-int(history_table_len):], "date") + history_table = convert_to_table(history_report["reports"][:int(history_table_len)], "date") logging.info(history_table) if send_results_confluence: diff --git a/src/py/feasibility-monitoring.py b/src/py/feasibility-monitoring.py index cd03629..4b6bb60 100644 --- a/src/py/feasibility-monitoring.py +++ b/src/py/feasibility-monitoring.py @@ -1,6 +1,8 @@ import argparse import execute_ping_test import execute_feasibility_test +import execute_collect_activity_definitions +import execute_collect_report_overview import logging import sys @@ -12,44 +14,73 @@ def str_to_bool(s): if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('--backend_base_url', help='base url of the feasibility backend', - default="https://localhost/api/v2") - parser.add_argument('--backend_client_id', help='backend client id', - nargs="?", default="feasibility-gui") - parser.add_argument('--dsf_base_url', help='base url of the feasibility backend', - default="https://localhost/api/v2") - parser.add_argument('--dsf_cert_path', help='path to the dsf cert', - nargs="?", default="certs/dsf-cert.cer") - parser.add_argument('--dsf_key_path', help='path to the dsf key', - nargs="?", default="certs/dsf-key.key") - parser.add_argument('--backend_user', help='', nargs="?", default="test") - parser.add_argument('--client_secret', help='', nargs="?", default="test") - parser.add_argument('--keycloak_token_url', help='keycloak token url', - default="https://localhost/auth/realms/feasibility/protocol/openid-connect/token") - parser.add_argument('--confluence_api_base_url', help='', nargs="?", - default='https://myconfluence-rest-api-url') - parser.add_argument('--confluence_page_id_feas', help='', nargs="?", - default='') - parser.add_argument('--confluence_page_id_hist', help='', nargs="?", - default='') - parser.add_argument('--confluence_page_id_ping', help='', nargs="?", - default='') - parser.add_argument('--send_results_confluence', help='', type=str_to_bool, - default="false") parser.add_argument( - '--conf_user', help='username of confluence account', nargs="?", default='username') + "--backend_base_url", + help="base url of the feasibility backend", + default="https://localhost/api/v2", + ) parser.add_argument( - '--conf_pw', help='password of confluence account', nargs="?", default='password') - parser.add_argument('--wait_result_secs_feas', - help='number of seconds to wait before results for a query are fetched', nargs="?", default='60') - parser.add_argument('--wait_result_secs_ping', - help='number of seconds to wait before results for the ping task fetched', nargs="?", default='600') - parser.add_argument('--history_table_len', - help='length of the history table that is sent to confluence', nargs="?", default='14') - parser.add_argument('--execute_feas_test', help='', type=str_to_bool, default="false") - parser.add_argument('--execute_history_test', help='', type=str_to_bool, default="false") - parser.add_argument('--execute_ping_test', help='', type=str_to_bool, default="false") - parser.add_argument('--log_level', help='', default="INFO") + "--backend_client_id", help="backend client id", nargs="?", default="feasibility-gui" + ) + parser.add_argument( + "--dsf_base_url", + help="base url of the feasibility backend", + default="https://localhost/api/v2", + ) + parser.add_argument( + "--dsf_cert_path", help="path to the dsf cert", nargs="?", default="certs/dsf-cert.cer" + ) + parser.add_argument( + "--dsf_key_path", help="path to the dsf key", nargs="?", default="certs/dsf-key.key" + ) + parser.add_argument("--backend_user", help="", nargs="?", default="test") + parser.add_argument("--client_secret", help="", nargs="?", default="test") + parser.add_argument( + "--keycloak_token_url", + help="keycloak token url", + default="https://localhost/auth/realms/feasibility/protocol/openid-connect/token", + ) + parser.add_argument( + "--confluence_api_base_url", help="", nargs="?", default="https://myconfluence-rest-api-url" + ) + parser.add_argument("--confluence_page_id_feas", help="", nargs="?", default="") + parser.add_argument("--confluence_page_id_hist", help="", nargs="?", default="") + parser.add_argument("--confluence_page_id_ping", help="", nargs="?", default="") + parser.add_argument("--send_results_confluence", help="", type=str_to_bool, default="false") + parser.add_argument( + "--conf_user", help="username of confluence account", nargs="?", default="username" + ) + parser.add_argument( + "--conf_pw", help="password of confluence account", nargs="?", default="password" + ) + parser.add_argument( + "--wait_result_secs_feas", + help="number of seconds to wait before results for a query are fetched", + nargs="?", + default="60", + ) + parser.add_argument( + "--wait_result_secs_ping", + help="number of seconds to wait before results for the ping task fetched", + nargs="?", + default="600", + ) + parser.add_argument( + "--history_table_len", + help="length of the history table that is sent to confluence", + nargs="?", + default="14", + ) + parser.add_argument("--execute_feas_test", help="", type=str_to_bool, default="false") + parser.add_argument("--execute_history_test", help="", type=str_to_bool, default="false") + parser.add_argument("--execute_ping_test", help="", type=str_to_bool, default="false") + parser.add_argument( + "--execute_collect_activity_definition", help="", type=str_to_bool, default="false" + ) + parser.add_argument( + "--execute_collect_report_overview", help="", type=str_to_bool, default="false" + ) + parser.add_argument("--log_level", help="", default="INFO") args = vars(parser.parse_args()) backend_base_url = args["backend_base_url"] @@ -73,46 +104,75 @@ def str_to_bool(s): b_execute_feas_test = args["execute_feas_test"] b_execute_history_test = args["execute_history_test"] b_execute_ping_test = args["execute_ping_test"] + b_execute_activity_config_collection = args["execute_collect_activity_definition"] + b_execute_collect_report_overview = args["execute_collect_report_overview"] log_level = args["log_level"] logging.basicConfig(stream=sys.stdout, level=log_level) - logging.info(f'Running Feasibility Monitoring with log level {log_level}') + logging.info(f"Running Feasibility Monitoring with log level {log_level}") if b_execute_history_test: logging.info("Executing history test") - execute_feasibility_test.execute_history_query(backend_base_url, - backend_client_id, - client_secret, - keycloak_token_url, - wait_result_secs_feas, - conf_user, conf_pw, - confluence_api_base_url, - confluence_page_id_hist, - b_send_results_confluence, - history_table_len) + execute_feasibility_test.execute_history_query( + backend_base_url, + backend_client_id, + client_secret, + keycloak_token_url, + wait_result_secs_feas, + conf_user, + conf_pw, + confluence_api_base_url, + confluence_page_id_hist, + b_send_results_confluence, + history_table_len, + ) logging.info("Finished executing history test") if b_execute_feas_test: logging.info("Executing feasibility test") - execute_feasibility_test.execute_feas_test_queries(backend_base_url, - backend_client_id, - client_secret, - keycloak_token_url, - conf_user, conf_pw, - confluence_api_base_url, - confluence_page_id_feas, - wait_result_secs_feas, - b_send_results_confluence) + execute_feasibility_test.execute_feas_test_queries( + backend_base_url, + backend_client_id, + client_secret, + keycloak_token_url, + conf_user, + conf_pw, + confluence_api_base_url, + confluence_page_id_feas, + wait_result_secs_feas, + b_send_results_confluence, + ) logging.info("Finished executing feasibility test") if b_execute_ping_test: logging.info("Executing ping test") - execute_ping_test.execute_ping_task(dsf_base_url, - wait_result_secs_ping, - b_send_results_confluence, - confluence_api_base_url, - confluence_page_id_ping, - conf_user, conf_pw, - dsf_cert_path, - dsf_key_path) + execute_ping_test.execute_ping_task( + dsf_base_url, + wait_result_secs_ping, + b_send_results_confluence, + confluence_api_base_url, + confluence_page_id_ping, + conf_user, + conf_pw, + dsf_cert_path, + dsf_key_path, + ) logging.info("Finished executing ping test") + + if b_execute_activity_config_collection: + logging.info("### Executing the collection of activity definitions (installed processes at sites)") + + execute_collect_activity_definitions.execute_collect_activity_definitions( + dsf_base_url, dsf_cert_path, dsf_key_path + ) + + logging.info("### Finished executing the collection of activity definitions") + + if b_execute_collect_report_overview: + logging.info("### Executing the collection of report overview") + + execute_collect_report_overview.execute_collect_report_overview( + dsf_base_url, dsf_cert_path, dsf_key_path + ) + + logging.info("### Finished executing the collection of report overview")