Skip to content

Commit

Permalink
Merge pull request #25 from medizininformatik-initiative/add-activity…
Browse files Browse the repository at this point in the history
…-definition-query

Add activity definition query
  • Loading branch information
juliangruendner authored Aug 13, 2024
2 parents f8434cb + f2708c2 commit e8f5379
Show file tree
Hide file tree
Showing 10 changed files with 443 additions and 70 deletions.
4 changes: 3 additions & 1 deletion .env.default
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@ HISTORY_TABLE_LEN=14
EXECUTE_FEAS_TEST=false
CONFLUENCE_PAGE_ID_FEAS=""
EXECUTE_DSF_PING_TEST=false
EXECUTE_ACTIVITY_DEF_COLLECTION=false
EXECUTE_COLLECT_REPORT_OVERVIEW=false
CONFLUENCE_PAGE_ID_PING=""
LOCAL_DSF_CERT_PATH=
LOCAL_DSF_KEY_PATH=
LOG_LEVEL=INFO
LOG_LEVEL=INFO
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@
feasibility-results.json
dev-call.sh
reports/*.json
reports/*.csv
certs/
temp/
__pycache__/
config/config.yml
config/config.yml
venv
2 changes: 2 additions & 0 deletions docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ RUN mkdir -p /opt/reportclient
COPY src/py/feasibility-monitoring.py /opt/reportclient/src/py/feasibility-monitoring.py
COPY src/py/execute_ping_test.py /opt/reportclient/src/py/execute_ping_test.py
COPY src/py/execute_feasibility_test.py /opt/reportclient/src/py/execute_feasibility_test.py
COPY src/py/execute_collect_activity_definitions.py /opt/reportclient/src/py/execute_collect_activity_definitions.py
COPY src/py/execute_collect_report_overview.py /opt/reportclient/src/py/execute_collect_report_overview.py
COPY config/input-queries.json /opt/reportclient/config/input-queries.json
COPY config/history-query.json /opt/reportclient/config/history-query.json
COPY config/config.yml /opt/reportclient/config/config.yml
Expand Down
2 changes: 2 additions & 0 deletions docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ services:
- EXECUTE_DSF_PING_TEST=${EXECUTE_DSF_PING_TEST:-"false"}
- CONFLUENCE_PAGE_ID_PING=${CONFLUENCE_PAGE_ID_PING:-""}
- LOG_LEVEL=${LOG_LEVEL:-"INFO"}
- EXECUTE_ACTIVITY_DEF_COLLECTION=${EXECUTE_ACTIVITY_DEF_COLLECTION:-"false"}
- EXECUTE_COLLECT_REPORT_OVERVIEW=${EXECUTE_COLLECT_REPORT_OVERVIEW:-"false"}
volumes:
- ${LOCAL_DSF_CERT_PATH:-./cert/dsf-cert.cer}:/opt/reportclient/certs/dsf-cert.cer
- ${LOCAL_DSF_KEY_PATH:-./cert/dsf-key.key}:/opt/reportclient/certs/dsf-key.key
Expand Down
6 changes: 5 additions & 1 deletion docker/docker-entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,17 @@ CONFLUENCE_PAGE_ID_HIST=${CONFLUENCE_PAGE_ID_HIST:-""}
HISTORY_TABLE_LEN=${HISTORY_TABLE_LEN:-14}
EXECUTE_DSF_PING_TEST=${EXECUTE_DSF_PING_TEST:-"false"}
CONFLUENCE_PAGE_ID_PING=${CONFLUENCE_PAGE_ID_PING:-""}
EXECUTE_ACTIVITY_DEF_COLLECTION=${EXECUTE_ACTIVITY_DEF_COLLECTION:-"false"}
EXECUTE_COLLECT_REPORT_OVERVIEW=${EXECUTE_COLLECT_REPORT_OVERVIEW:-"false"}
LOG_LEVEL=${LOG_LEVEL:-"INFO"}



python src/py/feasibility-monitoring.py --dsf_cert_path $DSF_CERT_PATH --dsf_key_path $DSF_KEY_PATH --backend_base_url "$BACKEND_BASE_URL" --backend_client_id "$BACKEND_CLIENT_ID" \
--client_secret "$BACKEND_CLIENT_SECRET" --keycloak_token_url "$KEYCLOAK_TOKEN_URL" \
--confluence_api_base_url "$CONFLUENCE_API_BASE_URL" --conf_user "$CONF_USER" --conf_pw "$CONF_PW"\
--send_results_confluence $SEND_TO_CONFLUENCE --wait_result_secs_feas "$WAIT_RESULT_SECS_FEAS"\
--dsf_base_url $DSF_BASE_URL --execute_feas_test $EXECUTE_FEAS_TEST --confluence_page_id_feas $CONFLUENCE_PAGE_ID_FEAS\
--execute_history_test $EXECUTE_HISTORY_TEST --confluence_page_id_hist $CONFLUENCE_PAGE_ID_HIST --history_table_len $HISTORY_TABLE_LEN\
--execute_ping_test $EXECUTE_DSF_PING_TEST --confluence_page_id_ping $CONFLUENCE_PAGE_ID_PING --wait_result_secs_ping "$WAIT_RESULT_SECS_PING"\
--log_level "$LOG_LEVEL"
--execute_collect_activity_definition $EXECUTE_ACTIVITY_DEF_COLLECTION --log_level "$LOG_LEVEL" --execute_collect_report_overview $EXECUTE_COLLECT_REPORT_OVERVIEW
8 changes: 6 additions & 2 deletions src/execute-local.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,15 @@ CONFLUENCE_PAGE_ID_HIST=${CONFLUENCE_PAGE_ID_HIST:-""}
HISTORY_TABLE_LEN=${HISTORY_TABLE_LEN:-14}
EXECUTE_DSF_PING_TEST=${EXECUTE_DSF_PING_TEST:-"false"}
CONFLUENCE_PAGE_ID_PING=$CONFLUENCE_PAGE_ID_PING
EXECUTE_ACTIVITY_DEF_COLLECTION=${EXECUTE_ACTIVITY_DEF_COLLECTION:-"false"}
EXECUTE_COLLECT_REPORT_OVERVIEW=${EXECUTE_COLLECT_REPORT_OVERVIEW:-"false"}
LOG_LEVEL=${LOG_LEVEL:-"INFO"}

python src/py/feasibility-monitoring.py --dsf_cert_path $DSF_CERT_PATH --dsf_key_path $DSF_KEY_PATH --backend_base_url "$BACKEND_BASE_URL" --backend_client_id "$BACKEND_CLIENT_ID" \
python3 src/py/feasibility-monitoring.py --dsf_cert_path $DSF_CERT_PATH --dsf_key_path $DSF_KEY_PATH --backend_base_url "$BACKEND_BASE_URL" --backend_client_id "$BACKEND_CLIENT_ID" \
--client_secret "$BACKEND_CLIENT_SECRET" --keycloak_token_url "$KEYCLOAK_TOKEN_URL" \
--confluence_api_base_url "$CONFLUENCE_API_BASE_URL" --conf_user "$CONF_USER" --conf_pw "$CONF_PW"\
--send_results_confluence $SEND_TO_CONFLUENCE --wait_result_secs_feas "$WAIT_RESULT_SECS_FEAS"\
--dsf_base_url $DSF_BASE_URL --execute_feas_test $EXECUTE_FEAS_TEST --confluence_page_id_feas $CONFLUENCE_PAGE_ID_FEAS\
--execute_history_test $EXECUTE_HISTORY_TEST --confluence_page_id_hist $CONFLUENCE_PAGE_ID_HIST --history_table_len $HISTORY_TABLE_LEN\
--execute_ping_test $EXECUTE_DSF_PING_TEST --confluence_page_id_ping $CONFLUENCE_PAGE_ID_PING --wait_result_secs_ping "$WAIT_RESULT_SECS_PING"
--execute_ping_test $EXECUTE_DSF_PING_TEST --confluence_page_id_ping $CONFLUENCE_PAGE_ID_PING --wait_result_secs_ping "$WAIT_RESULT_SECS_PING"\
--execute_collect_activity_definition $EXECUTE_ACTIVITY_DEF_COLLECTION --log_level $LOG_LEVEL --execute_collect_report_overview $EXECUTE_COLLECT_REPORT_OVERVIEW
216 changes: 216 additions & 0 deletions src/py/execute_collect_activity_definitions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,216 @@
from prettytable import PrettyTable
import requests
import logging
import csv

process_name_to_plugin_map = {
"Report Autostart": "mii-process-report",
"Report Send": "mii-process-report",
"Report Receive": "mii-process-report",
"FeasibilityRequest": "mii-process-feasibility",
"ExecuteFeasibility": "mii-process-feasibility",
"dataSend": "mii-process-data-transfer",
"dataReceive": "mii-process-data-transfer",
"Ping": "dsf-process-ping-pong",
"Pong": "dsf-process-ping-pong",
"PingAutostart": "dsf-process-ping-pong",
"DownloadAllowList": "dsf-process-allowlist",
"UpdateAllowList": "dsf-process-allowlist",
}


def get_next_link(link_elem):
for elem in link_elem:
if elem["relation"] == "next":
return elem["url"]

return None


def page_through_results_and_collect(resp, dsf_cert_path, dsf_key_path):

result_entry = []

if resp.status_code != 200:
return result_entry

next_link = get_next_link(resp.json()["link"])
if "entry" not in resp.json().keys():
return result_entry
if len(resp.json()["entry"]) > 0:
result_entry = result_entry + resp.json()["entry"]

if next_link:
logging.debug(f"Getting next page {next_link}")
resp = requests.get(next_link, cert=(dsf_cert_path, dsf_key_path))
if "entry" not in resp.json().keys():
return result_entry

return result_entry + page_through_results_and_collect(resp, dsf_cert_path, dsf_key_path)

return result_entry


def convert_process_info_to_process_name(process):
return f"{process['plugin']}|{process['name']}|{process['version']}"


def create_results_table(activity_information):

process_list = []

for site_ident in activity_information.keys():
site_info = activity_information[site_ident]

for process in site_info["installedProcesses"]:

process_name_version = convert_process_info_to_process_name(process)

if process_name_version not in process_list:
process_list.append(process_name_version)

process_list_overall = [0] * len(process_list)
site_process_lists = []

process_list = sorted(process_list)

for site_ident in activity_information.keys():
site_info = activity_information[site_ident]
site_process_list = [0] * len(process_list)

for process_index in range(0, len(process_list)):

process = process_list[process_index]
for installed_process in site_info["installedProcesses"]:

process_name_version = convert_process_info_to_process_name(installed_process)

if process == process_name_version:
site_process_list[process_index] = 1
process_list_overall[process_index] = process_list_overall[process_index] + 1
break

site_process_lists.append([site_ident] + site_process_list)

table = []

table.append(["Site Identifier"] + process_list)
table.append(["Overall"] + process_list_overall)

for site_process_list in site_process_lists:
table.append(site_process_list)

return table


def map_process_name_to_plugin(process_name):

if process_name not in process_name_to_plugin_map:
return "unknown-process"
else:
return process_name_to_plugin_map[process_name]


def create_activity_list_and_add_to_org(
organization_list, organization, endpoint, activity_definitions
):
activity_definition_list = []

for activity_definition in activity_definitions:

activity_definition = activity_definition["resource"]

if "url" not in activity_definition:
logging.error(f"ERROR - broken activity defnition in endpoint {endpoint}")
continue

activity_definition_list.append(
{
"url": activity_definition["url"],
"name": activity_definition["name"],
"plugin": map_process_name_to_plugin(activity_definition["name"]),
"version": activity_definition["version"],
"status": activity_definition["status"],
}
)

organization_list[organization["resource"]["identifier"][0]["value"]] = {
"endpoint": endpoint,
"installedProcesses": activity_definition_list,
}


def generate_activity_list_for_orgs(
organization_list, organizations, dsf_base_url, dsf_cert_path, dsf_key_path
):
for organization in organizations:

org_ident = organization["resource"]["identifier"][0]["value"]

if "endpoint" not in organization["resource"]:
continue

endpoint = requests.get(
f'{dsf_base_url}/{organization["resource"]["endpoint"][0]["reference"]}?_format=json',
cert=(dsf_cert_path, dsf_key_path),
).json()["address"]

logging.info(f"Querying endpoint: {endpoint} for org: {org_ident}")

try:
activity_def_req_res = requests.get(
f"{endpoint}/ActivityDefinition?_format=json",
cert=(dsf_cert_path, dsf_key_path),
timeout=10,
)
activity_definitions = page_through_results_and_collect(
activity_def_req_res, dsf_cert_path, dsf_key_path
)

except requests.exceptions.RequestException:
logging.debug(f"Could not connect to endpoint {endpoint}")
organization_list[org_ident] = {
"endpoint": endpoint,
"installedProcesses": [],
"errors": [
{
"code": "connection-error",
"display": "Could not connect to endpoint",
}
],
}

continue

create_activity_list_and_add_to_org(
organization_list, organization, endpoint, activity_definitions
)


def save_results_to_disk(table):

with open("reports/dsf_installed_plugins.csv", "w", newline="") as csvfile:
writer = csv.writer(csvfile, delimiter=";", quotechar='"', quoting=csv.QUOTE_MINIMAL)

for row in table:
writer.writerow(row)


def execute_collect_activity_definitions(dsf_base_url, dsf_cert_path, dsf_key_path):

organization_list = {}
organizations_req_res = requests.get(
f"{dsf_base_url}/Organization?_format=json",
cert=(dsf_cert_path, dsf_key_path),
timeout=20,
)
organizations = page_through_results_and_collect(
organizations_req_res, dsf_cert_path, dsf_key_path
)

generate_activity_list_for_orgs(
organization_list, organizations, dsf_base_url, dsf_cert_path, dsf_key_path
)
sorted_orgs_list = dict(sorted(organization_list.items()))
table = create_results_table(sorted_orgs_list)
save_results_to_disk(table)
82 changes: 82 additions & 0 deletions src/py/execute_collect_report_overview.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
from prettytable import PrettyTable
import requests
import logging
import csv
import json


def get_next_link(link_elem):
for elem in link_elem:
if elem["relation"] == "next":
return elem["url"]

return None


def page_through_results_and_collect(resp, dsf_cert_path, dsf_key_path):

result_entry = []

if resp.status_code != 200:
return result_entry

next_link = get_next_link(resp.json()["link"])
if "entry" not in resp.json().keys():
return result_entry
if len(resp.json()["entry"]) > 0:
result_entry = result_entry + resp.json()["entry"]

if next_link:
logging.debug(f"Getting next page {next_link}")
resp = requests.get(next_link, cert=(dsf_cert_path, dsf_key_path))
if "entry" not in resp.json().keys():
return result_entry

return result_entry + page_through_results_and_collect(resp, dsf_cert_path, dsf_key_path)

return result_entry


def create_results_table(reports):

table = []

table.append(["Site Identifier", "Last Report"])

for site_ident in reports.keys():
table.append([site_ident, reports[site_ident]])

return table


def save_results_to_disk(table):

with open("reports/last_kds_reports.csv", "w", newline="") as csvfile:
writer = csv.writer(csvfile, delimiter=";", quotechar='"', quoting=csv.QUOTE_MINIMAL)

for row in table:
writer.writerow(row)


def execute_collect_report_overview(dsf_base_url, dsf_cert_path, dsf_key_path):

report_list = {}
report_req_res = requests.get(
f"{dsf_base_url}/Bundle?_format=json&_profile=http://medizininformatik-initiative.de/fhir/Bundle/search-bundle-response-report|1.0",
cert=(dsf_cert_path, dsf_key_path),
timeout=20,
)

print(report_req_res.status_code)

reports = page_through_results_and_collect(
report_req_res, dsf_cert_path, dsf_key_path
)

for report in reports:
report_list[report['resource']['identifier']['value']] = report['resource']['meta']['lastUpdated'][0:10]

results_table = create_results_table(report_list)
save_results_to_disk(results_table)

print(json.dumps(report_list))
1 change: 0 additions & 1 deletion src/py/execute_feasibility_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,6 @@ def execute_history_query(backend_base_url, backend_client_id, client_secret, ke
history_query = load_json_file(HISTORY_QUERY_FILE)
results = send_query_and_get_results(history_query, backend_base_url, backend_client_id, client_secret, keycloak_token_url, wait_result_secs_feas)
converted_result = convert_results(results)

update_and_save_history_report(history_report, converted_result)

history_table = convert_to_table(history_report["reports"][:int(history_table_len)], "date")
Expand Down
Loading

0 comments on commit e8f5379

Please sign in to comment.