diff --git a/.env.default b/.env.default index 333ccbc..729b9e3 100644 --- a/.env.default +++ b/.env.default @@ -11,6 +11,7 @@ SEND_TO_CONFLUENCE=false DSF_BASE_URL="https://dsf.base-url-here" EXECUTE_HISTORY_TEST=false CONFLUENCE_PAGE_ID_HIST="" +HISTORY_TABLE_LEN=14 EXECUTE_FEAS_TEST=false CONFLUENCE_PAGE_ID_FEAS="" EXECUTE_DSF_PING_TEST=false diff --git a/.github/workflows/buid.yml b/.github/workflows/buid.yml new file mode 100644 index 0000000..dcbdeb4 --- /dev/null +++ b/.github/workflows/buid.yml @@ -0,0 +1,42 @@ +name: Build + +on: + push: + branches: + - main + - develop + pull_request: + branches: + - main + - develop + +jobs: + docker: + runs-on: ubuntu-latest + steps: + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to GitHub Container Registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Prepare Version + id: prep + run: | + echo "mii=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]')" >> "$GITHUB_OUTPUT" + + - name: Debug ls + run: ls + + - name: Build and push + uses: docker/build-push-action@v5 + with: + context: . + platforms: linux/amd64,linux/arm64 + push: true + tags: | + ghcr.io/${{ steps.prep.outputs.mii }}/feasibility-monitoring:test diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..825c32f --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1 @@ +# Changelog diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md new file mode 100644 index 0000000..083d629 --- /dev/null +++ b/DEVELOPMENT.md @@ -0,0 +1,13 @@ +# Development + +## Release Checklist + +* create a release branch called `release-v` like `release-v0.1.1` +* update the CHANGELOG based on the milestone +* create a commit with the title `Release v` +* create a PR from the release branch into main +* merge that PR +* create and push a tag called `v` like `v0.1.1` on main at the merge commit +* create a new branch called `next-dev` on top of the release branch +* merge the `next-dev` branch back into develop +* create release notes on GitHub diff --git a/Dockerfile b/Dockerfile index ac32de5..0b055c7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ FROM python:3.9 RUN mkdir -p /opt/reportclient -COPY ./docker-entrypoint.sh /opt/reportclient/docker-entrypoint.sh + COPY ./feasibility-monitoring.py /opt/reportclient/feasibility-monitoring.py COPY ./execute_ping_test.py /opt/reportclient/execute_ping_test.py COPY ./execute_feasibility_test.py /opt/reportclient/execute_feasibility_test.py @@ -10,19 +10,16 @@ COPY ./history-query.json /opt/reportclient/history-query.json COPY ./config.yml /opt/reportclient/config.yml COPY ./ping-task.xml /opt/reportclient/ping-task.xml COPY ./requirements.txt /tmp/requirements.txt -RUN pip3 install -r /tmp/requirements.txt -RUN useradd -r -s /bin/false 10001 +RUN pip3 install -r /tmp/requirements.txt WORKDIR /opt/reportclient RUN mkdir reports -RUN chmod 777 reports -RUN chown -R 10001:10001 /opt/reportclient +RUN chown -R 1001:1001 /opt/reportclient COPY docker-entrypoint.sh /usr/local/bin/ RUN chmod +x /usr/local/bin/docker-entrypoint.sh -RUN mkdir certs -USER 10001 +USER 1001 ENTRYPOINT ["docker-entrypoint.sh"] diff --git a/README.md b/README.md index b3d619e..fb2f750 100644 --- a/README.md +++ b/README.md @@ -72,5 +72,6 @@ docker-compose up | CONFLUENCE_PAGE_ID_FEAS | | The Confluence page ID where the Feasibility test result will be uploaded. | | EXECUTE_HISTORY_TEST | false | Boolean: Whether to execute the Feasibility history test. | | CONFLUENCE_PAGE_ID_HIST | | The Confluence page ID where the Feasibility history test result will be uploaded. | +| HISTORY_TABLE_LEN | 14 | The maximum amount of history repots displayed in the Confluence table. | | EXECUTE_DSF_PING_TEST | false | Boolean: Whether to execute the DSF ping test. | | CONFLUENCE_PAGE_ID_PING | | The Confluence page ID where the ping test result will be uploaded. | diff --git a/docker-compose.yml b/docker-compose.yml index 775c5ec..a448b47 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -18,6 +18,7 @@ services: - CONFLUENCE_PAGE_ID_FEAS=${CONFLUENCE_PAGE_ID_FEAS:-""} - EXECUTE_HISTORY_TEST=${EXECUTE_HISTORY_TEST:-"false"} - CONFLUENCE_PAGE_ID_HIST=${CONFLUENCE_PAGE_ID_HIST:-""} + - HISTORY_TABLE_LEN=${HISTORY_TABLE_LEN:-14} - EXECUTE_DSF_PING_TEST=${EXECUTE_DSF_PING_TEST:-"false"} - CONFLUENCE_PAGE_ID_PING=${CONFLUENCE_PAGE_ID_PING:-""} volumes: diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh index d4e40de..1f508a1 100644 --- a/docker-entrypoint.sh +++ b/docker-entrypoint.sh @@ -17,6 +17,7 @@ EXECUTE_FEAS_TEST=${EXECUTE_FEAS_TEST:-"false"} CONFLUENCE_PAGE_ID_FEAS=${CONFLUENCE_PAGE_ID_FEAS:-""} EXECUTE_HISTORY_TEST=${EXECUTE_HISTORY_TEST:-"false"} CONFLUENCE_PAGE_ID_HIST=${CONFLUENCE_PAGE_ID_HIST:-""} +HISTORY_TABLE_LEN=${HISTORY_TABLE_LEN:-14} EXECUTE_DSF_PING_TEST=${EXECUTE_DSF_PING_TEST:-"false"} CONFLUENCE_PAGE_ID_PING=${CONFLUENCE_PAGE_ID_PING:""} @@ -25,5 +26,5 @@ python feasibility-monitoring.py --dsf_cert_path $DSF_CERT_PATH --dsf_key_path $ --confluence_api_base_url "$CONFLUENCE_API_BASE_URL" --conf_user "$CONF_USER" --conf_pw "$CONF_PW"\ --send_results_confluence $SEND_TO_CONFLUENCE --wait_result_secs_feas "$WAIT_RESULT_SECS_FEAS"\ --dsf_base_url $DSF_BASE_URL --execute_feas_test $EXECUTE_FEAS_TEST --confluence_page_id_feas $CONFLUENCE_PAGE_ID_FEAS\ - --execute_history_test $EXECUTE_HISTORY_TEST --confluence_page_id_hist $CONFLUENCE_PAGE_ID_HIST\ + --execute_history_test $EXECUTE_HISTORY_TEST --confluence_page_id_hist $CONFLUENCE_PAGE_ID_HIST --history_table_len $HISTORY_TABLE_LEN\ --execute_ping_test $EXECUTE_DSF_PING_TEST --confluence_page_id_ping $CONFLUENCE_PAGE_ID_PING --wait_result_secs_ping "$WAIT_RESULT_SECS_PING" diff --git a/execute-local.sh b/execute-local.sh index c0a69f4..86dd7cd 100644 --- a/execute-local.sh +++ b/execute-local.sh @@ -15,6 +15,9 @@ SEND_TO_CONFLUENCE=${SEND_TO_CONFLUENCE:-"false"} DSF_BASE_URL=${DSF_BASE_URL:-""} EXECUTE_FEAS_TEST=${EXECUTE_FEAS_TEST:-"false"} CONFLUENCE_PAGE_ID_FEAS=$CONFLUENCE_PAGE_ID_FEAS +EXECUTE_HISTORY_TEST=${EXECUTE_HISTORY_TEST:-"false"} +CONFLUENCE_PAGE_ID_HIST=${CONFLUENCE_PAGE_ID_HIST:-""} +HISTORY_TABLE_LEN=${HISTORY_TABLE_LEN:-14} EXECUTE_DSF_PING_TEST=${EXECUTE_DSF_PING_TEST:-"false"} CONFLUENCE_PAGE_ID_PING=$CONFLUENCE_PAGE_ID_PING @@ -23,5 +26,5 @@ python feasibility-monitoring.py --dsf_cert_path $DSF_CERT_PATH --dsf_key_path $ --confluence_api_base_url "$CONFLUENCE_API_BASE_URL" --conf_user "$CONF_USER" --conf_pw "$CONF_PW"\ --send_results_confluence $SEND_TO_CONFLUENCE --wait_result_secs_feas "$WAIT_RESULT_SECS_FEAS"\ --dsf_base_url $DSF_BASE_URL --execute_feas_test $EXECUTE_FEAS_TEST --confluence_page_id_feas $CONFLUENCE_PAGE_ID_FEAS\ - --execute_history_test $EXECUTE_HISTORY_TEST --confluence_page_id_hist $CONFLUENCE_PAGE_ID_HIST\ + --execute_history_test $EXECUTE_HISTORY_TEST --confluence_page_id_hist $CONFLUENCE_PAGE_ID_HIST --history_table_len $HISTORY_TABLE_LEN\ --execute_ping_test $EXECUTE_DSF_PING_TEST --confluence_page_id_ping $CONFLUENCE_PAGE_ID_PING --wait_result_secs_ping "$WAIT_RESULT_SECS_PING" diff --git a/execute_feasibility_test.py b/execute_feasibility_test.py index f6a23a1..c11dfc0 100644 --- a/execute_feasibility_test.py +++ b/execute_feasibility_test.py @@ -90,9 +90,9 @@ def convert_results(results): n_patients = result['numberOfPatients'] if n_patients >= 20: - n_patients = "yes" + n_patients = "Yes" else: - n_patients = "no" + n_patients = "No" converted_results[result["siteName"]] = n_patients @@ -117,7 +117,7 @@ def append_percentage_fulfilled_column(site_module_yes_no_map, modules, optional for site_url in DSF_SITE_IDS: site_yes_count = 0 for column_name in modules: - if site_module_yes_no_map[column_name][site_url] == "yes" and column_name not in optional_modules: + if site_module_yes_no_map[column_name][site_url] == "Yes" and column_name not in optional_modules: site_yes_count += 1 non_optional_modules = (len(modules) - len(optional_modules)) @@ -143,9 +143,9 @@ def create_columns(query_reports, column_names, optional_modules, column_attribu for site_url, converted_query_result in query_report["result"].items(): - if converted_query_result == "yes": + if converted_query_result == "Yes": site_module_yes_no_map[column_name]["total_yes"] += 1 - elif converted_query_result == "no": + elif converted_query_result == "No": site_module_yes_no_map[column_name]["total_no"] += 1 else: site_module_yes_no_map[column_name]["total_na"] += 1 @@ -176,15 +176,6 @@ def get_column_keys(report_results, column_attribute): return column_keys, [] -def beautyfy_result(result): - if result == "yes": - return "Yes" - elif result == "no": - return "No" - else: - return result - - # column_attribute can be date or module def convert_to_table(report_results, column_attribute): row_keys = ["total_yes", "total_no", "total_na"] + list(DSF_SITE_IDS) @@ -198,7 +189,7 @@ def convert_to_table(report_results, column_attribute): for column_key in column_keys: column = [] for row_key in row_keys: - column.append(beautyfy_result(columns[column_key][row_key])) + column.append(columns[column_key][row_key]) column_name = column_key + " (optional)" if column_key in optional_column_keys else column_key if column_name == "percentage_fulfilled": @@ -211,32 +202,31 @@ def convert_to_table(report_results, column_attribute): def load_history_report(): history_query = load_json_file(HISTORY_QUERY_FILE) - if os.path.isfile(HISTORY_REPORT_FILE): - with open(HISTORY_REPORT_FILE, "r") as f: - history_report = json.load(f) - - if history_report["query"] != history_query: - raise ValueError("history report contains query that is not equal to the current history query") - - return history_report - else: + if not os.path.isfile(HISTORY_REPORT_FILE): open(HISTORY_REPORT_FILE, "x") history_report = {"reports": [], "query": history_query} return history_report + with open(HISTORY_REPORT_FILE, "r") as f: + history_report = json.load(f) + + if history_report["query"] != history_query: + raise ValueError("history report contains query that is not equal to the current history query") + + history_report["reports"].sort(key=lambda x: datetime.strptime(x["date"], "%Y-%m-%d")) + return history_report + def update_and_save_history_report(history_report, new_result): now = datetime.now().strftime("%Y-%m-%d") - new_history_result = {"date": now} - new_history_result["result"] = new_result - - is_new_date = True - for i in range(len(history_report["reports"])): - if history_report["reports"][i]["date"] == now: - history_report["reports"][i] = new_history_result - is_new_date = False - break - if is_new_date: + new_history_result = {"date": now, "result": new_result} + + reports = history_report["reports"] + most_recent_date = reports[len(reports)-1]["date"] if len(reports) > 0 else None + + if most_recent_date == now: + reports[len(reports)-1] = new_history_result + else: history_report["reports"].append(new_history_result) with open(HISTORY_REPORT_FILE, "w") as f: @@ -284,7 +274,7 @@ def send_query_and_get_results(query, backend_base_url, backend_client_id, clien def execute_history_query(backend_base_url, backend_client_id, client_secret, keycloak_token_url, wait_result_secs_feas, conf_user, conf_pw, - confluence_api_base_url, confluence_page_id_hist, send_results_confluence): + confluence_api_base_url, confluence_page_id_hist, send_results_confluence, history_table_len): load_constants() history_report = load_history_report() history_query = load_json_file(HISTORY_QUERY_FILE) @@ -293,7 +283,7 @@ def execute_history_query(backend_base_url, backend_client_id, client_secret, ke update_and_save_history_report(history_report, converted_result) - history_table = convert_to_table(history_report["reports"], "date") + history_table = convert_to_table(history_report["reports"][-int(history_table_len):], "date") print(history_table) if send_results_confluence: diff --git a/feasibility-monitoring.py b/feasibility-monitoring.py index 95596bc..9444256 100644 --- a/feasibility-monitoring.py +++ b/feasibility-monitoring.py @@ -41,6 +41,8 @@ def str_to_bool(s): help='number of seconds to wait before results for a query are fetched', nargs="?", default='60') parser.add_argument('--wait_result_secs_ping', help='number of seconds to wait before results for the ping task fetched', nargs="?", default='600') + parser.add_argument('--history_table_len', + help='length of the history table that is sent to confluence', nargs="?", default='14') parser.add_argument('--execute_feas_test', help='', type=str_to_bool, default="false") parser.add_argument('--execute_history_test', help='', type=str_to_bool, default="false") parser.add_argument('--execute_ping_test', help='', type=str_to_bool, default="false") @@ -64,6 +66,7 @@ def str_to_bool(s): conf_pw = args["conf_pw"] wait_result_secs_feas = args["wait_result_secs_feas"] wait_result_secs_ping = args["wait_result_secs_ping"] + history_table_len = args["history_table_len"] b_execute_feas_test = args["execute_feas_test"] b_execute_history_test = args["execute_history_test"] b_execute_ping_test = args["execute_ping_test"] @@ -77,7 +80,8 @@ def str_to_bool(s): conf_user, conf_pw, confluence_api_base_url, confluence_page_id_hist, - b_send_results_confluence) + b_send_results_confluence, + history_table_len) if b_execute_feas_test: execute_feasibility_test.execute_feas_test_queries(backend_base_url, backend_client_id,