From 84ee39baf4503f5e20340d3efb63e5bd9546c111 Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Tue, 24 Oct 2023 13:52:08 -0400 Subject: [PATCH 01/22] Attempt to fix bottleneck and resource contention --- .gitlab-ci.yml | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 76ac2ae32..99939a015 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -14,14 +14,17 @@ stages: # before attempting to launch the CI pipelines setup-vms: stage: setup - tags: - - datafed-infrastructure - script: - - ./scripts/pipeline_setup.sh --compute-instance-name "ci-datafed-arangodb" - - ./scripts/pipeline_setup.sh --compute-instance-name "ci-datafed-core" - - ./scripts/pipeline_setup.sh --compute-instance-name "ci-datafed-globus1" - - ./scripts/pipeline_setup.sh --compute-instance-name "ci-datafed-globus2" - - ./scripts/pipeline_setup.sh --compute-instance-name "ci-datafed-client" + trigger: + project: 8f4/datafedci + branch: main + strategy: depend +# - datafed-infrastructure +# script: +# - ./scripts/pipeline_setup.sh --compute-instance-name "ci-datafed-arangodb" +# - ./scripts/pipeline_setup.sh --compute-instance-name "ci-datafed-core" +# - ./scripts/pipeline_setup.sh --compute-instance-name "ci-datafed-globus1" +# - ./scripts/pipeline_setup.sh --compute-instance-name "ci-datafed-globus2" +# - ./scripts/pipeline_setup.sh --compute-instance-name "ci-datafed-client" ################################################################################ # STAGE: build-deploy-base From 69d51b76dd115b52161ccf13f3ba5a39e4f4bb2e Mon Sep 17 00:00:00 2001 From: par-hermes Date: Tue, 24 Oct 2023 18:08:10 +0000 Subject: [PATCH 02/22] cpp-py-formatter --- common/tests/unit/test_SocketOptions.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/common/tests/unit/test_SocketOptions.cpp b/common/tests/unit/test_SocketOptions.cpp index ba76be426..b9040c5cb 100644 --- a/common/tests/unit/test_SocketOptions.cpp +++ b/common/tests/unit/test_SocketOptions.cpp @@ -36,7 +36,8 @@ BOOST_AUTO_TEST_CASE(testing_AddressSplitterINPROC) { BOOST_AUTO_TEST_CASE(testing_AddressSplitterNoPort) { // Still contains ':' - BOOST_CHECK_THROW(AddressSplitter splitter("inproc://www.datafed.com:"), TraceException); + BOOST_CHECK_THROW(AddressSplitter splitter("inproc://www.datafed.com:"), + TraceException); } BOOST_AUTO_TEST_CASE(testing_AddressSplitterNoPort2) { From 2408ec3c6e88b1d361f87035221c0f81c7a12247 Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Tue, 24 Oct 2023 18:10:16 +0000 Subject: [PATCH 03/22] Add Changelog comment --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 154a55e8d..7bd5e43b8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,7 @@ +## PATCH Bug Fixes/ +1. [915] - Refactor CI to use pipelines Gitlab feature as opposed to custom + solution + 1. [911] - Add GitHub template # v2023.8.21.10.40 From c84e8803195da6cb613da527f292d64628ad98e3 Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Tue, 24 Oct 2023 20:24:37 +0000 Subject: [PATCH 04/22] Fix missing dependency install --- scripts/install_core_dependencies.sh | 2 +- scripts/install_repo_dependencies.sh | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/scripts/install_core_dependencies.sh b/scripts/install_core_dependencies.sh index 6a38b0860..edd6f102c 100755 --- a/scripts/install_core_dependencies.sh +++ b/scripts/install_core_dependencies.sh @@ -18,7 +18,7 @@ rapidjson-dev libkrb5-dev git python3-pkg-resources python3-pip libssl-dev sudo apt-get install -y libzmq3-dev cd ~ -#install_cmake +install_cmake cd ~ # Install cmake 3.17 diff --git a/scripts/install_repo_dependencies.sh b/scripts/install_repo_dependencies.sh index 92fb83426..287ebebaf 100755 --- a/scripts/install_repo_dependencies.sh +++ b/scripts/install_repo_dependencies.sh @@ -20,13 +20,13 @@ sudo apt-get install -y libzmq3-dev python3-pip python3 -m pip install --upgrade pip python3 -m pip install setuptools -#install_cmake -#cd ~ -# -#install_protobuf -#cd ~ -# -#install_libsodium +install_cmake +cd ~ + +install_protobuf +cd ~ + +install_libsodium cd ~ install_libzmq From e418c71c36ce8766da4129dda6af2066f972c218 Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Tue, 24 Oct 2023 21:46:22 -0400 Subject: [PATCH 05/22] Fix ci pipeline build folder --- .gitlab-ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 99939a015..cd29879df 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -94,6 +94,7 @@ build-gcs-base: - docker tag "$LATEST_IMAGE" "$GCS_TAG" - docker login -u gitlab-ci-token -p $CI_JOB_TOKEN code.ornl.gov:4567 - docker push "$GCS_TAG" + - cd ../../../ - docker build -f repository/docker/Dockerfile.gcs-authz-base.ubuntu -t code.ornl.gov:4567/${IMAGE_TAG2} . - docker push code.ornl.gov:4567/${IMAGE_TAG2} - docker build -f repository/docker/Dockerfile.gcs-authz.ubuntu -t code.ornl.gov:4567/${IMAGE_TAG3} . From 60b38278ec3948d697852d771bc1edb0af39722e Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Sun, 26 Nov 2023 16:23:46 -0500 Subject: [PATCH 06/22] Attempt to make CI pipeline smarter by only triggering infrastructure setup when it is not already running. --- .gitlab-ci.yml | 33 +++++++++--- scripts/ci_pipeline_setup.sh | 99 ++++++++---------------------------- 2 files changed, 47 insertions(+), 85 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c9b11d9e7..ff9c54933 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,6 +1,7 @@ stages: - - setup + - ci-infrastructure-check + - build-infrastructure - clear-docker-cache - build-deploy-base - build-unit-test-deploy @@ -9,16 +10,36 @@ stages: - end-to-end-teardown ################################################################################ -# STAGE: setup +# STAGE: ci-infrastructure-check ################################################################################ -# Setup stage is designed to check that the infrastructure is up and running -# before attempting to launch the CI pipelines -setup-vms: - stage: setup +# ci-infrastructure-check stage is designed to check that the infrastructure is +# up and running before attempting to launch the CI pipelines + +check-ci-infrastructure: + stage: ci-infrastructure-check + tags: + - datafed-infrastructure + script: + - ./scripts/ci_pipeline_setup.sh --compute-instance-name "ci-datafed-arangodb" + - ./scripts/ci_pipeline_setup.sh --compute-instance-name "ci-datafed-core" + - ./scripts/ci_pipeline_setup.sh --compute-instance-name "ci-datafed-globus2" + - ./scripts/ci_pipeline_setup.sh --compute-instance-name "ci-datafed-client" + allow_failure: true + resource_group: infrastructure_build + only: + - merge_requests + +run-trigger-job: + stage: build-infrastructure + rules: + - when: on_failure trigger: project: 8f4/datafedci branch: main strategy: depend + resource_group: infrastructure_build + only: + - merge_requests ################################################################################ # STAGE: clear-docker-cache diff --git a/scripts/ci_pipeline_setup.sh b/scripts/ci_pipeline_setup.sh index 3ef912a0b..6084a77d1 100755 --- a/scripts/ci_pipeline_setup.sh +++ b/scripts/ci_pipeline_setup.sh @@ -5,10 +5,10 @@ set -eu Help() { echo "$(basename $0) Will determine if a Open Stack VM exists if not it will" - echo " triger a GitLab pipeline to create the VM. It requires that you " + echo " will exit with an error code. It requires that you " echo "provide the Open Stack VM ID" echo - echo "Syntax: $(basename $0) [-h|i|s|c|g|a|n]" + echo "Syntax: $(basename $0) [-h|i|s|c|a|n]" echo "options:" echo "-h, --help Print this help message" echo "-i, --app-credential-id The application credentials id for" @@ -23,12 +23,12 @@ Help() echo " to check id or name is required." echo "-n, --compute-instance-name The name of the instance we are trying" echo " to check id or name is required.." - echo "-g, --gitlab-trigger-token The GitLab token for restarting the CI" - echo " pipeline to generate the VMs." echo "-a, --gitlab-api-token The GitLab API token for checking the" echo " status of a pipeline." } +GITLAB_PROJECT_ID="10830" + OS_APP_ID=$(printenv OS_APP_ID || true) if [ -z "$OS_APP_ID" ] then @@ -47,14 +47,6 @@ else local_OS_APP_SECRET="$OS_APP_SECRET" fi -GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN=$(printenv GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN || true) -if [ -z "$GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN" ] -then - local_GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN="" -else - local_GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN="$GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN" -fi - GITLAB_DATAFEDCI_REPO_API_TOKEN=$(printenv GITLAB_DATAFEDCI_REPO_API_TOKEN || true) if [ -z "$GITLAB_DATAFEDCI_REPO_API_TOKEN" ] then @@ -69,7 +61,7 @@ COMPUTE_INSTANCE_NAME="" COMPUTE_NAME_PROVIDED="FALSE" COMPUTE_ID_PROVIDED="FALSE" -VALID_ARGS=$(getopt -o hi:s:c:g:a:n: --long 'help',app-credential-id:,app-credential-secret:,compute-instance-id:,gitlab-trigger-token:,gitlab-api-token:,compute-instance-name: -- "$@") +VALID_ARGS=$(getopt -o hi:s:c:a:n: --long 'help',app-credential-id:,app-credential-secret:,compute-instance-id:,gitlab-api-token:,compute-instance-name: -- "$@") if [[ $? -ne 0 ]]; then exit 1; fi @@ -98,10 +90,6 @@ while [ : ]; do COMPUTE_NAME_PROVIDED="TRUE" shift 2 ;; - -g | --gitlab-trigger-token) - local_GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN=$2 - shift 2 - ;; -a | --gitlab-api-token) local_GITLAB_DATAFEDCI_REPO_API_TOKEN=$2 shift 2 @@ -136,16 +124,9 @@ then exit 1 fi -if [ -z "$local_GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN" ] -then - echo "The GitLab token for triggering the CI pipeline has not been defined it" - echo "is a required parameter." - exit 1 -fi - if [ -z "$local_GITLAB_DATAFEDCI_REPO_API_TOKEN" ] then - echo "The GitLab token for accessing the API of the datafedci repo is missing." + echo "The GitLab token for accessing the API of the DataFed ci repo is missing." echo "It is a required parameter." exit 1 fi @@ -173,7 +154,7 @@ then fi # Make sure jq is installed -jq_path=$(which jq) +jq_path=$(which jq || true) if [ -z "$jq_path" ] then echo "jq command not found exiting!" @@ -182,23 +163,27 @@ fi wait_for_running_infrastructure_pipelines_to_finish() { local GITLAB_REPO_API_TOKEN="$1" - local all_other_pipelines=$(curl -s --header "PRIVATE-TOKEN: ${GITLAB_REPO_API_TOKEN}" "https://code.ornl.gov/api/v4/projects/10830/pipelines?status=running" | jq '.[]') + local all_other_pipelines=$(curl -s --header "PRIVATE-TOKEN: ${GITLAB_REPO_API_TOKEN}" "https://code.ornl.gov/api/v4/projects/${GITLAB_PROJECT_ID}/pipelines?status=running" | jq '.[]') if [ -z "$all_other_pipelines" ] then echo "No other running infrastructure provisioning pipelines detected!" fi - + local count=0 while [ ! -z "$all_other_pipelines" ] do - echo "$count Other running infrastructure provisioning pipelines detected... waiting for them to complete." + echo "Attempt $count, Other running infrastructure provisioning pipelines detected... waiting for them to complete." + echo + echo "Running Pipelines Are:" echo "$all_other_pipelines" | jq '.id' sleep 30s count=$(($count + 1)) - all_other_pipelines=$(curl -s --header "PRIVATE-TOKEN: ${GITLAB_REPO_API_TOKEN}" "https://code.ornl.gov/api/v4/projects/10830/pipelines?status=running" | jq '.[]') + all_other_pipelines=$(curl -s --header "PRIVATE-TOKEN: ${GITLAB_REPO_API_TOKEN}" "https://code.ornl.gov/api/v4/projects/${GITLAB_PROJECT_ID}/pipelines?status=running" | jq '.[]') done } +# Will search the open research cloud for instance that is running with the +# provided identity find_orc_instance_by_id() { local SANITIZED_TOKEN="$1" local SANITIZED_URL="$2" @@ -238,7 +223,6 @@ body=$(echo $data | sed 's/^.*{\"token/{\"token/' ) compute_url=$(echo "$body" | jq '.token.catalog[] | select(.name=="nova") |.endpoints[] | select(.interface=="public") | .url ') sanitize_compute_url=$(echo $compute_url | sed 's/\"//g') - header=$(echo "$data" | sed 's/{\"token.*//') subject_token=$(echo "$data" | grep "X-Subject-Token" | awk '{print $2}' ) @@ -271,42 +255,7 @@ pipeline_id="" if [ "$found_vm_id" == "FALSE" ] then echo "VM ID: $compute_id Name: $compute_name is Unhealthy, does not exist, triggering pipeline." - - #datafedci_repo_api_trigger_to_run_ci_pipeline - # Here we need to make a request to the code.ornl.gov at datafedci - gitlab_response=$(curl -s --retry 5 --request POST \ - --form token="$local_GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN" \ - --form ref="main" \ - "https://code.ornl.gov/api/v4/projects/10830/trigger/pipeline") - pipeline_id=$(echo "$gitlab_response" | jq '.id' ) - - MAX_COUNT=40 - count=0 - while [ "$found_vm_id" == "FALSE" ] - do - printf "$count Waiting for pipeline to start VM $compute_id..." - sleep 30s - # Run while loop and sleep until VM shows up or timeout is hit - compute_id="" - compute_name="" - found_vm_id="FALSE" - if [ "$COMPUTE_ID_PROVIDED" == "TRUE" ] - then - find_orc_instance_by_id "$sanitize_subject_token" "$sanitize_compute_url" "$COMPUTE_INSTANCE_ID" - fi - if [[ "$found_vm_id" == "FALSE" && "$COMPUTE_NAME_PROVIDED" == "TRUE" ]] - then - find_orc_instance_by_name "$sanitize_subject_token" "$sanitize_compute_url" "$COMPUTE_INSTANCE_NAME" - fi - - count=$(($count + 1)) - - if [ "$count" == "$MAX_COUNT" ] - then - echo "Exceeded time limit!" - exit 1 - fi - done + exit 1 fi ################################################################################ @@ -329,15 +278,7 @@ fi if [[ "$VM_IS_ACTIVE" == "FALSE" && -z "$pipeline_id" ]] then echo "VM ID: $compute_id Name: $compute_name is unhealthy triggering pipeline." - - #datafedci_repo_api_trigger_to_run_ci_pipeline - # Here we need to make a request to the code.ornl.gov at datafedci - gitlab_response=$(curl -s --retry 5 --request POST \ - --form token="$local_GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN" \ - --form ref="main" \ - "https://code.ornl.gov/api/v4/projects/10830/trigger/pipeline") - - pipeline_id=$(echo "$gitlab_response" | jq '.id' ) + exit 1 fi # If the pipeline is defined check the status of the VMs @@ -348,7 +289,7 @@ then while [ "$VM_IS_ACTIVE" == "FALSE" ] do - printf "$count Waiting for pipeline to start VM ... " + printf "Attempt $count, Waiting for pipeline to start VM ... " sleep 30s compute_instances=$(curl -s --retry 5 -H "X-Auth-Token: $sanitize_subject_token" "$sanitize_compute_url/servers/detail" | jq) INSTANCE_STATUS=$(echo "$compute_instances" | jq --arg compute_id "$compute_id" '.servers[] | select(.id==$compute_id) | .status ') @@ -389,9 +330,9 @@ then KEEP_RUNNING="TRUE" while [ "$KEEP_RUNNING" == "TRUE" ] do - pipeline_status=$(curl -s --header "PRIVATE-TOKEN: ${local_GITLAB_DATAFEDCI_REPO_API_TOKEN}" "https://code.ornl.gov/api/v4/projects/10830/pipelines/$pipeline_id" | jq .status | sed 's/\"//g') + pipeline_status=$(curl -s --header "PRIVATE-TOKEN: ${local_GITLAB_DATAFEDCI_REPO_API_TOKEN}" "https://code.ornl.gov/api/v4/projects/${GITLAB_PROJECT_ID}/pipelines/$pipeline_id" | jq .status | sed 's/\"//g') - printf "$count Waiting for triggered infrastructure provisioning pipeline: ${pipeline_id} to complete ... " + printf "Attempt $count, Waiting for triggered infrastructure provisioning pipeline: ${pipeline_id} to complete ... " if [ "$pipeline_status" == "failed" ] then echo "Infrastructure triggered pipeline has failed unable to execute CI. STATUS: $pipeline_status" From 2c80b48f8a3836bde3680ae081fe05929a754869 Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Sun, 26 Nov 2023 16:34:30 -0500 Subject: [PATCH 07/22] Remove only statement --- .gitlab-ci.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ff9c54933..d45882531 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -26,8 +26,8 @@ check-ci-infrastructure: - ./scripts/ci_pipeline_setup.sh --compute-instance-name "ci-datafed-client" allow_failure: true resource_group: infrastructure_build - only: - - merge_requests + # only: + # - merge_requests run-trigger-job: stage: build-infrastructure @@ -38,8 +38,8 @@ run-trigger-job: branch: main strategy: depend resource_group: infrastructure_build - only: - - merge_requests + #only: + # - merge_requests ################################################################################ # STAGE: clear-docker-cache From ec224743b9ef79cd4f00068681419392f01a603c Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Sun, 26 Nov 2023 16:38:52 -0500 Subject: [PATCH 08/22] remove needs dependency --- .gitlab-ci.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d45882531..381138cc0 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -208,7 +208,6 @@ build-foxx: DATAFED_DATABASE_PASSWORD: "${CI_DATAFED_DATABASE_PASSWORD}" DATAFED_ZEROMQ_SYSTEM_SECRET: "${CI_DATAFED_ZEROMQ_SYSTEM_SECRET}" stage: build-unit-test-deploy - needs: ["setup-vms"] tags: - ci-datafed-arango rules: From ee616fa55cd9ffe731591e6730c033339338aefd Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Sun, 26 Nov 2023 22:14:29 -0500 Subject: [PATCH 09/22] Attempting to add back in python client --- .gitlab-ci.yml | 47 +++++++++++++++++++++++------------------------ 1 file changed, 23 insertions(+), 24 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 381138cc0..9ffdd848c 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -437,28 +437,27 @@ end-to-end-arango-setup: - arangod --version - ./scripts/run_arango_service.sh - -#end-to-end-client-test: -# variables: -# GIT_STRATEGY: clone -# stage: end-to-end-test -# tags: -# - ci-datafed-client -# script: -# - ./scripts/generate_datafed.sh -# - env -# - cat ./config/datafed.sh -# - > -# cmake -S. -B build -# -DENABLE_FOXX_TESTS=OFF -# -DBUILD_CORE_SERVER=OFF -# -DBUILD_COMMON=OFF -# -DBUILD_WEB_SERVER=OFF -# -DBUILD_DOCS=OFF -# -DBUILD_PYTHON_CLIENT=ON -# -DBUILD_TESTS=ON -# -DENABLE_END_TO_END_TESTS=ON -# -DINSTALL_FOXX=OFF -# - cmake --build build -# - cmake --build build --target install +end-to-end-client-test: + variables: + GIT_STRATEGY: clone + stage: end-to-end-test + tags: + - ci-datafed-client + script: + - ./scripts/generate_datafed.sh + - env + - cat ./config/datafed.sh + - > + cmake -S. -B build + -DENABLE_FOXX_TESTS=OFF + -DBUILD_CORE_SERVER=OFF + -DBUILD_COMMON=OFF + -DBUILD_WEB_SERVER=OFF + -DBUILD_DOCS=OFF + -DBUILD_PYTHON_CLIENT=ON + -DBUILD_TESTS=ON + -DENABLE_END_TO_END_TESTS=ON + -DINSTALL_FOXX=OFF + - cmake --build build + - cmake --build build --target pydatafed From 15d13247274d888e78ded946420cc42393cf4f97 Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Mon, 27 Nov 2023 09:35:46 -0500 Subject: [PATCH 10/22] Add more configuration flexibility --- .gitlab-ci.yml | 12 ++++++++++++ scripts/clear_db.sh | 16 +++++++++++++++- scripts/install_foxx.sh | 37 ++++++++++++++++++++++++++++--------- tests/end-to-end/setup.sh | 33 ++++++++++++++++++++------------- 4 files changed, 75 insertions(+), 23 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 9ffdd848c..031feb03b 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -440,6 +440,17 @@ end-to-end-arango-setup: end-to-end-client-test: variables: GIT_STRATEGY: clone + DATAFED_DATABASE_HOST: "${CI_DATAFED_DATABASE_HOST}" + DATAFED_DATABASE_ZEROMQ_SYSTEM_SECRET: "${CI_DATAFED_DATABASE_ZEROMQ_SYSTEM_SECRET}" + DATAFED_DATABASE_PASSWORD: "${CI_DATAFED_DATABASE_PASSWORD}" + DATAFED_USER89_PASSWORD: "${CI_DATAFED_USER89_PASSWORD}" + DATAFED_USER89_GLOBUS_REFRESH_TOKEN: "${CI_DATAFED_USER89_GLOBUS_REFRESH_TOKEN}" + DATAFED_USER89_GLOBUS_ACCESS_TOKEN: "${CI_DATAFED_USER89_GLOBUS_ACCESS_TOKEN}" + DATAFED_USER89_GLOBUS_UUID: "${CI_DATAFED_USER89_GLOBUS_UUID}" + DATAFED_USER99_PASSWORD: "${CI_DATAFED_USER99_PASSWORD}" + DATAFED_USER99_GLOBUS_REFRESH_TOKEN: "${CI_DATAFED_USER99_GLOBUS_REFRESH_TOKEN}" + DATAFED_USER99_GLOBUS_ACCESS_TOKEN: "${CI_DATAFED_USER99_GLOBUS_ACCESS_TOKEN}" + DATAFED_USER99_GLOBUS_UUID: "${CI_DATAFED_USER99_GLOBUS_UUID}" stage: end-to-end-test tags: - ci-datafed-client @@ -460,4 +471,5 @@ end-to-end-client-test: -DINSTALL_FOXX=OFF - cmake --build build - cmake --build build --target pydatafed + - cmake --build build --target test diff --git a/scripts/clear_db.sh b/scripts/clear_db.sh index 696c66bcf..52187f956 100755 --- a/scripts/clear_db.sh +++ b/scripts/clear_db.sh @@ -22,8 +22,22 @@ else local_DATAFED_ZEROMQ_SYSTEM_SECRET=$(printenv DATAFED_ZEROMQ_SYSTEM_SECRET) fi +if [ -z "${DATAFED_DATABASE_HOST}" ] +then + local_DATAFED_DATABASE_HOST="localhost" +else + local_DATAFED_DATABASE_HOST=$(printenv DATAFED_DATABASE_HOST) +fi + +if [ -z "${DATAFED_DATABASE_PORT}" ] +then + local_DATAFED_DATABASE_PORT="8529" +else + local_DATAFED_DATABASE_PORT=$(printenv DATAFED_DATABASE_PORT) +fi + # Delete database and API from arangodb if command -v arangosh &> /dev/null then - arangosh --server.password ${local_DATAFED_DATABASE_PASSWORD} --server.username ${local_DATABASE_USER} --javascript.execute-string 'db._dropDatabase("sdms");' + arangosh --server.endpoint "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATAFED_DATABASE_PORT}" --server.password ${local_DATAFED_DATABASE_PASSWORD} --server.username ${local_DATABASE_USER} --javascript.execute-string 'db._dropDatabase("sdms");' fi diff --git a/scripts/install_foxx.sh b/scripts/install_foxx.sh index ea1b0d4ce..fd06fa67d 100755 --- a/scripts/install_foxx.sh +++ b/scripts/install_foxx.sh @@ -14,7 +14,7 @@ Help() { echo "$(basename $0) Will set up a configuration file for the core server" echo - echo "Syntax: $(basename $0) [-h|u|f|p|y]" + echo "Syntax: $(basename $0) [-h|u|f|p|i|y]" echo "options:" echo "-h, --help Print this help message." echo "-u, --database-user Database user, needed to log into the database." @@ -24,6 +24,10 @@ Help() echo " provided via the command line it can also be set" echo " using the enviromental variable" echo " DATAFED_DATABASE_PASSWORD." + echo "-i, --database-host The hostname or IP address of the " + echo " database, the env variable: " + echo " DATAFED_DATABASE_HOST can also be " + echo " used." echo "-y, --system-secret ZeroMQ system secret" echo echo "NOTE: Do not run this script with sudo!" @@ -99,6 +103,7 @@ semantic_version_compatible() { local_DATABASE_NAME="sdms" local_DATABASE_USER="root" +local_DATABASE_PORT="8529" if [ -z "${DATAFED_DATABASE_PASSWORD}" ] then @@ -121,7 +126,16 @@ else local_FOXX_MAJOR_API_VERSION=$(printenv FOXX_MAJOR_API_VERSION) fi -VALID_ARGS=$(getopt -o hu:p:f:y: --long 'help',database-user:,database-password:,foxx-api-major-version:,zeromq-system-secret: -- "$@") + +if [ -z "${DATAFED_DATABASE_HOST}" ] +then + local_DATAFED_DATABASE_HOST="localhost" +else + local_DATAFED_DATABASE_HOST=$(printenv DATAFED_DATABASE_HOST) +fi + + +VALID_ARGS=$(getopt -o hu:p:f:i:y: --long 'help',database-user:,database-password:,foxx-api-major-version:,database-host:,zeromq-system-secret: -- "$@") if [[ $? -ne 0 ]]; then exit 1; fi @@ -148,6 +162,11 @@ while [ : ]; do local_FOXX_MAJOR_API_VERSION=$2 shift 2 ;; + -i | --database-host) + echo "Processing 'database host' option. Input argument is '$2'" + local_DATAFED_DATABASE_HOST=$2 + shift 2 + ;; -y | --zeromq-system-secret) echo "Processing 'DataFed ZeroMQ system secret' option. Input argument is '$2'" local_DATAFED_ZEROMQ_SYSTEM_SECRET=$2 @@ -186,17 +205,17 @@ fi # We are now going to initialize the DataFed database in Arango, but only if sdms database does # not exist -output=$(curl --dump - --user $local_DATABASE_USER:$local_DATAFED_DATABASE_PASSWORD http://localhost:8529/_api/database/user) +output=$(curl --dump - --user $local_DATABASE_USER:$local_DATAFED_DATABASE_PASSWORD http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}/_api/database/user) if [[ "$output" =~ .*"sdms".* ]]; then echo "SDMS already exists do nothing" else echo "Creating SDMS" - arangosh --server.password ${local_DATAFED_DATABASE_PASSWORD} --server.username ${local_DATABASE_USER} --javascript.execute ${PROJECT_ROOT}/core/database/foxx/db_create.js + arangosh --server.endpoint "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" --server.password ${local_DATAFED_DATABASE_PASSWORD} --server.username ${local_DATABASE_USER} --javascript.execute ${PROJECT_ROOT}/core/database/foxx/db_create.js # Give time for the database to be created sleep 2 - arangosh --server.password ${local_DATAFED_DATABASE_PASSWORD} --server.username ${local_DATABASE_USER} --javascript.execute-string 'db._useDatabase("sdms"); db.config.insert({"_key": "msg_daily", "msg" : "DataFed servers will be off-line for regular maintenance every Sunday night from 11:45 pm until 12:15 am EST Monday morning."}, {overwrite: true});' - arangosh --server.password ${local_DATAFED_DATABASE_PASSWORD} --server.username ${local_DATABASE_USER} --javascript.execute-string "db._useDatabase(\"sdms\"); db.config.insert({ \"_key\": \"system\", \"_id\": \"config/system\", \"secret\": \"${local_DATAFED_ZEROMQ_SYSTEM_SECRET}\"}, {overwrite: true } );" + arangosh --server.endpoint "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" --server.password ${local_DATAFED_DATABASE_PASSWORD} --server.username ${local_DATABASE_USER} --javascript.execute-string 'db._useDatabase("sdms"); db.config.insert({"_key": "msg_daily", "msg" : "DataFed servers will be off-line for regular maintenance every Sunday night from 11:45 pm until 12:15 am EST Monday morning."}, {overwrite: true});' + arangosh --server.endpoint "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" --server.password ${local_DATAFED_DATABASE_PASSWORD} --server.username ${local_DATABASE_USER} --javascript.execute-string "db._useDatabase(\"sdms\"); db.config.insert({ \"_key\": \"system\", \"_id\": \"config/system\", \"secret\": \"${local_DATAFED_ZEROMQ_SYSTEM_SECRET}\"}, {overwrite: true } );" fi # There are apparently 3 different ways to deploy Foxx microservices, @@ -239,16 +258,16 @@ echo "$local_DATAFED_DATABASE_PASSWORD" > "${PATH_TO_PASSWD_FILE}" { # try # Check if database foxx services have already been installed - existing_services=$(foxx list -a -u $local_DATABASE_USER -p ${PATH_TO_PASSWD_FILE} --database $local_DATABASE_NAME) + existing_services=$(foxx list --server "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" -a -u $local_DATABASE_USER -p ${PATH_TO_PASSWD_FILE} --database $local_DATABASE_NAME) FOUND_API=$(echo "$existing_services" | grep "/api/${local_FOXX_MAJOR_API_VERSION}") if [ -z "${FOUND_API}" ] then - foxx install -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} ${PROJECT_ROOT}/core/database/foxx/ + foxx install --server "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} ${PROJECT_ROOT}/core/database/foxx/ else echo "DataFed Foxx Services have already been uploaded, replacing to ensure consisency" - foxx replace -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} ${PROJECT_ROOT}/core/database/foxx/ + foxx replace --server "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} ${PROJECT_ROOT}/core/database/foxx/ echo "foxx replace -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} ${PROJECT_ROOT}/core/database/foxx" fi diff --git a/tests/end-to-end/setup.sh b/tests/end-to-end/setup.sh index 548c89c51..f0dcbf9e1 100755 --- a/tests/end-to-end/setup.sh +++ b/tests/end-to-end/setup.sh @@ -117,31 +117,38 @@ ${PROJECT_ROOT}/scripts/clear_db.sh # Second install foxx ${PROJECT_ROOT}/scripts/install_foxx.sh + +if [ -z "${DATAFED_DATABASE_HOST}" ] +then + local_DATAFED_DATABASE_HOST=$(hostname -I | awk '{print $1}') +else + local_DATAFED_DATABASE_HOST=$(printenv DATAFED_DATABASE_HOST) +fi + #curl -X GET http://127.0.0.1:8529/_db/sdms/ -IP=$(hostname -I | awk '{print $1}') -echo "IP is $IP" +echo "IP is ${local_DATAFED_DATABASE_HOST}" echo "USER89 GLobud ID $DATAFED_USER89_GLOBUS_UUID" echo "Refresh is ${DATAFED_USER89_REFRESH_TOKEN}" # Chreate user datafed89 who is admin -HTTP_CODE=$( curl -w "%{http_code}" -o /dev/null -X GET "http://${IP}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/create?name=Data%20Fed&uid=datafed89&uuids=%5B\"${DATAFED_USER89_GLOBUS_UUID}\"%5D&password=${local_DATAFED_USER89_PASSWORD}&email=datafed89%40gmail.com&is_admin=true&secret=${DATAFED_ZEROMQ_SYSTEM_SECRET}" ) +HTTP_CODE=$( curl -w "%{http_code}" -o /dev/null -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/create?name=Data%20Fed&uid=datafed89&uuids=%5B\"${DATAFED_USER89_GLOBUS_UUID}\"%5D&password=${local_DATAFED_USER89_PASSWORD}&email=datafed89%40gmail.com&is_admin=true&secret=${DATAFED_ZEROMQ_SYSTEM_SECRET}" ) echo "HTTP_CODE: ${HTTP_CODE}" FIRST_INT=${HTTP_CODE:0:1} if [ "${FIRST_INT}" -ne "2" ] then - response=$( curl -X GET "http://${IP}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/create?name=Data%20Fed&uid=datafed89&uuids=%5B\"${DATAFED_USER89_GLOBUS_UUID}\"%5D&password=${local_DATAFED_USER89_PASSWORD}&email=datafed89%40gmail.com&is_admin=true&secret=${DATAFED_ZEROMQ_SYSTEM_SECRET}" ) + response=$( curl -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/create?name=Data%20Fed&uid=datafed89&uuids=%5B\"${DATAFED_USER89_GLOBUS_UUID}\"%5D&password=${local_DATAFED_USER89_PASSWORD}&email=datafed89%40gmail.com&is_admin=true&secret=${DATAFED_ZEROMQ_SYSTEM_SECRET}" ) CODE=$(echo $response | jq .code ) ERROR_MSG=$(echo $response | jq .errorMessage ) echo "$ERROR_MSG" exit 1 fi # Set globus tokens -HTTP_CODE=$(curl -w "%{http_code}" -o /dev/null -X GET "http://${IP}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/token/set?client=u%2Fdatafed89&access=${DATAFED_USER89_GLOBUS_ACCESS_TOKEN}&refresh=${DATAFED_USER89_GLOBUS_REFRESH_TOKEN}&expires_in=1") +HTTP_CODE=$(curl -w "%{http_code}" -o /dev/null -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/token/set?client=u%2Fdatafed89&access=${DATAFED_USER89_GLOBUS_ACCESS_TOKEN}&refresh=${DATAFED_USER89_GLOBUS_REFRESH_TOKEN}&expires_in=1") echo "HTTP_CODE: ${HTTP_CODE}" FIRST_INT=${HTTP_CODE:0:1} if [ "${FIRST_INT}" -ne "2" ] then - response=$(curl --fail-early -X GET "http://${IP}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/token/set?client=u%2Fdatafed89&access=${DATAFED_USER89_GLOBUS_ACCESS_TOKEN}&refresh=${DATAFED_USER89_GLOBUS_REFRESH_TOKEN}&expires_in=1") + response=$(curl --fail-early -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/token/set?client=u%2Fdatafed89&access=${DATAFED_USER89_GLOBUS_ACCESS_TOKEN}&refresh=${DATAFED_USER89_GLOBUS_REFRESH_TOKEN}&expires_in=1") CODE=$(echo $response | jq .code ) ERROR_MSG=$(echo $response | jq .errorMessage ) echo "$ERROR_MSG" @@ -149,24 +156,24 @@ then fi # Create user datafed99 who is not admin -HTTP_CODE=$(curl -w "%{http_code}" -o /dev/null -X GET "http://${IP}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/create?name=Data%20Fed&uid=datafed99&uuids=%5B\"${DATAFED_USER99_GLOBUS_UUID}\"%5D&password=${local_DATAFED_USER99_PASSWORD}&email=datafed99%40gmail.com&is_admin=false&secret=${DATAFED_ZEROMQ_SYSTEM_SECRET}") +HTTP_CODE=$(curl -w "%{http_code}" -o /dev/null -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/create?name=Data%20Fed&uid=datafed99&uuids=%5B\"${DATAFED_USER99_GLOBUS_UUID}\"%5D&password=${local_DATAFED_USER99_PASSWORD}&email=datafed99%40gmail.com&is_admin=false&secret=${DATAFED_ZEROMQ_SYSTEM_SECRET}") echo "HTTP_CODE: ${HTTP_CODE}" FIRST_INT=${HTTP_CODE:0:1} if [ "${FIRST_INT}" -ne "2" ] then - response=$(curl --fail-early -X GET "http://${IP}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/create?name=Data%20Fed&uid=datafed99&uuids=%5B\"${DATAFED_USER99_GLOBUS_UUID}\"%5D&password=${local_DATAFED_USER99_PASSWORD}&email=datafed99%40gmail.com&is_admin=false&secret=${DATAFED_ZEROMQ_SYSTEM_SECRET}") + response=$(curl --fail-early -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/create?name=Data%20Fed&uid=datafed99&uuids=%5B\"${DATAFED_USER99_GLOBUS_UUID}\"%5D&password=${local_DATAFED_USER99_PASSWORD}&email=datafed99%40gmail.com&is_admin=false&secret=${DATAFED_ZEROMQ_SYSTEM_SECRET}") CODE=$(echo $response | jq .code ) ERROR_MSG=$(echo $response | jq .errorMessage ) echo "$ERROR_MSG" exit 1 fi # Set globus tokens -HTTP_CODE=$(curl -w "%{http_code}" -o /dev/null -X GET "http://${IP}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/token/set?client=u%2Fdatafed99&access=${DATAFED_USER99_GLOBUS_ACCESS_TOKEN}&refresh=${DATAFED_USER99_GLOBUS_REFRESH_TOKEN}&expires_in=1") +HTTP_CODE=$(curl -w "%{http_code}" -o /dev/null -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/token/set?client=u%2Fdatafed99&access=${DATAFED_USER99_GLOBUS_ACCESS_TOKEN}&refresh=${DATAFED_USER99_GLOBUS_REFRESH_TOKEN}&expires_in=1") echo "HTTP_CODE: ${HTTP_CODE}" FIRST_INT=${HTTP_CODE:0:1} if [ "${FIRST_INT}" -ne "2" ] then - response=$(curl --fail-early -X GET "http://${IP}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/token/set?client=u%2Fdatafed99&access=${DATAFED_USER99_GLOBUS_ACCESS_TOKEN}&refresh=${DATAFED_USER99_GLOBUS_REFRESH_TOKEN}&expires_in=1") + response=$(curl --fail-early -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/token/set?client=u%2Fdatafed99&access=${DATAFED_USER99_GLOBUS_ACCESS_TOKEN}&refresh=${DATAFED_USER99_GLOBUS_REFRESH_TOKEN}&expires_in=1") CODE=$(echo $response | jq .code ) ERROR_MSG=$(echo $response | jq .errorMessage ) echo "$ERROR_MSG" @@ -177,7 +184,7 @@ exit 0 #source ${DATAFED_REPO_FORM_PATH} # Using the datafed89 client because it has admin rights to add the repo -#curl -X POST --header 'accept: application/json' --data-binary @- --dump - "http://${IP}:8529/_db/sdms/api/repo/create?client=u%2Fdatafed89" <<\ +#curl -X POST --header 'accept: application/json' --data-binary @- --dump - "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/repo/create?client=u%2Fdatafed89" <<\ #EOF #{ # "id" : "$DATAFED_REPO_ID", @@ -196,7 +203,7 @@ exit 0 # ## Using the datafed89 client because it has the repo rights to create an allocation ## Creating an allocation for datafed89 -#curl -X GET "http://${IP}:8529/_db/sdms/api/repo/alloc/create?client=u%2Fdatafed89&subject=u%2Fdatafed89&repo=repo%2F${DATAFED_REPO_ID}&data_limit=1000000000&rec_limit=100" +#curl -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/repo/alloc/create?client=u%2Fdatafed89&subject=u%2Fdatafed89&repo=repo%2F${DATAFED_REPO_ID}&data_limit=1000000000&rec_limit=100" # ## Creating an allocation for datafed99 -#curl -X GET "http://${IP}:8529/_db/sdms/api/repo/alloc/create?client=u%2Fdatafed89&subject=u%2Fdatafed99&repo=repo%2F${DATAFED_REPO_ID}&data_limit=1000000000&rec_limit=100" +#curl -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/repo/alloc/create?client=u%2Fdatafed89&subject=u%2Fdatafed99&repo=repo%2F${DATAFED_REPO_ID}&data_limit=1000000000&rec_limit=100" From 42d96e6f5601bbe122614ad2f72bcdf3de9706de Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Tue, 28 Nov 2023 07:31:15 -0500 Subject: [PATCH 11/22] Add customization of configuration during end to end testing --- .gitlab-ci.yml | 35 +++++++++++++++++++++++--- core/database/tests/test_foxx.sh | 12 +++++++-- scripts/clear_db.sh | 16 +++++++++++- scripts/install_client_dependencies.sh | 19 ++++++++++++++ scripts/install_foxx.sh | 31 +++++++++++++++++------ tests/end-to-end/setup.sh | 16 +++++++++--- 6 files changed, 111 insertions(+), 18 deletions(-) create mode 100755 scripts/install_client_dependencies.sh diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 031feb03b..3d01f03a9 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -2,8 +2,10 @@ stages: - ci-infrastructure-check - build-infrastructure + - signal - clear-docker-cache - build-deploy-base + - provision-client - build-unit-test-deploy - end-to-end-setup - end-to-end-test @@ -26,8 +28,6 @@ check-ci-infrastructure: - ./scripts/ci_pipeline_setup.sh --compute-instance-name "ci-datafed-client" allow_failure: true resource_group: infrastructure_build - # only: - # - merge_requests run-trigger-job: stage: build-infrastructure @@ -38,8 +38,16 @@ run-trigger-job: branch: main strategy: depend resource_group: infrastructure_build - #only: - # - merge_requests + +################################################################################ +# STAGE: signal +################################################################################ +# Stage is used to separte the trigger job from the remaining jobs and to act +# as an anchor for setting up dependencies +signal: + stage: signal + script: + - echo "Starting Build" ################################################################################ # STAGE: clear-docker-cache @@ -47,6 +55,7 @@ run-trigger-job: # Used to clear out the cache on VMs where the images are being built clear-core-cache: stage: clear-docker-cache + needs: ["signal"] tags: - ci_1 script: @@ -56,6 +65,7 @@ clear-core-cache: clear-repo-cache: stage: clear-docker-cache + needs: ["signal"] tags: - ci_3 script: @@ -145,6 +155,20 @@ build-gcs-base: - docker build -f repository/docker/Dockerfile.gcs-authz.ubuntu -t code.ornl.gov:4567/${IMAGE_TAG3} . - docker push code.ornl.gov:4567/${IMAGE_TAG3} +################################################################################ +# STAGE: provision client +################################################################################ +provision-client: + needs: ["signal"] + variables: + IMAGE_TAG: "dlsw/datafed/repo-base" + GIT_STRATEGY: clone + stage: provision-client + tags: + - ci-datafed-client + script: + - ./scripts/install_client_dependencies.sh + ################################################################################ # STAGE: build ################################################################################ @@ -437,6 +461,9 @@ end-to-end-arango-setup: - arangod --version - ./scripts/run_arango_service.sh +################################################################################ +# Actual Testing +################################################################################ end-to-end-client-test: variables: GIT_STRATEGY: clone diff --git a/core/database/tests/test_foxx.sh b/core/database/tests/test_foxx.sh index 902da6d28..25b5267ca 100755 --- a/core/database/tests/test_foxx.sh +++ b/core/database/tests/test_foxx.sh @@ -125,10 +125,18 @@ export NVM_DIR="$HOME/.nvm" nvm use $NODE_VERSION +FOXX_PREFIX="" +{ + # Determine if exists globally first + which foxx +} || { + FOXX_PREFIX="~/bin/" +} + PATH_TO_PASSWD_FILE=${SOURCE}/database_temp.password if [ "$TEST_TO_RUN" == "all" ] then - foxx test -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} --reporter spec + ${FOXX_PREFIX}foxx test -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} --reporter spec else - foxx test -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} "$TEST_TO_RUN" --reporter spec + ${FOXX_PREFIX}foxx test -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} "$TEST_TO_RUN" --reporter spec fi diff --git a/scripts/clear_db.sh b/scripts/clear_db.sh index 52187f956..346c9c8de 100755 --- a/scripts/clear_db.sh +++ b/scripts/clear_db.sh @@ -39,5 +39,19 @@ fi # Delete database and API from arangodb if command -v arangosh &> /dev/null then - arangosh --server.endpoint "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATAFED_DATABASE_PORT}" --server.password ${local_DATAFED_DATABASE_PASSWORD} --server.username ${local_DATABASE_USER} --javascript.execute-string 'db._dropDatabase("sdms");' + exists=$(arangosh --server.endpoint "http+tcp://${local_DATAFED_DATABASE_HOST}:${local_DATAFED_DATABASE_PORT}" \ + --server.usernam "$local_DATABASE_USER" \ + --server.password "$local_DATAFED_DATABASE_PASSWORD" \ + --javascript.execute "db._databases().includes('$local_DATABASE_NAME')") + + if [ "$exists" = "true" ]; then + arangosh --server.endpoint + "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATAFED_DATABASE_PORT}" \ + --server.password ${local_DATAFED_DATABASE_PASSWORD} \ + --server.username ${local_DATABASE_USER} \ + --javascript.execute-string "db._dropDatabase('$local_DATABASE_NAME');" + else + echo "Database $local_DATABASE_NAME does not exist." + fi + fi diff --git a/scripts/install_client_dependencies.sh b/scripts/install_client_dependencies.sh new file mode 100755 index 000000000..343c01c3a --- /dev/null +++ b/scripts/install_client_dependencies.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +# Exit on error +set -e + +SCRIPT=$(realpath "$0") +SOURCE=$(dirname "$SCRIPT") +PROJECT_ROOT=$(realpath ${SOURCE}/..) + +source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh" + +# This script will install all of the dependencies needed by DataFed 1.0 +sudo apt-get update +sudo dpkg --configure -a +# Install cmake 3.17 + +install_protobuf +cd ~ + diff --git a/scripts/install_foxx.sh b/scripts/install_foxx.sh index fd06fa67d..d4e8ed0f4 100755 --- a/scripts/install_foxx.sh +++ b/scripts/install_foxx.sh @@ -141,7 +141,6 @@ if [[ $? -ne 0 ]]; then fi eval set -- "$VALID_ARGS" while [ : ]; do - echo "$1" case "$1" in -h | --help) Help @@ -211,11 +210,21 @@ if [[ "$output" =~ .*"sdms".* ]]; then echo "SDMS already exists do nothing" else echo "Creating SDMS" - arangosh --server.endpoint "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" --server.password ${local_DATAFED_DATABASE_PASSWORD} --server.username ${local_DATABASE_USER} --javascript.execute ${PROJECT_ROOT}/core/database/foxx/db_create.js + arangosh --server.endpoint \ + "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" \ + --server.password ${local_DATAFED_DATABASE_PASSWORD} \ + --server.username ${local_DATABASE_USER} \ + --javascript.execute ${PROJECT_ROOT}/core/database/foxx/db_create.js # Give time for the database to be created sleep 2 - arangosh --server.endpoint "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" --server.password ${local_DATAFED_DATABASE_PASSWORD} --server.username ${local_DATABASE_USER} --javascript.execute-string 'db._useDatabase("sdms"); db.config.insert({"_key": "msg_daily", "msg" : "DataFed servers will be off-line for regular maintenance every Sunday night from 11:45 pm until 12:15 am EST Monday morning."}, {overwrite: true});' - arangosh --server.endpoint "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" --server.password ${local_DATAFED_DATABASE_PASSWORD} --server.username ${local_DATABASE_USER} --javascript.execute-string "db._useDatabase(\"sdms\"); db.config.insert({ \"_key\": \"system\", \"_id\": \"config/system\", \"secret\": \"${local_DATAFED_ZEROMQ_SYSTEM_SECRET}\"}, {overwrite: true } );" + arangosh --server.endpoint "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" \ + --server.password ${local_DATAFED_DATABASE_PASSWORD} \ + --server.username ${local_DATABASE_USER} \ + --javascript.execute-string 'db._useDatabase("sdms"); db.config.insert({"_key": "msg_daily", "msg" : "DataFed servers will be off-line for regular maintenance every Sunday night from 11:45 pm until 12:15 am EST Monday morning."}, {overwrite: true});' + arangosh --server.endpoint "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" \ + --server.password ${local_DATAFED_DATABASE_PASSWORD} \ + --server.username ${local_DATABASE_USER} \ + --javascript.execute-string "db._useDatabase(\"sdms\"); db.config.insert({ \"_key\": \"system\", \"_id\": \"config/system\", \"secret\": \"${local_DATAFED_ZEROMQ_SYSTEM_SECRET}\"}, {overwrite: true } );" fi # There are apparently 3 different ways to deploy Foxx microservices, @@ -251,6 +260,14 @@ else npm install --global foxx-cli --prefix ~/ fi +FOXX_PREFIX="" +{ + # Determine if exists globally first + which foxx +} || { + FOXX_PREFIX="~/bin/" +} + PATH_TO_PASSWD_FILE=${SOURCE}/database_temp.password echo "Path to PASSWRD file ${PATH_TO_PASSWD_FILE} passwd is $local_DATAFED_DATABASE_PASSWORD" @@ -258,16 +275,16 @@ echo "$local_DATAFED_DATABASE_PASSWORD" > "${PATH_TO_PASSWD_FILE}" { # try # Check if database foxx services have already been installed - existing_services=$(foxx list --server "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" -a -u $local_DATABASE_USER -p ${PATH_TO_PASSWD_FILE} --database $local_DATABASE_NAME) + existing_services=$(${FOXX_PREFIX}foxx list --server "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" -a -u $local_DATABASE_USER -p ${PATH_TO_PASSWD_FILE} --database $local_DATABASE_NAME) FOUND_API=$(echo "$existing_services" | grep "/api/${local_FOXX_MAJOR_API_VERSION}") if [ -z "${FOUND_API}" ] then - foxx install --server "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} ${PROJECT_ROOT}/core/database/foxx/ + ${FOXX_PREFIX}foxx install --server "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} ${PROJECT_ROOT}/core/database/foxx/ else echo "DataFed Foxx Services have already been uploaded, replacing to ensure consisency" - foxx replace --server "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} ${PROJECT_ROOT}/core/database/foxx/ + ${FOXX_PREFIX}foxx replace --server "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} ${PROJECT_ROOT}/core/database/foxx/ echo "foxx replace -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} ${PROJECT_ROOT}/core/database/foxx" fi diff --git a/tests/end-to-end/setup.sh b/tests/end-to-end/setup.sh index f0dcbf9e1..2e35dbef1 100755 --- a/tests/end-to-end/setup.sh +++ b/tests/end-to-end/setup.sh @@ -104,11 +104,19 @@ fi # Detect whether arangodb is running locally -ARANGODB_RUNNING=$(systemctl is-active --quiet arangodb3.service && echo "RUNNING") -if [ "$ARANGODB_RUNNING" != "RUNNING" ] +{ + ARANGODB_RUNNING=$(systemctl is-active --quiet arangodb3.service && echo "RUNNING") +} || { + echo "Arangodb service is not locally detected." +} + +if [ "${DATAFED_DATABASE_HOST}" == "localhost" ] || [ "${DATAFED_DATABASE_HOST}" == "127.0.0.1" ] then - echo "REQUIRED the arangodb service has not been detected to be running by systemctl" - exit 1 + if [ "$ARANGODB_RUNNING" != "RUNNING" ] + then + echo "REQUIRED the arangodb service has not been detected to be running by systemctl" + exit 1 + fi fi # First step is to clear the database From c810485846b65cb639bb7df867e3d6caf44ac14a Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Tue, 28 Nov 2023 07:53:50 -0500 Subject: [PATCH 12/22] Add generalist runner tag --- .gitlab-ci.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 3d01f03a9..ca72ab640 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -46,6 +46,8 @@ run-trigger-job: # as an anchor for setting up dependencies signal: stage: signal + tags: + - runner script: - echo "Starting Build" From 15c1fd5ffa7c98296591ffdc8a2b86afd404b67a Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Tue, 28 Nov 2023 09:18:48 -0500 Subject: [PATCH 13/22] Fix typo --- scripts/clear_db.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/clear_db.sh b/scripts/clear_db.sh index 346c9c8de..fb9d9e130 100755 --- a/scripts/clear_db.sh +++ b/scripts/clear_db.sh @@ -40,7 +40,7 @@ fi if command -v arangosh &> /dev/null then exists=$(arangosh --server.endpoint "http+tcp://${local_DATAFED_DATABASE_HOST}:${local_DATAFED_DATABASE_PORT}" \ - --server.usernam "$local_DATABASE_USER" \ + --server.username "$local_DATABASE_USER" \ --server.password "$local_DATAFED_DATABASE_PASSWORD" \ --javascript.execute "db._databases().includes('$local_DATABASE_NAME')") From f069e5ae87e2a609c9a1c5173b842f1c99954cf7 Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Tue, 28 Nov 2023 10:00:15 -0500 Subject: [PATCH 14/22] Try to decouple containers from env in ci, remove sudo from python install fix env var --- .gitlab-ci.yml | 5 +++++ scripts/dependency_install_functions.sh | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ca72ab640..cdba4ed6a 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -345,6 +345,7 @@ end-to-end-ws-setup: -v "${CI_DATAFED_WEB_CERT_PATH}:/datafed/install/keys/${CERT_FILE_NAME}" -v "${CI_DATAFED_WEB_KEY_PATH}:/datafed/install/keys/${KEY_FILE_NAME}" -t "code.ornl.gov:4567/${IMAGE_TAG}${BRANCH_LOWER}" + /dev/null & - ./scripts/container_run_test.sh -e -c "1" -t "code.ornl.gov:4567/${IMAGE_TAG}${BRANCH_LOWER}" # Note here we use network=host to run the docker container this is @@ -384,6 +385,7 @@ end-to-end-core-setup: -v "$CI_DATAFED_CORE_PUB_KEY":/source/install/keys/datafed-core-key.pub -v "$CI_DATAFED_CORE_PRIV_KEY":/source/install/keys/datafed-core-key.priv -t "code.ornl.gov:4567/${IMAGE_TAG}${BRANCH_LOWER}" + /dev/null & - ./scripts/container_run_test.sh -e -c "1" -t "code.ornl.gov:4567/${IMAGE_TAG}${BRANCH_LOWER}" @@ -418,6 +420,7 @@ end-to-end-repo-setup: -v "$CI_DATAFED_REPO_PUB_KEY":/source/install/keys/datafed-repo-key.pub -v "$CI_DATAFED_REPO_PRIV_KEY":/source/install/keys/datafed-repo-key.priv -t "code.ornl.gov:4567/${IMAGE_TAG}${BRANCH_LOWER}" + /dev/null & - ./scripts/container_run_test.sh -e -c "1" -t "code.ornl.gov:4567/${IMAGE_TAG}${BRANCH_LOWER}" end-to-end-gcs-authz-setup: @@ -451,6 +454,7 @@ end-to-end-gcs-authz-setup: -v "$CI_DATAFED_REPO_PUB_KEY":/source/install/keys/datafed-repo-key.pub -v "$CI_DATAFED_REPO_PRIV_KEY":/source/install/keys/datafed-repo-key.priv -t "code.ornl.gov:4567/${IMAGE_TAG}${BRANCH_LOWER}" + /dev/null & - ./scripts/container_run_test.sh -e -c "1" -t "code.ornl.gov:4567/${IMAGE_TAG}${BRANCH_LOWER}" end-to-end-arango-setup: @@ -480,6 +484,7 @@ end-to-end-client-test: DATAFED_USER99_GLOBUS_REFRESH_TOKEN: "${CI_DATAFED_USER99_GLOBUS_REFRESH_TOKEN}" DATAFED_USER99_GLOBUS_ACCESS_TOKEN: "${CI_DATAFED_USER99_GLOBUS_ACCESS_TOKEN}" DATAFED_USER99_GLOBUS_UUID: "${CI_DATAFED_USER99_GLOBUS_UUID}" + DATAFED_ZEROMQ_SYSTEM_SECRET: "${CI_DATAFED_ZEROMQ_SYSTEM_SECRET}" stage: end-to-end-test tags: - ci-datafed-client diff --git a/scripts/dependency_install_functions.sh b/scripts/dependency_install_functions.sh index ae19f75bc..cbf5636f1 100644 --- a/scripts/dependency_install_functions.sh +++ b/scripts/dependency_install_functions.sh @@ -30,7 +30,7 @@ install_protobuf() { cd python python3 setup.py build python3 setup.py test - sudo python3 setup.py install + python3 setup.py install --user cd ../../ } From 00011c54d942b512c1f82d83de731ccf61b419d2 Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Tue, 28 Nov 2023 11:33:23 -0500 Subject: [PATCH 15/22] Remove problematic file --- scripts/dependency_install_functions.sh | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/scripts/dependency_install_functions.sh b/scripts/dependency_install_functions.sh index cbf5636f1..350db0c49 100644 --- a/scripts/dependency_install_functions.sh +++ b/scripts/dependency_install_functions.sh @@ -31,7 +31,13 @@ install_protobuf() { python3 setup.py build python3 setup.py test python3 setup.py install --user - cd ../../ + cd ../ + # Cleanup build file with root ownership + if [ -f build/install_manifest.txt ] + then + sudo rm build/install_manifest.txt + fi + cd ../ } install_libsodium() { From f7c8b01e8b0f8c3e323d004f707d2dfd6ec614d4 Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Tue, 28 Nov 2023 16:38:14 -0500 Subject: [PATCH 16/22] Add python requirements to install --- python/datafed_pkg/requirements.txt | 5 +++++ python/datafed_pkg/setup.py | 11 ++++------- scripts/install_client_dependencies.sh | 3 ++- 3 files changed, 11 insertions(+), 8 deletions(-) create mode 100644 python/datafed_pkg/requirements.txt diff --git a/python/datafed_pkg/requirements.txt b/python/datafed_pkg/requirements.txt new file mode 100644 index 000000000..9735f7929 --- /dev/null +++ b/python/datafed_pkg/requirements.txt @@ -0,0 +1,5 @@ +protobuf>=3,<=3.20 +pyzmq>=16 +wget>=3 +click>=7 +prompt_toolkit>=2 diff --git a/python/datafed_pkg/setup.py b/python/datafed_pkg/setup.py index 40c83d67c..3b25d3abf 100644 --- a/python/datafed_pkg/setup.py +++ b/python/datafed_pkg/setup.py @@ -9,6 +9,9 @@ long_description = f.read() +with open("requirements.txt", "r") as f: + install_requires = [line.strip() for line in f] + setuptools.setup( name="datafed", version=__version__, @@ -20,13 +23,7 @@ url="https://github.com/ORNL/DataFed", packages=setuptools.find_packages(), setup_requires=["setuptools"], - install_requires=[ - "protobuf>=3, <=3.20", - "pyzmq>=16", - "wget>=3", - "click>=7", - "prompt_toolkit>=2", - ], + install_requires=install_requires, entry_points={"console_scripts": ["datafed = datafed.CLI:run"]}, classifiers=[ "Programming Language :: Python :: 3", diff --git a/scripts/install_client_dependencies.sh b/scripts/install_client_dependencies.sh index 343c01c3a..c9cad0408 100755 --- a/scripts/install_client_dependencies.sh +++ b/scripts/install_client_dependencies.sh @@ -12,7 +12,8 @@ source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh" # This script will install all of the dependencies needed by DataFed 1.0 sudo apt-get update sudo dpkg --configure -a -# Install cmake 3.17 + +python3 -m pip install -r ${PROJECT_ROOT}/python/datafed_pkg/requirements.txt install_protobuf cd ~ From 22f1116456e2a529d1127b3dfa4902389ea99f44 Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Wed, 29 Nov 2023 00:08:52 -0500 Subject: [PATCH 17/22] Turn off end-to-end testing for now --- .gitlab-ci.yml | 74 +++++++++++++++++++++++++------------------------- 1 file changed, 37 insertions(+), 37 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index cdba4ed6a..c59c45caf 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -470,40 +470,40 @@ end-to-end-arango-setup: ################################################################################ # Actual Testing ################################################################################ -end-to-end-client-test: - variables: - GIT_STRATEGY: clone - DATAFED_DATABASE_HOST: "${CI_DATAFED_DATABASE_HOST}" - DATAFED_DATABASE_ZEROMQ_SYSTEM_SECRET: "${CI_DATAFED_DATABASE_ZEROMQ_SYSTEM_SECRET}" - DATAFED_DATABASE_PASSWORD: "${CI_DATAFED_DATABASE_PASSWORD}" - DATAFED_USER89_PASSWORD: "${CI_DATAFED_USER89_PASSWORD}" - DATAFED_USER89_GLOBUS_REFRESH_TOKEN: "${CI_DATAFED_USER89_GLOBUS_REFRESH_TOKEN}" - DATAFED_USER89_GLOBUS_ACCESS_TOKEN: "${CI_DATAFED_USER89_GLOBUS_ACCESS_TOKEN}" - DATAFED_USER89_GLOBUS_UUID: "${CI_DATAFED_USER89_GLOBUS_UUID}" - DATAFED_USER99_PASSWORD: "${CI_DATAFED_USER99_PASSWORD}" - DATAFED_USER99_GLOBUS_REFRESH_TOKEN: "${CI_DATAFED_USER99_GLOBUS_REFRESH_TOKEN}" - DATAFED_USER99_GLOBUS_ACCESS_TOKEN: "${CI_DATAFED_USER99_GLOBUS_ACCESS_TOKEN}" - DATAFED_USER99_GLOBUS_UUID: "${CI_DATAFED_USER99_GLOBUS_UUID}" - DATAFED_ZEROMQ_SYSTEM_SECRET: "${CI_DATAFED_ZEROMQ_SYSTEM_SECRET}" - stage: end-to-end-test - tags: - - ci-datafed-client - script: - - ./scripts/generate_datafed.sh - - env - - cat ./config/datafed.sh - - > - cmake -S. -B build - -DENABLE_FOXX_TESTS=OFF - -DBUILD_CORE_SERVER=OFF - -DBUILD_COMMON=OFF - -DBUILD_WEB_SERVER=OFF - -DBUILD_DOCS=OFF - -DBUILD_PYTHON_CLIENT=ON - -DBUILD_TESTS=ON - -DENABLE_END_TO_END_TESTS=ON - -DINSTALL_FOXX=OFF - - cmake --build build - - cmake --build build --target pydatafed - - cmake --build build --target test - +#end-to-end-client-test: +# variables: +# GIT_STRATEGY: clone +# DATAFED_DATABASE_HOST: "${CI_DATAFED_DATABASE_HOST}" +# DATAFED_DATABASE_ZEROMQ_SYSTEM_SECRET: "${CI_DATAFED_DATABASE_ZEROMQ_SYSTEM_SECRET}" +# DATAFED_DATABASE_PASSWORD: "${CI_DATAFED_DATABASE_PASSWORD}" +# DATAFED_USER89_PASSWORD: "${CI_DATAFED_USER89_PASSWORD}" +# DATAFED_USER89_GLOBUS_REFRESH_TOKEN: "${CI_DATAFED_USER89_GLOBUS_REFRESH_TOKEN}" +# DATAFED_USER89_GLOBUS_ACCESS_TOKEN: "${CI_DATAFED_USER89_GLOBUS_ACCESS_TOKEN}" +# DATAFED_USER89_GLOBUS_UUID: "${CI_DATAFED_USER89_GLOBUS_UUID}" +# DATAFED_USER99_PASSWORD: "${CI_DATAFED_USER99_PASSWORD}" +# DATAFED_USER99_GLOBUS_REFRESH_TOKEN: "${CI_DATAFED_USER99_GLOBUS_REFRESH_TOKEN}" +# DATAFED_USER99_GLOBUS_ACCESS_TOKEN: "${CI_DATAFED_USER99_GLOBUS_ACCESS_TOKEN}" +# DATAFED_USER99_GLOBUS_UUID: "${CI_DATAFED_USER99_GLOBUS_UUID}" +# DATAFED_ZEROMQ_SYSTEM_SECRET: "${CI_DATAFED_ZEROMQ_SYSTEM_SECRET}" +# stage: end-to-end-test +# tags: +# - ci-datafed-client +# script: +# - ./scripts/generate_datafed.sh +# - env +# - cat ./config/datafed.sh +# - > +# cmake -S. -B build +# -DENABLE_FOXX_TESTS=OFF +# -DBUILD_CORE_SERVER=OFF +# -DBUILD_COMMON=OFF +# -DBUILD_WEB_SERVER=OFF +# -DBUILD_DOCS=OFF +# -DBUILD_PYTHON_CLIENT=ON +# -DBUILD_TESTS=ON +# -DENABLE_END_TO_END_TESTS=ON +# -DINSTALL_FOXX=OFF +# - cmake --build build +# - cmake --build build --target pydatafed +# - cmake --build build --target test +# From 1eb1f6dbcc103a73401be1e9faf14d4fc595b4c9 Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Wed, 29 Nov 2023 08:14:04 -0500 Subject: [PATCH 18/22] Fix codacy complaints --- core/database/tests/test_foxx.sh | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/core/database/tests/test_foxx.sh b/core/database/tests/test_foxx.sh index 25b5267ca..3dbc5b4da 100755 --- a/core/database/tests/test_foxx.sh +++ b/core/database/tests/test_foxx.sh @@ -136,7 +136,13 @@ FOXX_PREFIX="" PATH_TO_PASSWD_FILE=${SOURCE}/database_temp.password if [ "$TEST_TO_RUN" == "all" ] then - ${FOXX_PREFIX}foxx test -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} --reporter spec + "${FOXX_PREFIX}foxx" test -u "${local_DATABASE_USER}" \ + -p "${PATH_TO_PASSWD_FILE}" \ + --database "${local_DATABASE_NAME}" \ + /api/${local_FOXX_MAJOR_API_VERSION} --reporter spec else - ${FOXX_PREFIX}foxx test -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} "$TEST_TO_RUN" --reporter spec + "${FOXX_PREFIX}foxx" test -u "${local_DATABASE_USER}" \ + -p "${PATH_TO_PASSWD_FILE}" \ + --database "${local_DATABASE_NAME}" \ + /api/${local_FOXX_MAJOR_API_VERSION} "$TEST_TO_RUN" --reporter spec fi From 317816acc3d7b9187baa6f05d1e3c2aa79be92b6 Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Wed, 29 Nov 2023 08:33:01 -0500 Subject: [PATCH 19/22] Attempt to fix codacy issues --- scripts/clear_db.sh | 4 +- scripts/install_client_dependencies.sh | 4 +- scripts/install_foxx.sh | 53 +++++++++++++++++--------- 3 files changed, 39 insertions(+), 22 deletions(-) diff --git a/scripts/clear_db.sh b/scripts/clear_db.sh index fb9d9e130..b4a5a20b4 100755 --- a/scripts/clear_db.sh +++ b/scripts/clear_db.sh @@ -47,8 +47,8 @@ then if [ "$exists" = "true" ]; then arangosh --server.endpoint "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATAFED_DATABASE_PORT}" \ - --server.password ${local_DATAFED_DATABASE_PASSWORD} \ - --server.username ${local_DATABASE_USER} \ + --server.password "${local_DATAFED_DATABASE_PASSWORD}" \ + --server.username "${local_DATABASE_USER}" \ --javascript.execute-string "db._dropDatabase('$local_DATABASE_NAME');" else echo "Database $local_DATABASE_NAME does not exist." diff --git a/scripts/install_client_dependencies.sh b/scripts/install_client_dependencies.sh index c9cad0408..574e0457c 100755 --- a/scripts/install_client_dependencies.sh +++ b/scripts/install_client_dependencies.sh @@ -5,7 +5,7 @@ set -e SCRIPT=$(realpath "$0") SOURCE=$(dirname "$SCRIPT") -PROJECT_ROOT=$(realpath ${SOURCE}/..) +PROJECT_ROOT=$(realpath "${SOURCE}/..") source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh" @@ -13,7 +13,7 @@ source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh" sudo apt-get update sudo dpkg --configure -a -python3 -m pip install -r ${PROJECT_ROOT}/python/datafed_pkg/requirements.txt +python3 -m pip install -r "${PROJECT_ROOT}/python/datafed_pkg/requirements.txt" install_protobuf cd ~ diff --git a/scripts/install_foxx.sh b/scripts/install_foxx.sh index d4e8ed0f4..84023f9e7 100755 --- a/scripts/install_foxx.sh +++ b/scripts/install_foxx.sh @@ -204,7 +204,9 @@ fi # We are now going to initialize the DataFed database in Arango, but only if sdms database does # not exist -output=$(curl --dump - --user $local_DATABASE_USER:$local_DATAFED_DATABASE_PASSWORD http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}/_api/database/user) +output=$(curl --dump - \ + --user "$local_DATABASE_USER:$local_DATAFED_DATABASE_PASSWORD" + "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}/_api/database/user") if [[ "$output" =~ .*"sdms".* ]]; then echo "SDMS already exists do nothing" @@ -212,18 +214,18 @@ else echo "Creating SDMS" arangosh --server.endpoint \ "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" \ - --server.password ${local_DATAFED_DATABASE_PASSWORD} \ - --server.username ${local_DATABASE_USER} \ - --javascript.execute ${PROJECT_ROOT}/core/database/foxx/db_create.js + --server.password "${local_DATAFED_DATABASE_PASSWORD}" \ + --server.username "${local_DATABASE_USER}" \ + --javascript.execute "${PROJECT_ROOT}/core/database/foxx/db_create.js" # Give time for the database to be created sleep 2 arangosh --server.endpoint "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" \ - --server.password ${local_DATAFED_DATABASE_PASSWORD} \ - --server.username ${local_DATABASE_USER} \ + --server.password "${local_DATAFED_DATABASE_PASSWORD}" \ + --server.username "${local_DATABASE_USER}" \ --javascript.execute-string 'db._useDatabase("sdms"); db.config.insert({"_key": "msg_daily", "msg" : "DataFed servers will be off-line for regular maintenance every Sunday night from 11:45 pm until 12:15 am EST Monday morning."}, {overwrite: true});' arangosh --server.endpoint "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" \ - --server.password ${local_DATAFED_DATABASE_PASSWORD} \ - --server.username ${local_DATABASE_USER} \ + --server.password "${local_DATAFED_DATABASE_PASSWORD}" \ + --server.username "${local_DATABASE_USER}" \ --javascript.execute-string "db._useDatabase(\"sdms\"); db.config.insert({ \"_key\": \"system\", \"_id\": \"config/system\", \"secret\": \"${local_DATAFED_ZEROMQ_SYSTEM_SECRET}\"}, {overwrite: true } );" fi @@ -235,7 +237,7 @@ fi # The web deployment requires manual interaction, and I could not figure out the # syntax for the REST http endpoints with curl so we are going to try the node module actual_version=$(node --version) -semantic_version_compatible $actual_version $DATAFED_NODE_VERSION +semantic_version_compatible "$actual_version" "$DATAFED_NODE_VERSION" compatible=$? if [ "$compatible" -eq "0" ] @@ -249,11 +251,11 @@ then export NVM_DIR="$HOME/.nvm" [ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh" # This loads nvm - nvm install $DATAFED_NODE_VERSION - nvm use $DATAFED_NODE_VERSION + nvm install "$DATAFED_NODE_VERSION" + nvm use "$DATAFED_NODE_VERSION" # Install foxx service node module - $NVM_DIR/nvm-exec npm install --global foxx-cli --prefix ~/ + "$NVM_DIR/nvm-exec" npm install --global foxx-cli --prefix ~/ else # We are assuming that if the correct version of node is installed then the # correct version of npm is also installed @@ -268,30 +270,45 @@ FOXX_PREFIX="" FOXX_PREFIX="~/bin/" } -PATH_TO_PASSWD_FILE=${SOURCE}/database_temp.password +PATH_TO_PASSWD_FILE="${SOURCE}/database_temp.password" echo "Path to PASSWRD file ${PATH_TO_PASSWD_FILE} passwd is $local_DATAFED_DATABASE_PASSWORD" echo "$local_DATAFED_DATABASE_PASSWORD" > "${PATH_TO_PASSWD_FILE}" { # try # Check if database foxx services have already been installed - existing_services=$(${FOXX_PREFIX}foxx list --server "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" -a -u $local_DATABASE_USER -p ${PATH_TO_PASSWD_FILE} --database $local_DATABASE_NAME) + existing_services=$("${FOXX_PREFIX}foxx" list \ + --server "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" \ + -a -u "$local_DATABASE_USER" \ + -p "${PATH_TO_PASSWD_FILE}" \ + --database "$local_DATABASE_NAME") FOUND_API=$(echo "$existing_services" | grep "/api/${local_FOXX_MAJOR_API_VERSION}") if [ -z "${FOUND_API}" ] then - ${FOXX_PREFIX}foxx install --server "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} ${PROJECT_ROOT}/core/database/foxx/ + "${FOXX_PREFIX}foxx" install \ + --server "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" \ + -u "${local_DATABASE_USER}" \ + -p "${PATH_TO_PASSWD_FILE}" \ + --database "${local_DATABASE_NAME}" \ + "/api/${local_FOXX_MAJOR_API_VERSION}" \ + "${PROJECT_ROOT}/core/database/foxx/" else echo "DataFed Foxx Services have already been uploaded, replacing to ensure consisency" - ${FOXX_PREFIX}foxx replace --server "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} ${PROJECT_ROOT}/core/database/foxx/ + "${FOXX_PREFIX}foxx" replace \ + --server "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" \ + -u "${local_DATABASE_USER}" \ + -p "${PATH_TO_PASSWD_FILE}" \ + --database "${local_DATABASE_NAME}" \ + "/api/${local_FOXX_MAJOR_API_VERSION}" "${PROJECT_ROOT}/core/database/foxx/" echo "foxx replace -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} ${PROJECT_ROOT}/core/database/foxx" fi - rm ${PATH_TO_PASSWD_FILE} + rm "${PATH_TO_PASSWD_FILE}" } || { # catch - rm ${PATH_TO_PASSWD_FILE} + rm "${PATH_TO_PASSWD_FILE}" } From 582cd95b67969e59d791ee953043d3b63c61ee5f Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Wed, 29 Nov 2023 11:03:55 -0500 Subject: [PATCH 20/22] Remove allow failure flag --- .gitlab-ci.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c59c45caf..73b38048f 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -26,7 +26,6 @@ check-ci-infrastructure: - ./scripts/ci_pipeline_setup.sh --compute-instance-name "ci-datafed-core" - ./scripts/ci_pipeline_setup.sh --compute-instance-name "ci-datafed-globus2" - ./scripts/ci_pipeline_setup.sh --compute-instance-name "ci-datafed-client" - allow_failure: true resource_group: infrastructure_build run-trigger-job: From 801855f9318929bf58dc271e25a26e2abfb97554 Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Wed, 29 Nov 2023 11:23:44 -0500 Subject: [PATCH 21/22] get Ci working --- .gitlab-ci.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 73b38048f..575b2f154 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -49,6 +49,12 @@ signal: - runner script: - echo "Starting Build" + rules: + - exists: + - check-ci-infrastrucure + - exists: + - run-trigger-job + - when: on_success ################################################################################ # STAGE: clear-docker-cache From 0db9ce0239c99a2723375a59c0d9188ffb321e7c Mon Sep 17 00:00:00 2001 From: "Brown, Joshua" Date: Wed, 29 Nov 2023 16:41:59 -0500 Subject: [PATCH 22/22] Fix codacy issues --- core/database/tests/test_foxx.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/database/tests/test_foxx.sh b/core/database/tests/test_foxx.sh index 3dbc5b4da..5dbb28e67 100755 --- a/core/database/tests/test_foxx.sh +++ b/core/database/tests/test_foxx.sh @@ -139,10 +139,10 @@ then "${FOXX_PREFIX}foxx" test -u "${local_DATABASE_USER}" \ -p "${PATH_TO_PASSWD_FILE}" \ --database "${local_DATABASE_NAME}" \ - /api/${local_FOXX_MAJOR_API_VERSION} --reporter spec + "/api/${local_FOXX_MAJOR_API_VERSION}" --reporter spec else "${FOXX_PREFIX}foxx" test -u "${local_DATABASE_USER}" \ -p "${PATH_TO_PASSWD_FILE}" \ --database "${local_DATABASE_NAME}" \ - /api/${local_FOXX_MAJOR_API_VERSION} "$TEST_TO_RUN" --reporter spec + "/api/${local_FOXX_MAJOR_API_VERSION}" "$TEST_TO_RUN" --reporter spec fi