Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/devel' into t-ramz-dev-container
Browse files Browse the repository at this point in the history
  • Loading branch information
Anthony Ramirez committed Jan 24, 2025
2 parents 444f5dd + 45eb9bf commit e016142
Show file tree
Hide file tree
Showing 56 changed files with 4,662 additions and 879 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/javascript-lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jobs:
- name: Install ESLint
run: |
npm init -y
npm install eslint@latest @babel/eslint-parser@latest eslint-define-config globals --save
npm install eslint@latest @babel/eslint-parser@latest eslint-define-config globals eslint-plugin-jsdoc --save
npx eslint "**/*.js"
# Step 4: Report status
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: Unit-Testing
on: push
jobs:
unit-test:
runs-on: ubuntu-latest
runs-on: ubuntu-20.04
if: ${{ always() }}
steps:
- uses: actions/checkout@v2
Expand Down
4 changes: 4 additions & 0 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ include:
- local: .gitlab/stage_provision_client.yml
- local: .gitlab/stage_image_check.yml
- local: .gitlab/stage_build.yml
- local: .gitlab/stage_test.yml
- local: .gitlab/stage_unit.yml
- local: .gitlab/end_to_end.yml

Expand All @@ -35,10 +36,12 @@ stages:
- provision-client
- image-check
- build
- test # Required for gitlab sast templates
- unit
- end-to-end-setup-arango
- end-to-end-setup
- end-to-end-test
- end-to-end-test-finish-signal
- log
- deploy-pypi-package

Expand All @@ -52,3 +55,4 @@ variables:
HARBOR_USER: 'robot$$datafed+harbor_datafed_gitlab_ci_registry_2'
REGISTRY: 'camden.ornl.gov'
DATAFED_DEPENDENCIES_INSTALL_PATH: "/shared/install"
DOCKER_TLS_CERTDIR: "" # Required for running docker in docker
2 changes: 2 additions & 0 deletions .gitlab/build/build_foxx_image.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ build-foxx:
- docker/**/*
- scripts/**/*
- core/database/**/*
- core/CMakeLists.txt
- common/proto/**/*
- .gitlab-ci.yml
- CMakeLists.txt
Expand All @@ -43,6 +44,7 @@ retag-image:
- docker/**/*
- scripts/**/*
- core/database/**/*
- core/CMakeLists.txt
- common/proto/**/*
- .gitlab-ci.yml
- CMakeLists.txt
Expand Down
19 changes: 16 additions & 3 deletions .gitlab/common.yml
Original file line number Diff line number Diff line change
Expand Up @@ -287,9 +287,22 @@
stage: log
script:
- BRANCH_LOWER=$(echo "$CI_COMMIT_REF_NAME" | tr '[:upper:]' '[:lower:]')
- docker logs $(docker ps -a --filter "ancestor=${REGISTRY}/${PROJECT}/${COMPONENT}-${BRANCH_LOWER}" --format "{{.Names}}")
rules:
- when: always
- FULL_IMAGE_NAME="${REGISTRY}/${PROJECT}/${COMPONENT}-${BRANCH_LOWER}"
- echo "Full image name is $FULL_IMAGE_NAME"
- ANCESTOR_CONTAINERS=$(docker ps -a --filter "ancestor=${FULL_IMAGE_NAME}" --format "{{.Names}}")
- echo "${ANCESTOR_CONTAINERS}"
# It is possible to return more than one container with the same ancestor
# for instance if two different branches have the exact same build but
# different names. Or one image was built on top of another and share the
# same base.
- MATCHING_CONTAINERS=$( echo "$ANCESTOR_CONTAINERS" | grep "${COMPONENT}-${BRANCH_LOWER}" || echo "")
- if [ -z "$MATCHING_CONTAINERS" ]; then
echo "No matching containers found for image ${FULL_IMAGE_NAME} and component ${COMPONENT}-${BRANCH_LOWER}";
else
echo "Getting log output for the following."
echo "$MATCHING_CONTAINERS"
docker logs $MATCHING_CONTAINERS;
fi

# In the case that we are not needing to actually rebuild the image we want to
# retag the image that already exists in harbor with the current commit, this
Expand Down
29 changes: 24 additions & 5 deletions .gitlab/end_to_end.yml
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ end-to-end-repo-setup:

# Requires setting up Globus Connect Server, requires firewall exceptions on
# the machine running this.
# Note we need the certificates to be available on the gcs-authz container
# Note we need the certificates to be available on the gcs container
# if it is meant to be run on the same machine as the metadata services
# because the Apache web server can then route traffic appropriately, if
# run separate from the metadata services it should not be needed.
Expand Down Expand Up @@ -312,13 +312,13 @@ end-to-end-gcs-authz-setup:
- chown gitlab-runner "$HOST_LOG_FILE_PATH"
- ./scripts/generate_datafed.sh
- docker login "${REGISTRY}" -u "${HARBOR_USER}" -p "${HARBOR_DATAFED_GITLAB_CI_REGISTRY_TOKEN}"
- ./scripts/container_stop.sh -n "gcs-authz" -p
- ./scripts/container_stop.sh -n "${COMPONENT}" -p
- random_string=$(bash -c "cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w "10" | head -n 1")
- cat $CI_DATAFED_CORE_PUB_KEY > /shared/datafed-repo-key.pub
- cat $CI_DATAFED_CORE_PRIV_KEY > /shared/datafed-repo-key.priv
- echo "#!/bin/bash" > run_globus.sh
- echo "docker run -d \\" >> run_globus.sh
- echo "--name \"gcs-authz-${BRANCH_LOWER}-${CI_COMMIT_SHORT_SHA}-${random_string}\" \\" >> run_globus.sh
- echo "--name \"${COMPONENT}-${BRANCH_LOWER}-${CI_COMMIT_SHORT_SHA}-${random_string}\" \\" >> run_globus.sh
- echo "--network host \\" >> run_globus.sh
- echo "-e DATAFED_GLOBUS_APP_SECRET=\"$CI_DATAFED_GLOBUS_APP_SECRET\" \\" >> run_globus.sh
- echo "-e DATAFED_GLOBUS_APP_ID=\"$CI_DATAFED_GLOBUS_APP_ID\" \\" >> run_globus.sh
Expand Down Expand Up @@ -443,16 +443,32 @@ end_to_end_web_test:
-DINSTALL_FOXX=OFF
- cmake --build build --target end_to_end_web_tests

################################################################################
# Signal end to end to end tests
################################################################################
end-to-end-signal:
stage: end-to-end-test-finish-signal
tags:
- datafed-infrastructure
script:
- echo "Signal end of end to end tests"
when: always
################################################################################
# Log output
################################################################################
# We don't want to run the log jobs if the infrastructure was never stood up
# because the jobs will hang indefinately

end_to_end_error_discovery_arango:
needs: ["check-ci-infrastructure", "end-to-end-arango-setup", "end-to-end-signal"]
stage: log
tags:
- ci-datafed-arango
script:
- sudo journalctl --no-pager -u arangodb3.service
rules:
- when: always

end_to_end_error_discovery_gcs:
needs: ["check-ci-infrastructure", "end-to-end-gcs-authz-setup", "end-to-end-signal"]
extends: .error_logs_client_end_to_end
variables:
PROJECT: "datafed"
Expand All @@ -462,6 +478,7 @@ end_to_end_error_discovery_gcs:
- docker

end_to_end_error_discovery_repo:
needs: ["check-ci-infrastructure", "end-to-end-repo-setup", "end-to-end-signal"]
extends: .error_logs_client_end_to_end
variables:
PROJECT: "datafed"
Expand All @@ -471,6 +488,7 @@ end_to_end_error_discovery_repo:
- docker

end_to_end_error_discovery_core:
needs: ["check-ci-infrastructure", "end-to-end-core-setup", "end-to-end-signal"]
extends: .error_logs_client_end_to_end
variables:
PROJECT: "datafed"
Expand All @@ -480,6 +498,7 @@ end_to_end_error_discovery_core:
- docker

end_to_end_error_discovery_web:
needs: ["check-ci-infrastructure", "end-to-end-ws-setup", "end-to-end-signal"]
extends: .error_logs_client_end_to_end
variables:
PROJECT: "datafed"
Expand Down
34 changes: 34 additions & 0 deletions .gitlab/stage_test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
---

# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/pipeline/#customization
# Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
semgrep-sast:
tags:
- docker-exec
variables:
SECURE_LOG_LEVEL: "debug"
rules:
- if: $CI_PIPELINE_SOURCE == "push"

secret_detection:
stage: test
tags:
- docker-exec
rules:
- if: $CI_PIPELINE_SOURCE == "push"

gemnasium-dependency_scanning:
stage: test
tags:
- docker-exec
rules:
- if: $CI_PIPELINE_SOURCE == "push"

include:
- template: Security/Secret-Detection.gitlab-ci.yml
- template: Security/SAST.gitlab-ci.yml
- template: Security/Dependency-Scanning.gitlab-ci.yml
4 changes: 2 additions & 2 deletions .gitlab/stage_unit.yml
Original file line number Diff line number Diff line change
Expand Up @@ -94,11 +94,11 @@ run-authz-unit-job:
- chown gitlab-runner "$HOST_LOG_FILE_PATH"
- ./scripts/generate_datafed.sh
- docker login "${REGISTRY}" -u "${HARBOR_USER}" -p "${HARBOR_DATAFED_GITLAB_CI_REGISTRY_TOKEN}"
- ./scripts/container_stop.sh -n "gcs-authz" -p
- ./scripts/container_stop.sh -n "${COMPONENT}" -p
- random_string=$(bash -c "cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w "10" | head -n 1")
- echo "#!/bin/bash" > run_globus.sh
- echo "docker run \\" >> run_globus.sh
- echo "--name \"gcs-authz-${BRANCH_LOWER}-${CI_COMMIT_SHORT_SHA}-${random_string}\" \\" >> run_globus.sh
- echo "--name \"${COMPONENT}-${BRANCH_LOWER}-${CI_COMMIT_SHORT_SHA}-${random_string}\" \\" >> run_globus.sh
- echo "--network host \\" >> run_globus.sh
- echo "-e DATAFED_GLOBUS_APP_SECRET=\"$CI_DATAFED_GLOBUS_APP_SECRET\" \\" >> run_globus.sh
- echo "-e DATAFED_GLOBUS_APP_ID=\"$CI_DATAFED_GLOBUS_APP_ID\" \\" >> run_globus.sh
Expand Down
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
2. [918] - Add admin script for making a user an admin
3. [1009] - Add collections to database for holding Globus tokens.
4. [1002] - Add backoff for task worker when database fails to prevent log overflow.
5. [1109] - Add support for Globus token association to Globus collection and user.

## PATCH Bug fixes/Technical Debt/Documentation
1. [984] - Fixes {server_default} from showing up in path.
Expand All @@ -27,6 +28,7 @@
16. [1149] - Docker container GCS Collection Mount Bug Fix
17. [1168] - Add authz unit testing to the CI
18. [1200] - Add JavaScript linter (eslint) and (prettier) formatter for JavaScript
19. [1180] - Refactor of authz foxx module, split into objects and added unit tests

# v2024.6.17.10.40

Expand Down
2 changes: 2 additions & 0 deletions common/proto/common/SDMS.proto
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,8 @@ enum AccessTokenType {
GLOBUS = 2;
GLOBUS_AUTH = 3;
GLOBUS_TRANSFER = 4;
GLOBUS_DEFAULT = 5;
ACCESS_SENTINEL = 255;
}

// ============================ Data Structures
Expand Down
2 changes: 1 addition & 1 deletion common/proto/common/SDMS_Auth.proto
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ message UserSetAccessTokenRequest
required string access = 1; // Access token
required uint32 expires_in = 2; // Access expiration time in seconds
required string refresh = 3; // Refresh token (never expires)
optional AccessTokenType type = 4;
optional AccessTokenType type = 4 [default = GLOBUS_DEFAULT];
optional string other = 5; // Other information relevant to the set access token
}

Expand Down
14 changes: 14 additions & 0 deletions core/database/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,25 @@ configure_file(
if( ENABLE_FOXX_TESTS )
add_test(NAME foxx_setup COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_setup.sh")
add_test(NAME foxx_teardown COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_teardown.sh")
add_test(NAME foxx_authz COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_authz")
add_test(NAME foxx_record COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_record")
add_test(NAME foxx_repo COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_repo")
add_test(NAME foxx_path COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_path")
add_test(NAME foxx_db_fixtures COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_fixture_setup.sh")
add_test(NAME foxx_version COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_version")
add_test(NAME foxx_support COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_support")
add_test(NAME foxx_user_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_user_router")
add_test(NAME foxx_authz_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_authz_router")

set_tests_properties(foxx_setup PROPERTIES FIXTURES_SETUP Foxx)
set_tests_properties(foxx_teardown PROPERTIES FIXTURES_CLEANUP Foxx)
set_tests_properties(foxx_db_fixtures PROPERTIES FIXTURES_SETUP FoxxDBFixtures FIXTURES_REQUIRED Foxx)
set_tests_properties(foxx_version PROPERTIES FIXTURES_REQUIRED Foxx)
set_tests_properties(foxx_support PROPERTIES FIXTURES_REQUIRED Foxx)
set_tests_properties(foxx_authz PROPERTIES FIXTURES_REQUIRED Foxx)
set_tests_properties(foxx_authz_router PROPERTIES FIXTURES_REQUIRED Foxx)
set_tests_properties(foxx_record PROPERTIES FIXTURES_REQUIRED Foxx)
set_tests_properties(foxx_repo PROPERTIES FIXTURES_REQUIRED Foxx)
set_tests_properties(foxx_path PROPERTIES FIXTURES_REQUIRED Foxx)
set_tests_properties(foxx_user_router PROPERTIES FIXTURES_REQUIRED "Foxx;FoxxDBFixtures")
endif()
44 changes: 44 additions & 0 deletions core/database/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# WARNING - Adding Tests

Note The granularity of CMake is dependent on how they are defined in the CMakeLists.txt file. The tests are specified in
CMake by passing a string that is matched against the chai test cases in the
"it()" sections of the chai unit tests. Any test cases that match the pattern will run when that test is triggered.

i.e.

CMakeLists.txt line

```
add_test(NAME foxx_record COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_record")
```

This will pass "unit_record" as the pattern to be matched to the test_foxx.sh
script. In turn, the test_foxx.sh script will call foxx test with
"unit_record". Tests are not matched based on the name of the test file they
are matched based on the test cases.

i.e.

Below is part of a test case that would be matched against the "unit_record" pattern.

```
describe('Record Class', () => {
it('unit_record: isPathConsistent should return false paths are inconsistent in new and old alloc.', () => {
:
:
});
it('unit_record: isPathConsistent a different test case.', () => {
:
:
});
});
```

Notice that 'unit_record' is explicitly mentioned in the test cases. In the above exerpt, both tests will run. If ctest were to be explicitly called we could run all unit_record tests with the following.

```
ctest -R foxx_record
```
Loading

0 comments on commit e016142

Please sign in to comment.