From 796adf347f509038304deaf80fb74b4f8f341a79 Mon Sep 17 00:00:00 2001 From: Marcelo R Costa Date: Thu, 17 Jun 2021 10:16:36 -0500 Subject: [PATCH] Fix(prod-test): Also scan s3 buckets in access check script (#633) * initial commit to start fix * fix(prod-testing): Also scan s3 buckets in access check automation * remove Unexpected token 'const' * utilize proper loop for asynchronicity * tweak instructions and make sure this test is not executed in the conventional ci pipeline * remove trailing spaces --- run-tests.sh | 3 + scripts/list-all-test-suites-for-ci.py | 3 +- .../checkAllProjectsBucketAccessTest.js} | 70 ++++++++++--------- 3 files changed, 43 insertions(+), 33 deletions(-) rename suites/{google/checkAllProjectsGoogleBucketAccessTest.js => prod/checkAllProjectsBucketAccessTest.js} (67%) diff --git a/run-tests.sh b/run-tests.sh index fc389062a..b96611346 100755 --- a/run-tests.sh +++ b/run-tests.sh @@ -257,6 +257,9 @@ donot '@batch' # Do not run dataguids.org test for regular PRs donot '@dataguids' +# Do not run prjsBucketAccess (for prod-execution only) +donot '@prjsBucketAccess' + # For dataguids.org PRs, skip all fence-related bootstrapping oprations # as the environment does not have fence if [ "$testedEnv" == "dataguids.org" ]; then diff --git a/scripts/list-all-test-suites-for-ci.py b/scripts/list-all-test-suites-for-ci.py index dec30284c..1b54095be 100644 --- a/scripts/list-all-test-suites-for-ci.py +++ b/scripts/list-all-test-suites-for-ci.py @@ -25,7 +25,8 @@ 'test-regressions-queryPerformanceTest', # legacy (disabled test) 'test-regressions-submissionPerformanceTest', # legacy (disabled test) 'test-dream-challenge-DCgen3clientTest', # legacy (disabled test) - 'test-dream-challenge-synapaseLoginTest' # legacy (disabled test) + 'test-dream-challenge-synapaseLoginTest', # legacy (disabled test) + 'test-prod-checkAllProjectsBucketAccessTest.js' # prod test ] def collect_test_suites_from_codeceptjs_dryrun(): diff --git a/suites/google/checkAllProjectsGoogleBucketAccessTest.js b/suites/prod/checkAllProjectsBucketAccessTest.js similarity index 67% rename from suites/google/checkAllProjectsGoogleBucketAccessTest.js rename to suites/prod/checkAllProjectsBucketAccessTest.js index 96bc0a4ef..872d338cb 100644 --- a/suites/google/checkAllProjectsGoogleBucketAccessTest.js +++ b/suites/prod/checkAllProjectsBucketAccessTest.js @@ -1,11 +1,11 @@ /* - Google Bucket Access test (PXP-7076) + Bucket Access test for ALL projects and protocols (PXP-7076) This automation should be utilized on every release to run a PreSigned URL request against an indexd record from each Project (acl / authz). How to run: $ export GEN3_SKIP_PROJ_SETUP=true RUNNING_LOCAL=true - $ npm test -- suites/google/checkAllProjectsGoogleBucketAccessTest.js + $ npm test -- suites/prod/checkAllProjectsBucketAccessTest.js The full list of indexd records can be filtered through the INDEXD_FILTER var. It supports the following 3 options: @@ -14,14 +14,14 @@ - all (default) e.g., $ export INDEXD_FILTER=acl GEN3_SKIP_PROJ_SETUP=true RUNNING_LOCAL=true - $ npm test -- suites/google/checkAllProjectsGoogleBucketAccessTest.js + $ npm test -- suites/prod/checkAllProjectsBucketAccessTest.js Note: The DEBUG level is enabled by default in the logger configuration. To tail the logs and focus only on successful or Failed checks just run the command below: - $ tail -f all-projects-google-bucket-access-check.log | grep -E "Successfully|Failed" + $ tail -f all-projects-bucket-access-check.log | grep -E "Successfully|Failed" */ -Feature('CheckAllProjectsGoogleBucketAccess'); +Feature('CheckAllProjectsBucketAccess'); const bar = require('cli-progress'); const axios = require('axios'); @@ -37,7 +37,7 @@ log4js.configure({ appenders: { accessCheck: { // eslint-disable-line no-undef type: 'file', - filename: 'all-projects-google-bucket-access-check.log', + filename: 'all-projects-bucket-access-check.log', }, console: { type: 'console' }, }, @@ -64,31 +64,37 @@ async function checkAccess(I, a, indexdQueryParam) { ].did; logger.info(`picking one indexd record with GUID ${aGUID} from ${indexdQueryParam} ${a}`); // shoot pre signed url against fence with the GUID - const preSignedURLResp = await I.sendGetRequest( - `https://${I.cache.environment}/user/data/download/${aGUID}?protocol=gs`, - getAccessTokenHeader(I.cache.ACCESS_TOKEN), - ); - // TODO: 401 Token Expired check to prompt the user for a new access token - // if we expect this check to take longer than 20 minutes - logger.debug(`preSignedURLResp: ${JSON.stringify(preSignedURLResp.data)}`); - - let httpHeadCheck = {}; - try { - httpHeadCheck = await axios.head(preSignedURLResp.data.url); - // logger.debug(`http HEAD Check for ${aGUID}: ${httpHeadCheck}`); - } catch (error) { - // Error 😨 - if (error.response) { - logger.error(`Failed to obtain a successful access check against GUID ${aGUID} from ${indexdQueryParam} ${a} - Status Code: ${error.response.status}`); - logger.error(`Details: ${error.response.data}`); - } else { - logger.error(`Failed to process HTTP HEAD request for GUID ${aGUID}`); + + const listOfProtocols = ['s3', 'gs']; + + for (const protocol of listOfProtocols) { + logger.debug(`checking guid ${aGUID} with protocol ${protocol}...`); + const preSignedURLResp = await I.sendGetRequest( + `https://${I.cache.environment}/user/data/download/${aGUID}?protocol=${protocol}`, + getAccessTokenHeader(I.cache.ACCESS_TOKEN), + ); + // TODO: 401 Token Expired check to prompt the user for a new access token + // if we expect this check to take longer than 20 minutes + logger.debug(`preSignedURLResp: ${JSON.stringify(preSignedURLResp.data)}`); + + let httpHeadCheck = {}; + try { + httpHeadCheck = await axios.head(preSignedURLResp.data.url); + // logger.debug(`http HEAD Check for ${aGUID}: ${httpHeadCheck}`); + } catch (error) { + // Error 😨 + if (error.response) { + logger.error(`Failed to obtain a successful access check against GUID ${aGUID} from ${indexdQueryParam} ${a} - Status Code: ${error.response.status}`); + logger.error(`Details: ${error.response.data}`); + } else { + logger.error(`Failed to process HTTP HEAD request for GUID ${aGUID}`); + } + } + if (httpHeadCheck.status === 200) { + logger.debug(`Successfully verified the access for ${indexdQueryParam} ${a}!`); + } else if (httpHeadCheck.status === 400) { + logger.debug(`http.status: ${httpHeadCheck.status} - Successfully verified the access for ${indexdQueryParam} ${a} with message .!`); } - } - if (httpHeadCheck.status === 200) { - logger.debug(`Successfully verified the access for ${indexdQueryParam} ${a}!`); - } else if (httpHeadCheck.status === 400) { - logger.debug(`http.status: ${httpHeadCheck.status} - Successfully verified the access for ${indexdQueryParam} ${a} with message .!`); } } else { logger.warn(`Zero indexd records found for ${indexdQueryParam} ${a}`); @@ -105,7 +111,7 @@ BeforeSuite(async ({ I }) => { I.cache = {}; }); -Scenario('Fetch list of projects (acl/authz) from environment @manual @prjsGoogleBucketAccess', async ({ I }) => { +Scenario('Fetch list of projects (acl/authz) from environment @manual @prjsBucketAccess', async ({ I }) => { if (process.env.RUNNING_IN_PROD_TIER === 'true') { I.cache.ACCESS_TOKEN = process.env.ACCESS_TOKEN; } else { @@ -141,7 +147,7 @@ Scenario('Fetch list of projects (acl/authz) from environment @manual @prjsGoogl progressBar.start(I.cache.numOfChecks, 0); }); -Scenario('Run PreSigned URL checks against an indexd record from each project @manual @prjsGoogleBucketAccess', async ({ I }) => { +Scenario('Run PreSigned URL checks against an indexd record from each project @manual @prjsBucketAccess', async ({ I }) => { if (!I.cache.ACCESS_TOKEN) I.cache.ACCESS_TOKEN = await requestUserInput('Please provide your ACCESS_TOKEN: '); if (I.cache.acls) {