From ee510effbeff304b5921c1db88f250de315d2e14 Mon Sep 17 00:00:00 2001 From: Nitish Sharma <50713102+rednitish@users.noreply.github.com> Date: Tue, 6 Jul 2021 11:36:15 +0530 Subject: [PATCH] CCS-4453 - Improvement on existing code (#616) * CCS-4453 - Improvement on existing code Added support for providing environment as argument to help testers/devs have it working on other environments. Added print usages instructions for better understanding of script. * Proxy as header addition, to remove 403 Forbidden Errors. More validation to restrict arguments within qa/stage/prod/dev --- scripts/populateDrupalCache.py | 61 ++++++++++++++++++++++++++++------ 1 file changed, 50 insertions(+), 11 deletions(-) diff --git a/scripts/populateDrupalCache.py b/scripts/populateDrupalCache.py index f391d7557..c6c6b11d9 100755 --- a/scripts/populateDrupalCache.py +++ b/scripts/populateDrupalCache.py @@ -2,16 +2,39 @@ import requests from xml.dom import minidom +import sys -sitemaps = [ - 'https://access.redhat.com/sitemap/assembly.sitemap.xml', - 'https://access.redhat.com/sitemap/module.sitemap.xml' -] +env = dict({ + "prod" : [ + 'https://access.redhat.com/sitemap/assembly.sitemap.xml', + 'https://access.redhat.com/sitemap/module.sitemap.xml' + ], + "qa" : [ + 'https://pantheon.corp.qa.redhat.com/api/sitemap/assembly.sitemap.xml', + 'https://pantheon.corp.qa.redhat.com/api/sitemap/module.sitemap.xml' + ], + "stage" : [ + 'https://pantheon.corp.stage.redhat.com/api/sitemap/assembly.sitemap.xml', + 'https://pantheon.corp.stage.redhat.com/api/sitemap/module.sitemap.xml' + ], + "dev" : [ + 'https://pantheon.corp.dev.redhat.com/api/sitemap/assembly.sitemap.xml', + 'https://pantheon.corp.dev.redhat.com/api/sitemap/module.sitemap.xml' + ]}) -def gather_urls(): +proxies = {} + +def gather_urls(user_input): urls = [] + sitemaps = env.get(user_input) + if sitemaps is None: + print_usage() + sys.exit(0) + if user_input != "prod": + proxies["http"] = "http://squid.corp.redhat.com:3128" + proxies["https"] = "https://squid.corp.redhat.com:3128" for sitemap in sitemaps: - r = requests.get(sitemap) + r = requests.get(sitemap, proxies=proxies) if r.status_code != 200: print('Error, status code for ' + sitemap + ' was ' + r.status_code) else: @@ -27,8 +50,8 @@ def perform_requests(urls): failure = 0 for url in urls: try: - rr = requests.get(url) - print(str(rr.status_code) + ': ' + rr.url) + rr = requests.get(url, proxies=proxies) + print(str(rr.status_code) + ': ' + url) success += 1 except requests.exceptions.RequestException as e: print(e) @@ -36,8 +59,8 @@ def perform_requests(urls): return [success, failure] -def main(): - urls = gather_urls() +def main(user_input): + urls = gather_urls(user_input) q = perform_requests(urls) successes = q[0] failures = q[1] @@ -45,5 +68,21 @@ def main(): print('\nMade ' + str(successes) + ' requests and raised ' + str(failures) + ' errors.') +def print_usage(): + print(50 * "*") + print(50 * "*") + print("\nMissing argument : Expected dev / qa / stage / prod\n") + print("Note : Purpose of script is parse cache on customer portal \n " + "and request the url, to check, if it is live \n") + print(50 * "*") + print(50 * "*") + + if __name__ == '__main__': - main() + try: + args = sys.argv[1] + main(args) + except IndexError: + print_usage() + sys.exit(0) +