Skip to content

Commit

Permalink
CCS-4453 - Improvement on existing code (#616)
Browse files Browse the repository at this point in the history
* CCS-4453 - Improvement on existing code
 Added support for providing environment as argument to help testers/devs have it working on other environments.

Added print usages instructions for better understanding of script.

* Proxy as header addition, to remove 403 Forbidden Errors.
More validation to restrict arguments within qa/stage/prod/dev
  • Loading branch information
rednitish authored Jul 6, 2021
1 parent 60d8c93 commit ee510ef
Showing 1 changed file with 50 additions and 11 deletions.
61 changes: 50 additions & 11 deletions scripts/populateDrupalCache.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,39 @@

import requests
from xml.dom import minidom
import sys

sitemaps = [
'https://access.redhat.com/sitemap/assembly.sitemap.xml',
'https://access.redhat.com/sitemap/module.sitemap.xml'
]
env = dict({
"prod" : [
'https://access.redhat.com/sitemap/assembly.sitemap.xml',
'https://access.redhat.com/sitemap/module.sitemap.xml'
],
"qa" : [
'https://pantheon.corp.qa.redhat.com/api/sitemap/assembly.sitemap.xml',
'https://pantheon.corp.qa.redhat.com/api/sitemap/module.sitemap.xml'
],
"stage" : [
'https://pantheon.corp.stage.redhat.com/api/sitemap/assembly.sitemap.xml',
'https://pantheon.corp.stage.redhat.com/api/sitemap/module.sitemap.xml'
],
"dev" : [
'https://pantheon.corp.dev.redhat.com/api/sitemap/assembly.sitemap.xml',
'https://pantheon.corp.dev.redhat.com/api/sitemap/module.sitemap.xml'
]})

def gather_urls():
proxies = {}

def gather_urls(user_input):
urls = []
sitemaps = env.get(user_input)
if sitemaps is None:
print_usage()
sys.exit(0)
if user_input != "prod":
proxies["http"] = "http://squid.corp.redhat.com:3128"
proxies["https"] = "https://squid.corp.redhat.com:3128"
for sitemap in sitemaps:
r = requests.get(sitemap)
r = requests.get(sitemap, proxies=proxies)
if r.status_code != 200:
print('Error, status code for ' + sitemap + ' was ' + r.status_code)
else:
Expand All @@ -27,23 +50,39 @@ def perform_requests(urls):
failure = 0
for url in urls:
try:
rr = requests.get(url)
print(str(rr.status_code) + ': ' + rr.url)
rr = requests.get(url, proxies=proxies)
print(str(rr.status_code) + ': ' + url)
success += 1
except requests.exceptions.RequestException as e:
print(e)
failure += 1
return [success, failure]


def main():
urls = gather_urls()
def main(user_input):
urls = gather_urls(user_input)
q = perform_requests(urls)
successes = q[0]
failures = q[1]

print('\nMade ' + str(successes) + ' requests and raised ' + str(failures) + ' errors.')


def print_usage():
print(50 * "*")
print(50 * "*")
print("\nMissing argument : Expected dev / qa / stage / prod\n")
print("Note : Purpose of script is parse cache on customer portal \n "
"and request the url, to check, if it is live \n")
print(50 * "*")
print(50 * "*")


if __name__ == '__main__':
main()
try:
args = sys.argv[1]
main(args)
except IndexError:
print_usage()
sys.exit(0)

0 comments on commit ee510ef

Please sign in to comment.