diff --git a/scripts/logfetch/entrypoint.py b/scripts/logfetch/entrypoint.py index 57c7564f78..f6b47f7085 100644 --- a/scripts/logfetch/entrypoint.py +++ b/scripts/logfetch/entrypoint.py @@ -3,6 +3,7 @@ import sys import os import pkg_resources +from datetime import datetime from termcolor import colored from fake_section_head import FakeSectionHead from live_logs import download_live_logs @@ -56,6 +57,22 @@ def check_dest(args): if not os.path.exists(args.dest): os.makedirs(args.dest) +def check_args(args): + if args.deployId and not args.requestId: + exit("Must specify request-id (-r) when specifying deploy-id") + elif not args.requestId and not args.deployId and not args.taskId: + exit('Must specify one of\n -t task-id\n -r request-id and -d deploy-id\n -r request-id') + +def convert_to_days(argument): + try: + val = int(argument) + except: + try: + val = (datetime.now() - datetime.strptime(argument, "%m-%d-%Y")).days + except: + exit('Start/End days value must be either a number of days or a date in format "mm-dd-yyyy"') + return val + def fetch(): conf_parser = argparse.ArgumentParser(version=VERSION, description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, add_help=False) conf_parser.add_argument("-f", "--conf-folder", dest='conf_folder', help="specify a folder for config files to live") @@ -91,17 +108,16 @@ def fetch(): parser.add_argument("-n", "--num-parallel-fetches", dest="num_parallel_fetches", help="Number of fetches to make at once", type=int) parser.add_argument("-cs", "--chunk-size", dest="chunk_size", help="Chunk size for writing from response to filesystem", type=int) parser.add_argument("-u", "--singularity-uri-base", dest="singularity_uri_base", help="The base for singularity (eg. http://localhost:8080/singularity/v1)") - parser.add_argument("-s", "--start-days", dest="start_days", help="Search for logs no older than this many days", type=int) - parser.add_argument("-e", "--end-days", dest="end_days", help="Search for logs no new than this many days (defaults to None/today)", type=int) + parser.add_argument("-s", "--start-days", dest="start_days", help="Search for logs no older than this, can be an integer number of days or date in format 'mm-dd-yyyy'") + parser.add_argument("-e", "--end-days", dest="end_days", help="Search for logs no newer than this, can be an integer number of days or date in format 'mm-dd-yyyy' (defaults to None/today)") parser.add_argument("-l", "--log-type", dest="logtype", help="Logfile type to downlaod (ie 'access.log'), can be a glob (ie *.log)") parser.add_argument("-g", "--grep", dest="grep", help="Regex to grep for (normal grep syntax) or a full grep command") args = parser.parse_args(remaining_argv) - if args.deployId and not args.requestId: - exit("Must specify request-id (-r) when specifying deploy-id") - elif not args.requestId and not args.deployId and not args.taskId: - exit('Must specify one of\n -t task-id\n -r request-id and -d deploy-id\n -r request-id') + check_args(args) + args.start_days = convert_to_days(args.start_days) if args.start_days else args.start_days + args.end_days = convert_to_days(args.end_days) if args.end_days else args.end_days args.dest = os.path.expanduser(args.dest) @@ -142,16 +158,15 @@ def cat(): parser.add_argument("-n", "--num-parallel-fetches", dest="num_parallel_fetches", help="Number of fetches to make at once", type=int) parser.add_argument("-cs", "--chunk-size", dest="chunk_size", help="Chunk size for writing from response to filesystem", type=int) parser.add_argument("-u", "--singularity-uri-base", dest="singularity_uri_base", help="The base for singularity (eg. http://localhost:8080/singularity/v1)") - parser.add_argument("-s", "--start-days", dest="start_days", help="Search for logs no older than this many days", type=int) - parser.add_argument("-e", "--end-days", dest="end_days", help="Search for logs no new than this many days (defaults to None/today)", type=int) + parser.add_argument("-s", "--start-days", dest="start_days", help="Search for logs no older than this, can be an integer number of days or date in format 'mm-dd-yyyy'") + parser.add_argument("-e", "--end-days", dest="end_days", help="Search for logs no newer than this, can be an integer number of days or date in format 'mm-dd-yyyy' (defaults to None/today)") parser.add_argument("-l", "--logtype", dest="logtype", help="Logfile type to downlaod (ie 'access.log'), can be a glob (ie *.log)") args = parser.parse_args(remaining_argv) - if args.deployId and not args.requestId: - exit("Must specify requestId (-r) when specifying deploy-id") - elif not args.requestId and not args.deployId and not args.taskId: - exit('Must specify one of\n -t task-id\n -r request-id and -d deploy-id\n -r request-id') + check_args(args) + args.start_days = convert_to_days(args.start_days) if args.start_days else args.start_days + args.end_days = convert_to_days(args.end_days) if args.end_days else args.end_days args.dest = os.path.expanduser(args.dest) @@ -188,12 +203,9 @@ def tail(): args = parser.parse_args(remaining_argv) - if args.deployId and not args.requestId: - exit("Must specify request-id (-r) when specifying deploy-id") - elif not args.requestId and not args.deployId and not args.taskId: - exit('Must specify one of\n -t task-id\n -r request-id and -d deploy-id\n -r request-id') - elif not args.logfile: + if not args.logfile: exit("Must specify logfile to tail (-l)") + check_args(args) args.dest = os.path.expanduser(args.dest) diff --git a/scripts/logfetch/s3_logs.py b/scripts/logfetch/s3_logs.py index c348d14a89..dbc229bda7 100644 --- a/scripts/logfetch/s3_logs.py +++ b/scripts/logfetch/s3_logs.py @@ -45,12 +45,11 @@ def logs_for_all_requests(args): for task in tasks: s3_logs = get_json_response(s3_task_logs_uri(args, task)) logs = logs + s3_logs if s3_logs else logs - if not logs: - sys.stderr.write(colored('No tasks found in time range, searching s3 history...\n', 'magenta')) - for request in logfetch_base.all_requests(args): - s3_logs = get_json_response(s3_request_logs_uri(args, request)) - logs = logs + s3_logs if s3_logs else logs - return logs + sys.stderr.write(colored('Also searching s3 history...\n', 'magenta')) + for request in logfetch_base.all_requests(args): + s3_logs = get_json_response(s3_request_logs_uri(args, request)) + logs = logs + s3_logs if s3_logs else logs + return [dict(t) for t in set([tuple(l.items()) for l in logs])] def time_from_filename(filename): time_string = re.search('(\d{13})', filename).group(1) diff --git a/scripts/setup.py b/scripts/setup.py index 6290834bb0..8d3fe269c4 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -10,7 +10,7 @@ setup( name='singularity-logfetch', - version='0.11.0', + version='0.12.0', description='Singularity log fetching and searching', author="HubSpot", author_email='singularity-users@googlegroups.com',