Skip to content

Commit

Permalink
take date as input, always search s3 history
Browse files Browse the repository at this point in the history
  • Loading branch information
ssalinas committed Feb 25, 2015
1 parent 280f353 commit d66f0d1
Show file tree
Hide file tree
Showing 3 changed files with 35 additions and 24 deletions.
46 changes: 29 additions & 17 deletions scripts/logfetch/entrypoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import sys
import os
import pkg_resources
from datetime import datetime
from termcolor import colored
from fake_section_head import FakeSectionHead
from live_logs import download_live_logs
Expand Down Expand Up @@ -56,6 +57,22 @@ def check_dest(args):
if not os.path.exists(args.dest):
os.makedirs(args.dest)

def check_args(args):
if args.deployId and not args.requestId:
exit("Must specify request-id (-r) when specifying deploy-id")
elif not args.requestId and not args.deployId and not args.taskId:
exit('Must specify one of\n -t task-id\n -r request-id and -d deploy-id\n -r request-id')

def convert_to_days(argument):
try:
val = int(argument)
except:
try:
val = (datetime.now() - datetime.strptime(argument, "%m-%d-%Y")).days
except:
exit('Start/End days value must be either a number of days or a date in format "mm-dd-yyyy"')
return val

def fetch():
conf_parser = argparse.ArgumentParser(version=VERSION, description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, add_help=False)
conf_parser.add_argument("-f", "--conf-folder", dest='conf_folder', help="specify a folder for config files to live")
Expand Down Expand Up @@ -91,17 +108,16 @@ def fetch():
parser.add_argument("-n", "--num-parallel-fetches", dest="num_parallel_fetches", help="Number of fetches to make at once", type=int)
parser.add_argument("-cs", "--chunk-size", dest="chunk_size", help="Chunk size for writing from response to filesystem", type=int)
parser.add_argument("-u", "--singularity-uri-base", dest="singularity_uri_base", help="The base for singularity (eg. http://localhost:8080/singularity/v1)")
parser.add_argument("-s", "--start-days", dest="start_days", help="Search for logs no older than this many days", type=int)
parser.add_argument("-e", "--end-days", dest="end_days", help="Search for logs no new than this many days (defaults to None/today)", type=int)
parser.add_argument("-s", "--start-days", dest="start_days", help="Search for logs no older than this, can be an integer number of days or date in format 'mm-dd-yyyy'")
parser.add_argument("-e", "--end-days", dest="end_days", help="Search for logs no newer than this, can be an integer number of days or date in format 'mm-dd-yyyy' (defaults to None/today)")
parser.add_argument("-l", "--log-type", dest="logtype", help="Logfile type to downlaod (ie 'access.log'), can be a glob (ie *.log)")
parser.add_argument("-g", "--grep", dest="grep", help="Regex to grep for (normal grep syntax) or a full grep command")

args = parser.parse_args(remaining_argv)

if args.deployId and not args.requestId:
exit("Must specify request-id (-r) when specifying deploy-id")
elif not args.requestId and not args.deployId and not args.taskId:
exit('Must specify one of\n -t task-id\n -r request-id and -d deploy-id\n -r request-id')
check_args(args)
args.start_days = convert_to_days(args.start_days) if args.start_days else args.start_days
args.end_days = convert_to_days(args.end_days) if args.end_days else args.end_days

args.dest = os.path.expanduser(args.dest)

Expand Down Expand Up @@ -142,16 +158,15 @@ def cat():
parser.add_argument("-n", "--num-parallel-fetches", dest="num_parallel_fetches", help="Number of fetches to make at once", type=int)
parser.add_argument("-cs", "--chunk-size", dest="chunk_size", help="Chunk size for writing from response to filesystem", type=int)
parser.add_argument("-u", "--singularity-uri-base", dest="singularity_uri_base", help="The base for singularity (eg. http://localhost:8080/singularity/v1)")
parser.add_argument("-s", "--start-days", dest="start_days", help="Search for logs no older than this many days", type=int)
parser.add_argument("-e", "--end-days", dest="end_days", help="Search for logs no new than this many days (defaults to None/today)", type=int)
parser.add_argument("-s", "--start-days", dest="start_days", help="Search for logs no older than this, can be an integer number of days or date in format 'mm-dd-yyyy'")
parser.add_argument("-e", "--end-days", dest="end_days", help="Search for logs no newer than this, can be an integer number of days or date in format 'mm-dd-yyyy' (defaults to None/today)")
parser.add_argument("-l", "--logtype", dest="logtype", help="Logfile type to downlaod (ie 'access.log'), can be a glob (ie *.log)")

args = parser.parse_args(remaining_argv)

if args.deployId and not args.requestId:
exit("Must specify requestId (-r) when specifying deploy-id")
elif not args.requestId and not args.deployId and not args.taskId:
exit('Must specify one of\n -t task-id\n -r request-id and -d deploy-id\n -r request-id')
check_args(args)
args.start_days = convert_to_days(args.start_days) if args.start_days else args.start_days
args.end_days = convert_to_days(args.end_days) if args.end_days else args.end_days

args.dest = os.path.expanduser(args.dest)

Expand Down Expand Up @@ -188,12 +203,9 @@ def tail():

args = parser.parse_args(remaining_argv)

if args.deployId and not args.requestId:
exit("Must specify request-id (-r) when specifying deploy-id")
elif not args.requestId and not args.deployId and not args.taskId:
exit('Must specify one of\n -t task-id\n -r request-id and -d deploy-id\n -r request-id')
elif not args.logfile:
if not args.logfile:
exit("Must specify logfile to tail (-l)")
check_args(args)

args.dest = os.path.expanduser(args.dest)

Expand Down
11 changes: 5 additions & 6 deletions scripts/logfetch/s3_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,12 +45,11 @@ def logs_for_all_requests(args):
for task in tasks:
s3_logs = get_json_response(s3_task_logs_uri(args, task))
logs = logs + s3_logs if s3_logs else logs
if not logs:
sys.stderr.write(colored('No tasks found in time range, searching s3 history...\n', 'magenta'))
for request in logfetch_base.all_requests(args):
s3_logs = get_json_response(s3_request_logs_uri(args, request))
logs = logs + s3_logs if s3_logs else logs
return logs
sys.stderr.write(colored('Also searching s3 history...\n', 'magenta'))
for request in logfetch_base.all_requests(args):
s3_logs = get_json_response(s3_request_logs_uri(args, request))
logs = logs + s3_logs if s3_logs else logs
return [dict(t) for t in set([tuple(l.items()) for l in logs])]

def time_from_filename(filename):
time_string = re.search('(\d{13})', filename).group(1)
Expand Down
2 changes: 1 addition & 1 deletion scripts/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

setup(
name='singularity-logfetch',
version='0.11.0',
version='0.12.0',
description='Singularity log fetching and searching',
author="HubSpot",
author_email='[email protected]',
Expand Down

0 comments on commit d66f0d1

Please sign in to comment.