Skip to content

Commit

Permalink
Merge pull request #459 from HubSpot/logfetch_history_search
Browse files Browse the repository at this point in the history
Logfetch v12 (WIP)
  • Loading branch information
ssalinas committed Feb 25, 2015
2 parents d3bd662 + 019b4a2 commit ca20613
Show file tree
Hide file tree
Showing 3 changed files with 46 additions and 24 deletions.
52 changes: 33 additions & 19 deletions scripts/logfetch/entrypoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import sys
import os
import pkg_resources
from datetime import datetime
from termcolor import colored
from fake_section_head import FakeSectionHead
from live_logs import download_live_logs
Expand Down Expand Up @@ -56,6 +57,22 @@ def check_dest(args):
if not os.path.exists(args.dest):
os.makedirs(args.dest)

def check_args(args):
if args.deployId and not args.requestId:
exit("Must specify request-id (-r) when specifying deploy-id")
elif not args.requestId and not args.deployId and not args.taskId:
exit('Must specify one of\n -t task-id\n -r request-id and -d deploy-id\n -r request-id')

def convert_to_days(argument):
try:
val = int(argument)
except:
try:
val = (datetime.now() - datetime.strptime(argument, "%m-%d-%Y")).days
except:
exit('Start/End days value must be either a number of days or a date in format "mm-dd-yyyy"')
return val

def fetch():
conf_parser = argparse.ArgumentParser(version=VERSION, description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, add_help=False)
conf_parser.add_argument("-f", "--conf-folder", dest='conf_folder', help="specify a folder for config files to live")
Expand All @@ -70,7 +87,8 @@ def fetch():
"chunk_size" : DEFAULT_CHUNK_SIZE,
"dest" : DEFAULT_DEST,
"task_count" : DEFAULT_TASK_COUNT,
"start_days" : DEFAULT_DAYS
"start_days" : DEFAULT_DAYS,
"end_days" : 0 #today
}

try:
Expand All @@ -91,17 +109,16 @@ def fetch():
parser.add_argument("-n", "--num-parallel-fetches", dest="num_parallel_fetches", help="Number of fetches to make at once", type=int)
parser.add_argument("-cs", "--chunk-size", dest="chunk_size", help="Chunk size for writing from response to filesystem", type=int)
parser.add_argument("-u", "--singularity-uri-base", dest="singularity_uri_base", help="The base for singularity (eg. http://localhost:8080/singularity/v1)")
parser.add_argument("-s", "--start-days", dest="start_days", help="Search for logs no older than this many days", type=int)
parser.add_argument("-e", "--end-days", dest="end_days", help="Search for logs no new than this many days (defaults to None/today)", type=int)
parser.add_argument("-s", "--start-days", dest="start_days", help="Search for logs no older than this, can be an integer number of days or date in format 'mm-dd-yyyy'")
parser.add_argument("-e", "--end-days", dest="end_days", help="Search for logs no newer than this, can be an integer number of days or date in format 'mm-dd-yyyy' (defaults to None/today)")
parser.add_argument("-l", "--log-type", dest="logtype", help="Logfile type to downlaod (ie 'access.log'), can be a glob (ie *.log)")
parser.add_argument("-g", "--grep", dest="grep", help="Regex to grep for (normal grep syntax) or a full grep command")

args = parser.parse_args(remaining_argv)

if args.deployId and not args.requestId:
exit("Must specify request-id (-r) when specifying deploy-id")
elif not args.requestId and not args.deployId and not args.taskId:
exit('Must specify one of\n -t task-id\n -r request-id and -d deploy-id\n -r request-id')
check_args(args)
args.start_days = convert_to_days(args.start_days)
args.end_days = convert_to_days(args.end_days)

args.dest = os.path.expanduser(args.dest)

Expand All @@ -121,7 +138,8 @@ def cat():
"chunk_size" : DEFAULT_CHUNK_SIZE,
"dest" : DEFAULT_DEST,
"task_count" : DEFAULT_TASK_COUNT,
"start_days" : DEFAULT_DAYS
"start_days" : DEFAULT_DAYS,
"end_days" : 0 #today
}

try:
Expand All @@ -142,16 +160,15 @@ def cat():
parser.add_argument("-n", "--num-parallel-fetches", dest="num_parallel_fetches", help="Number of fetches to make at once", type=int)
parser.add_argument("-cs", "--chunk-size", dest="chunk_size", help="Chunk size for writing from response to filesystem", type=int)
parser.add_argument("-u", "--singularity-uri-base", dest="singularity_uri_base", help="The base for singularity (eg. http://localhost:8080/singularity/v1)")
parser.add_argument("-s", "--start-days", dest="start_days", help="Search for logs no older than this many days", type=int)
parser.add_argument("-e", "--end-days", dest="end_days", help="Search for logs no new than this many days (defaults to None/today)", type=int)
parser.add_argument("-s", "--start-days", dest="start_days", help="Search for logs no older than this, can be an integer number of days or date in format 'mm-dd-yyyy'")
parser.add_argument("-e", "--end-days", dest="end_days", help="Search for logs no newer than this, can be an integer number of days or date in format 'mm-dd-yyyy' (defaults to None/today)")
parser.add_argument("-l", "--logtype", dest="logtype", help="Logfile type to downlaod (ie 'access.log'), can be a glob (ie *.log)")

args = parser.parse_args(remaining_argv)

if args.deployId and not args.requestId:
exit("Must specify requestId (-r) when specifying deploy-id")
elif not args.requestId and not args.deployId and not args.taskId:
exit('Must specify one of\n -t task-id\n -r request-id and -d deploy-id\n -r request-id')
check_args(args)
args.start_days = convert_to_days(args.start_days)
args.end_days = convert_to_days(args.end_days)

args.dest = os.path.expanduser(args.dest)

Expand Down Expand Up @@ -188,12 +205,9 @@ def tail():

args = parser.parse_args(remaining_argv)

if args.deployId and not args.requestId:
exit("Must specify request-id (-r) when specifying deploy-id")
elif not args.requestId and not args.deployId and not args.taskId:
exit('Must specify one of\n -t task-id\n -r request-id and -d deploy-id\n -r request-id')
elif not args.logfile:
if not args.logfile:
exit("Must specify logfile to tail (-l)")
check_args(args)

args.dest = os.path.expanduser(args.dest)

Expand Down
16 changes: 12 additions & 4 deletions scripts/logfetch/s3_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

TASK_FORMAT = '/task/{0}'
S3LOGS_URI_FORMAT = '{0}/logs{1}'
REQUEST_FORMAT = '/request/{0}'

def download_s3_logs(args):
sys.stderr.write(colored('Checking for S3 log files', 'cyan') + '\n')
Expand Down Expand Up @@ -37,19 +38,26 @@ def already_downloaded(dest, filename):

def logs_for_all_requests(args):
if args.taskId:
return get_json_response(singularity_s3logs_uri(args, args.taskId))
return get_json_response(s3_task_logs_uri(args, args.taskId))
else:
tasks = logfetch_base.tasks_for_requests(args)
logs = []
for task in tasks:
s3_logs = get_json_response(singularity_s3logs_uri(args, task))
s3_logs = get_json_response(s3_task_logs_uri(args, task))
logs = logs + s3_logs if s3_logs else logs
return logs
sys.stderr.write(colored('Also searching s3 history...\n', 'magenta'))
for request in logfetch_base.all_requests(args):
s3_logs = get_json_response(s3_request_logs_uri(args, request))
logs = logs + s3_logs if s3_logs else logs
return [dict(t) for t in set([tuple(l.items()) for l in logs])]

def time_from_filename(filename):
time_string = re.search('(\d{13})', filename).group(1)
return int(time_string[0:-3])

def singularity_s3logs_uri(args, idString):
def s3_task_logs_uri(args, idString):
return S3LOGS_URI_FORMAT.format(logfetch_base.base_uri(args), TASK_FORMAT.format(idString))

def s3_request_logs_uri(args, idString):
return S3LOGS_URI_FORMAT.format(logfetch_base.base_uri(args), REQUEST_FORMAT.format(idString))

2 changes: 1 addition & 1 deletion scripts/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

setup(
name='singularity-logfetch',
version='0.11.0',
version='0.12.0',
description='Singularity log fetching and searching',
author="HubSpot",
author_email='[email protected]',
Expand Down

0 comments on commit ca20613

Please sign in to comment.