Skip to content

Commit

Permalink
Merge pull request #660 from HubSpot/logfetch_upgrade
Browse files Browse the repository at this point in the history
Logfetch upgrade
  • Loading branch information
ssalinas committed Aug 19, 2015
2 parents 88addd8 + 584ef2e commit 6bd8546
Show file tree
Hide file tree
Showing 6 changed files with 56 additions and 43 deletions.
28 changes: 15 additions & 13 deletions scripts/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,21 +26,23 @@ Two commands exist for downloading logs.
|:---:|:---------|:-----:|
|-f , --conf-folder|Folder to look for configuration files|`~/.logfetch`|
|-c , --conf-file|configuration file to use(path relative to conf_folder)|default|
|-t , --task-id|Task Id to fetch logs for|
|-r , --request-id|Request Id to fetch logs for|
|-tc, --task-count|Number of recent tasks (belonging to a request) to fetch live logs (on machine not s3)|1|
|-t , --task-id|Task Id to fetch logs for||
|-r , --request-id|Request Id to fetch logs for||
|-T, --task-count|Max number of recent tasks (belonging to a request) to fetch live logs (on machine not s3)|20|
|-d , --deploy-id|Deploy Id to fetch logs for (Must also specify requestId when using this option)|
|-o, --dest|Destination folder for download output|`~/.logfetch_cache`|
|-n --num-parallel-fetches|Max number of log fetches to make at once|5
|-cs, --chunk-size|Chunk size for writing responses to file system|8192
|-n --num-parallel-fetches|Max number of log fetches to make at once|10|
|-C, --chunk-size|Chunk size for writing responses to file system|8192|
|-u, --singularity-uri-base|Base url for singularity (e.g. `localhost:8080/singularity/v2/api`)| Must be set!|
|-s , --start-days|Search for logs no older than this, can be an integer number of days or date in format “%Y-%m-%d %H:%M:%S” or “%Y-%m-%d”, leaving off h-m-s will be inclusive for the current day (00:00:00) | 7 days ago
|-e , --end-days|Search for logs no newer than this, can be an integer number of days or date in format “%Y-%m-%d %H:%M:%S” or “%Y-%m-%d”, leaving off h-m-s will be inclusive for the current day (23:59:59)| None (now)
|-s , --start|Search for logs no older than this, can be an integer number of days or date in format “%Y-%m-%d %H:%M:%S” or “%Y-%m-%d”, leaving off h-m-s will be inclusive for the current day (00:00:00) | 7 days ago|
|-e , --end|Search for logs no newer than this, can be an integer number of days or date in format “%Y-%m-%d %H:%M:%S” or “%Y-%m-%d”, leaving off h-m-s will be inclusive for the current day (23:59:59)| None (now)|
|-z , --local-zone|Specify times for `-s` and `-e` in your local time zone. If this is not set, times are assumed to be in UTC|unset/false|
|-p, --file-pattern|Should match the executor.s3.uploader.pattern setting, determines if we can match on file name for s3 logs|`%requestId/%Y/%m/%taskId_%index-%s-%filename`|
|-nn, --no-name-fetch-off|If a logtype matcher is specified, but the s3 log pattern does not include file name, don't download any s3 files| None (fetch all)|
|-g, --grep|Grep string for searching log files(Only for `logfetch`)|
|-N, --no-name-fetch-off|If a logtype matcher is specified, but the s3 log pattern does not include file name, don't download any s3 files| None (fetch all)|
|-g, --grep|Grep string for searching log files(Only for `logfetch`)||
|-l, --logtype|Glob matcher for type of log file to download| None (match all)|
|-S, --skip-s3|Don't search/download s3 logs|false|
|-L, --skip-live|Don't search/download live logs|false|
|-V, --verbose|More verbose output||

##Grep and Log Files
Expand Down Expand Up @@ -99,11 +101,11 @@ You can also provide the `-g` option which will provide the grep string to the s
|:---:|:---------|:-----:|
|-f , --conf-folder|Folder to look for configuration files|`~/.logfetch`|
|-c , --conf-file|configuration file to use(path relative to conf_folder)|default|
|-t , --task-id|Task Id to fetch logs for|
|-r , --request-id|Request Id to fetch logs for|
|-d , --deploy-id|Deploy Id to fetch logs for (Must also specify requestId when using this option)|
|-t , --task-id|Task Id to fetch logs for||
|-r , --request-id|Request Id to fetch logs for||
|-d , --deploy-id|Deploy Id to fetch logs for (Must also specify requestId when using this option)||
|-u, --singularity-uri-base|Base url for singularity (e.g. `localhost:8080/singularity/v2/api`)|Must be set!|
|-g, --grep|Grep string or full command for searching output|
|-g, --grep|Grep string or full command for searching output||
|-l, --logfile|Log file path to tail (ie logs/access.log)|Must be set!|
|-v, --verbose|Extra output about the task id associated with logs in the output|False|

1 change: 1 addition & 0 deletions scripts/logfetch/cat.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

def cat_files(args, all_logs):
if all_logs:
all_logs.sort()
for log in all_logs:
sys.stderr.write(colored(log, 'cyan') + '\n')
command = 'cat {0}'.format(log)
Expand Down
54 changes: 31 additions & 23 deletions scripts/logfetch/entrypoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
DEFAULT_PARALLEL_FETCHES = 10
DEFAULT_CHUNK_SIZE = 8192
DEFAULT_DEST = os.path.expanduser('~/.logfetch_cache')
DEFAULT_TASK_COUNT = 10
DEFAULT_TASK_COUNT = 20
DEFAULT_DAYS = 7
DEFAULT_S3_PATTERN = '%requestId/%%Y/%m/%taskId_%index-%s-%filename'

Expand All @@ -38,8 +38,10 @@ def fetch_logs(args):
try:
check_dest(args)
all_logs = []
all_logs += download_s3_logs(args)
all_logs += download_live_logs(args)
if not args.skip_s3:
all_logs += download_s3_logs(args)
if not args.skip_live:
all_logs += download_live_logs(args)
grep_files(args, all_logs)
except KeyboardInterrupt:
exit('Stopping logfetch...', 'magenta')
Expand All @@ -48,8 +50,10 @@ def cat_logs(args):
try:
check_dest(args)
all_logs = []
all_logs += download_s3_logs(args)
all_logs += download_live_logs(args)
if not args.skip_s3:
all_logs += download_s3_logs(args)
if not args.skip_live:
all_logs += download_live_logs(args)
cat_files(args, all_logs)
except KeyboardInterrupt:
exit('Stopping logcat...', 'magenta')
Expand Down Expand Up @@ -98,9 +102,9 @@ def fetch():
"chunk_size" : DEFAULT_CHUNK_SIZE,
"dest" : DEFAULT_DEST,
"task_count" : DEFAULT_TASK_COUNT,
"start_days" : datetime.strptime('{0} 00:00:00'.format(datetime.now().strftime("%Y-%m-%d")), "%Y-%m-%d %H:%M:%S") - timedelta(days=DEFAULT_DAYS),
"start" : datetime.strptime('{0} 00:00:00'.format(datetime.now().strftime("%Y-%m-%d")), "%Y-%m-%d %H:%M:%S") - timedelta(days=DEFAULT_DAYS),
"file_pattern" : DEFAULT_S3_PATTERN,
"end_days" : datetime.strptime('{0} 23:59:59'.format(datetime.now().strftime("%Y-%m-%d")), "%Y-%m-%d %H:%M:%S")
"end" : datetime.strptime('{0} 23:59:59'.format(datetime.now().strftime("%Y-%m-%d")), "%Y-%m-%d %H:%M:%S")
}

try:
Expand All @@ -115,26 +119,28 @@ def fetch():
parser.set_defaults(**defaults)
parser.add_argument("-t", "--task-id", dest="taskId", help="TaskId of task to fetch logs for")
parser.add_argument("-r", "--request-id", dest="requestId", help="RequestId of request to fetch logs for (can be a glob)")
parser.add_argument("-tc","--task-count", dest="task_count", help="Number of recent tasks per request to fetch logs from")
parser.add_argument("-T","--task-count", dest="task_count", help="Number of recent tasks per request to fetch logs from", type=int)
parser.add_argument("-d", "--deploy-id", dest="deployId", help="DeployId of task to fetch logs for (can be a glob)")
parser.add_argument("-o", "--dest", dest="dest", help="Destination directory")
parser.add_argument("-n", "--num-parallel-fetches", dest="num_parallel_fetches", help="Number of fetches to make at once", type=int)
parser.add_argument("-cs", "--chunk-size", dest="chunk_size", help="Chunk size for writing from response to filesystem", type=int)
parser.add_argument("-C", "--chunk-size", dest="chunk_size", help="Chunk size for writing from response to filesystem", type=int)
parser.add_argument("-u", "--singularity-uri-base", dest="singularity_uri_base", help="The base for singularity (eg. http://localhost:8080/singularity/v1)")
parser.add_argument("-s", "--start-days", dest="start_days", help="Search for logs no older than this, can be an integer number of days or date in format '%%Y-%%m-%%d %%H:%%M:%%S' or '%%Y-%%m-%%d'")
parser.add_argument("-e", "--end-days", dest="end_days", help="Search for logs no newer than this, can be an integer number of days or date in format '%%Y-%%m-%%d %%H:%%M:%%S' or '%%Y-%%m-%%d' (defaults to None/now)")
parser.add_argument("-s", "--start", dest="start", help="Search for logs no older than this, can be an integer number of days or date in format '%%Y-%%m-%%d %%H:%%M:%%S' or '%%Y-%%m-%%d'")
parser.add_argument("-e", "--end", dest="end", help="Search for logs no newer than this, can be an integer number of days or date in format '%%Y-%%m-%%d %%H:%%M:%%S' or '%%Y-%%m-%%d' (defaults to None/now)")
parser.add_argument("-l", "--log-type", dest="logtype", help="Logfile type to downlaod (ie 'access.log'), can be a glob (ie *.log)")
parser.add_argument("-p", "--file-pattern", dest="file_pattern", help="S3 uploader file pattern")
parser.add_argument("-nn", "--no-name-fetch-off", dest="no_name_fetch_off", help="If a logtype matcher is specified, but the s3 log pattern does not include file name, don't download any s3 files", action="store_true")
parser.add_argument("-N", "--no-name-fetch-off", dest="no_name_fetch_off", help="If a logtype matcher is specified, but the s3 log pattern does not include file name, don't download any s3 files", action="store_true")
parser.add_argument("-g", "--grep", dest="grep", help="Regex to grep for (normal grep syntax) or a full grep command")
parser.add_argument("-z", "--local-zone", dest="zone", help="If specified, input times in the local time zone and convert to UTC, if not specified inputs are assumed to be UTC", action="store_true")
parser.add_argument("-S", "--skip-s3", dest="skip_s3", help="Don't download/search s3 logs", action='store_true')
parser.add_argument("-L", "--skip-live", dest="skip_live", help="Don't download/search live logs", action='store_true')
parser.add_argument("-V", "--verbose", dest="verbose", help="Print more verbose output", action='store_true')

args = parser.parse_args(remaining_argv)

check_args(args)
args.start_days = convert_to_date(args, args.start_days)
args.end_days = convert_to_date(args, args.end_days)
args.start = convert_to_date(args, args.start)
args.end = convert_to_date(args, args.end)

args.dest = os.path.expanduser(args.dest)
try:
Expand All @@ -160,9 +166,9 @@ def cat():
"chunk_size" : DEFAULT_CHUNK_SIZE,
"dest" : DEFAULT_DEST,
"task_count" : DEFAULT_TASK_COUNT,
"start_days" : datetime.strptime('{0} 00:00:00'.format(datetime.now().strftime("%Y-%m-%d")), "%Y-%m-%d %H:%M:%S") - timedelta(days=DEFAULT_DAYS),
"start" : datetime.strptime('{0} 00:00:00'.format(datetime.now().strftime("%Y-%m-%d")), "%Y-%m-%d %H:%M:%S") - timedelta(days=DEFAULT_DAYS),
"file_pattern" : DEFAULT_S3_PATTERN,
"end_days" : datetime.strptime('{0} 23:59:59'.format(datetime.now().strftime("%Y-%m-%d")), "%Y-%m-%d %H:%M:%S")
"end" : datetime.strptime('{0} 23:59:59'.format(datetime.now().strftime("%Y-%m-%d")), "%Y-%m-%d %H:%M:%S")
}

try:
Expand All @@ -177,25 +183,27 @@ def cat():
parser.set_defaults(**defaults)
parser.add_argument("-t", "--task-id", dest="taskId", help="TaskId of task to fetch logs for")
parser.add_argument("-r", "--request-id", dest="requestId", help="RequestId of request to fetch logs for (can be a glob)")
parser.add_argument("-tc","--task-count", dest="taskCount", help="Number of recent tasks per request to fetch logs from")
parser.add_argument("-T","--task-count", dest="taskCount", help="Number of recent tasks per request to fetch logs from", type=int)
parser.add_argument("-d", "--deploy-id", dest="deployId", help="DeployId of tasks to fetch logs for (can be a glob)")
parser.add_argument("-o", "--dest", dest="dest", help="Destination directory")
parser.add_argument("-n", "--num-parallel-fetches", dest="num_parallel_fetches", help="Number of fetches to make at once", type=int)
parser.add_argument("-cs", "--chunk-size", dest="chunk_size", help="Chunk size for writing from response to filesystem", type=int)
parser.add_argument("-C", "--chunk-size", dest="chunk_size", help="Chunk size for writing from response to filesystem", type=int)
parser.add_argument("-u", "--singularity-uri-base", dest="singularity_uri_base", help="The base for singularity (eg. http://localhost:8080/singularity/v1)")
parser.add_argument("-s", "--start-days", dest="start_days", help="Search for logs no older than this, can be an integer number of days or date in format '%%Y-%%m-%%d %%H:%%M:%%S' or '%%Y-%%m-%%d'")
parser.add_argument("-e", "--end-days", dest="end_days", help="Search for logs no newer than this, can be an integer number of days or date in format '%%Y-%%m-%%d %%H:%%M:%%S' or '%%Y-%%m-%%d' (defaults to None/now)")
parser.add_argument("-s", "--start", dest="start", help="Search for logs no older than this, can be an integer number of days or date in format '%%Y-%%m-%%d %%H:%%M:%%S' or '%%Y-%%m-%%d'")
parser.add_argument("-e", "--end", dest="end", help="Search for logs no newer than this, can be an integer number of days or date in format '%%Y-%%m-%%d %%H:%%M:%%S' or '%%Y-%%m-%%d' (defaults to None/now)")
parser.add_argument("-l", "--logtype", dest="logtype", help="Logfile type to downlaod (ie 'access.log'), can be a glob (ie *.log)")
parser.add_argument("-p", "--file-pattern", dest="file_pattern", help="S3 uploader file pattern")
parser.add_argument("-nn", "--no-name-fetch-off", dest="no_name_fetch_off", help="If a logtype matcher is specified, but the s3 log pattern does not include file name, don't download any s3 files", action="store_true")
parser.add_argument("-N", "--no-name-fetch-off", dest="no_name_fetch_off", help="If a logtype matcher is specified, but the s3 log pattern does not include file name, don't download any s3 files", action="store_true")
parser.add_argument("-z", "--local-zone", dest="zone", help="If specified, input times in the local time zone and convert to UTC, if not specified inputs are assumed to be UTC", action="store_true")
parser.add_argument("-S", "--skip-s3", dest="skip_s3", help="Don't download/search s3 logs", action='store_true')
parser.add_argument("-L", "--skip-live", dest="skip_live", help="Don't download/search live logs", action='store_true')
parser.add_argument("-V", "--verbose", dest="verbose", help="Print more verbose output", action='store_true')

args = parser.parse_args(remaining_argv)

check_args(args)
args.start_days = convert_to_date(args, args.start_days)
args.end_days = convert_to_date(args, args.end_days)
args.start = convert_to_date(args, args.start)
args.end = convert_to_date(args, args.end)

args.dest = os.path.expanduser(args.dest)
try:
Expand Down
1 change: 1 addition & 0 deletions scripts/logfetch/grep.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
def grep_files(args, all_logs):
if args.grep:
if all_logs:
all_logs.sort()
for log in all_logs:
command = grep_command(args, log)
output = os.popen(command).read()
Expand Down
8 changes: 4 additions & 4 deletions scripts/logfetch/logfetch_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def log_matches(inputString, pattern):
def all_tasks_for_request(args, request):
uri = '{0}{1}'.format(base_uri(args), ACTIVE_TASKS_FORMAT.format(request))
active_tasks = get_json_response(uri, args)
if hasattr(args, 'start_days'):
if hasattr(args, 'start'):
uri = '{0}{1}'.format(base_uri(args), REQUEST_TASKS_FORMAT.format(request))
historical_tasks = get_json_response(uri, args)
if len(historical_tasks) == 0:
Expand All @@ -88,7 +88,7 @@ def all_requests(args):

def is_in_date_range(args, timestamp):
timstamp_datetime = datetime.utcfromtimestamp(timestamp)
if args.end_days:
return False if (timstamp_datetime < args.start_days or timstamp_datetime > args.end_days) else True
if args.end:
return False if (timstamp_datetime < args.start or timstamp_datetime > args.end) else True
else:
return False if timedelta.days < args.start_days else True
return False if timedelta.days < args.start else True
7 changes: 4 additions & 3 deletions scripts/setup.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
from setuptools import setup, find_packages

requirements = [
'argparse==1.2.2',
'argparse==1.3.0',
'ConfigParser==3.5.0b2',
'grequests==0.2.0',
'requests==2.5.0',
'gevent==1.0.2',
'requests==2.7.0',
'termcolor==1.1.0'
]

setup(
name='singularity-logfetch',
version='0.17.2',
version='0.19.0',
description='Singularity log fetching and searching',
author="HubSpot",
author_email='[email protected]',
Expand Down

0 comments on commit 6bd8546

Please sign in to comment.