Skip to content

Commit

Permalink
Merge pull request #911 from HubSpot/logfetch_no_unzip
Browse files Browse the repository at this point in the history
Logfetch no unzip
  • Loading branch information
ssalinas committed Feb 19, 2016
2 parents a24826a + 39067a7 commit af7dd3d
Show file tree
Hide file tree
Showing 5 changed files with 12 additions and 6 deletions.
1 change: 1 addition & 0 deletions scripts/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ Two commands exist for downloading logs.
|-z , --local-zone|Specify times for `-s` and `-e` in your local time zone. If this is not set, times are assumed to be in UTC|unset/false|
|-p, --file-pattern|Should match the executor.s3.uploader.pattern setting, determines if we can match on file name for s3 logs|`%requestId/%Y/%m/%taskId_%index-%s-%filename`|
|-N, --no-name-fetch-off|If a logtype matcher is specified, but the s3 log pattern does not include file name, don't download any s3 files| None (fetch all)|
|-D, --download-only|Only download logs in their current state, don't unzip or grep||
|-g, --grep|Grep string for searching log files(Only for `logfetch`)||
|-l, --logtype|Glob matcher for type of log file to download| None (match all)|
|-S, --skip-s3|Don't search/download s3 logs|false|
Expand Down
8 changes: 6 additions & 2 deletions scripts/logfetch/entrypoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,8 @@ def fetch_logs(args):
all_logs += download_s3_logs(args)
if not args.skip_live:
all_logs += download_live_logs(args)
grep_files(args, all_logs)
if not args.download_only:
grep_files(args, all_logs)
except KeyboardInterrupt:
exit('Stopping logfetch...', 'magenta')

Expand All @@ -64,7 +65,8 @@ def cat_logs(args):
all_logs += download_s3_logs(args)
if not args.skip_live:
all_logs += download_live_logs(args)
cat_files(args, all_logs)
if not args.download_only:
cat_files(args, all_logs)
except KeyboardInterrupt:
exit('Stopping logcat...', 'magenta')

Expand Down Expand Up @@ -148,6 +150,7 @@ def fetch():
parser.add_argument("--search", dest="search", help="run logsearch on the local cache of downloaded files", action='store_true')
parser.add_argument("-V", "--verbose", dest="verbose", help="Print more verbose output", action='store_true')
parser.add_argument("--silent", dest="silent", help="No stderr (progress, file names, etc) output", action='store_true')
parser.add_argument("-D" ,"--download-only", dest="download_only", help="Only download files, don't unzip or grep", action='store_true')

args = parser.parse_args(remaining_argv)

Expand Down Expand Up @@ -277,6 +280,7 @@ def cat():
parser.add_argument("-U", "--use-cache", dest="use_cache", help="Use cache for live logs, don't re-download them", action='store_true')
parser.add_argument("-V", "--verbose", dest="verbose", help="Print more verbose output", action='store_true')
parser.add_argument("--silent", dest="silent", help="No stderr (progress, file names, etc) output", action='store_true')
parser.add_argument("-D" ,"--download-only", dest="download_only", help="Only download files, don't unzip or grep", action='store_true')

args = parser.parse_args(remaining_argv)

Expand Down
2 changes: 1 addition & 1 deletion scripts/logfetch/live_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def download_live_logs(args):
sys.stderr.write(colored('\nStarting {0} live logs downloads\n'.format(len(async_requests)), 'cyan'))
callbacks.goal = len(async_requests)
grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
if zipped_files:
if zipped_files and not args.download_only:
if not args.silent:
sys.stderr.write(colored('\nUnpacking {0} log(s)\n'.format(len(zipped_files)), 'cyan'))
all_logs = all_logs + logfetch_base.unpack_logs(args, zipped_files)
Expand Down
5 changes: 3 additions & 2 deletions scripts/logfetch/s3_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,12 +50,13 @@ def download_s3_logs(args):
sys.stderr.write(colored('Starting {0} S3 Downloads with {1} parallel fetches\n'.format(len(async_requests), args.num_parallel_fetches), 'cyan'))
callbacks.goal = len(async_requests)
grequests.map(async_requests, stream=True, size=args.num_parallel_fetches)
if not args.silent:
if not args.silent and not args.download_only:
sys.stderr.write(colored('\nUnpacking {0} S3 log(s)\n'.format(len(async_requests)), 'cyan'))
else:
if not args.silent:
sys.stderr.write(colored('No S3 logs to download\n', 'cyan'))
all_logs = all_logs + logfetch_base.unpack_logs(args, zipped_files)
if not args.download_only:
all_logs = all_logs + logfetch_base.unpack_logs(args, zipped_files)
if not args.silent:
sys.stderr.write(colored('All S3 logs up to date\n', 'cyan'))
return all_logs
Expand Down
2 changes: 1 addition & 1 deletion scripts/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

setup(
name='singularity-logfetch',
version='0.22.1',
version='0.23.0',
description='Singularity log fetching and searching',
author="HubSpot",
author_email='[email protected]',
Expand Down

0 comments on commit af7dd3d

Please sign in to comment.