Skip to content

Commit

Permalink
Merge pull request #1053 from HubSpot/logfetch_tail_dups
Browse files Browse the repository at this point in the history
Logfetch tailing grep and duplicate logs fix
  • Loading branch information
ssalinas committed May 20, 2016
2 parents 698c8f9 + 0675c44 commit 83c4c4d
Show file tree
Hide file tree
Showing 5 changed files with 10 additions and 23 deletions.
3 changes: 1 addition & 2 deletions scripts/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ For example, to tail the `service.log` file for all tasks for a request named `M

- The path for the log file is relative to the base path for that task's sandbox. For example, to tail a file in `(sandbox path)/logs/access.log`, the argument to -l would be `logs/access.log`

You can also provide the `-g` option which will provide the grep string to the singularity API and search the results. This can be a string to match on or a full grep command as above.
As of `0.25.0` a grep option is no longer supported in `logtail`. it more efficient/usable, and therefore recommended, to pipe output to grep for this type of functionality.

##Options
|Flags|Description|Default|
Expand All @@ -109,7 +109,6 @@ You can also provide the `-g` option which will provide the grep string to the s
|-r , --request-id|Request Id to fetch logs for||
|-d , --deploy-id|Deploy Id to fetch logs for (Must also specify requestId when using this option)||
|-u, --singularity-uri-base|Base url for singularity (e.g. `localhost:8080/singularity/v2/api`)|Must be set!|
|-g, --grep|Grep string or full command for searching output||
|-l, --logfile|Log file path to tail (ie logs/access.log)|Must be set!|
|-V, --verbose|Extra output about the task id associated with logs in the output|False|
|--silent|No output except for log content, overrides -V|false|
Expand Down
1 change: 0 additions & 1 deletion scripts/logfetch/entrypoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,6 @@ def tail():
parser.add_argument("-r", "--request-id", dest="requestId", help="RequestId of request to fetch logs for (can be a glob)")
parser.add_argument("-d", "--deploy-id", dest="deployId", help="DeployId of tasks to fetch logs for (can be a glob)")
parser.add_argument("-u", "--singularity-uri-base", dest="singularity_uri_base", help="The base for singularity (eg. http://localhost:8080/singularity/v1)")
parser.add_argument("-g", "--grep", dest="grep", help="String to grep for")
parser.add_argument("-l", "--logfile", dest="logfile", help="Logfile path/name to tail (ie 'logs/access.log')")
parser.add_argument("-V", "--verbose", dest="verbose", help="more verbose output", action='store_true')
parser.add_argument("--silent", dest="silent", help="No stderr (progress, file names, etc) output", action='store_true')
Expand Down
8 changes: 7 additions & 1 deletion scripts/logfetch/s3_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,13 @@ def logs_for_all_requests(args):
for request in logfetch_base.all_requests(args):
s3_logs = logfetch_base.get_json_response(s3_request_logs_uri(args, request), args, s3_params)
logs = logs + s3_logs if s3_logs else logs
return [dict(t) for t in set(tuple(l.items()) for l in logs)] # remove any duplicates
found_logs = []
keys = []
for log in logs:
if not log['key'] in keys:
found_logs.append(log)
keys.append(log['key'])
return found_logs

def s3_task_logs_uri(args, idString):
return S3LOGS_URI_FORMAT.format(logfetch_base.base_uri(args), TASK_FORMAT.format(idString))
Expand Down
19 changes: 1 addition & 18 deletions scripts/logfetch/tail.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,28 +104,11 @@ def fetch_new_log_data(self, uri, path, offset, args, task):
response = requests.get(uri, params=params, headers=args.headers).json()
prefix = '({0}) =>\n'.format(task) if args.verbose else ''
if len(response['data'].encode('utf-8')) > 0:
if args.grep:
filename = '{0}/.grep{1}'.format(args.dest, self.Task)
self.create_grep_file(args, filename, response['data'])
output = os.popen(grep_command(args, filename)).read()
sys.stdout.write('{0}{1}'.format(colored(prefix, 'cyan'), output))
self.remove_grep_file(filename)
else:
sys.stdout.write('{0}{1}'.format(colored(prefix, 'cyan'), response['data'].encode('utf-8')))
sys.stdout.write('{0}{1}'.format(colored(prefix, 'cyan'), response['data'].encode('utf-8')))
return offset + len(response['data'].encode('utf-8'))
else:
return offset

def create_grep_file(self, args, filename, content):
grep_file = open(filename, 'wb')
grep_file.write(content.encode('utf-8'))
grep_file.close()


def remove_grep_file(self, grep_file):
if os.path.isfile(grep_file):
os.remove(grep_file)

def show_available_files(self, args, task):
sys.stderr.write(colored('Available files (-l arguments):\n', 'cyan'))
try:
Expand Down
2 changes: 1 addition & 1 deletion scripts/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

setup(
name='singularity-logfetch',
version='0.24.3',
version='0.25.0',
description='Singularity log fetching and searching',
author="HubSpot",
author_email='[email protected]',
Expand Down

0 comments on commit 83c4c4d

Please sign in to comment.