Skip to content

Commit

Permalink
Merge pull request #22 from liamg/liamg-sizes
Browse files Browse the repository at this point in the history
add option to hide responses of a certain content-length
  • Loading branch information
liamg authored Feb 25, 2021
2 parents 6add9c2 + da1ced9 commit 3140c90
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 1 deletion.
5 changes: 4 additions & 1 deletion cmd/scout/url.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ var filename string
var headers []string
var extensions = []string{"php", "htm", "html", "txt"}
var enableSpidering bool
var ignoredLengths []int

var urlCmd = &cobra.Command{
Use: "url [url]",
Expand Down Expand Up @@ -80,6 +81,7 @@ var urlCmd = &cobra.Command{

options := []scan.URLOption{
scan.WithPositiveStatusCodes(intStatusCodes),
scan.WithNegativeLengths(ignoredLengths),
scan.WithTargetURL(*parsedURL),
scan.WithResultChan(resultChan),
scan.WithBusyChan(busyChan),
Expand Down Expand Up @@ -124,7 +126,7 @@ var urlCmd = &cobra.Command{

go func() {
for result := range resultChan {
importantOutputChan <- tml.Sprintf("<blue>[</blue><yellow>%d</yellow><blue>]</blue> %s\n", result.StatusCode, result.URL.String())
importantOutputChan <- tml.Sprintf("<blue>[</blue><yellow>%d</yellow><blue>]</blue> <blue>[</blue><yellow>%d</yellow><blue>]</blue> %s\n", result.StatusCode, result.Size, result.URL.String())
}
close(waitChan)
}()
Expand Down Expand Up @@ -197,6 +199,7 @@ func init() {
urlCmd.Flags().StringSliceVarP(&extensions, "extensions", "x", extensions, "File extensions to detect.")
urlCmd.Flags().StringSliceVarP(&headers, "header", "H", headers, "Extra header to send with requests (can be specified multiple times).")
urlCmd.Flags().BoolVarP(&enableSpidering, "spider", "s", enableSpidering, "Spider links within page content")
urlCmd.Flags().IntSliceVarP(&ignoredLengths, "hide-lengths", "l", ignoredLengths, "Hide results with these content lengths")

rootCmd.AddCommand(urlCmd)
}
8 changes: 8 additions & 0 deletions pkg/scan/url_options.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,13 @@ func WithPositiveStatusCodes(codes []int) URLOption {
}
}

// WithNegativeLengths provides lengths which should be ignored
func WithNegativeLengths(lengths []int) URLOption {
return func(s *URLScanner) {
s.negativeLengths = lengths
}
}

func WithTimeout(timeout time.Duration) URLOption {
return func(s *URLScanner) {
s.timeout = timeout
Expand Down Expand Up @@ -104,4 +111,5 @@ func WithMethod(method string) URLOption {
type URLResult struct {
URL url.URL
StatusCode int
Size int
}
15 changes: 15 additions & 0 deletions pkg/scan/url_scanner.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import (
"io/ioutil"
"net/http"
"net/url"
"strconv"
"strings"
"sync"
"sync/atomic"
Expand Down Expand Up @@ -43,6 +44,7 @@ type URLScanner struct {
jobsLoaded int32
proxy *url.URL
method string
negativeLengths []int
}

type URLJob struct {
Expand Down Expand Up @@ -334,9 +336,22 @@ func (scanner *URLScanner) checkURL(job URLJob) *URLResult {
_, _ = io.Copy(ioutil.Discard, resp.Body)
}

var size int
contentLength := resp.Header.Get("Content-Length")
if contentLength != "" {
size, _ = strconv.Atoi(contentLength)
}

for _, length := range scanner.negativeLengths {
if length == size {
return nil
}
}

result = &URLResult{
StatusCode: code,
URL: *parsedURL,
Size: size,
}

break
Expand Down

0 comments on commit 3140c90

Please sign in to comment.