Skip to content

Commit

Permalink
Add crowdsec reload + cscli metrics minor improvements (#79)
Browse files Browse the repository at this point in the history
  • Loading branch information
buixor authored Jun 19, 2020
1 parent bb60d29 commit 5446857
Show file tree
Hide file tree
Showing 80 changed files with 541 additions and 4,395 deletions.
2 changes: 1 addition & 1 deletion cmd/crowdsec-cli/dashboard.go
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ func extractMetabaseDB(buf *bytes.Reader) error {

func resetMetabasePassword(newpassword string) error {

httpctx := sling.New().Base(metabaseURI).Set("User-Agent", fmt.Sprintf("CrowdWatch/%s", cwversion.VersionStr()))
httpctx := sling.New().Base(metabaseURI).Set("User-Agent", fmt.Sprintf("Crowdsec/%s", cwversion.VersionStr()))

log.Printf("Waiting for metabase API to be up (can take up to a minute)")
for {
Expand Down
122 changes: 61 additions & 61 deletions cmd/crowdsec-cli/metrics.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import (
"fmt"
"net/http"
"os"
"sort"
"strconv"
"strings"
"time"
Expand All @@ -18,6 +19,39 @@ import (
"github.com/spf13/cobra"
)

func metricsToTable(table *tablewriter.Table, stats map[string]map[string]int, keys []string) error {

var sortedKeys []string

if table == nil {
return fmt.Errorf("nil table")
}
//sort keys to keep consistent order when printing
sortedKeys = []string{}
for akey := range stats {
sortedKeys = append(sortedKeys, akey)
}
sort.Strings(sortedKeys)
//
for _, alabel := range sortedKeys {
astats, ok := stats[alabel]
if !ok {
continue
}
row := []string{}
row = append(row, alabel) //name
for _, sl := range keys {
if v, ok := astats[sl]; ok && v != 0 {
row = append(row, fmt.Sprintf("%d", v))
} else {
row = append(row, "-")
}
}
table.Append(row)
}
return nil
}

/*This is a complete rip from prom2json*/
func ShowPrometheus(url string) {
mfChan := make(chan *dto.MetricFamily, 1024)
Expand Down Expand Up @@ -55,11 +89,11 @@ func ShowPrometheus(url string) {
metric := m.(prom2json.Metric)
name, ok := metric.Labels["name"]
if !ok {
log.Debugf("no name in Metric")
log.Debugf("no name in Metric %v", metric.Labels)
}
source, ok := metric.Labels["source"]
if !ok {
log.Debugf("no source in Metric")
log.Debugf("no source in Metric %v", metric.Labels)
}
value := m.(prom2json.Metric).Value
fval, err := strconv.ParseFloat(value, 32)
Expand All @@ -74,6 +108,11 @@ func ShowPrometheus(url string) {
buckets_stats[name] = make(map[string]int)
}
buckets_stats[name]["instanciation"] += ival
case "cs_bucket_count":
if _, ok := buckets_stats[name]; !ok {
buckets_stats[name] = make(map[string]int)
}
buckets_stats[name]["curr_count"] += ival
case "cs_bucket_overflow":
if _, ok := buckets_stats[name]; !ok {
buckets_stats[name] = make(map[string]int)
Expand Down Expand Up @@ -126,72 +165,33 @@ func ShowPrometheus(url string) {
}
}
if config.output == "human" {
atable := tablewriter.NewWriter(os.Stdout)
atable.SetHeader([]string{"Source", "Lines read", "Lines parsed", "Lines unparsed", "Lines poured to bucket"})
for alabel, astats := range acquis_stats {

if alabel == "" {
continue
}
row := []string{}
row = append(row, alabel) //name
for _, sl := range []string{"reads", "parsed", "unparsed", "pour"} {
if v, ok := astats[sl]; ok {
row = append(row, fmt.Sprintf("%d", v))
} else {
row = append(row, "-")
}
}
atable.Append(row)
acquisTable := tablewriter.NewWriter(os.Stdout)
acquisTable.SetHeader([]string{"Source", "Lines read", "Lines parsed", "Lines unparsed", "Lines poured to bucket"})
keys := []string{"reads", "parsed", "unparsed", "pour"}
if err := metricsToTable(acquisTable, acquis_stats, keys); err != nil {
log.Warningf("while collecting acquis stats : %s", err)
}
btable := tablewriter.NewWriter(os.Stdout)
btable.SetHeader([]string{"Bucket", "Overflows", "Instanciated", "Poured", "Expired"})
for blabel, bstats := range buckets_stats {
if blabel == "" {
continue
}
row := []string{}
row = append(row, blabel) //name
for _, sl := range []string{"overflow", "instanciation", "pour", "underflow"} {
if v, ok := bstats[sl]; ok {
row = append(row, fmt.Sprintf("%d", v))
} else {
row = append(row, "-")
}
}
btable.Append(row)
bucketsTable := tablewriter.NewWriter(os.Stdout)
bucketsTable.SetHeader([]string{"Bucket", "Current Count", "Overflows", "Instanciated", "Poured", "Expired"})
keys = []string{"curr_count", "overflow", "instanciation", "pour", "underflow"}
if err := metricsToTable(bucketsTable, buckets_stats, keys); err != nil {
log.Warningf("while collecting acquis stats : %s", err)
}
ptable := tablewriter.NewWriter(os.Stdout)
ptable.SetHeader([]string{"Parsers", "Hits", "Parsed", "Unparsed"})
for plabel, pstats := range parsers_stats {
if plabel == "" {
continue
}
row := []string{}
row = append(row, plabel) //name
hits := 0
parsed := 0
for _, sl := range []string{"hits", "parsed"} {
if v, ok := pstats[sl]; ok {
row = append(row, fmt.Sprintf("%d", v))
if sl == "hits" {
hits = v
} else if sl == "parsed" {
parsed = v
}
} else {
row = append(row, "-")
}
}
row = append(row, fmt.Sprintf("%d", hits-parsed))
ptable.Append(row)

parsersTable := tablewriter.NewWriter(os.Stdout)
parsersTable.SetHeader([]string{"Parsers", "Hits", "Parsed", "Unparsed"})
keys = []string{"hits", "parsed", "unparsed"}
if err := metricsToTable(parsersTable, parsers_stats, keys); err != nil {
log.Warningf("while collecting acquis stats : %s", err)
}

log.Printf("Buckets Metrics:")
btable.Render() // Send output
bucketsTable.Render()
log.Printf("Acquisition Metrics:")
atable.Render() // Send output
acquisTable.Render()
log.Printf("Parser Metrics:")
ptable.Render() // Send output
parsersTable.Render()
} else if config.output == "json" {
for _, val := range []map[string]map[string]int{acquis_stats, parsers_stats, buckets_stats} {
x, err := json.MarshalIndent(val, "", " ")
Expand Down
Loading

0 comments on commit 5446857

Please sign in to comment.