Skip to content

Commit

Permalink
FIX lint issues
Browse files Browse the repository at this point in the history
  • Loading branch information
jaimeyh committed Sep 3, 2024
1 parent 9486c39 commit 0123c35
Show file tree
Hide file tree
Showing 23 changed files with 50 additions and 104 deletions.
4 changes: 2 additions & 2 deletions collector/cluster_health.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ package collector
import (
"encoding/json"
"fmt"
"io/ioutil"
"io"
"net/http"
"net/url"
"path"
Expand Down Expand Up @@ -256,7 +256,7 @@ func (c *ClusterHealth) fetchAndDecodeClusterHealth() (clusterHealthResponse, er
return chr, fmt.Errorf("HTTP Request failed with code %d", res.StatusCode)
}

bts, err := ioutil.ReadAll(res.Body)
bts, err := io.ReadAll(res.Body)
if err != nil {
c.jsonParseFailures.Inc()
return chr, err
Expand Down
2 changes: 1 addition & 1 deletion collector/cluster_health_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ func TestClusterHealth(t *testing.T) {
"5.4.2": `{"cluster_name":"elasticsearch","status":"yellow","timed_out":false,"number_of_nodes":1,"number_of_data_nodes":1,"active_primary_shards":5,"active_shards":5,"relocating_shards":0,"initializing_shards":0,"unassigned_shards":5,"delayed_unassigned_shards":0,"number_of_pending_tasks":0,"number_of_in_flight_fetch":0,"task_max_waiting_in_queue_millis":12,"active_shards_percent_as_number":50.0}`,
}
for ver, out := range tcs {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
fmt.Fprintln(w, out)
}))
defer ts.Close()
Expand Down
6 changes: 3 additions & 3 deletions collector/cluster_info.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ package collector
import (
"context"
"encoding/json"
"io/ioutil"
"io"
"net/http"
"net/url"

Expand Down Expand Up @@ -77,13 +77,13 @@ type VersionInfo struct {
LuceneVersion semver.Version `json:"lucene_version"`
}

func (c *ClusterInfoCollector) Update(ctx context.Context, ch chan<- prometheus.Metric) error {
func (c *ClusterInfoCollector) Update(_ context.Context, ch chan<- prometheus.Metric) error {
resp, err := c.hc.Get(c.u.String())
if err != nil {
return err
}
defer resp.Body.Close()
b, err := ioutil.ReadAll(resp.Body)
b, err := io.ReadAll(resp.Body)
if err != nil {
return err
}
Expand Down
4 changes: 2 additions & 2 deletions collector/cluster_settings.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ package collector
import (
"encoding/json"
"fmt"
"io/ioutil"
"io"
"net/http"
"net/url"
"path"
Expand Down Expand Up @@ -100,7 +100,7 @@ func (cs *ClusterSettings) getAndParseURL(u *url.URL, data interface{}) error {
return fmt.Errorf("HTTP Request failed with code %d", res.StatusCode)
}

bts, err := ioutil.ReadAll(res.Body)
bts, err := io.ReadAll(res.Body)
if err != nil {
cs.jsonParseFailures.Inc()
return err
Expand Down
4 changes: 2 additions & 2 deletions collector/cluster_settings_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ func TestClusterSettingsStats(t *testing.T) {
f, _ := os.Open(filename)
defer f.Close()
for hn, handler := range map[string]http.Handler{
"plain": http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
"plain": http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
io.Copy(w, f)
}),
} {
Expand Down Expand Up @@ -69,7 +69,7 @@ func TestClusterMaxShardsPerNode(t *testing.T) {
f, _ := os.Open(filename)
defer f.Close()
for hn, handler := range map[string]http.Handler{
"plain": http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
"plain": http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
io.Copy(w, f)
}),
} {
Expand Down
4 changes: 2 additions & 2 deletions collector/collector.go
Original file line number Diff line number Diff line change
Expand Up @@ -202,8 +202,8 @@ func execute(ctx context.Context, name string, c Collector, ch chan<- prometheus
// A new action function is needed for each collector flag because the ParseContext
// does not contain information about which flag called the action.
// See: https://github.com/alecthomas/kingpin/issues/294
func collectorFlagAction(collector string) func(ctx *kingpin.ParseContext) error {
return func(ctx *kingpin.ParseContext) error {
func collectorFlagAction(collector string) func(_ *kingpin.ParseContext) error {
return func(_ *kingpin.ParseContext) error {
forcedCollectors[collector] = true
return nil
}
Expand Down
13 changes: 7 additions & 6 deletions collector/indices.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,17 @@ package collector
import (
"encoding/json"
"fmt"
"github.com/go-kit/log"
"github.com/go-kit/log/level"
"github.com/prometheus-community/elasticsearch_exporter/pkg/clusterinfo"
"github.com/prometheus/client_golang/prometheus"
"io/ioutil"
"io"
"net/http"
"net/url"
"path"
"sort"
"strconv"

"github.com/go-kit/log"
"github.com/go-kit/log/level"
"github.com/prometheus-community/elasticsearch_exporter/pkg/clusterinfo"
"github.com/prometheus/client_golang/prometheus"
)

type labels struct {
Expand Down Expand Up @@ -1183,7 +1184,7 @@ func (i *Indices) queryURL(u *url.URL) ([]byte, error) {
return []byte{}, fmt.Errorf("HTTP Request failed with code %d", res.StatusCode)
}

bts, err := ioutil.ReadAll(res.Body)
bts, err := io.ReadAll(res.Body)
if err != nil {
return []byte{}, err
}
Expand Down
4 changes: 2 additions & 2 deletions collector/indices_mappings.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ package collector
import (
"encoding/json"
"fmt"
"io/ioutil"
"io"
"net/http"
"net/url"
"path"
Expand Down Expand Up @@ -132,7 +132,7 @@ func (im *IndicesMappings) getAndParseURL(u *url.URL) (*IndicesMappingsResponse,
return nil, fmt.Errorf("HTTP Request failed with code %d", res.StatusCode)
}

body, err := ioutil.ReadAll(res.Body)
body, err := io.ReadAll(res.Body)
if err != nil {
_ = level.Warn(im.logger).Log("msg", "failed to read response body", "err", err)
return nil, err
Expand Down
2 changes: 1 addition & 1 deletion collector/indices_mappings_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ func TestMapping(t *testing.T) {
}
for ver, out := range tcs {
for hn, handler := range map[string]http.Handler{
"plain": http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
"plain": http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
fmt.Fprintln(w, out)
}),
} {
Expand Down
4 changes: 2 additions & 2 deletions collector/indices_settings.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ package collector
import (
"encoding/json"
"fmt"
"io/ioutil"
"io"
"net/http"
"net/url"
"path"
Expand Down Expand Up @@ -123,7 +123,7 @@ func (cs *IndicesSettings) getAndParseURL(u *url.URL, data interface{}) error {
return fmt.Errorf("HTTP Request failed with code %d", res.StatusCode)
}

bts, err := ioutil.ReadAll(res.Body)
bts, err := io.ReadAll(res.Body)
if err != nil {
cs.jsonParseFailures.Inc()
return err
Expand Down
2 changes: 1 addition & 1 deletion collector/indices_settings_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ func TestIndicesSettings(t *testing.T) {
}
for ver, out := range tcs {
for hn, handler := range map[string]http.Handler{
"plain": http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
"plain": http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
fmt.Fprintln(w, out)
}),
} {
Expand Down
6 changes: 3 additions & 3 deletions collector/indices_test.go

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions collector/nodes.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ package collector
import (
"encoding/json"
"fmt"
"io/ioutil"
"io"
"net/http"
"net/url"
"path"
Expand Down Expand Up @@ -70,7 +70,7 @@ func createRoleMetric(role string) *nodeMetric {
"Node roles",
defaultRoleLabels, prometheus.Labels{"role": role},
),
Value: func(node NodeStatsNodeResponse) float64 {
Value: func(_ NodeStatsNodeResponse) float64 {
return 1.0
},
Labels: func(cluster string, node NodeStatsNodeResponse) []string {
Expand Down Expand Up @@ -1829,7 +1829,7 @@ func (c *Nodes) fetchAndDecodeNodeStats() (nodeStatsResponse, error) {
return nsr, fmt.Errorf("HTTP Request failed with code %d", res.StatusCode)
}

bts, err := ioutil.ReadAll(res.Body)
bts, err := io.ReadAll(res.Body)
if err != nil {
c.jsonParseFailures.Inc()
return nsr, err
Expand Down
10 changes: 5 additions & 5 deletions collector/nodes_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@ package collector
import (
"encoding/base64"
"fmt"
"io/ioutil"
"net/http"
"net/http/httptest"
"net/url"
"os"
"strings"
"testing"

Expand All @@ -29,18 +29,18 @@ import (
func TestNodesStats(t *testing.T) {
for _, ver := range testElasticsearchVersions {
filename := fmt.Sprintf("../fixtures/nodestats/%s.json", ver)
data, _ := ioutil.ReadFile(filename)
data, _ := os.ReadFile(filename)

handlers := map[string]http.Handler{
"plain": http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
"plain": http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
if _, err := w.Write(data); err != nil {
t.Fatalf("failed write: %s", err)
}
}),
"basicauth": &basicAuth{
User: "elastic",
Pass: "changeme",
Next: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
Next: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
if _, err := w.Write(data); err != nil {
t.Fatalf("failed write: %s", err)
}
Expand Down Expand Up @@ -138,7 +138,7 @@ type basicAuth struct {
Next http.Handler
}

func (h *basicAuth) checkAuth(w http.ResponseWriter, r *http.Request) bool {
func (h *basicAuth) checkAuth(_ http.ResponseWriter, r *http.Request) bool {
s := strings.SplitN(r.Header.Get("Authorization"), " ", 2)
if len(s) != 2 {
return false
Expand Down
6 changes: 3 additions & 3 deletions collector/slm.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ package collector
import (
"encoding/json"
"fmt"
"io/ioutil"
"io"
"net/http"
"net/url"
"path"
Expand Down Expand Up @@ -287,7 +287,7 @@ func (s *SLM) fetchAndDecodeSLMStats() (SLMStatsResponse, error) {
return ssr, fmt.Errorf("HTTP Request failed with code %d", res.StatusCode)
}

bts, err := ioutil.ReadAll(res.Body)
bts, err := io.ReadAll(res.Body)
if err != nil {
s.jsonParseFailures.Inc()
return ssr, err
Expand Down Expand Up @@ -326,7 +326,7 @@ func (s *SLM) fetchAndDecodeSLMStatus() (SLMStatusResponse, error) {
return ssr, fmt.Errorf("HTTP Request failed with code %d", res.StatusCode)
}

bts, err := ioutil.ReadAll(res.Body)
bts, err := io.ReadAll(res.Body)
if err != nil {
s.jsonParseFailures.Inc()
return ssr, err
Expand Down
2 changes: 1 addition & 1 deletion collector/slm_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ func TestSLM(t *testing.T) {
"7.15.0": `{"retention_runs":9,"retention_failed":0,"retention_timed_out":0,"retention_deletion_time":"1.2m","retention_deletion_time_millis":72491,"total_snapshots_taken":103,"total_snapshots_failed":2,"total_snapshots_deleted":20,"total_snapshot_deletion_failures":0,"policy_stats":[{"policy":"everything","snapshots_taken":50,"snapshots_failed":2,"snapshots_deleted":20,"snapshot_deletion_failures":0}]}`,
}
for ver, out := range tcs {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
fmt.Fprintln(w, out)
}))
defer ts.Close()
Expand Down
4 changes: 2 additions & 2 deletions collector/snapshots.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ package collector
import (
"encoding/json"
"fmt"
"io/ioutil"
"io"
"net/http"
"net/url"
"path"
Expand Down Expand Up @@ -250,7 +250,7 @@ func (s *Snapshots) getAndParseURL(u *url.URL, data interface{}) error {
return fmt.Errorf("HTTP Request failed with code %d", res.StatusCode)
}

bts, err := ioutil.ReadAll(res.Body)
bts, err := io.ReadAll(res.Body)
if err != nil {
s.jsonParseFailures.Inc()
return err
Expand Down
16 changes: 1 addition & 15 deletions go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -10,32 +10,18 @@ require (
github.com/go-kit/log v0.2.1
github.com/imdario/mergo v0.3.15
github.com/prometheus/client_golang v1.19.1
github.com/prometheus/common v0.53.0 // indirect
github.com/prometheus/common v0.53.0
gopkg.in/alecthomas/kingpin.v2 v2.2.6
)

require (
github.com/Masterminds/semver/v3 v3.2.1 // indirect
github.com/alecthomas/gometalinter v3.0.0+incompatible // indirect
github.com/alecthomas/kingpin/v2 v2.4.0 // indirect
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/go-logfmt/logfmt v0.5.1 // indirect
github.com/google/go-github/v25 v25.1.3 // indirect
github.com/google/go-querystring v1.1.0 // indirect
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect
github.com/nicksnyder/go-i18n v1.10.3 // indirect
github.com/pelletier/go-toml v1.9.5 // indirect
github.com/prometheus/client_model v0.6.0 // indirect
github.com/prometheus/procfs v0.12.0 // indirect
github.com/prometheus/promu v0.17.0 // indirect
github.com/stretchr/testify v1.8.2 // indirect
github.com/xhit/go-str2duration/v2 v2.1.0 // indirect
go.uber.org/atomic v1.11.0 // indirect
golang.org/x/oauth2 v0.20.0 // indirect
golang.org/x/sys v0.18.0 // indirect
google.golang.org/protobuf v1.33.0 // indirect
gopkg.in/alecthomas/kingpin.v3-unstable v3.0.0-20191105091915-95d230a53780 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
)
Loading

0 comments on commit 0123c35

Please sign in to comment.