Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Dynatrace scaler #5685

Merged
merged 30 commits into from
Jul 30, 2024
Merged
Show file tree
Hide file tree
Changes from 29 commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
7c81b45
Add first scaler version
cyrilico Apr 11, 2024
93e3270
small refactor for response validation
cyrilico Apr 11, 2024
5a77a72
Add 'from' property, rename host/token
cyrilico Apr 11, 2024
0456bc7
Add parsing tests
cyrilico Apr 11, 2024
402e2e8
update changelog
cyrilico Apr 11, 2024
efca4d7
Merge branch 'main' into dynatrace_scaler
cyrilico Apr 11, 2024
2583560
Update CHANGELOG.md
cyrilico Apr 11, 2024
f7661de
Update values type to float64
cyrilico Apr 11, 2024
3b4ca9d
Remove unnecessary conversion
cyrilico Apr 11, 2024
4e3d039
e2e tests
cyrilico Apr 23, 2024
7cf6d2b
Merge branch 'main' into dynatrace_scaler
cyrilico Apr 28, 2024
911b0df
Apply suggestions from code review
cyrilico Apr 28, 2024
698d6be
Update dynatrace_test.go
cyrilico Apr 28, 2024
622266e
Fix bad templating for e2e tests
cyrilico Apr 28, 2024
e0d42eb
Revert unnecessary (?) template variable change
cyrilico Apr 28, 2024
607805e
Apply suggestions from code review
JorTurFer Apr 28, 2024
cde08d3
Update tests/scalers/dynatrace/dynatrace_test.go
JorTurFer Apr 28, 2024
4ffad46
Do not allow token to be passed in scaledobject trigger
cyrilico Apr 29, 2024
f27579c
Remove bad secret, tweak dynakube test config
cyrilico Apr 29, 2024
c2f9467
Rename property in response parsing
cyrilico Apr 29, 2024
eb0b04a
Update tests/scalers/dynatrace/dynatrace_test.go
JorTurFer Apr 29, 2024
de19f06
Merge branch 'main' into dynatrace_scaler
cyrilico May 20, 2024
fb5ce7b
use new operator secret, update template variable naming
cyrilico May 27, 2024
6e03f9b
forgotten correct variable definition
cyrilico May 27, 2024
3505f51
Merge branch 'main' into dynatrace_scaler
cyrilico Jul 20, 2024
b40f11f
try default value in query for e2e tests
cyrilico Jul 21, 2024
fd857af
fix missing closing parenthesis, bad indenting
cyrilico Jul 21, 2024
d6971dc
Update e2e test to use custom metrics
JorTurFer Jul 25, 2024
54428f7
Close the body to fix static checks
JorTurFer Jul 25, 2024
e92d83f
use declarative scaler config
cyrilico Jul 30, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ To learn more about active deprecations, we recommend checking [GitHub Discussio

- TODO ([#XXX](https://github.com/kedacore/keda/issues/XXX))
- **General**: Add --ca-dir flag to KEDA operator to specify directories with CA certificates for scalers to authenticate TLS connections (defaults to /custom/ca) ([#5860](https://github.com/kedacore/keda/issues/5860))
- **General**: Add Dynatrace Scaler ([#5685](https://github.com/kedacore/keda/pull/5685))
- **General**: Declarative parsing of scaler config ([#5037](https://github.com/kedacore/keda/issues/5037)|[#5797](https://github.com/kedacore/keda/issues/5797))
- **General**: Introduce new Splunk Scaler ([#5904](https://github.com/kedacore/keda/issues/5904))
- **General**: Provide CloudEvents around the management of ScaledObjects resources ([#3522](https://github.com/kedacore/keda/issues/3522))
Expand Down
242 changes: 242 additions & 0 deletions pkg/scalers/dynatrace_scaler.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,242 @@
package scalers

import (
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
neturl "net/url"
"strconv"
"strings"

"github.com/go-logr/logr"
v2 "k8s.io/api/autoscaling/v2"
"k8s.io/metrics/pkg/apis/external_metrics"

"github.com/kedacore/keda/v2/pkg/scalers/scalersconfig"
kedautil "github.com/kedacore/keda/v2/pkg/util"
)

const (
dynatraceMetricDataPointsAPI = "api/v2/metrics/query"
)

type dynatraceScaler struct {
metricType v2.MetricTargetType
metadata *dynatraceMetadata
httpClient *http.Client
logger logr.Logger
}

type dynatraceMetadata struct {
host string
token string
metricSelector string
fromTimestamp string
threshold float64
activationThreshold float64
triggerIndex int
}

// Model of relevant part of Dynatrace's Metric Data Points API Response
// as per https://docs.dynatrace.com/docs/dynatrace-api/environment-api/metric-v2/get-data-points#definition--MetricData
type dynatraceResponse struct {
Result []struct {
Data []struct {
Values []float64 `json:"values"`
} `json:"data"`
} `json:"result"`
}

func NewDynatraceScaler(config *scalersconfig.ScalerConfig) (Scaler, error) {
metricType, err := GetMetricTargetType(config)
if err != nil {
return nil, fmt.Errorf("error getting scaler metric type: %w", err)
}

logger := InitializeLogger(config, "dynatrace_scaler")

meta, err := parseDynatraceMetadata(config, logger)
if err != nil {
return nil, fmt.Errorf("error parsing dynatrace metadata: %w", err)
}

httpClient := kedautil.CreateHTTPClient(config.GlobalHTTPTimeout, false)

logMsg := fmt.Sprintf("Initializing Dynatrace Scaler (Host: %s)", meta.host)

logger.Info(logMsg)

return &dynatraceScaler{
metricType: metricType,
metadata: meta,
httpClient: httpClient,
logger: logger}, nil
}

func parseDynatraceMetadata(config *scalersconfig.ScalerConfig, logger logr.Logger) (*dynatraceMetadata, error) {
meta := dynatraceMetadata{}
var err error

host, err := GetFromAuthOrMeta(config, "host")
if err != nil {
return nil, err
}
meta.host = host

if val, ok := config.AuthParams["token"]; ok && val != "" {
meta.token = val
} else {
return nil, fmt.Errorf("no token given in trigger auth")
}

if val, ok := config.TriggerMetadata["metricSelector"]; ok && val != "" {
meta.metricSelector = val
} else {
return nil, fmt.Errorf("no metricSelector given")
}

if val, ok := config.TriggerMetadata["from"]; ok && val != "" {
meta.fromTimestamp = val
} else {
logger.Info("no 'from' timestamp provided, using default value (last 2 hours)")
meta.fromTimestamp = "now-2h"
}

if val, ok := config.TriggerMetadata["threshold"]; ok && val != "" {
t, err := strconv.ParseFloat(val, 64)
if err != nil {
return nil, fmt.Errorf("error parsing threshold")
}
meta.threshold = t
} else {
if config.AsMetricSource {
meta.threshold = 0
} else {
return nil, fmt.Errorf("missing threshold value")
}
}

meta.activationThreshold = 0
if val, ok := config.TriggerMetadata["activationThreshold"]; ok {
activationThreshold, err := strconv.ParseFloat(val, 64)
if err != nil {
return nil, fmt.Errorf("queryValue parsing error %w", err)
}
meta.activationThreshold = activationThreshold
}

meta.triggerIndex = config.TriggerIndex
return &meta, nil
}

func (s *dynatraceScaler) Close(context.Context) error {
if s.httpClient != nil {
s.httpClient.CloseIdleConnections()
}
return nil
}

// Validate that response object contains the minimum expected structure
// as per https://docs.dynatrace.com/docs/dynatrace-api/environment-api/metric-v2/get-data-points#definition--MetricData
func validateDynatraceResponse(response *dynatraceResponse) error {
if len(response.Result) == 0 {
return errors.New("dynatrace response does not contain any results")
}
if len(response.Result[0].Data) == 0 {
return errors.New("dynatrace response does not contain any metric series")
}
if len(response.Result[0].Data[0].Values) == 0 {
return errors.New("dynatrace response does not contain any values for the metric series")
}
return nil
}

func (s *dynatraceScaler) GetMetricValue(ctx context.Context) (float64, error) {
/*
* Build request
*/
var req *http.Request
var err error

// Append host information to appropriate API endpoint
// Trailing slashes are removed from provided host information to avoid double slashes in the URL
dynatraceAPIURL := fmt.Sprintf("%s/%s", strings.TrimRight(s.metadata.host, "/"), dynatraceMetricDataPointsAPI)

// Add query parameters to the URL
url, _ := neturl.Parse(dynatraceAPIURL)
queryString := url.Query()
queryString.Set("metricSelector", s.metadata.metricSelector)
queryString.Set("from", s.metadata.fromTimestamp)
url.RawQuery = queryString.Encode()

req, err = http.NewRequestWithContext(ctx, "GET", url.String(), nil)
if err != nil {
return 0, err
}

// Authentication header as per https://docs.dynatrace.com/docs/dynatrace-api/basics/dynatrace-api-authentication#authenticate
req.Header.Add("Authorization", fmt.Sprintf("Api-Token %s", s.metadata.token))

/*
* Execute request
*/
r, err := s.httpClient.Do(req)
if err != nil {
return 0, err
}
defer r.Body.Close()

if r.StatusCode != http.StatusOK {
msg := fmt.Sprintf("%s: api returned %d", r.Request.URL.Path, r.StatusCode)
return 0, errors.New(msg)
}

/*
* Parse response
*/
b, err := io.ReadAll(r.Body)
if err != nil {
return 0, err
}
var dynatraceResponse *dynatraceResponse
err = json.Unmarshal(b, &dynatraceResponse)
if err != nil {
return -1, fmt.Errorf("unable to parse Dynatrace Metric Data Points API response: %w", err)
}

err = validateDynatraceResponse(dynatraceResponse)
if err != nil {
return 0, err
}

return dynatraceResponse.Result[0].Data[0].Values[0], nil
}

func (s *dynatraceScaler) GetMetricsAndActivity(ctx context.Context, metricName string) ([]external_metrics.ExternalMetricValue, bool, error) {
val, err := s.GetMetricValue(ctx)

if err != nil {
s.logger.Error(err, "error executing Dynatrace query")
return []external_metrics.ExternalMetricValue{}, false, err
}

metric := GenerateMetricInMili(metricName, val)

return []external_metrics.ExternalMetricValue{metric}, val > s.metadata.activationThreshold, nil
}

func (s *dynatraceScaler) GetMetricSpecForScaling(context.Context) []v2.MetricSpec {
externalMetric := &v2.ExternalMetricSource{
Metric: v2.MetricIdentifier{
Name: GenerateMetricNameWithIndex(s.metadata.triggerIndex, kedautil.NormalizeString("dynatrace")),
},
Target: GetMetricTargetMili(s.metricType, s.metadata.threshold),
}
metricSpec := v2.MetricSpec{
External: externalMetric, Type: externalMetricType,
}
return []v2.MetricSpec{metricSpec}
}
76 changes: 76 additions & 0 deletions pkg/scalers/dynatrace_scaler_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
package scalers

import (
"context"
"fmt"
"testing"

"github.com/go-logr/logr"

"github.com/kedacore/keda/v2/pkg/scalers/scalersconfig"
)

type dynatraceMetadataTestData struct {
metadata map[string]string
authParams map[string]string
errorCase bool
}

type dynatraceMetricIdentifier struct {
metadataTestData *dynatraceMetadataTestData
triggerIndex int
name string
}

var testDynatraceMetadata = []dynatraceMetadataTestData{
{map[string]string{}, map[string]string{}, true},
// all properly formed
{map[string]string{"threshold": "100", "from": "now-3d", "metricSelector": "MyCustomEvent:filter(eq(\"someProperty\",\"someValue\")):count:splitBy(\"dt.entity.process_group\"):fold"}, map[string]string{"host": "http://dummy:1234", "token": "dummy"}, false},
// malformed threshold
{map[string]string{"threshold": "abc", "from": "now-3d", "metricSelector": "MyCustomEvent:filter(eq(\"someProperty\",\"someValue\")):count:splitBy(\"dt.entity.process_group\"):fold"}, map[string]string{"host": "http://dummy:1234", "token": "dummy"}, true},
// malformed activationThreshold
{map[string]string{"activationThreshold": "abc", "threshold": "100", "from": "now-3d", "metricSelector": "MyCustomEvent:filter(eq(\"someProperty\",\"someValue\")):count:splitBy(\"dt.entity.process_group\"):fold"}, map[string]string{"host": "http://dummy:1234", "token": "dummy"}, true},
// missing threshold
{map[string]string{"metricSelector": "MyCustomEvent:filter(eq(\"someProperty\",\"someValue\")):count:splitBy(\"dt.entity.process_group\"):fold"}, map[string]string{"host": "http://dummy:1234", "token": "dummy"}, true},
// missing metricsSelector
{map[string]string{"threshold": "100"}, map[string]string{"host": "http://dummy:1234", "token": "dummy"}, true},
// missing token (must come from auth params)
{map[string]string{"token": "foo", "threshold": "100", "from": "now-3d", "metricSelector": "MyCustomEvent:filter(eq(\"someProperty\",\"someValue\")):count:splitBy(\"dt.entity.process_group\"):fold"}, map[string]string{"host": "http://dummy:1234"}, true},
}

var dynatraceMetricIdentifiers = []dynatraceMetricIdentifier{
{&testDynatraceMetadata[1], 0, "s0-dynatrace"},
{&testDynatraceMetadata[1], 1, "s1-dynatrace"},
}

func TestDynatraceParseMetadata(t *testing.T) {
for _, testData := range testDynatraceMetadata {
_, err := parseDynatraceMetadata(&scalersconfig.ScalerConfig{TriggerMetadata: testData.metadata, AuthParams: testData.authParams}, logr.Discard())
if err != nil && !testData.errorCase {
fmt.Printf("X: %s", testData.metadata)
t.Error("Expected success but got error", err)
}
if testData.errorCase && err == nil {
fmt.Printf("X: %s", testData.metadata)
t.Error("Expected error but got success")
}
}
}
func TestDynatraceGetMetricSpecForScaling(t *testing.T) {
for _, testData := range dynatraceMetricIdentifiers {
meta, err := parseDynatraceMetadata(&scalersconfig.ScalerConfig{TriggerMetadata: testData.metadataTestData.metadata, AuthParams: testData.metadataTestData.authParams, TriggerIndex: testData.triggerIndex}, logr.Discard())
if err != nil {
t.Fatal("Could not parse metadata:", err)
}
mockNewRelicScaler := dynatraceScaler{
metadata: meta,
httpClient: nil,
}

metricSpec := mockNewRelicScaler.GetMetricSpecForScaling(context.Background())
metricName := metricSpec[0].External.Metric.Name
if metricName != testData.name {
t.Error("Wrong External metric source name:", metricName)
}
}
}
2 changes: 2 additions & 0 deletions pkg/scaling/scalers_builder.go
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,8 @@ func buildScaler(ctx context.Context, client client.Client, triggerType string,
return scalers.NewCronScaler(config)
case "datadog":
return scalers.NewDatadogScaler(ctx, config)
case "dynatrace":
return scalers.NewDynatraceScaler(config)
case "elasticsearch":
return scalers.NewElasticsearchScaler(config)
case "etcd":
Expand Down
Loading
Loading