From 0c7b7cba734137cdc503bd7437b8b20c324e9376 Mon Sep 17 00:00:00 2001 From: kmssap Date: Fri, 4 Nov 2022 16:58:14 +0100 Subject: [PATCH] Support remaining OpenTelemetry Metrics proto spec features (#1335) * Bump OTEL proto version Signed-off-by: Kai Sternad Co-authored-by: Tomas Longo Co-authored-by: Karsten Schnitter * Support OTEL ScopeMetrics Signed-off-by: Kai Sternad Co-authored-by: Tomas Longo Co-authored-by: Karsten Schnitter * Add support for OTEL schemaUrl Signed-off-by: Kai Sternad Co-authored-by: Tomas Longo Co-authored-by: Karsten Schnitter * Add exemplars to metrics plugin Signed-off-by: Kai Sternad Co-authored-by: Tomas Longo Co-authored-by: Karsten Schnitter * Add metrics flags Signed-off-by: Kai Sternad Co-authored-by: Tomas Longo Co-authored-by: Karsten Schnitter * Add support for Exponential Histogram Signed-off-by: Kai Sternad Co-authored-by: Tomas Longo Co-authored-by: Karsten Schnitter * Add config switch for histogram bucket calculation Signed-off-by: Kai Sternad Co-authored-by: Tomas Longo Co-authored-by: Karsten Schnitter * Refactor Otel Metrics Proto Signed-off-by: Kai Sternad Co-authored-by: Tomas Longo Co-authored-by: Karsten Schnitter * Change config property to snake_case Signed-off-by: Kai Sternad Co-authored-by: Tomas Longo Co-authored-by: Karsten Schnitter * Fix JavaDoc Signed-off-by: Kai Sternad Co-authored-by: Tomas Longo Co-authored-by: Karsten Schnitter * Remove Clock from tests Signed-off-by: Kai Sternad Co-authored-by: Tomas Longo Co-authored-by: Karsten Schnitter * Change config parameters - Introduce allowed max scale - Invert histogram calculation params Signed-off-by: Kai Sternad Co-authored-by: Tomas Longo Co-authored-by: Karsten Schnitter * Address review comments - Remove unused import, breaking Checkstyle - Change Exponential Histogram filter - Add lenient to some Mockito calls - Clarify metrics processor documentation Signed-off-by: Kai Sternad Co-authored-by: Tomas Longo Co-authored-by: Karsten Schnitter * Fix OtelMetricsRawProcessorConfigTest Signed-off-by: Kai Sternad Co-authored-by: Tomas Longo Co-authored-by: Karsten Schnitter * Change ExponentialHistogram Bucket Calculation - Precompute all possible bucket bounds - Consider negative offset Signed-off-by: Kai Sternad Co-authored-by: Tomas Longo Co-authored-by: Karsten Schnitter * Fix e2e otel dependency coordinates Signed-off-by: Kai Sternad Co-authored-by: Tomas Longo Co-authored-by: Karsten Schnitter * Fix dependency coordinate for otel Signed-off-by: Kai Sternad Signed-off-by: Kai Sternad Co-authored-by: Kai Sternad Co-authored-by: Tomas Longo Co-authored-by: Karsten Schnitter --- build-resources.gradle | 2 +- data-prepper-api/build.gradle | 2 + .../model/metric/DefaultExemplar.java | 58 +++ .../dataprepper/model/metric/Exemplar.java | 57 +++ .../model/metric/ExponentialHistogram.java | 102 +++++ .../dataprepper/model/metric/Histogram.java | 17 +- .../metric/JacksonExponentialHistogram.java | 272 ++++++++++++ .../model/metric/JacksonHistogram.java | 34 ++ .../model/metric/JacksonMetric.java | 54 ++- .../dataprepper/model/metric/Metric.java | 28 +- .../JacksonExponentialHistogramTest.java | 257 +++++++++++ .../model/metric/JacksonGaugeTest.java | 58 ++- .../model/metric/JacksonHistogramTest.java | 54 ++- .../model/metric/JacksonSumTest.java | 12 +- .../model/metric/JacksonSummaryTest.java | 12 +- .../testjson/exponentialHistogram.json | 61 +++ .../src/test/resources/testjson/gauge.json | 42 ++ .../test/resources/testjson/histogram.json | 48 +++ .../otel-metrics-raw-processor/README.md | 101 +++++ .../otel-metrics-raw-processor/build.gradle | 3 +- .../otelmetrics/OTelMetricsProtoHelper.java | 125 +++++- .../otelmetrics/OTelMetricsRawProcessor.java | 243 ++++++++--- .../OtelMetricsRawProcessorConfig.java | 27 ++ ...MetricsPluginExponentialHistogramTest.java | 197 +++++++++ .../otelmetrics/MetricsPluginGaugeTest.java | 189 ++++++++- .../MetricsPluginHistogramTest.java | 95 +++-- .../otelmetrics/MetricsPluginSumTest.java | 8 +- .../otelmetrics/MetricsPluginSummaryTest.java | 8 +- .../OTelMetricsProtoHelperTest.java | 220 +++++++++- .../OtelMetricsRawProcessorConfigTest.java | 25 ++ .../otel-metrics-source/build.gradle | 2 +- .../otel-proto-common/build.gradle | 4 +- .../plugins/otel/codec/OTelProtoCodec.java | 368 ++++++++++++++++ .../otel/codec/OTelProtoCodecTest.java | 400 +++++++++++++++++- .../otel-trace-raw-prepper/build.gradle | 37 ++ .../otel-trace-raw-processor/build.gradle | 2 +- .../otel-trace-source/build.gradle | 2 +- .../peer-forwarder/build.gradle | 45 ++ .../service-map-stateful/build.gradle | 2 +- e2e-test/peerforwarder/build.gradle | 2 +- e2e-test/trace/build.gradle | 2 +- 41 files changed, 3113 insertions(+), 164 deletions(-) create mode 100644 data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/DefaultExemplar.java create mode 100644 data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/Exemplar.java create mode 100644 data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/ExponentialHistogram.java create mode 100644 data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/JacksonExponentialHistogram.java create mode 100644 data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonExponentialHistogramTest.java create mode 100644 data-prepper-api/src/test/resources/testjson/exponentialHistogram.json create mode 100644 data-prepper-api/src/test/resources/testjson/gauge.json create mode 100644 data-prepper-api/src/test/resources/testjson/histogram.json create mode 100644 data-prepper-plugins/otel-metrics-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OtelMetricsRawProcessorConfig.java create mode 100644 data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginExponentialHistogramTest.java create mode 100644 data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OtelMetricsRawProcessorConfigTest.java create mode 100644 data-prepper-plugins/otel-trace-raw-prepper/build.gradle create mode 100644 data-prepper-plugins/peer-forwarder/build.gradle diff --git a/build-resources.gradle b/build-resources.gradle index 45af3142e4..13418aa8d3 100644 --- a/build-resources.gradle +++ b/build-resources.gradle @@ -7,7 +7,7 @@ ext.versionMap = [ junitJupiter : '5.8.2', mockito : '3.11.2', - opentelemetryProto : '1.7.1-alpha', + opentelemetryProto : '0.16.0-alpha', opensearchVersion : '1.3.5', armeria: '1.19.0', armeriaGrpc: '1.19.0', diff --git a/data-prepper-api/build.gradle b/data-prepper-api/build.gradle index 13699268dd..b4328dd340 100644 --- a/data-prepper-api/build.gradle +++ b/data-prepper-api/build.gradle @@ -10,6 +10,8 @@ dependencies { testImplementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml' implementation "org.apache.commons:commons-lang3:3.12.0" testImplementation project(':data-prepper-test-common') + testImplementation 'org.skyscreamer:jsonassert:1.5.0' + testImplementation 'commons-io:commons-io:2.11.0' } jacocoTestCoverageVerification { diff --git a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/DefaultExemplar.java b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/DefaultExemplar.java new file mode 100644 index 0000000000..34e87518ad --- /dev/null +++ b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/DefaultExemplar.java @@ -0,0 +1,58 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.model.metric; + +import java.util.Map; + +/** + * The default implementation of {@link Exemplar} + * + * @since 1.4 + */ +public class DefaultExemplar implements Exemplar { + + private String time; + private Double value; + private Map attributes; + private String spanId; + private String traceId; + + // required for serialization + DefaultExemplar() {} + + public DefaultExemplar(String time, Double value, String spanId, String traceId, Map attributes) { + this.time = time; + this.value = value; + this.spanId = spanId; + this.traceId = traceId; + this.attributes = attributes; + } + + @Override + public String getTime() { + return time; + } + + @Override + public Double getValue() { + return value; + } + + @Override + public Map getAttributes() { + return attributes; + } + + @Override + public String getSpanId() { + return spanId; + } + + @Override + public String getTraceId() { + return traceId; + } +} diff --git a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/Exemplar.java b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/Exemplar.java new file mode 100644 index 0000000000..68da4f5db5 --- /dev/null +++ b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/Exemplar.java @@ -0,0 +1,57 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.model.metric; + +import java.util.Map; + +/** + * A representation of an exemplar which is a sample input measurement. + * It may contain the span and trace id of a metric event. + * + * @since 1.4 + */ +public interface Exemplar { + + /** + * Gets the string encoded value of time_unix_nano + * @return the time value + * @since 1.4 + */ + String getTime(); + + + /** + * Gets the value for the exemplar + * @return the value + * @since 1.4 + */ + Double getValue(); + + /** + * Gets a collection of key-value pairs related to the exemplar. + * + * @return A map of attributes + * @since 1.4 + */ + Map getAttributes(); + + /** + * Gets the span id of this exemplar. + * + * @return the span id + * @since 1.4 + */ + String getSpanId(); + + /** + * Gets the trace id of this exemplar. + * + * @return the trace id + * @since 1.4 + */ + String getTraceId(); + +} diff --git a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/ExponentialHistogram.java b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/ExponentialHistogram.java new file mode 100644 index 0000000000..81166771f5 --- /dev/null +++ b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/ExponentialHistogram.java @@ -0,0 +1,102 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.model.metric; + +import java.util.List; + +public interface ExponentialHistogram extends Metric { + + /** + * Gets the sum for the histogram + * + * @return the sum of the values in the population + * @since 1.4 + */ + Double getSum(); + + + /** + * Gets the count of the histogram + * @return the count, must be equal to the sum of the "count" fields in buckets + * @since 1.4 + */ + Long getCount(); + + /** + * Gets the aggregation temporality for the histogram + * + * @return the aggregation temporality + * @since 1.4 + */ + String getAggregationTemporality(); + + /** + * Gets the positive range of exponential buckets + * + * @return the buckets + * @since 1.4 + */ + List getPositiveBuckets(); + + /** + * Gets the negative range of exponential buckets + * + * @return the buckets + * @since 1.4 + */ + List getNegativeBuckets(); + + + /** + * Gets the positive range of exponential bucket counts + * + * @return the buckets + * @since 1.4 + */ + List getNegative(); + + + /** + * Gets the negative range of exponential bucket counts + * + * @return the buckets + * @since 1.4 + */ + List getPositive(); + + /** + * Gets the zero count of events + * + * @return the zero count + * @since 1.4 + */ + Long getZeroCount(); + + /** + * Gets the scale for the histogram + * + * @return the scale + * @since 1.4 + */ + Integer getScale(); + + /** + * Gets the offset for negative buckets + * + * @return the offset + * @since 1.4 + */ + Integer getNegativeOffset(); + + + /** + * Gets the offset for positive buckets + * + * @return the offset + * @since 1.4 + */ + Integer getPositiveOffset(); +} \ No newline at end of file diff --git a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/Histogram.java b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/Histogram.java index a71f5338e6..1bb20a455a 100644 --- a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/Histogram.java +++ b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/Histogram.java @@ -55,11 +55,26 @@ public interface Histogram extends Metric { String getAggregationTemporality(); /** - * Gets the actual buckets for a histogram + * Gets the computed buckets for a histogram * * @return the buckets * @since 1.4 */ List getBuckets(); + /** + * Gets bucket counts for a histogram + * + * @return the bucket values + * @since 1.4 + */ + List getBucketCountsList(); + + /** + * Gets the explicit bounds list for a histogram + * + * @return the bounds + * @since 1.4 + */ + List getExplicitBoundsList(); } diff --git a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/JacksonExponentialHistogram.java b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/JacksonExponentialHistogram.java new file mode 100644 index 0000000000..8efdde3419 --- /dev/null +++ b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/JacksonExponentialHistogram.java @@ -0,0 +1,272 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.model.metric; + +import org.opensearch.dataprepper.model.event.EventType; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; + +import static com.google.common.base.Preconditions.checkArgument; + +/** + * A Jackson implementation for {@link Histogram}. + * + * @since 1.4 + */ +public class JacksonExponentialHistogram extends JacksonMetric implements ExponentialHistogram { + + private static final String SUM_KEY = "sum"; + private static final String COUNT_KEY = "count"; + private static final String SCALE_KEY = "scale"; + private static final String AGGREGATION_TEMPORALITY_KEY = "aggregationTemporality"; + private static final String ZERO_COUNT_KEY = "zeroCount"; + private static final String POSITIVE_BUCKETS_KEY = "positiveBuckets"; + private static final String NEGATIVE_BUCKETS_KEY = "negativeBuckets"; + private static final String NEGATIVE_KEY = "negative"; + private static final String POSITIVE_KEY = "positive"; + private static final String NEGATIVE_OFFSET_KEY = "negativeOffset"; + private static final String POSITIVE_OFFSET_KEY = "positiveOffset"; + + private static final List REQUIRED_KEYS = new ArrayList<>(); + private static final List REQUIRED_NON_EMPTY_KEYS = Arrays.asList(NAME_KEY, KIND_KEY, TIME_KEY); + private static final List REQUIRED_NON_NULL_KEYS = Collections.singletonList(SUM_KEY); + + + protected JacksonExponentialHistogram(JacksonExponentialHistogram.Builder builder) { + super(builder); + + checkArgument(this.getMetadata().getEventType().equals(EventType.METRIC.toString()), "eventType must be of type Metric"); + } + + public static JacksonExponentialHistogram.Builder builder() { + return new JacksonExponentialHistogram.Builder(); + } + + @Override + public Double getSum() { + return this.get(SUM_KEY, Double.class); + } + + @Override + public Long getCount() { + return this.get(COUNT_KEY, Long.class); + } + + @Override + public String getAggregationTemporality() { + return this.get(AGGREGATION_TEMPORALITY_KEY, String.class); + } + + @Override + public List getNegativeBuckets() { + return this.getList(NEGATIVE_BUCKETS_KEY, DefaultBucket.class); + } + + @Override + public List getPositiveBuckets() { + return this.getList(POSITIVE_BUCKETS_KEY, DefaultBucket.class); + } + + @Override + public List getNegative() { + return this.getList(NEGATIVE_KEY, Long.class); + } + + @Override + public List getPositive() { + return this.getList(POSITIVE_KEY, Long.class); + } + + @Override + public Long getZeroCount() { + return this.get(ZERO_COUNT_KEY, Long.class); + } + + @Override + public Integer getScale() { + return this.get(SCALE_KEY, Integer.class); + } + + @Override + public Integer getNegativeOffset() { + return this.get(NEGATIVE_OFFSET_KEY, Integer.class); + } + + @Override + public Integer getPositiveOffset() { + return this.get(POSITIVE_OFFSET_KEY, Integer.class); + } + + /** + * Builder for creating JacksonExponentialHistogram + * + * @since 1.4 + */ + public static class Builder extends JacksonMetric.Builder { + + @Override + public JacksonExponentialHistogram.Builder getThis() { + return this; + } + + /** + * Sets the sum of the histogram + * + * @param sum the sum of the histogram + * @return the builder + * @since 1.4 + */ + public JacksonExponentialHistogram.Builder withSum(double sum) { + data.put(SUM_KEY, sum); + return this; + } + + /** + * Sets the count of the histogram. Must be equal to the sum of the "count" fields in buckets + * + * @param count the number of values in the population + * @return the builder + * @since 1.4 + */ + public JacksonExponentialHistogram.Builder withCount(long count) { + data.put(COUNT_KEY, count); + return this; + } + + /** + * Sets the count of the histogram. Must be equal to the sum of the "count" fields in buckets + * + * @param scale the number of values in the population + * @return the builder + * @since 1.4 + */ + public JacksonExponentialHistogram.Builder withScale(int scale) { + data.put(SCALE_KEY, scale); + return this; + } + + /** + * Sets the count of values that are either exactly zero or within the region considered zero by the + * instrumentation at the tolerated level of precision + * + * @param zeroCount the zeroCount + * @return the builder + * @since 1.4 + */ + public JacksonExponentialHistogram.Builder withZeroCount(long zeroCount) { + data.put(ZERO_COUNT_KEY, zeroCount); + return this; + } + + /** + * Sets the aggregation temporality for this histogram + * + * @param aggregationTemporality the aggregation temporality + * @return the builder + * @since 1.4 + */ + public JacksonExponentialHistogram.Builder withAggregationTemporality(String aggregationTemporality) { + data.put(AGGREGATION_TEMPORALITY_KEY, aggregationTemporality); + return this; + } + + /** + * Sets the positive range of calculated exponential buckets + * + * @param exponentialBuckets a list of buckets + * @return the builder + * @since 1.4 + */ + public JacksonExponentialHistogram.Builder withPositiveBuckets(List exponentialBuckets) { + data.put(POSITIVE_BUCKETS_KEY, exponentialBuckets); + return this; + } + + /** + * Sets the negative range of exponential buckets + * + * @param exponentialBuckets a list of buckets + * @return the builder + * @since 1.4 + */ + public JacksonExponentialHistogram.Builder withNegativeBuckets(List exponentialBuckets) { + data.put(NEGATIVE_BUCKETS_KEY, exponentialBuckets); + return this; + } + + /** + * Sets the positive range of exponential bucket counts + * + * @param bucketCountsList positive bucket value counts + * @return the builder + * @since 1.4 + */ + public JacksonExponentialHistogram.Builder withPositive(List bucketCountsList) { + data.put(POSITIVE_KEY, bucketCountsList); + return this; + } + + /** + * Sets the negative range of exponential bucket counts + * + * @param bucketCountsList negative bucket value counts + * @return the builder + * @since 1.4 + */ + public JacksonExponentialHistogram.Builder withNegative(List bucketCountsList) { + data.put(NEGATIVE_KEY, bucketCountsList); + return this; + } + + /** + * Sets the offset for the positive buckets + * + * @param offset the offset + * @return the builder + * @since 1.4 + */ + public JacksonExponentialHistogram.Builder withPositiveOffset(int offset) { + data.put(POSITIVE_OFFSET_KEY, offset); + return this; + } + + /** + * Sets the offset for the negative buckets + * + * @param offset the offset + * @return the builder + * @since 1.4 + */ + public JacksonExponentialHistogram.Builder withNegativeOffset(int offset) { + data.put(NEGATIVE_OFFSET_KEY, offset); + return this; + } + + /** + * Returns a newly created {@link JacksonExponentialHistogram} + * + * @return a JacksonExponentialHistogram + * @since 1.4 + */ + public JacksonExponentialHistogram build() { + this.withData(data); + this.withEventKind(KIND.EXPONENTIAL_HISTOGRAM.toString()); + this.withEventType(EventType.METRIC.toString()); + checkAndSetDefaultValues(); + new ParameterValidator().validate(REQUIRED_KEYS, REQUIRED_NON_EMPTY_KEYS, REQUIRED_NON_NULL_KEYS, data); + + return new JacksonExponentialHistogram(this); + } + + private void checkAndSetDefaultValues() { + data.computeIfAbsent(ATTRIBUTES_KEY, k -> new HashMap<>()); + } + } +} diff --git a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/JacksonHistogram.java b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/JacksonHistogram.java index 3ccc810fbf..065c7bc8b6 100644 --- a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/JacksonHistogram.java +++ b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/JacksonHistogram.java @@ -28,6 +28,8 @@ public class JacksonHistogram extends JacksonMetric implements Histogram { private static final String BUCKET_COUNTS_KEY = "bucketCounts"; private static final String EXPLICIT_BOUNDS_COUNT_KEY = "explicitBoundsCount"; private static final String BUCKETS_KEY = "buckets"; + private static final String BUCKET_COUNTS_LIST_KEY = "bucketCountsList"; + private static final String EXPLICIT_BOUNDS_KEY = "explicitBounds"; private static final List REQUIRED_KEYS = new ArrayList<>(); private static final List REQUIRED_NON_EMPTY_KEYS = Arrays.asList(NAME_KEY, KIND_KEY, TIME_KEY); @@ -74,6 +76,16 @@ public List getBuckets() { return this.getList(BUCKETS_KEY, DefaultBucket.class); } + @Override + public List getBucketCountsList() { + return this.getList(BUCKET_COUNTS_LIST_KEY, Long.class); + } + + @Override + public List getExplicitBoundsList() { + return this.getList(EXPLICIT_BOUNDS_KEY, Double.class); + } + /** * Builder for creating JacksonHistogram * @@ -152,6 +164,28 @@ public JacksonHistogram.Builder withBuckets(List buckets) { return this; } + /** + * Sets the buckets counts for this histogram + * @param bucketCountsList the list with the individual counts + * @return the builder + * @since 1.4 + */ + public JacksonHistogram.Builder withBucketCountsList(List bucketCountsList) { + data.put(BUCKET_COUNTS_LIST_KEY, bucketCountsList); + return this; + } + + /** + * Sets the buckets bounds for this histogram + * @param explicitBoundsList the list with the individual bucket bounds + * @return the builder + * @since 1.4 + */ + public JacksonHistogram.Builder withExplicitBoundsList(List explicitBoundsList) { + data.put(EXPLICIT_BOUNDS_KEY, explicitBoundsList); + return this; + } + /** * Returns a newly created {@link JacksonHistogram} * @return a JacksonHistogram diff --git a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/JacksonMetric.java b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/JacksonMetric.java index 775d4e7976..146a0bf34e 100644 --- a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/JacksonMetric.java +++ b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/JacksonMetric.java @@ -11,6 +11,7 @@ import java.util.HashMap; import java.util.Iterator; +import java.util.List; import java.util.Map; /** @@ -28,6 +29,9 @@ public abstract class JacksonMetric extends JacksonEvent implements Metric { protected static final String KIND_KEY = "kind"; protected static final String UNIT_KEY = "unit"; protected static final String ATTRIBUTES_KEY = "attributes"; + protected static final String SCHEMA_URL_KEY = "schemaUrl"; + protected static final String EXEMPLARS_KEY = "exemplars"; + protected static final String FLAGS_KEY = "flags"; protected JacksonMetric(Builder builder) { super(builder); @@ -90,6 +94,21 @@ public Map getAttributes() { return this.get(ATTRIBUTES_KEY, Map.class); } + @Override + public String getSchemaUrl() { + return this.get(SCHEMA_URL_KEY, String.class); + } + + @Override + public List getExemplars() { + return this.getList(EXEMPLARS_KEY, DefaultExemplar.class); + } + + @Override + public Integer getFlags() { + return this.get(FLAGS_KEY, Integer.class); + } + /** * Builder for creating {@link JacksonMetric} * @@ -160,7 +179,7 @@ public T withDescription(final String description) { /** * Sets the start time of the gauge - * @param startTime + * @param startTime the start time * @return the builder * @since 1.4 */ @@ -190,5 +209,38 @@ public T withServiceName(final String serviceName) { data.put(SERVICE_NAME_KEY, serviceName); return getThis(); } + + /** + * Sets the schema url of the metric event + * @param schemaUrl sets the url of the schema + * @return the builder + * @since 1.4 + */ + public T withSchemaUrl(final String schemaUrl) { + data.put(SCHEMA_URL_KEY, schemaUrl); + return getThis(); + } + + /** + * Sets the exemplars that are associated with this metric event + * @param exemplars sets the exemplars for this metric + * @return the builder + * @since 1.4 + */ + public T withExemplars(final List exemplars) { + data.put(EXEMPLARS_KEY, exemplars); + return getThis(); + } + + /** + * Sets the flags that are associated with this metric event + * @param flags sets the flags for this metric + * @return the builder + * @since 1.4 + */ + public T withFlags(final Integer flags) { + data.put(FLAGS_KEY, flags); + return getThis(); + } } } diff --git a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/Metric.java b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/Metric.java index f4a885148c..0af6723278 100644 --- a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/Metric.java +++ b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/metric/Metric.java @@ -7,6 +7,7 @@ import org.opensearch.dataprepper.model.event.Event; +import java.util.List; import java.util.Map; /** @@ -23,7 +24,7 @@ public interface Metric extends Event { * @see * The OpenTelemetry Data Model Spec */ - enum KIND {GAUGE, HISTOGRAM, SUM, SUMMARY} + enum KIND {GAUGE, HISTOGRAM, EXPONENTIAL_HISTOGRAM, SUM, SUMMARY} /** * Gets the serviceName of this metric. @@ -49,7 +50,6 @@ enum KIND {GAUGE, HISTOGRAM, SUM, SUMMARY} */ String getDescription(); - /** * Gets the unit in which the metric value is reported. * @@ -90,4 +90,28 @@ enum KIND {GAUGE, HISTOGRAM, SUM, SUMMARY} */ Map getAttributes(); + /** + * Gets the schema url of this metric. + * + * @return the schemaUrl + * @since 1.4 + */ + String getSchemaUrl(); + + /** + * Gets the associated exemplars for this metric event. + * + * @return the exemplars + * @since 1.4 + */ + List getExemplars(); + + + /** + * Gets the associated flags for this metric event. + * + * @return the flags encoded as Integer + * @since 1.4 + */ + Integer getFlags(); } diff --git a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonExponentialHistogramTest.java b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonExponentialHistogramTest.java new file mode 100644 index 0000000000..8cd843aaa0 --- /dev/null +++ b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonExponentialHistogramTest.java @@ -0,0 +1,257 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.model.metric; + +import com.google.common.collect.ImmutableMap; +import io.micrometer.core.instrument.util.IOUtils; +import org.json.JSONException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.opensearch.dataprepper.model.event.TestObject; +import org.skyscreamer.jsonassert.JSONAssert; + +import java.time.LocalDateTime; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.anEmptyMap; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.is; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +public class JacksonExponentialHistogramTest { + + private static final Long TEST_KEY1_TIME = TimeUnit.MILLISECONDS.toNanos(ZonedDateTime.of( + LocalDateTime.of(2020, 5, 24, 14, 0, 0), + ZoneOffset.UTC).toInstant().toEpochMilli()); + + private static final String TEST_KEY2 = UUID.randomUUID().toString(); + + private static final Map TEST_ATTRIBUTES = ImmutableMap.of( + "key1", TEST_KEY1_TIME, + "key2", TEST_KEY2); + private static final String TEST_SERVICE_NAME = "service"; + private static final String TEST_NAME = "name"; + private static final String TEST_DESCRIPTION = "description"; + private static final String TEST_UNIT_NAME = "unit"; + private static final String TEST_START_TIME = UUID.randomUUID().toString(); + private static final String TEST_TIME = UUID.randomUUID().toString(); + private static final String TEST_EVENT_KIND = Metric.KIND.EXPONENTIAL_HISTOGRAM.name(); + private static final Double TEST_SUM = 1D; + private static final List TEST_POSITIVE_BUCKETS = Arrays.asList( + new DefaultBucket(0.0, 5.0, 2L), + new DefaultBucket(5.0, 10.0, 5L) + ); + + private static final List TEST_NEGATIVE_BUCKETS = Arrays.asList( + new DefaultBucket(0.0, 5.0, 2L), + new DefaultBucket(5.0, 10.0, 5L) + ); + private static final List TEST_NEGATIVE = Arrays.asList(1L, 2L, 3L); + private static final List TEST_POSITIVE = Arrays.asList(4L, 5L); + private static final Long TEST_COUNT = 2L; + private static final String TEST_AGGREGATION_TEMPORALITY = "AGGREGATIONTEMPORALITY"; + private static final String TEST_SCHEMA_URL = "schema"; + private static final Integer TEST_SCALE = -3; + private static final Long TEST_ZERO_COUNT = 1L; + private static final Integer TEST_NEGATIVE_OFFSET = 2; + private static final Integer TEST_POSITIVE_OFFSET = 5; + + private JacksonExponentialHistogram histogram; + + private JacksonExponentialHistogram.Builder builder; + + @BeforeEach + public void setup() { + builder = JacksonExponentialHistogram.builder() + .withAttributes(TEST_ATTRIBUTES) + .withName(TEST_NAME) + .withDescription(TEST_DESCRIPTION) + .withEventKind(TEST_EVENT_KIND) + .withStartTime(TEST_START_TIME) + .withTime(TEST_TIME) + .withUnit(TEST_UNIT_NAME) + .withServiceName(TEST_SERVICE_NAME) + .withSum(TEST_SUM) + .withCount(TEST_COUNT) + .withNegativeBuckets(TEST_NEGATIVE_BUCKETS) + .withPositiveBuckets(TEST_POSITIVE_BUCKETS) + .withAggregationTemporality(TEST_AGGREGATION_TEMPORALITY) + .withSchemaUrl(TEST_SCHEMA_URL) + .withScale(TEST_SCALE) + .withZeroCount(TEST_ZERO_COUNT) + .withPositiveOffset(TEST_POSITIVE_OFFSET) + .withNegativeOffset(TEST_NEGATIVE_OFFSET) + .withNegative(TEST_NEGATIVE) + .withPositive(TEST_POSITIVE); + + histogram = builder.build(); + } + + @Test + public void testGetAttributes() { + final Map attributes = histogram.getAttributes(); + TEST_ATTRIBUTES.keySet().forEach(key -> { + assertThat(attributes, hasKey(key)); + assertThat(attributes.get(key), is(equalTo(TEST_ATTRIBUTES.get(key)))); + } + ); + } + + @Test + public void testGetName() { + final String name = histogram.getName(); + assertThat(name, is(equalTo(TEST_NAME))); + } + + @Test + public void testGetDescription() { + final String description = histogram.getDescription(); + assertThat(description, is(equalTo(TEST_DESCRIPTION))); + } + + @Test + public void testGetKind() { + final String kind = histogram.getKind(); + assertThat(kind, is(equalTo(TEST_EVENT_KIND))); + } + + @Test + public void testGetSum() { + final Double sum = histogram.getSum(); + assertThat(sum, is(equalTo(TEST_SUM))); + } + + @Test + public void testGetCount() { + final Long count = histogram.getCount(); + assertThat(count, is(equalTo(TEST_COUNT))); + } + + @Test + public void testGetServiceName() { + final String name = histogram.getServiceName(); + assertThat(name, is(equalTo(TEST_SERVICE_NAME))); + } + + @Test + public void testGetScale() { + Integer scale = histogram.getScale(); + assertThat(scale, is(equalTo(TEST_SCALE))); + } + + @Test + public void testZeroCount() { + Long zeroCount = histogram.getZeroCount(); + assertThat(zeroCount, is(equalTo(TEST_ZERO_COUNT))); + } + + @Test + public void testGetNegativeBuckets() { + final List buckets = histogram.getNegativeBuckets(); + assertThat(buckets.size(), is(equalTo(2))); + Bucket firstBucket = buckets.get(0); + Bucket secondBucket = buckets.get(1); + + assertThat(firstBucket.getMin(), is(equalTo(0.0))); + assertThat(firstBucket.getMax(), is(equalTo(5.0))); + assertThat(firstBucket.getCount(), is(equalTo(2L))); + + assertThat(secondBucket.getMin(), is(equalTo(5.0))); + assertThat(secondBucket.getMax(), is(equalTo(10.0))); + assertThat(secondBucket.getCount(), is(equalTo(5L))); + } + + @Test + public void testGetPositiveBuckets() { + final List buckets = histogram.getPositiveBuckets(); + assertThat(buckets.size(), is(equalTo(2))); + Bucket firstBucket = buckets.get(0); + Bucket secondBucket = buckets.get(1); + + assertThat(firstBucket.getMin(), is(equalTo(0.0))); + assertThat(firstBucket.getMax(), is(equalTo(5.0))); + assertThat(firstBucket.getCount(), is(equalTo(2L))); + + assertThat(secondBucket.getMin(), is(equalTo(5.0))); + assertThat(secondBucket.getMax(), is(equalTo(10.0))); + assertThat(secondBucket.getCount(), is(equalTo(5L))); + } + + @Test + public void testGetNegative() { + List negativeBucketCounts = histogram.getNegative(); + assertThat(negativeBucketCounts.size(), is(equalTo(3))); + assertEquals(negativeBucketCounts, TEST_NEGATIVE); + } + + @Test + public void testGetPositive() { + List negativeBucketCounts = histogram.getPositive(); + assertThat(negativeBucketCounts.size(), is(equalTo(2))); + assertEquals(negativeBucketCounts, TEST_POSITIVE); + } + + @Test + public void testGetPositiveOffset() { + Integer positiveOffset = histogram.getPositiveOffset(); + assertThat(positiveOffset, is(TEST_POSITIVE_OFFSET)); + } + + @Test + public void testGetNegativeOffset() { + Integer negativeOffset = histogram.getNegativeOffset(); + assertThat(negativeOffset, is(TEST_NEGATIVE_OFFSET)); + } + + @Test + public void testGetAggregationTemporality() { + final String aggregationTemporality = histogram.getAggregationTemporality(); + assertThat(aggregationTemporality, is(equalTo(TEST_AGGREGATION_TEMPORALITY))); + } + + @Test + public void testBuilder_missingNonNullParameters_throwsNullPointerException() { + final JacksonSum.Builder builder = JacksonSum.builder(); + builder.withValue(null); + assertThrows(NullPointerException.class, builder::build); + } + + @Test + public void testBuilder_withEmptyTime_throwsIllegalArgumentException() { + builder.withTime(""); + assertThrows(IllegalArgumentException.class, builder::build); + } + + @Test + public void testGetAttributes_withNull_mustBeEmpty() { + builder.withAttributes(null); + JacksonExponentialHistogram histogram = builder.build(); + histogram.toJsonString(); + assertThat(histogram.getAttributes(), is(anEmptyMap())); + } + + @Test + public void testHistogramToJsonString() throws JSONException { + histogram.put("foo", "bar"); + final String value = UUID.randomUUID().toString(); + histogram.put("testObject", new TestObject(value)); + histogram.put("list", Arrays.asList(1, 4, 5)); + final String result = histogram.toJsonString(); + + String file = IOUtils.toString(this.getClass().getResourceAsStream("/testjson/exponentialHistogram.json")); + String expected = String.format(file, TEST_START_TIME, TEST_TIME, value, TEST_KEY1_TIME, TEST_KEY2); + JSONAssert.assertEquals(expected, result, false); + } +} diff --git a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonGaugeTest.java b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonGaugeTest.java index cedb55edba..9521dc2160 100644 --- a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonGaugeTest.java +++ b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonGaugeTest.java @@ -7,12 +7,15 @@ import org.opensearch.dataprepper.model.event.TestObject; import com.google.common.collect.ImmutableMap; -import org.hamcrest.CoreMatchers; +import io.micrometer.core.instrument.util.IOUtils; +import org.json.JSONException; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.skyscreamer.jsonassert.JSONAssert; import java.util.Arrays; import java.util.Date; +import java.util.List; import java.util.Map; import java.util.UUID; @@ -33,10 +36,17 @@ class JacksonGaugeTest { private static final String TEST_NAME = "name"; private static final String TEST_DESCRIPTION = "description"; private static final String TEST_UNIT_NAME = "unit"; - private static final String TEST_START_TIME = UUID.randomUUID().toString(); - private static final String TEST_TIME = UUID.randomUUID().toString(); + private static final String TEST_START_TIME = "2022-01-01T00:00:00Z"; + private static final String TEST_TIME = "2022-01-02T00:00:00Z"; private static final String TEST_EVENT_KIND = Metric.KIND.GAUGE.name(); private static final Double TEST_VALUE = 1D; + private static final String TEST_SCHEMA_URL = "schema"; + private static final Integer TEST_FLAGS = 1; + + private static final List TEST_EXEMPLARS = Arrays.asList( + new DefaultExemplar("1970-01-01T00:00:00Z", 2.0, "xsdf", "abcd", Map.of("test", "value")), + new DefaultExemplar("1971-01-01T00:00:00Z", 5.0, "xsdt", "asdf", Map.of("test", "value")) + ); private JacksonGauge gauge; @@ -53,7 +63,10 @@ public void setup() { .withTime(TEST_TIME) .withUnit(TEST_UNIT_NAME) .withValue(TEST_VALUE) - .withServiceName(TEST_SERVICE_NAME); + .withServiceName(TEST_SERVICE_NAME) + .withExemplars(TEST_EXEMPLARS) + .withSchemaUrl(TEST_SCHEMA_URL) + .withFlags(TEST_FLAGS); gauge = builder.build(); @@ -117,6 +130,24 @@ public void testGetValue() { assertThat(value, is(equalTo(TEST_VALUE))); } + @Test + public void testGetExemplars() { + List exemplars = gauge.getExemplars(); + assertThat(exemplars.size(), is(equalTo(2))); + Exemplar e1 = exemplars.get(0); + Exemplar e2 = exemplars.get(1); + + assertThat(e1.getTime(), equalTo("1970-01-01T00:00:00Z")); + assertThat(e1.getValue(), equalTo(2.0)); + assertThat(e1.getSpanId(), equalTo("xsdf")); + assertThat(e1.getTraceId(), equalTo("abcd")); + + assertThat(e2.getTime(), equalTo("1971-01-01T00:00:00Z")); + assertThat(e2.getValue(), equalTo(5.0)); + assertThat(e2.getSpanId(), equalTo("xsdt")); + assertThat(e2.getTraceId(), equalTo("asdf")); + } + @Test public void testBuilder_missingNonNullParameters_throwsNullPointerException() { final JacksonGauge.Builder builder = JacksonGauge.builder(); @@ -131,12 +162,27 @@ public void testBuilder_withEmptyTime_throwsIllegalArgumentException() { } @Test - public void testGaugeToJsonString() { + public void testGaugeToJsonString() throws JSONException { gauge.put("foo", "bar"); final String value = UUID.randomUUID().toString(); gauge.put("testObject", new TestObject(value)); gauge.put("list", Arrays.asList(1, 4, 5)); final String result = gauge.toJsonString(); - assertThat(result, CoreMatchers.is(CoreMatchers.equalTo(String.format("{\"unit\":\"unit\",\"kind\":\"GAUGE\",\"name\":\"name\",\"description\":\"description\",\"startTime\":\"%s\",\"time\":\"%s\",\"serviceName\":\"service\",\"value\":1.0,\"foo\":\"bar\",\"testObject\":{\"field1\":\"%s\"},\"list\":[1,4,5],\"key1\":%s,\"key2\":\"%s\"}", TEST_START_TIME, TEST_TIME, value, TEST_TIME_KEY1, TEST_KEY2)))); + + String file = IOUtils.toString(this.getClass().getResourceAsStream("/testjson/gauge.json")); + String expected = String.format(file, value, TEST_TIME_KEY1, TEST_KEY2); + JSONAssert.assertEquals(expected, result, false); + } + + @Test + public void testGetSchemaUrl() { + final String url = gauge.getSchemaUrl(); + assertThat(url, is(equalTo(TEST_SCHEMA_URL))); + } + + @Test + public void testGetFlags() { + final Integer flags = gauge.getFlags(); + assertThat(flags, is(equalTo(TEST_FLAGS))); } } \ No newline at end of file diff --git a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonHistogramTest.java b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonHistogramTest.java index 75ca77d322..318d3a2389 100644 --- a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonHistogramTest.java +++ b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonHistogramTest.java @@ -6,8 +6,12 @@ package org.opensearch.dataprepper.model.metric; import com.google.common.collect.ImmutableMap; +import io.micrometer.core.instrument.util.IOUtils; +import org.json.JSONException; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.opensearch.dataprepper.model.event.TestObject; +import org.skyscreamer.jsonassert.JSONAssert; import java.util.Arrays; import java.util.Date; @@ -20,13 +24,16 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.is; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; class JacksonHistogramTest { + private static final Long TEST_KEY1_TIME = new Date().getTime(); + private static final String TEST_KEY2 = UUID.randomUUID().toString(); private static final Map TEST_ATTRIBUTES = ImmutableMap.of( - "key1", new Date().getTime(), - "key2", UUID.randomUUID().toString()); + "key1", TEST_KEY1_TIME, + "key2", TEST_KEY2); private static final String TEST_SERVICE_NAME = "service"; private static final String TEST_NAME = "name"; private static final String TEST_DESCRIPTION = "description"; @@ -39,10 +46,14 @@ class JacksonHistogramTest { new DefaultBucket(0.0, 5.0, 2L), new DefaultBucket(5.0, 10.0, 5L) ); + + private static final List TEST_BUCKET_COUNTS_LIST = Arrays.asList(1L, 2L, 3L); + private static final List TEST_EXPLICIT_BOUNDS_LIST = Arrays.asList(5D, 10D, 100D); private static final Integer TEST_BUCKETS_COUNT = 2; private static final Long TEST_COUNT = 2L; private static final Integer TEST_EXPLICIT_BOUNDS_COUNT = 2; private static final String TEST_AGGREGATION_TEMPORALITY = "AGGREGATIONTEMPORALITY"; + private static final String TEST_SCHEMA_URL = "schema"; private JacksonHistogram histogram; @@ -64,7 +75,10 @@ public void setup() { .withBucketCount(TEST_BUCKETS_COUNT) .withBuckets(TEST_BUCKETS) .withExplicitBoundsCount(TEST_EXPLICIT_BOUNDS_COUNT) - .withAggregationTemporality(TEST_AGGREGATION_TEMPORALITY); + .withAggregationTemporality(TEST_AGGREGATION_TEMPORALITY) + .withSchemaUrl(TEST_SCHEMA_URL) + .withExplicitBoundsList(TEST_EXPLICIT_BOUNDS_LIST) + .withBucketCountsList(TEST_BUCKET_COUNTS_LIST); histogram = builder.build(); } @@ -133,9 +147,21 @@ public void testGetBuckets() { } @Test - public void testGetBucketCount() { - final Integer bucketCount = histogram.getBucketCount(); - assertThat(bucketCount, is(equalTo(TEST_BUCKETS_COUNT))); + public void testGetBucketCountsList() { + List counts = histogram.getBucketCountsList(); + assertEquals(counts, TEST_BUCKET_COUNTS_LIST); + } + + @Test + public void testGetExplicitBoundsList() { + List bounds = histogram.getExplicitBoundsList(); + assertEquals(bounds, TEST_EXPLICIT_BOUNDS_LIST); + } + + @Test + public void testGetBucketCounts() { + Integer count = histogram.getBucketCount(); + assertEquals(count, TEST_BUCKETS_COUNT); } @Test @@ -168,6 +194,20 @@ public void testGetAttributes_withNull_mustBeEmpty() { builder.withAttributes(null); JacksonHistogram histogram = builder.build(); histogram.toJsonString(); - assertThat(histogram.getAttributes(),is(anEmptyMap())); + assertThat(histogram.getAttributes(), is(anEmptyMap())); } + + @Test + public void testHistogramToJsonString() throws JSONException { + histogram.put("foo", "bar"); + final String value = UUID.randomUUID().toString(); + histogram.put("testObject", new TestObject(value)); + histogram.put("list", Arrays.asList(1, 4, 5)); + final String result = histogram.toJsonString(); + + String file = IOUtils.toString(this.getClass().getResourceAsStream("/testjson/histogram.json")); + String expected = String.format(file, TEST_START_TIME, TEST_TIME, value, TEST_KEY1_TIME, TEST_KEY2); + JSONAssert.assertEquals(expected, result, false); + } + } \ No newline at end of file diff --git a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonSumTest.java b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonSumTest.java index fdc9c6536b..a166a0c042 100644 --- a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonSumTest.java +++ b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonSumTest.java @@ -34,6 +34,7 @@ public class JacksonSumTest { private static final String TEST_EVENT_KIND = Metric.KIND.SUM.name(); private static final boolean TEST_IS_MONOTONIC = true; private static final Double TEST_VALUE = 1D; + private static final String TEST_SCHEMA_URL = "schema"; private JacksonSum sum; @@ -52,7 +53,8 @@ public void setup() { .withUnit(TEST_UNIT_NAME) .withIsMonotonic(TEST_IS_MONOTONIC) .withValue(TEST_VALUE) - .withServiceName(TEST_SERVICE_NAME); + .withServiceName(TEST_SERVICE_NAME) + .withSchemaUrl(TEST_SCHEMA_URL); sum = builder.build(); @@ -124,7 +126,6 @@ public void testGetMonotonic() { assertThat(monotonic, is(equalTo(TEST_IS_MONOTONIC))); } - @Test public void testGetValue() { final Double value = sum.getValue(); @@ -143,4 +144,11 @@ public void testBuilder_withEmptyTime_throwsIllegalArgumentException() { builder.withTime(""); assertThrows(IllegalArgumentException.class, builder::build); } + + @Test + public void testGetSchemaUrl() { + final String url = sum.getSchemaUrl(); + assertThat(url, is(equalTo(TEST_SCHEMA_URL))); + } + } \ No newline at end of file diff --git a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonSummaryTest.java b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonSummaryTest.java index e5d1e04dce..4f94fc86d2 100644 --- a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonSummaryTest.java +++ b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/metric/JacksonSummaryTest.java @@ -6,6 +6,7 @@ package org.opensearch.dataprepper.model.metric; import com.google.common.collect.ImmutableMap; +import org.hamcrest.Matchers; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -41,6 +42,7 @@ public class JacksonSummaryTest { ); private static final Integer TEST_QUANTILES_COUNT = 2; private static final Long TEST_COUNT = 2L; + private static final String TEST_SCHEMA_URL = "schema"; private JacksonSummary summary; @@ -60,7 +62,8 @@ public void setup() { .withSum(TEST_SUM) .withQuantiles(TEST_QUANTILES) .withCount(TEST_COUNT) - .withQuantilesValueCount(TEST_QUANTILES_COUNT); + .withQuantilesValueCount(TEST_QUANTILES_COUNT) + .withSchemaUrl(TEST_SCHEMA_URL); summary = builder.build(); } @@ -149,4 +152,11 @@ public void testBuilder_withEmptyTime_throwsIllegalArgumentException() { builder.withTime(""); assertThrows(IllegalArgumentException.class, builder::build); } + + @Test + public void testGetSchemaUrl() { + final String url = summary.getSchemaUrl(); + assertThat(url, Matchers.is(Matchers.equalTo(TEST_SCHEMA_URL))); + } + } diff --git a/data-prepper-api/src/test/resources/testjson/exponentialHistogram.json b/data-prepper-api/src/test/resources/testjson/exponentialHistogram.json new file mode 100644 index 0000000000..e20933cfa1 --- /dev/null +++ b/data-prepper-api/src/test/resources/testjson/exponentialHistogram.json @@ -0,0 +1,61 @@ +{ + "kind": "EXPONENTIAL_HISTOGRAM", + "count": 2, + "description": "description", + "scale": -3, + "sum": 1.0, + "positiveOffset": 5, + "positive": [ + 4, + 5 + ], + "serviceName": "service", + "negativeBuckets": [ + { + "min": 0.0, + "max": 5.0, + "count": 2 + }, + { + "min": 5.0, + "max": 10.0, + "count": 5 + } + ], + "schemaUrl": "schema", + "zeroCount": 1, + "unit": "unit", + "aggregationTemporality": "AGGREGATIONTEMPORALITY", + "negative": [ + 1, + 2, + 3 + ], + "name": "name", + "startTime": "%s", + "time": "%s", + "positiveBuckets": [ + { + "min": 0.0, + "max": 5.0, + "count": 2 + }, + { + "min": 5.0, + "max": 10.0, + "count": 5 + } + ], + "negativeOffset": 2, + "foo": "bar", + "testObject": { + "field1": "%s" + }, + "list": [ + 1, + 4, + 5 + ], + "key1": %s, + "key2": "%s" +} \ No newline at end of file diff --git a/data-prepper-api/src/test/resources/testjson/gauge.json b/data-prepper-api/src/test/resources/testjson/gauge.json new file mode 100644 index 0000000000..4a71dc3569 --- /dev/null +++ b/data-prepper-api/src/test/resources/testjson/gauge.json @@ -0,0 +1,42 @@ +{ + "unit": "unit", + "exemplars": [ + { + "time": "1970-01-01T00:00:00Z", + "value": 2.0, + "attributes": { + "test": "value" + }, + "spanId": "xsdf", + "traceId": "abcd" + }, + { + "time": "1971-01-01T00:00:00Z", + "value": 5.0, + "attributes": { + "test": "value" + }, + "spanId": "xsdt", + "traceId": "asdf" + } + ], + "kind": "GAUGE", + "name": "name", + "description": "description", + "startTime": "2022-01-01T00:00:00Z", + "time": "2022-01-02T00:00:00Z", + "serviceName": "service", + "value": 1.0, + "schemaUrl": "schema", + "foo": "bar", + "testObject": { + "field1": "%s" + }, + "list": [ + 1, + 4, + 5 + ], + "key1": %s, + "key2": "%s" +} \ No newline at end of file diff --git a/data-prepper-api/src/test/resources/testjson/histogram.json b/data-prepper-api/src/test/resources/testjson/histogram.json new file mode 100644 index 0000000000..fd61c2480c --- /dev/null +++ b/data-prepper-api/src/test/resources/testjson/histogram.json @@ -0,0 +1,48 @@ +{ + "kind": "HISTOGRAM", + "buckets": [ + { + "min": 0.0, + "max": 5.0, + "count": 2 + }, + { + "min": 5.0, + "max": 10.0, + "count": 5 + } + ], + "count": 2, + "bucketCountsList": [ + 1, + 2, + 3 + ], + "description": "description", + "sum": 1.0, + "serviceName": "service", + "schemaUrl": "schema", + "unit": "unit", + "aggregationTemporality": "AGGREGATIONTEMPORALITY", + "bucketCounts": 2, + "name": "name", + "startTime": "%s", + "explicitBoundsCount": 2, + "time": "%s", + "explicitBounds": [ + 5.0, + 10.0, + 100.0 + ], + "foo": "bar", + "testObject": { + "field1": "%s" + }, + "list": [ + 1, + 4, + 5 + ], + "key1": %s, + "key2": "%s" +} \ No newline at end of file diff --git a/data-prepper-plugins/otel-metrics-raw-processor/README.md b/data-prepper-plugins/otel-metrics-raw-processor/README.md index 3f5be314d9..9d8b067524 100644 --- a/data-prepper-plugins/otel-metrics-raw-processor/README.md +++ b/data-prepper-plugins/otel-metrics-raw-processor/README.md @@ -9,6 +9,107 @@ processor: - otel_metrics_raw_processor ``` +## Configurations +It is possible to create explicit representations of histogram buckets and their boundaries. This feature can be controlled with the following parameters: + +```yaml + processor: + - otel_metrics_raw_processor: + calculate_histogram_buckets: true + calculate_exponential_histogram_buckets: true + exponential_histogram_max_allowed_scale: 10 +``` + +There are three possible parameters: `calculate_histogram_buckets`, `calculate_exponential_histogram_buckets` and `exponential_histogram_max_allowed_scale` +If `calculate_histogram_buckets` and `calculate_exponential_histogram_buckets` are not provided they default to `false`. +If `exponential_histogram_max_allowed_scale` is not provided it defaults to 10. + +If `calculate_histogram_buckets` is not set to `false`, the following JSON will be added to every histogram JSON: + +```json + "buckets": [ + { + "min": 0.0, + "max": 5.0, + "count": 2 + }, + { + "min": 5.0, + "max": 10.0, + "count": 5 + } + ] +``` + +Each array element describes one bucket. Each bucket contains the lower boundary, upper boundary and its value count. +This is an explicit form of the more dense OpenTelemetry representation that is already part of the JSON output created by this plugin: + +```json + "explicitBounds": [ + 5.0, + 10.0 + ], + "bucketCountsList": [ + 2, + 5 + ] +``` + + +If `calculate_exponential_histogram_buckets` is not set to `false`, the following JSON will be added to every histogram JSON: +```json + + "negativeBuckets": [ + { + "min": 0.0, + "max": 5.0, + "count": 2 + }, + { + "min": 5.0, + "max": 10.0, + "count": 5 + } + ], +... + "positiveBuckets": [ + { + "min": 0.0, + "max": 5.0, + "count": 2 + }, + { + "min": 5.0, + "max": 10.0, + "count": 5 + } + ], +``` + +Again, this is a more explicit form of the dense OpenTelemetry representation which consists of negative and positive buckets along with +a scale parameter, offset and list of bucket counts: +```json + "negative": [ + 1, + 2, + 3 + ], + "positive": [ + 1, + 2, + 3 + ], + "scale" : -3, + "negativeOffset" : 0, + "positiveOffset : 1 +``` + +The `exponential_histogram_max_allowed_scale` parameter defines the maximum allowed scale for the exponential histogram. Increasing this parameter will increase potential +memory consumption. See [the spec](https://github.com/open-telemetry/opentelemetry-proto/blob/main/opentelemetry/proto/metrics/v1/metrics.proto) for more information on exponential histograms and their computational complexity. + +All exponential histograms that have a scale that is above the configured parameter (by default, 10) will be discarded and logged with error level. +**Note**: the absolute scale value is used for comparison, so a scale of -11 will be treated equally to 11 and thus exceed the configured value of 10 - and be discarded. + ## Metrics This plugin uses all common metrics in [AbstractProcessor](https://github.com/opensearch-project/data-prepper/blob/main/data-prepper-api/src/main/java/com/amazon/dataprepper/model/processor/AbstractProcessor.java), and does not currently introduce custom metrics. diff --git a/data-prepper-plugins/otel-metrics-raw-processor/build.gradle b/data-prepper-plugins/otel-metrics-raw-processor/build.gradle index 2926526ee4..7a66ff1521 100644 --- a/data-prepper-plugins/otel-metrics-raw-processor/build.gradle +++ b/data-prepper-plugins/otel-metrics-raw-processor/build.gradle @@ -10,10 +10,11 @@ plugins { dependencies { implementation project(':data-prepper-api') implementation project(':data-prepper-plugins:common') + implementation project(':data-prepper-plugins:otel-proto-common') implementation 'commons-codec:commons-codec:1.15' implementation 'org.apache.commons:commons-lang3:3.12.0' testImplementation project(':data-prepper-api').sourceSets.test.output - implementation "io.opentelemetry:opentelemetry-proto:${versionMap.opentelemetryProto}" + implementation "io.opentelemetry.proto:opentelemetry-proto:${versionMap.opentelemetryProto}" implementation "com.google.protobuf:protobuf-java-util:${versionMap.protobufJavaUtil}" implementation "com.linecorp.armeria:armeria:${versionMap.armeria}" implementation "com.linecorp.armeria:armeria-grpc:${versionMap.armeriaGrpc}" diff --git a/data-prepper-plugins/otel-metrics-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OTelMetricsProtoHelper.java b/data-prepper-plugins/otel-metrics-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OTelMetricsProtoHelper.java index c5310032e8..a925bb4730 100644 --- a/data-prepper-plugins/otel-metrics-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OTelMetricsProtoHelper.java +++ b/data-prepper-plugins/otel-metrics-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OTelMetricsProtoHelper.java @@ -9,13 +9,18 @@ import com.fasterxml.jackson.databind.ObjectMapper; import io.opentelemetry.proto.common.v1.AnyValue; import io.opentelemetry.proto.common.v1.InstrumentationLibrary; +import io.opentelemetry.proto.common.v1.InstrumentationScope; import io.opentelemetry.proto.common.v1.KeyValue; +import io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint; import io.opentelemetry.proto.metrics.v1.NumberDataPoint; import io.opentelemetry.proto.metrics.v1.SummaryDataPoint; import io.opentelemetry.proto.resource.v1.Resource; +import org.apache.commons.codec.binary.Hex; import org.opensearch.dataprepper.model.metric.Bucket; import org.opensearch.dataprepper.model.metric.DefaultBucket; +import org.opensearch.dataprepper.model.metric.DefaultExemplar; import org.opensearch.dataprepper.model.metric.DefaultQuantile; +import org.opensearch.dataprepper.model.metric.Exemplar; import org.opensearch.dataprepper.model.metric.Quantile; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -27,6 +32,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; import java.util.stream.Collectors; @@ -35,11 +41,13 @@ public final class OTelMetricsProtoHelper { private static final Logger LOG = LoggerFactory.getLogger(OTelMetricsProtoHelper.class); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final String SERVICE_NAME = "service.name"; - private static final String SPAN_ATTRIBUTES = "metric.attributes"; + private static final String METRIC_ATTRIBUTES = "metric.attributes"; static final String RESOURCE_ATTRIBUTES = "resource.attributes"; + static final String EXEMPLAR_ATTRIBUTES = "exemplar.attributes"; static final String INSTRUMENTATION_LIBRARY_NAME = "instrumentationLibrary.name"; static final String INSTRUMENTATION_LIBRARY_VERSION = "instrumentationLibrary.version"; - + static final String INSTRUMENTATION_SCOPE_NAME = "instrumentationScope.name"; + static final String INSTRUMENTATION_SCOPE_VERSION = "instrumentationScope.version"; /** * To make it ES friendly we will replace '.' in keys with '@' in all the Keys in {@link io.opentelemetry.proto.common.v1.KeyValue} @@ -49,15 +57,17 @@ public final class OTelMetricsProtoHelper { public static final Function REPLACE_DOT_WITH_AT = i -> i.replace(DOT, AT); /** - * Span and Resource attributes are essential for kibana so they should not be nested. SO we will prefix them with "span.attributes" - * and "resource.attributes". + * Span and Resource attributes are essential for kibana so they should not be nested. SO we will prefix them with "metric.attributes" + * and "resource.attributes" and "exemplar.attributes". */ - public static final Function PREFIX_AND_SPAN_ATTRIBUTES_REPLACE_DOT_WITH_AT = i -> SPAN_ATTRIBUTES + DOT + i.replace(DOT, AT); + public static final Function PREFIX_AND_METRIC_ATTRIBUTES_REPLACE_DOT_WITH_AT = i -> METRIC_ATTRIBUTES + DOT + i.replace(DOT, AT); public static final Function PREFIX_AND_RESOURCE_ATTRIBUTES_REPLACE_DOT_WITH_AT = i -> RESOURCE_ATTRIBUTES + DOT + i.replace(DOT, AT); + public static final Function PREFIX_AND_EXEMPLAR_ATTRIBUTES_REPLACE_DOT_WITH_AT = i -> EXEMPLAR_ATTRIBUTES + DOT + i.replace(DOT, AT); private OTelMetricsProtoHelper() { } + private static final Map EXPONENTIAL_BUCKET_BOUNDS = new ConcurrentHashMap<>(); /** * Converts an {@link AnyValue} into its appropriate data type @@ -110,7 +120,7 @@ public static Object convertAnyValue(final AnyValue value) { */ public static Map convertKeysOfDataPointAttributes(final NumberDataPoint numberDataPoint) { return numberDataPoint.getAttributesList().stream() - .collect(Collectors.toMap(i -> PREFIX_AND_SPAN_ATTRIBUTES_REPLACE_DOT_WITH_AT.apply(i.getKey()), i -> convertAnyValue(i.getValue()))); + .collect(Collectors.toMap(i -> PREFIX_AND_METRIC_ATTRIBUTES_REPLACE_DOT_WITH_AT.apply(i.getKey()), i -> convertAnyValue(i.getValue()))); } /** @@ -123,9 +133,23 @@ public static Map convertKeysOfDataPointAttributes(final NumberD */ public static Map unpackKeyValueList(List attributesList) { return attributesList.stream() - .collect(Collectors.toMap(i -> PREFIX_AND_SPAN_ATTRIBUTES_REPLACE_DOT_WITH_AT.apply(i.getKey()), i -> convertAnyValue(i.getValue()))); + .collect(Collectors.toMap(i -> PREFIX_AND_METRIC_ATTRIBUTES_REPLACE_DOT_WITH_AT.apply(i.getKey()), i -> convertAnyValue(i.getValue()))); } + /** + * Unpacks the List of {@link KeyValue} object into a Map. + *

+ * Converts the keys into an os friendly format and casts the underlying data into its actual type? + * + * @param attributesList The list of {@link KeyValue} objects to process + * @return A Map containing unpacked {@link KeyValue} data + */ + public static Map unpackExemplarValueList(List attributesList) { + return attributesList.stream() + .collect(Collectors.toMap(i -> PREFIX_AND_EXEMPLAR_ATTRIBUTES_REPLACE_DOT_WITH_AT.apply(i.getKey()), i -> convertAnyValue(i.getValue()))); + } + + /** * Extracts a value from the passed {@link NumberDataPoint} into a double representation * @@ -144,6 +168,24 @@ public static Double getValueAsDouble(final NumberDataPoint ndp) { } } + /** + * Extracts a value from the passed {@link io.opentelemetry.proto.metrics.v1.Exemplar} into a double representation + * + * @param exemplar The {@link io.opentelemetry.proto.metrics.v1.Exemplar} which's data should be turned into a double value + * @return A double representing the numerical value of the passed {@link io.opentelemetry.proto.metrics.v1.Exemplar}. + * Null if the numerical data point is not present + */ + public static Double getExemplarValueAsDouble(final io.opentelemetry.proto.metrics.v1.Exemplar exemplar) { + io.opentelemetry.proto.metrics.v1.Exemplar.ValueCase valueCase = exemplar.getValueCase(); + if (io.opentelemetry.proto.metrics.v1.Exemplar.ValueCase.AS_DOUBLE == valueCase) { + return exemplar.getAsDouble(); + } else if (io.opentelemetry.proto.metrics.v1.Exemplar.ValueCase.AS_INT == valueCase) { + return (double) exemplar.getAsInt(); + } else { + return null; + } + } + public static Map getResourceAttributes(final Resource resource) { return resource.getAttributesList().stream() .collect(Collectors.toMap(i -> PREFIX_AND_RESOURCE_ATTRIBUTES_REPLACE_DOT_WITH_AT.apply(i.getKey()), i -> convertAnyValue(i.getValue()))); @@ -165,6 +207,23 @@ public static Map getInstrumentationLibraryAttributes(final Inst return instrumentationAttr; } + /** + * Extracts the name and version of the used instrumentation scope used + * + * @return A map, containing information about the instrumentation scope + */ + public static Map getInstrumentationScopeAttributes(final InstrumentationScope instrumentationScope) { + final Map instrumentationScopeAttr = new HashMap<>(); + if (!instrumentationScope.getName().isEmpty()) { + instrumentationScopeAttr.put(INSTRUMENTATION_SCOPE_NAME, instrumentationScope.getName()); + } + if (!instrumentationScope.getVersion().isEmpty()) { + instrumentationScopeAttr.put(INSTRUMENTATION_SCOPE_VERSION, instrumentationScope.getVersion()); + } + return instrumentationScopeAttr; + } + + public static String convertUnixNanosToISO8601(final long unixNano) { return Instant.ofEpochSecond(0L, unixNano).toString(); } @@ -250,4 +309,56 @@ public static List createBuckets(List bucketCountsList, List convertExemplars(List exemplarsList) { + return exemplarsList.stream().map(exemplar -> + new DefaultExemplar(convertUnixNanosToISO8601(exemplar.getTimeUnixNano()), + getExemplarValueAsDouble(exemplar), + Hex.encodeHexString(exemplar.getSpanId().toByteArray()), + Hex.encodeHexString(exemplar.getTraceId().toByteArray()), + unpackExemplarValueList(exemplar.getFilteredAttributesList()))) + .collect(Collectors.toList()); + } + + + static double[] calculateBoundaries(int scale) { + int len = 1 << Math.abs(scale); + double[] boundaries = new double[len + 1]; + for (int i = 0; i <= len ; i++) { + boundaries[i] = scale >=0 ? + Math.pow(2., i / (double) len) : + Math.pow(2., Math.pow(2., i)); + } + return boundaries; + } + + /** + * Maps a List of {@link io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets} to an + * internal representation for Data Prepper. + * See data model + * + * @param buckets the list of buckets + * @param scale the scale of the exponential histogram + * @return a mapped list of Buckets + */ + public static List createExponentialBuckets(ExponentialHistogramDataPoint.Buckets buckets, int scale) { + double[] bucketBounds = EXPONENTIAL_BUCKET_BOUNDS.computeIfAbsent(scale, integer -> calculateBoundaries(scale)); + List mappedBuckets = new ArrayList<>(); + int offset = buckets.getOffset(); + List bucketsList = buckets.getBucketCountsList(); + for (int i = 0; i < bucketsList.size(); i++) { + Long value = bucketsList.get(i); + double lowerBound = bucketBounds[offset + i]; + double upperBound = bucketBounds[offset + i + 1]; + mappedBuckets.add(new DefaultBucket(lowerBound, upperBound, value)); + } + return mappedBuckets; + } } diff --git a/data-prepper-plugins/otel-metrics-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OTelMetricsRawProcessor.java b/data-prepper-plugins/otel-metrics-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OTelMetricsRawProcessor.java index 3eceb5e013..a44978d46b 100644 --- a/data-prepper-plugins/otel-metrics-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OTelMetricsRawProcessor.java +++ b/data-prepper-plugins/otel-metrics-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OTelMetricsRawProcessor.java @@ -8,21 +8,22 @@ import io.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest; import io.opentelemetry.proto.metrics.v1.InstrumentationLibraryMetrics; import io.opentelemetry.proto.metrics.v1.ResourceMetrics; +import io.opentelemetry.proto.metrics.v1.ScopeMetrics; import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin; +import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor; import org.opensearch.dataprepper.model.configuration.PluginSetting; -import org.opensearch.dataprepper.model.metric.Gauge; -import org.opensearch.dataprepper.model.metric.Histogram; +import org.opensearch.dataprepper.model.metric.JacksonExponentialHistogram; import org.opensearch.dataprepper.model.metric.JacksonGauge; import org.opensearch.dataprepper.model.metric.JacksonHistogram; import org.opensearch.dataprepper.model.metric.JacksonSum; import org.opensearch.dataprepper.model.metric.JacksonSummary; import org.opensearch.dataprepper.model.metric.Metric; -import org.opensearch.dataprepper.model.metric.Sum; -import org.opensearch.dataprepper.model.metric.Summary; import org.opensearch.dataprepper.model.processor.AbstractProcessor; import org.opensearch.dataprepper.model.processor.Processor; import org.opensearch.dataprepper.model.record.Record; - +import org.opensearch.dataprepper.plugins.otel.codec.OTelProtoCodec; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -31,142 +32,248 @@ import java.util.stream.Collectors; -@DataPrepperPlugin(name = "otel_metrics_raw_processor", pluginType = Processor.class) +@DataPrepperPlugin(name = "otel_metrics_raw_processor", pluginType = Processor.class, pluginConfigurationType = OtelMetricsRawProcessorConfig.class) public class OTelMetricsRawProcessor extends AbstractProcessor, Record> { - public OTelMetricsRawProcessor(PluginSetting pluginSetting) { + private static final Logger LOG = LoggerFactory.getLogger(OTelMetricsRawProcessor.class); + + private final OtelMetricsRawProcessorConfig otelMetricsRawProcessorConfig; + + @DataPrepperPluginConstructor + public OTelMetricsRawProcessor(PluginSetting pluginSetting, final OtelMetricsRawProcessorConfig otelMetricsRawProcessorConfig) { super(pluginSetting); + this.otelMetricsRawProcessorConfig = otelMetricsRawProcessorConfig; } @Override public Collection> doExecute(Collection> records) { Collection> recordsOut = new ArrayList<>(); - for (Record ets : records) { for (ResourceMetrics rs : ets.getData().getResourceMetricsList()) { - final Map resourceAttributes = OTelMetricsProtoHelper.getResourceAttributes(rs.getResource()); - final String serviceName = OTelMetricsProtoHelper.getServiceName(rs.getResource()).orElse(null); + final String schemaUrl = rs.getSchemaUrl(); + final Map resourceAttributes = OTelProtoCodec.getResourceAttributes(rs.getResource()); + final String serviceName = OTelProtoCodec.getServiceName(rs.getResource()).orElse(null); for (InstrumentationLibraryMetrics is : rs.getInstrumentationLibraryMetricsList()) { - final Map ils = OTelMetricsProtoHelper.getInstrumentationLibraryAttributes(is.getInstrumentationLibrary()); - for (io.opentelemetry.proto.metrics.v1.Metric metric : is.getMetricsList()) { - if (metric.hasGauge()) { - recordsOut.addAll(mapGauge(metric, serviceName, ils, resourceAttributes)); - } else - if (metric.hasSum()) { - recordsOut.addAll(mapSum(metric, serviceName, ils, resourceAttributes)); - } else - if (metric.hasSummary()) { - recordsOut.addAll(mapSummary(metric, serviceName, ils, resourceAttributes)); - } else - if (metric.hasHistogram()) { - recordsOut.addAll(mapHistogram(metric, serviceName, ils, resourceAttributes)); - } - } + final Map ils = OTelProtoCodec.getInstrumentationLibraryAttributes(is.getInstrumentationLibrary()); + recordsOut.addAll(processMetricsList(is.getMetricsList(), serviceName, ils, resourceAttributes, schemaUrl)); } + + for (ScopeMetrics sm : rs.getScopeMetricsList()) { + final Map ils = OTelProtoCodec.getInstrumentationScopeAttributes(sm.getScope()); + recordsOut.addAll(processMetricsList(sm.getMetricsList(), serviceName, ils, resourceAttributes, schemaUrl)); + } + + } + } + return recordsOut; + } + + private List> processMetricsList(final List metricsList, + final String serviceName, + final Map ils, + final Map resourceAttributes, + final String schemaUrl) { + List> recordsOut = new ArrayList<>(); + for (io.opentelemetry.proto.metrics.v1.Metric metric : metricsList) { + if (metric.hasGauge()) { + recordsOut.addAll(mapGauge(metric, serviceName, ils, resourceAttributes, schemaUrl)); + } else if (metric.hasSum()) { + recordsOut.addAll(mapSum(metric, serviceName, ils, resourceAttributes, schemaUrl)); + } else if (metric.hasSummary()) { + recordsOut.addAll(mapSummary(metric, serviceName, ils, resourceAttributes, schemaUrl)); + } else if (metric.hasHistogram()) { + recordsOut.addAll(mapHistogram(metric, serviceName, ils, resourceAttributes, schemaUrl)); + } else if (metric.hasExponentialHistogram()) { + recordsOut.addAll(mapExponentialHistogram(metric, serviceName, ils, resourceAttributes, schemaUrl)); } } return recordsOut; } - private List> mapGauge(io.opentelemetry.proto.metrics.v1.Metric metric, String serviceName, final Map ils, final Map resourceAttributes) { + private List> mapGauge(io.opentelemetry.proto.metrics.v1.Metric metric, + String serviceName, + final Map ils, + final Map resourceAttributes, + final String schemaUrl) { return metric.getGauge().getDataPointsList().stream() - .map(dp -> (Gauge) JacksonGauge.builder() + .map(dp -> JacksonGauge.builder() .withUnit(metric.getUnit()) .withName(metric.getName()) .withDescription(metric.getDescription()) - .withStartTime(OTelMetricsProtoHelper.getStartTimeISO8601(dp)) - .withTime(OTelMetricsProtoHelper.getTimeISO8601(dp)) + .withStartTime(OTelProtoCodec.getStartTimeISO8601(dp)) + .withTime(OTelProtoCodec.getTimeISO8601(dp)) .withServiceName(serviceName) - .withValue(OTelMetricsProtoHelper.getValueAsDouble(dp)) - .withAttributes(OTelMetricsProtoHelper.mergeAllAttributes( + .withValue(OTelProtoCodec.getValueAsDouble(dp)) + .withAttributes(OTelProtoCodec.mergeAllAttributes( Arrays.asList( - OTelMetricsProtoHelper.convertKeysOfDataPointAttributes(dp), + OTelProtoCodec.convertKeysOfDataPointAttributes(dp), resourceAttributes, ils ) )) + .withSchemaUrl(schemaUrl) + .withExemplars(OTelProtoCodec.convertExemplars(dp.getExemplarsList())) + .withFlags(dp.getFlags()) .build()) .map(Record::new) .collect(Collectors.toList()); } - private List> mapSum(io.opentelemetry.proto.metrics.v1.Metric metric, String serviceName, final Map ils, final Map resourceAttributes) { + private List> mapSum(final io.opentelemetry.proto.metrics.v1.Metric metric, + final String serviceName, + final Map ils, + final Map resourceAttributes, + final String schemaUrl) { return metric.getSum().getDataPointsList().stream() - .map(dp -> (Sum) JacksonSum.builder() + .map(dp -> JacksonSum.builder() .withUnit(metric.getUnit()) .withName(metric.getName()) .withDescription(metric.getDescription()) - .withStartTime(OTelMetricsProtoHelper.getStartTimeISO8601(dp)) - .withTime(OTelMetricsProtoHelper.getTimeISO8601(dp)) + .withStartTime(OTelProtoCodec.getStartTimeISO8601(dp)) + .withTime(OTelProtoCodec.getTimeISO8601(dp)) .withServiceName(serviceName) .withIsMonotonic(metric.getSum().getIsMonotonic()) - .withValue(OTelMetricsProtoHelper.getValueAsDouble(dp)) + .withValue(OTelProtoCodec.getValueAsDouble(dp)) .withAggregationTemporality(metric.getSum().getAggregationTemporality().toString()) - .withAttributes(OTelMetricsProtoHelper.mergeAllAttributes( + .withAttributes(OTelProtoCodec.mergeAllAttributes( Arrays.asList( - OTelMetricsProtoHelper.convertKeysOfDataPointAttributes(dp), + OTelProtoCodec.convertKeysOfDataPointAttributes(dp), resourceAttributes, ils ) )) + .withSchemaUrl(schemaUrl) + .withExemplars(OTelProtoCodec.convertExemplars(dp.getExemplarsList())) + .withFlags(dp.getFlags()) .build()) .map(Record::new) .collect(Collectors.toList()); } - private List> mapSummary(io.opentelemetry.proto.metrics.v1.Metric metric, String serviceName, final Map ils, final Map resourceAttributes) { + private List> mapSummary(final io.opentelemetry.proto.metrics.v1.Metric metric, + final String serviceName, + final Map ils, + final Map resourceAttributes, + final String schemaUrl) { return metric.getSummary().getDataPointsList().stream() - .map(dp -> (Summary) JacksonSummary.builder() + .map(dp -> JacksonSummary.builder() .withUnit(metric.getUnit()) .withName(metric.getName()) .withDescription(metric.getDescription()) - .withStartTime(OTelMetricsProtoHelper.convertUnixNanosToISO8601(dp.getStartTimeUnixNano())) - .withTime(OTelMetricsProtoHelper.convertUnixNanosToISO8601(dp.getTimeUnixNano())) + .withStartTime(OTelProtoCodec.convertUnixNanosToISO8601(dp.getStartTimeUnixNano())) + .withTime(OTelProtoCodec.convertUnixNanosToISO8601(dp.getTimeUnixNano())) .withServiceName(serviceName) .withCount(dp.getCount()) .withSum(dp.getSum()) - .withQuantiles(OTelMetricsProtoHelper.getQuantileValues(dp.getQuantileValuesList())) + .withQuantiles(OTelProtoCodec.getQuantileValues(dp.getQuantileValuesList())) .withQuantilesValueCount(dp.getQuantileValuesCount()) - .withAttributes(OTelMetricsProtoHelper.mergeAllAttributes( + .withAttributes(OTelProtoCodec.mergeAllAttributes( Arrays.asList( - OTelMetricsProtoHelper.unpackKeyValueList(dp.getAttributesList()), + OTelProtoCodec.unpackKeyValueList(dp.getAttributesList()), resourceAttributes, ils ) )) + .withSchemaUrl(schemaUrl) + .withFlags(dp.getFlags()) .build()) .map(Record::new) .collect(Collectors.toList()); } - private List> mapHistogram(io.opentelemetry.proto.metrics.v1.Metric metric, String serviceName, final Map ils, final Map resourceAttributes) { + private List> mapHistogram(final io.opentelemetry.proto.metrics.v1.Metric metric, + final String serviceName, + final Map ils, + final Map resourceAttributes, + final String schemaUrl) { return metric.getHistogram().getDataPointsList().stream() - .map(dp -> (Histogram) JacksonHistogram.builder() - .withUnit(metric.getUnit()) - .withName(metric.getName()) - .withDescription(metric.getDescription()) - .withStartTime(OTelMetricsProtoHelper.convertUnixNanosToISO8601(dp.getStartTimeUnixNano())) - .withTime(OTelMetricsProtoHelper.convertUnixNanosToISO8601(dp.getTimeUnixNano())) - .withServiceName(serviceName) - .withSum(dp.getSum()) - .withCount(dp.getCount()) - .withBucketCount(dp.getBucketCountsCount()) - .withExplicitBoundsCount(dp.getExplicitBoundsCount()) - .withAggregationTemporality(metric.getHistogram().getAggregationTemporality().toString()) - .withBuckets(OTelMetricsProtoHelper.createBuckets(dp.getBucketCountsList(), dp.getExplicitBoundsList())) - .withAttributes(OTelMetricsProtoHelper.mergeAllAttributes( - Arrays.asList( - OTelMetricsProtoHelper.unpackKeyValueList(dp.getAttributesList()), - resourceAttributes, - ils - ) - )) - .build()) + .map(dp -> { + JacksonHistogram.Builder builder = JacksonHistogram.builder() + .withUnit(metric.getUnit()) + .withName(metric.getName()) + .withDescription(metric.getDescription()) + .withStartTime(OTelProtoCodec.convertUnixNanosToISO8601(dp.getStartTimeUnixNano())) + .withTime(OTelProtoCodec.convertUnixNanosToISO8601(dp.getTimeUnixNano())) + .withServiceName(serviceName) + .withSum(dp.getSum()) + .withCount(dp.getCount()) + .withBucketCount(dp.getBucketCountsCount()) + .withExplicitBoundsCount(dp.getExplicitBoundsCount()) + .withAggregationTemporality(metric.getHistogram().getAggregationTemporality().toString()) + .withBucketCountsList(dp.getBucketCountsList()) + .withExplicitBoundsList(dp.getExplicitBoundsList()) + .withAttributes(OTelProtoCodec.mergeAllAttributes( + Arrays.asList( + OTelProtoCodec.unpackKeyValueList(dp.getAttributesList()), + resourceAttributes, + ils + ) + )) + .withSchemaUrl(schemaUrl) + .withExemplars(OTelProtoCodec.convertExemplars(dp.getExemplarsList())) + .withFlags(dp.getFlags()); + if (otelMetricsRawProcessorConfig.getCalculateHistogramBuckets()) { + builder.withBuckets(OTelProtoCodec.createBuckets(dp.getBucketCountsList(), dp.getExplicitBoundsList())); + } + return builder.build(); + + }) .map(Record::new) .collect(Collectors.toList()); } + private List> mapExponentialHistogram(io.opentelemetry.proto.metrics.v1.Metric metric, String serviceName, Map ils, Map resourceAttributes, String schemaUrl) { + return metric.getExponentialHistogram().getDataPointsList().stream() + .filter(dp -> { + if (otelMetricsRawProcessorConfig.getCalculateExponentialHistogramBuckets() && + otelMetricsRawProcessorConfig.getExponentialHistogramMaxAllowedScale() < Math.abs(dp.getScale())){ + LOG.error("Exponential histogram can not be processed since its scale of {} is bigger than the configured max of {}.", dp.getScale(), otelMetricsRawProcessorConfig.getExponentialHistogramMaxAllowedScale()); + return false; + } else { + return true; + } + }) + .map(dp -> { + JacksonExponentialHistogram.Builder builder = JacksonExponentialHistogram.builder() + .withUnit(metric.getUnit()) + .withName(metric.getName()) + .withDescription(metric.getDescription()) + .withStartTime(OTelProtoCodec.convertUnixNanosToISO8601(dp.getStartTimeUnixNano())) + .withTime(OTelProtoCodec.convertUnixNanosToISO8601(dp.getTimeUnixNano())) + .withServiceName(serviceName) + .withSum(dp.getSum()) + .withCount(dp.getCount()) + .withZeroCount(dp.getZeroCount()) + .withScale(dp.getScale()) + .withPositive(dp.getPositive().getBucketCountsList()) + .withPositiveOffset(dp.getPositive().getOffset()) + .withNegative(dp.getNegative().getBucketCountsList()) + .withNegativeOffset(dp.getNegative().getOffset()) + .withAggregationTemporality(metric.getHistogram().getAggregationTemporality().toString()) + .withAttributes(OTelProtoCodec.mergeAllAttributes( + Arrays.asList( + OTelProtoCodec.unpackKeyValueList(dp.getAttributesList()), + resourceAttributes, + ils + ) + )) + .withSchemaUrl(schemaUrl) + .withExemplars(OTelProtoCodec.convertExemplars(dp.getExemplarsList())) + .withFlags(dp.getFlags()); + + if (otelMetricsRawProcessorConfig.getCalculateExponentialHistogramBuckets()) { + builder.withPositiveBuckets(OTelProtoCodec.createExponentialBuckets(dp.getPositive(), dp.getScale())); + builder.withNegativeBuckets(OTelProtoCodec.createExponentialBuckets(dp.getNegative(), dp.getScale())); + } + + return builder.build(); + }) + .map(Record::new) + .collect(Collectors.toList()); + } + + @Override public void prepareForShutdown() { } diff --git a/data-prepper-plugins/otel-metrics-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OtelMetricsRawProcessorConfig.java b/data-prepper-plugins/otel-metrics-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OtelMetricsRawProcessorConfig.java new file mode 100644 index 0000000000..998d36acd0 --- /dev/null +++ b/data-prepper-plugins/otel-metrics-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OtelMetricsRawProcessorConfig.java @@ -0,0 +1,27 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.processor.otelmetrics; + +public class OtelMetricsRawProcessorConfig { + + private Boolean calculateHistogramBuckets = true; + + private Boolean calculateExponentialHistogramBuckets = true; + + private Integer exponentialHistogramMaxAllowedScale = 10; + + public Boolean getCalculateExponentialHistogramBuckets() { + return calculateExponentialHistogramBuckets; + } + + public Boolean getCalculateHistogramBuckets() { + return calculateHistogramBuckets; + } + + public Integer getExponentialHistogramMaxAllowedScale() { + return exponentialHistogramMaxAllowedScale; + } +} diff --git a/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginExponentialHistogramTest.java b/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginExponentialHistogramTest.java new file mode 100644 index 0000000000..59447e5bb4 --- /dev/null +++ b/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginExponentialHistogramTest.java @@ -0,0 +1,197 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.processor.otelmetrics; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest; +import io.opentelemetry.proto.common.v1.AnyValue; +import io.opentelemetry.proto.common.v1.KeyValue; +import io.opentelemetry.proto.metrics.v1.ExponentialHistogram; +import io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint; +import io.opentelemetry.proto.metrics.v1.InstrumentationLibraryMetrics; +import io.opentelemetry.proto.metrics.v1.ResourceMetrics; +import io.opentelemetry.proto.resource.v1.Resource; +import org.hamcrest.MatcherAssert; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.opensearch.dataprepper.model.configuration.PluginSetting; +import org.opensearch.dataprepper.model.metric.Bucket; +import org.opensearch.dataprepper.model.metric.DefaultBucket; +import org.opensearch.dataprepper.model.metric.Metric; +import org.opensearch.dataprepper.model.record.Record; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.entry; +import static org.mockito.Mockito.lenient; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class MetricsPluginExponentialHistogramTest { + + private static final Double MAX_ERROR = 0.00001; + + private OTelMetricsRawProcessor rawProcessor; + + @Mock + private OtelMetricsRawProcessorConfig config; + + private static final ExponentialHistogramDataPoint EXPONENTIAL_HISTOGRAM_DATA_POINT = ExponentialHistogramDataPoint.newBuilder() + .setNegative(ExponentialHistogramDataPoint.Buckets.newBuilder() + .addBucketCounts(1) + .addBucketCounts(2) + .addBucketCounts(3).setOffset(0).build()) + .setPositive(ExponentialHistogramDataPoint.Buckets.newBuilder() + .addBucketCounts(4) + .addBucketCounts(5) + .addBucketCounts(6).setOffset(1).build()) + .setScale(3) + .setCount(4) + .setSum(1d / 3d) + .setFlags(1) + .build(); + + @Before + public void init() { + PluginSetting testsettings = new PluginSetting("testsettings", Collections.emptyMap()); + testsettings.setPipelineName("testpipeline"); + rawProcessor = new OTelMetricsRawProcessor(testsettings, config); + } + + @Test + public void testWithMaxScaleExceedingConfiguredNegativeScale() { + when(config.getExponentialHistogramMaxAllowedScale()).thenReturn(-2); + lenient().when(config.getCalculateExponentialHistogramBuckets()).thenReturn(true); + ExponentialHistogram histogram = ExponentialHistogram.newBuilder() + .addDataPoints(EXPONENTIAL_HISTOGRAM_DATA_POINT).build(); + + List> processedRecords = (List>) rawProcessor.doExecute(Collections.singletonList(new Record<>(fillServiceRequest(histogram)))); + assertThat(processedRecords).isEmpty(); + } + + @Test + public void testWithMaxScaleExceedingConfiguredPositiveScale() { + when(config.getExponentialHistogramMaxAllowedScale()).thenReturn(2); + lenient().when(config.getCalculateExponentialHistogramBuckets()).thenReturn(true); + ExponentialHistogram histogram = ExponentialHistogram.newBuilder() + .addDataPoints(EXPONENTIAL_HISTOGRAM_DATA_POINT).build(); + + List> processedRecords = (List>) rawProcessor.doExecute(Collections.singletonList(new Record<>(fillServiceRequest(histogram)))); + assertThat(processedRecords).isEmpty(); + } + + @Test + public void test() throws JsonProcessingException { + when(config.getExponentialHistogramMaxAllowedScale()).thenReturn(10); + lenient().when(config.getCalculateExponentialHistogramBuckets()).thenReturn(true); + ExponentialHistogram histogram = ExponentialHistogram.newBuilder() + .addDataPoints(EXPONENTIAL_HISTOGRAM_DATA_POINT).build(); + + List> processedRecords = (List>) rawProcessor.doExecute(Collections.singletonList(new Record<>(fillServiceRequest(histogram)))); + Record record = processedRecords.get(0); + ObjectMapper objectMapper = new ObjectMapper(); + Map map = objectMapper.readValue(record.getData().toJsonString(), Map.class); + + DefaultBucket negative_1 = new DefaultBucket(1D, 1.0905077326652577, 1L); + DefaultBucket negative_2 = new DefaultBucket(1.0905077326652577, 1.189207115002721, 2L); + DefaultBucket negative_3 = new DefaultBucket(1.189207115002721, 1.2968395546510096, 3L); + + + DefaultBucket positive_1 = new DefaultBucket(1.0905077326652577, 1.189207115002721, 4L); + DefaultBucket positive_2 = new DefaultBucket(1.189207115002721, 1.2968395546510096, 5L); + DefaultBucket positive_3 = new DefaultBucket(1.2968395546510096, 1.4142135623730951, 6L); + + assertHistogramProcessing(map, Arrays.asList(negative_1, negative_2, negative_3, positive_1, positive_2, positive_3)); + } + + @Test + public void testWithHistogramCalculationFlagDisabled() throws JsonProcessingException { + when(config.getCalculateExponentialHistogramBuckets()).thenReturn(false); + lenient().when(config.getExponentialHistogramMaxAllowedScale()).thenReturn(10); + + ExponentialHistogram histogram = ExponentialHistogram.newBuilder() + .addDataPoints(EXPONENTIAL_HISTOGRAM_DATA_POINT).build(); + + List> processedRecords = (List>) rawProcessor.doExecute(Collections.singletonList(new Record<>(fillServiceRequest(histogram)))); + Record record = processedRecords.get(0); + ObjectMapper objectMapper = new ObjectMapper(); + Map map = objectMapper.readValue(record.getData().toJsonString(), Map.class); + + assertHistogramProcessing(map, Collections.emptyList()); + } + + private ExportMetricsServiceRequest fillServiceRequest(ExponentialHistogram histogram) { + io.opentelemetry.proto.metrics.v1.Metric metric = io.opentelemetry.proto.metrics.v1.Metric.newBuilder() + .setExponentialHistogram(histogram) + .setUnit("seconds") + .setName("name") + .setDescription("description") + .build(); + InstrumentationLibraryMetrics instLib = InstrumentationLibraryMetrics.newBuilder() + .addMetrics(metric).build(); + + Resource resource = Resource.newBuilder() + .addAttributes(KeyValue.newBuilder() + .setKey("service.name") + .setValue(AnyValue.newBuilder().setStringValue("service").build()) + ).build(); + ResourceMetrics resourceMetrics = ResourceMetrics.newBuilder() + .setResource(resource) + .addInstrumentationLibraryMetrics(instLib) + .build(); + return ExportMetricsServiceRequest.newBuilder().addResourceMetrics(resourceMetrics).build(); + } + + private void assertHistogramProcessing(Map map, List expectedBuckets) { + assertThat(map).contains(entry("kind", Metric.KIND.EXPONENTIAL_HISTOGRAM.toString())); + assertThat(map).contains(entry("unit", "seconds")); + assertThat(map).contains(entry("description", "description")); + assertThat(map).contains(entry("name", "name")); + assertThat(map).contains(entry("sum", (1d / 3d))); + assertThat(map).contains(entry("count", 4)); + assertThat(map).contains(entry("serviceName", "service")); + assertThat(map).contains(entry("aggregationTemporality", "AGGREGATION_TEMPORALITY_UNSPECIFIED")); + assertThat(map).contains(entry("flags", 1)); + assertThat(map).contains(entry("negativeOffset", 0)); + assertThat(map).contains(entry("positiveOffset", 1)); + assertThat(map).contains(entry("positive", Arrays.asList(4, 5, 6))); + assertThat(map).contains(entry("negative", Arrays.asList(1, 2, 3))); + assertThat(map).contains(entry("scale", 3)); + + if (expectedBuckets.isEmpty()) { + assertThat(map).doesNotContainKey("negativeBuckets"); + assertThat(map).doesNotContainKey("positiveBuckets"); + } else { + assertThat(map).containsKey("negativeBuckets"); + assertThat(map).containsKey("positiveBuckets"); + List negativeBuckets = (List) map.get("negativeBuckets"); + List positiveBuckets = (List) map.get("positiveBuckets"); + negativeBuckets.addAll(positiveBuckets); + assertThat(negativeBuckets).hasSize(expectedBuckets.size()); + + for (int i = 0; i < expectedBuckets.size(); i++) { + Bucket expectedBucket = expectedBuckets.get(i); + Map actualBucket = negativeBuckets.get(i); + Double min = (Double) actualBucket.get("min"); + Double max = (Double) actualBucket.get("max"); + Integer count = (Integer) actualBucket.get("count"); + + MatcherAssert.assertThat(expectedBucket.getMin(), Matchers.closeTo(min, MAX_ERROR)); + MatcherAssert.assertThat(expectedBucket.getMax(), Matchers.closeTo(max, MAX_ERROR)); + assertThat(Integer.toUnsignedLong(count)).isEqualTo(expectedBucket.getCount()); + } + } + } +} diff --git a/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginGaugeTest.java b/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginGaugeTest.java index b08175a003..b5c9eefa1b 100644 --- a/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginGaugeTest.java +++ b/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginGaugeTest.java @@ -7,14 +7,20 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.protobuf.ByteString; import io.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest; import io.opentelemetry.proto.common.v1.AnyValue; +import io.opentelemetry.proto.common.v1.InstrumentationLibrary; +import io.opentelemetry.proto.common.v1.InstrumentationScope; import io.opentelemetry.proto.common.v1.KeyValue; +import io.opentelemetry.proto.metrics.v1.Exemplar; import io.opentelemetry.proto.metrics.v1.Gauge; import io.opentelemetry.proto.metrics.v1.InstrumentationLibraryMetrics; import io.opentelemetry.proto.metrics.v1.NumberDataPoint; import io.opentelemetry.proto.metrics.v1.ResourceMetrics; +import io.opentelemetry.proto.metrics.v1.ScopeMetrics; import io.opentelemetry.proto.resource.v1.Resource; +import org.apache.commons.codec.binary.Hex; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; @@ -23,30 +29,51 @@ import org.opensearch.dataprepper.model.metric.Metric; import org.opensearch.dataprepper.model.record.Record; +import java.time.LocalDateTime; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Random; +import java.util.concurrent.TimeUnit; +import static org.opensearch.dataprepper.plugins.processor.otelmetrics.OTelMetricsProtoHelperTest.getRandomBytes; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.entry; - @RunWith(MockitoJUnitRunner.class) public class MetricsPluginGaugeTest { + private static final Long START_TIME = TimeUnit.MILLISECONDS.toNanos(ZonedDateTime.of( + LocalDateTime.of(2020, 5, 24, 14, 0, 0), + ZoneOffset.UTC).toInstant().toEpochMilli()); + + private static final Long TIME = TimeUnit.MILLISECONDS.toNanos(ZonedDateTime.of( + LocalDateTime.of(2020, 5, 24, 14, 1, 0), + ZoneOffset.UTC).toInstant().toEpochMilli()); + + private OTelMetricsRawProcessor rawProcessor; + private static final Random RANDOM = new Random(); + + private byte[] getRandomBytes(int len) { + byte[] bytes = new byte[len]; + RANDOM.nextBytes(bytes); + return bytes; + } @Before public void init() { PluginSetting testsettings = new PluginSetting("testsettings", Collections.emptyMap()); testsettings.setPipelineName("testpipeline"); - rawProcessor = new OTelMetricsRawProcessor(testsettings); + rawProcessor = new OTelMetricsRawProcessor(testsettings, new OtelMetricsRawProcessorConfig()); } @Test - public void test() throws JsonProcessingException { + public void testInstrumentationLibrary() throws JsonProcessingException { NumberDataPoint.Builder p1 = NumberDataPoint.newBuilder().setAsInt(4); Gauge gauge = Gauge.newBuilder().addDataPoints(p1).build(); @@ -56,7 +83,13 @@ public void test() throws JsonProcessingException { .setName("name") .setDescription("description"); - InstrumentationLibraryMetrics isntLib = InstrumentationLibraryMetrics.newBuilder().addMetrics(metric).build(); + InstrumentationLibraryMetrics isntLib = InstrumentationLibraryMetrics.newBuilder() + .addMetrics(metric) + .setInstrumentationLibrary(InstrumentationLibrary.newBuilder() + .setName("ilname") + .setVersion("ilversion") + .build()) + .build(); Resource resource = Resource.newBuilder() .addAttributes(KeyValue.newBuilder() @@ -79,18 +112,156 @@ public void test() throws JsonProcessingException { Record dataPrepperResult = list.get(0); ObjectMapper objectMapper = new ObjectMapper(); - Map map = objectMapper.readValue(dataPrepperResult.getData().toJsonString(), Map.class); - assertSumProcessing(map); + Map map = objectMapper.readValue(dataPrepperResult.getData().toJsonString(), Map.class); + assertThat(map).contains(entry("kind", Metric.KIND.GAUGE.toString())); + assertThat(map).contains(entry("unit", "seconds")); + assertThat(map).contains(entry("serviceName", "service")); + assertThat(map).contains(entry("resource.attributes.service@name", "service")); + assertThat(map).contains(entry("description", "description")); + assertThat(map).contains(entry("value", 4.0D)); + assertThat(map).contains(entry("startTime", "1970-01-01T00:00:00Z")); + assertThat(map).contains(entry("time", "1970-01-01T00:00:00Z")); + assertThat(map).contains(entry("instrumentationLibrary.name", "ilname")); + assertThat(map).contains(entry("instrumentationLibrary.version", "ilversion")); + } - private void assertSumProcessing(Map map) { + @Test + public void testScopeMetricsLibrary() throws JsonProcessingException { + NumberDataPoint.Builder p1 = NumberDataPoint.newBuilder().setAsInt(4); + Gauge gauge = Gauge.newBuilder().addDataPoints(p1).build(); + + io.opentelemetry.proto.metrics.v1.Metric.Builder metric = io.opentelemetry.proto.metrics.v1.Metric.newBuilder() + .setGauge(gauge) + .setUnit("seconds") + .setName("name") + .setDescription("description"); + + ScopeMetrics scopeMetrics = ScopeMetrics.newBuilder() + .addMetrics(metric) + .setScope(InstrumentationScope.newBuilder() + .setName("smname") + .setVersion("smversion")) + .build(); + + Resource resource = Resource.newBuilder() + .addAttributes(KeyValue.newBuilder() + .setKey("service.name") + .setValue(AnyValue.newBuilder().setStringValue("service").build()) + ).build(); + + ResourceMetrics resourceMetrics = ResourceMetrics.newBuilder() + .addScopeMetrics(scopeMetrics) + .setResource(resource) + .build(); + + ExportMetricsServiceRequest exportMetricRequest = ExportMetricsServiceRequest.newBuilder() + .addResourceMetrics(resourceMetrics).build(); + + Record record = new Record<>(exportMetricRequest); + + Collection> records = rawProcessor.doExecute(Collections.singletonList(record)); + List> list = new ArrayList<>(records); + + Record dataPrepperResult = list.get(0); + ObjectMapper objectMapper = new ObjectMapper(); + Map map = objectMapper.readValue(dataPrepperResult.getData().toJsonString(), Map.class); + assertThat(map).contains(entry("kind", Metric.KIND.GAUGE.toString())); assertThat(map).contains(entry("unit", "seconds")); assertThat(map).contains(entry("serviceName", "service")); assertThat(map).contains(entry("resource.attributes.service@name", "service")); assertThat(map).contains(entry("description", "description")); assertThat(map).contains(entry("value", 4.0D)); - assertThat(map).contains(entry("startTime","1970-01-01T00:00:00Z")); - assertThat(map).contains(entry("time","1970-01-01T00:00:00Z")); + assertThat(map).contains(entry("startTime", "1970-01-01T00:00:00Z")); + assertThat(map).contains(entry("time", "1970-01-01T00:00:00Z")); + assertThat(map).contains(entry("instrumentationScope.name", "smname")); + assertThat(map).contains(entry("instrumentationScope.version", "smversion")); + + } + + @Test + public void testWithExemplar() throws JsonProcessingException { + + byte[] spanId = getRandomBytes(8); + byte[] traceId = getRandomBytes(8); + + Exemplar e1 = Exemplar.newBuilder() + .addFilteredAttributes(KeyValue.newBuilder() + .setKey("key") + .setValue(AnyValue.newBuilder().setBoolValue(true).build()).build()) + .setAsDouble(3) + .setSpanId(ByteString.copyFrom(spanId)) + .setTimeUnixNano(TIME) + .setTraceId(ByteString.copyFrom(traceId)) + .build(); + + NumberDataPoint.Builder p1 = NumberDataPoint.newBuilder() + .addExemplars(e1) + .setStartTimeUnixNano(START_TIME) + .setTimeUnixNano(TIME) + .setAsInt(4) + .setFlags(1); + + Gauge gauge = Gauge.newBuilder().addDataPoints(p1).build(); + + io.opentelemetry.proto.metrics.v1.Metric.Builder metric = io.opentelemetry.proto.metrics.v1.Metric.newBuilder() + .setGauge(gauge) + .setUnit("seconds") + .setName("name") + .setDescription("description"); + + ScopeMetrics scopeMetrics = ScopeMetrics.newBuilder() + .addMetrics(metric) + .setScope(InstrumentationScope.newBuilder() + .setName("smname") + .setVersion("smversion")) + .build(); + + Resource resource = Resource.newBuilder() + .addAttributes(KeyValue.newBuilder() + .setKey("service.name") + .setValue(AnyValue.newBuilder().setStringValue("service").build()) + ).build(); + + ResourceMetrics resourceMetrics = ResourceMetrics.newBuilder() + .addScopeMetrics(scopeMetrics) + .setResource(resource) + .build(); + + ExportMetricsServiceRequest exportMetricRequest = ExportMetricsServiceRequest.newBuilder() + .addResourceMetrics(resourceMetrics).build(); + + Record record = new Record<>(exportMetricRequest); + + Collection> records = rawProcessor.doExecute(Collections.singletonList(record)); + List> list = new ArrayList<>(records); + + Record dataPrepperResult = list.get(0); + ObjectMapper objectMapper = new ObjectMapper(); + Map map = objectMapper.readValue(dataPrepperResult.getData().toJsonString(), Map.class); + + assertThat(map).contains(entry("kind", Metric.KIND.GAUGE.toString())); + assertThat(map).contains(entry("unit", "seconds")); + assertThat(map).contains(entry("serviceName", "service")); + assertThat(map).contains(entry("resource.attributes.service@name", "service")); + assertThat(map).contains(entry("description", "description")); + assertThat(map).contains(entry("value", 4.0D)); + assertThat(map).contains(entry("startTime", "2020-05-24T14:00:00Z")); + assertThat(map).contains(entry("time", "2020-05-24T14:01:00Z")); + assertThat(map).contains(entry("instrumentationScope.name", "smname")); + assertThat(map).contains(entry("instrumentationScope.version", "smversion")); + assertThat(map).contains(entry("flags", 1)); + + List> exemplars = (List>) map.get("exemplars"); + assertThat(exemplars.size()).isEqualTo(1); + Map eTest = exemplars.get(0); + + assertThat(eTest).contains(entry("time", "2020-05-24T14:01:00Z")); + assertThat(eTest).contains(entry("value", 3.0)); + assertThat(eTest).contains(entry("spanId", Hex.encodeHexString(spanId))); + assertThat(eTest).contains(entry("traceId", Hex.encodeHexString(traceId))); + Map atts = (Map) eTest.get("attributes"); + assertThat(atts).contains(entry("exemplar.attributes.key", true)); } } diff --git a/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginHistogramTest.java b/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginHistogramTest.java index 4fb2deaf72..7e7be137bf 100644 --- a/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginHistogramTest.java +++ b/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginHistogramTest.java @@ -18,6 +18,7 @@ import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import org.opensearch.dataprepper.model.configuration.PluginSetting; import org.opensearch.dataprepper.model.metric.Bucket; @@ -32,49 +33,65 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.entry; +import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class MetricsPluginHistogramTest { private OTelMetricsRawProcessor rawProcessor; + @Mock + private OtelMetricsRawProcessorConfig config; + + private final HistogramDataPoint HISTOGRAM_DATA_POINT = HistogramDataPoint.newBuilder() + .addBucketCounts(0) + .addBucketCounts(5) + .addBucketCounts(17) + .addBucketCounts(33) + .addExplicitBounds(10.0) + .addExplicitBounds(100.0) + .addExplicitBounds(1000.0) + .setCount(4) + .setSum(1d / 3d) + .setFlags(1) + .build(); + @Before public void init() { PluginSetting testsettings = new PluginSetting("testsettings", Collections.emptyMap()); testsettings.setPipelineName("testpipeline"); - rawProcessor = new OTelMetricsRawProcessor(testsettings); + rawProcessor = new OTelMetricsRawProcessor(testsettings, config); } @Test public void test() throws JsonProcessingException { + when(config.getCalculateHistogramBuckets()).thenReturn(true); + Histogram histogram = Histogram.newBuilder().addDataPoints(HISTOGRAM_DATA_POINT).build(); - final double bound_0 = 10.0; - final double bound_1 = 100.0; - final double bound_2 = 1000.0; - HistogramDataPoint dp = HistogramDataPoint.newBuilder() - .addBucketCounts(0) - .addBucketCounts(5) - .addBucketCounts(17) - .addBucketCounts(33) - .addExplicitBounds(bound_0) - .addExplicitBounds(bound_1) - .addExplicitBounds(bound_2) - .setCount(4) - .setSum(1d / 3d) - .build(); + List> processedRecords = (List>) rawProcessor.doExecute(Collections.singletonList(new Record<>(fillServiceRequest(histogram)))); + Record record = processedRecords.get(0); + ObjectMapper objectMapper = new ObjectMapper(); + Map map = objectMapper.readValue(record.getData().toJsonString(), Map.class); - Histogram histogram = Histogram.newBuilder().addDataPoints(dp).build(); + DefaultBucket bucket_0 = new DefaultBucket((double) -Float.MAX_VALUE, 10.0, 0L); + DefaultBucket bucket_1 = new DefaultBucket(10.0, 100.0, 5L); + DefaultBucket bucket_2 = new DefaultBucket(100.0, 1000.0, 17L); + DefaultBucket bucket_3 = new DefaultBucket(1000.0, (double) Float.MAX_VALUE, 33L); + assertHistogramProcessing(map, Arrays.asList(bucket_0, bucket_1, bucket_2, bucket_3)); + } + + @Test + public void testWithConfigFlagDisabled() throws JsonProcessingException { + when(config.getCalculateHistogramBuckets()).thenReturn(false); + + Histogram histogram = Histogram.newBuilder().addDataPoints(HISTOGRAM_DATA_POINT).build(); List> processedRecords = (List>) rawProcessor.doExecute(Collections.singletonList(new Record<>(fillServiceRequest(histogram)))); Record record = processedRecords.get(0); ObjectMapper objectMapper = new ObjectMapper(); Map map = objectMapper.readValue(record.getData().toJsonString(), Map.class); - DefaultBucket bucket_0 = new DefaultBucket((double) -Float.MAX_VALUE, bound_0, 0L); - DefaultBucket bucket_1 = new DefaultBucket(bound_0, bound_1, 5L); - DefaultBucket bucket_2 = new DefaultBucket(bound_1, bound_2, 17L); - DefaultBucket bucket_3 = new DefaultBucket(bound_2, (double) Float.MAX_VALUE, 33L); - assertHistogramProcessing(map, Arrays.asList(bucket_0, bucket_1, bucket_2, bucket_3)); + assertHistogramProcessing(map, Collections.emptyList()); } private ExportMetricsServiceRequest fillServiceRequest(Histogram histogram) { @@ -109,21 +126,27 @@ private void assertHistogramProcessing(Map map, List listOfMaps = (List) map.get("buckets"); - assertThat(listOfMaps).hasSize(expectedBuckets.size()); - - for (int i = 0; i < expectedBuckets.size(); i++) { - Bucket expectedBucket = expectedBuckets.get(i); - Map actualBucket = listOfMaps.get(i); - - assertThat(actualBucket) - .contains(entry("min", expectedBucket.getMin())) - .contains(entry("max", expectedBucket.getMax())) - .contains(entry("count", expectedBucket.getCount().intValue())); - + assertThat(map).contains(entry("flags", 1)); + assertThat(map).contains(entry("bucketCountsList", Arrays.asList(0, 5, 17, 33))); + assertThat(map).contains(entry("explicitBounds", Arrays.asList(10.0, 100.0, 1000.0))); + + if (expectedBuckets.isEmpty()) { + assertThat(map).doesNotContainKey("buckets"); + } else { + assertThat(map).containsKey("buckets"); + List listOfMaps = (List) map.get("buckets"); + assertThat(listOfMaps).hasSize(expectedBuckets.size()); + + for (int i = 0; i < expectedBuckets.size(); i++) { + Bucket expectedBucket = expectedBuckets.get(i); + Map actualBucket = listOfMaps.get(i); + + assertThat(actualBucket) + .contains(entry("min", expectedBucket.getMin())) + .contains(entry("max", expectedBucket.getMax())) + .contains(entry("count", expectedBucket.getCount().intValue())); + + } } } } diff --git a/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginSumTest.java b/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginSumTest.java index cb187ef53b..7224eed8b7 100644 --- a/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginSumTest.java +++ b/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginSumTest.java @@ -44,7 +44,7 @@ public class MetricsPluginSumTest { public void init() { PluginSetting testsettings = new PluginSetting("testsettings", Collections.emptyMap()); testsettings.setPipelineName("testpipeline"); - rawProcessor = new OTelMetricsRawProcessor(testsettings); + rawProcessor = new OTelMetricsRawProcessor(testsettings, new OtelMetricsRawProcessorConfig()); } @Test @@ -71,6 +71,7 @@ public void test() throws JsonProcessingException { NumberDataPoint dataPoint = NumberDataPoint.newBuilder() .setAsInt(3) .addAllAttributes(Arrays.asList(attribute1, attribute2, attribute3)) + .setFlags(0) .build(); Sum sum = Sum.newBuilder().addAllDataPoints(Collections.singletonList(dataPoint)).build(); @@ -104,11 +105,11 @@ public void test() throws JsonProcessingException { Record firstRecord = rec.get(0); ObjectMapper objectMapper = new ObjectMapper(); - Map map = objectMapper.readValue(firstRecord.getData().toJsonString(), Map.class); + Map map = objectMapper.readValue(firstRecord.getData().toJsonString(), Map.class); assertSumProcessing(map); } - private void assertSumProcessing(Map map) { + private void assertSumProcessing(Map map) { assertThat(map).contains(entry("kind", org.opensearch.dataprepper.model.metric.Metric.KIND.SUM.toString())); assertThat(map).contains(entry("unit", "seconds")); assertThat(map).contains(entry("description", "description")); @@ -124,6 +125,7 @@ private void assertSumProcessing(Map map) { assertThat(map).contains(entry("instrumentationLibrary.version", "v1")); assertThat(map).contains(entry("instrumentationLibrary.name", "name")); assertThat(map).contains(entry("metric.attributes.aws@details", "[\"asdf\",2000.123,\"{\\\"statement@params\\\":\\\"us-east-1\\\",\\\"statement\\\":1000}\"]")); + assertThat(map).contains(entry("flags", 0)); } diff --git a/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginSummaryTest.java b/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginSummaryTest.java index c46146c0c6..d915a48e37 100644 --- a/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginSummaryTest.java +++ b/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/MetricsPluginSummaryTest.java @@ -41,7 +41,7 @@ public class MetricsPluginSummaryTest { public void init() { PluginSetting testsettings = new PluginSetting("testsettings", Collections.emptyMap()); testsettings.setPipelineName("testpipeline"); - rawProcessor = new OTelMetricsRawProcessor(testsettings); + rawProcessor = new OTelMetricsRawProcessor(testsettings, new OtelMetricsRawProcessorConfig()); } @Test @@ -55,6 +55,7 @@ public void testSummaryProcessing() throws JsonProcessingException { .setQuantile(0.7) .setValue(250) .build()) + .setFlags(1) .build(); Summary summary = Summary.newBuilder().addDataPoints(dataPoint).build(); Metric metric = Metric.newBuilder() @@ -86,17 +87,18 @@ public void testSummaryProcessing() throws JsonProcessingException { Record firstRecord = rec.get(0); ObjectMapper objectMapper = new ObjectMapper(); - Map map = objectMapper.readValue(firstRecord.getData().toJsonString(), Map.class); + Map map = objectMapper.readValue(firstRecord.getData().toJsonString(), Map.class); assertSumProcessing(map); } - private void assertSumProcessing(Map map) { + private void assertSumProcessing(Map map) { assertThat(map).contains(entry("kind", org.opensearch.dataprepper.model.metric.Metric.KIND.SUMMARY.toString())); assertThat(map).contains(entry("unit", "seconds")); assertThat(map).contains(entry("description", "description")); assertThat(map).contains(entry("name", "name")); assertThat(map).contains(entry("serviceName", "service")); assertThat(map).contains(entry("quantileValuesCount", 2)); + assertThat(map).contains(entry("flags", 1)); List> quantileValues = (List>) map.get("quantiles"); assertThat(quantileValues).hasSize(2); diff --git a/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OTelMetricsProtoHelperTest.java b/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OTelMetricsProtoHelperTest.java index 55a6677379..259e1b03cd 100644 --- a/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OTelMetricsProtoHelperTest.java +++ b/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OTelMetricsProtoHelperTest.java @@ -7,16 +7,29 @@ import com.google.protobuf.ByteString; import io.opentelemetry.proto.common.v1.AnyValue; +import io.opentelemetry.proto.common.v1.ArrayValue; +import io.opentelemetry.proto.common.v1.KeyValue; +import io.opentelemetry.proto.metrics.v1.Exemplar; +import io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint; import io.opentelemetry.proto.metrics.v1.NumberDataPoint; +import org.apache.commons.codec.binary.Hex; +import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; import org.opensearch.dataprepper.model.metric.Bucket; +import java.time.Clock; +import java.time.Instant; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Random; +import java.util.concurrent.TimeUnit; +import static org.assertj.core.api.Assertions.entry; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -24,9 +37,20 @@ /** * This test exists purely to satisfy the test coverage because OtelMetricsHelper must be merged with * OtelProtoCodec when #546 is integrated since it shares most of the code with OTelProtoCodec - */ + */ public class OTelMetricsProtoHelperTest { + private static final Clock CLOCK = Clock.fixed(Instant.ofEpochSecond(1_700_000_000), ZoneOffset.UTC); + + private static final Double MAX_ERROR = 0.00001; + private static final Random RANDOM = new Random(); + + public static byte[] getRandomBytes(int len) { + byte[] bytes = new byte[len]; + RANDOM.nextBytes(bytes); + return bytes; + } + @Test void getValueAsDouble() { assertNull(OTelMetricsProtoHelper.getValueAsDouble(NumberDataPoint.newBuilder().build())); @@ -34,7 +58,7 @@ void getValueAsDouble() { @Test public void testCreateBucketsEmpty() { - assertThat(OTelMetricsProtoHelper.createBuckets(new ArrayList<>(),new ArrayList<>()).size(),equalTo(0)); + assertThat(OTelMetricsProtoHelper.createBuckets(new ArrayList<>(), new ArrayList<>()).size(), equalTo(0)); } @Test @@ -84,4 +108,194 @@ public void testUnsupportedTypeToAnyValue() { assertThrows(RuntimeException.class, () -> OTelMetricsProtoHelper.convertAnyValue(AnyValue.newBuilder().setBytesValue(ByteString.EMPTY).build())); } -} \ No newline at end of file + + @Test + void convertExemplars() { + long t1 = TimeUnit.MILLISECONDS.toNanos(Instant.now(CLOCK).toEpochMilli()); + long t2 = t1 + 100_000; + + Exemplar e1 = Exemplar.newBuilder() + .addFilteredAttributes(KeyValue.newBuilder() + .setKey("key") + .setValue(AnyValue.newBuilder().setBoolValue(true).build()).build()) + .setAsDouble(3) + .setSpanId(ByteString.copyFrom(getRandomBytes(8))) + .setTimeUnixNano(t1) + .setTraceId(ByteString.copyFrom(getRandomBytes(8))) + .build(); + + + Exemplar e2 = Exemplar.newBuilder() + .addFilteredAttributes(KeyValue.newBuilder() + .setKey("key2") + .setValue(AnyValue.newBuilder() + .setArrayValue(ArrayValue.newBuilder().addValues(AnyValue.newBuilder().setStringValue("test").build()).build()) + .build()).build()) + .setAsInt(42) + .setSpanId(ByteString.copyFrom(getRandomBytes(8))) + .setTimeUnixNano(t2) + .setTraceId(ByteString.copyFrom(getRandomBytes(8))) + .build(); + + List exemplars = Arrays.asList(e1, e2); + List convertedExemplars = OTelMetricsProtoHelper.convertExemplars(exemplars); + assertThat(convertedExemplars.size(), equalTo(2)); + + org.opensearch.dataprepper.model.metric.Exemplar conv1 = convertedExemplars.get(0); + assertThat(conv1.getSpanId(), equalTo(Hex.encodeHexString(e1.getSpanId().toByteArray()))); + assertThat(conv1.getTime(), equalTo("2023-11-14T22:13:20Z")); + assertThat(conv1.getTraceId(), equalTo(Hex.encodeHexString(e1.getTraceId().toByteArray()))); + assertThat(conv1.getValue(), equalTo(3.0)); + Assertions.assertThat(conv1.getAttributes()).contains(entry("exemplar.attributes.key", true)); + + org.opensearch.dataprepper.model.metric.Exemplar conv2 = convertedExemplars.get(1); + assertThat(conv2.getSpanId(), equalTo(Hex.encodeHexString(e2.getSpanId().toByteArray()))); + assertThat(conv2.getTime(), equalTo("2023-11-14T22:13:20.000100Z")); + assertThat(conv2.getTraceId(), equalTo(Hex.encodeHexString(e2.getTraceId().toByteArray()))); + assertThat(conv2.getValue(), equalTo(42.0)); + Assertions.assertThat(conv2.getAttributes()).contains(entry("exemplar.attributes.key2", "[\"test\"]")); + + } + + + /** + * See: The example table with scale 3 + */ + @Test + public void testExponentialHistogram() { + List b = OTelMetricsProtoHelper.createExponentialBuckets( + ExponentialHistogramDataPoint.Buckets.newBuilder() + .addBucketCounts(4) + .addBucketCounts(2) + .addBucketCounts(3) + .addBucketCounts(2) + .addBucketCounts(1) + .addBucketCounts(4) + .addBucketCounts(6) + .addBucketCounts(4) + .setOffset(0) + .build(), 3); + + assertThat(b.size(), equalTo(8)); + + Bucket b1 = b.get(0); + assertThat(b1.getCount(), equalTo(4L)); + assertThat(b1.getMin(), equalTo(1D)); + assertThat(b1.getMax(), closeTo(1.09051, MAX_ERROR)); + + Bucket b2 = b.get(1); + assertThat(b2.getCount(), equalTo(2L)); + assertThat(b2.getMin(), closeTo(1.09051, MAX_ERROR)); + assertThat(b2.getMax(), closeTo(1.18921, MAX_ERROR)); + + Bucket b3 = b.get(2); + assertThat(b3.getCount(), equalTo(3L)); + assertThat(b3.getMin(), closeTo(1.18921, MAX_ERROR)); + assertThat(b3.getMax(), closeTo(1.29684, MAX_ERROR)); + + Bucket b4 = b.get(3); + assertThat(b4.getCount(), equalTo(2L)); + assertThat(b4.getMin(), closeTo(1.29684, MAX_ERROR)); + assertThat(b4.getMax(), closeTo(1.41421, MAX_ERROR)); + + Bucket b5 = b.get(4); + assertThat(b5.getCount(), equalTo(1L)); + assertThat(b5.getMin(), closeTo(1.41421, MAX_ERROR)); + assertThat(b5.getMax(), closeTo(1.54221, MAX_ERROR)); + + Bucket b6 = b.get(5); + assertThat(b6.getCount(), equalTo(4L)); + assertThat(b6.getMin(), closeTo(1.54221, MAX_ERROR)); + assertThat(b6.getMax(), closeTo(1.68179, MAX_ERROR)); + + Bucket b7 = b.get(6); + assertThat(b7.getCount(), equalTo(6L)); + assertThat(b7.getMin(), closeTo(1.68179, MAX_ERROR)); + assertThat(b7.getMax(), closeTo(1.83401, MAX_ERROR)); + + Bucket b8 = b.get(7); + assertThat(b8.getCount(), equalTo(4L)); + assertThat(b8.getMin(), closeTo(1.83401, MAX_ERROR)); + assertThat(b8.getMax(), closeTo(2, MAX_ERROR)); + } + + @Test + public void testExponentialHistogramWithOffset() { + List b = OTelMetricsProtoHelper.createExponentialBuckets( + ExponentialHistogramDataPoint.Buckets.newBuilder() + .addBucketCounts(4) + .addBucketCounts(2) + .addBucketCounts(3) + .addBucketCounts(2) + .addBucketCounts(1) + .addBucketCounts(4) + .setOffset(2) + .build(), 3); + + assertThat(b.size(), equalTo(6)); + + Bucket b1 = b.get(0); + assertThat(b1.getCount(), equalTo(4L)); + assertThat(b1.getMin(), closeTo(1.18920, MAX_ERROR)); + assertThat(b1.getMax(), closeTo(1.29684, MAX_ERROR)); + + Bucket b2 = b.get(1); + assertThat(b2.getCount(), equalTo(2L)); + assertThat(b2.getMin(), closeTo(1.29684, MAX_ERROR)); + assertThat(b2.getMax(), closeTo(1.41421, MAX_ERROR)); + + Bucket b3 = b.get(2); + assertThat(b3.getCount(), equalTo(3L)); + assertThat(b3.getMin(), closeTo(1.41421, MAX_ERROR)); + assertThat(b3.getMax(), closeTo(1.54221, MAX_ERROR)); + + Bucket b4 = b.get(3); + assertThat(b4.getCount(), equalTo(2L)); + assertThat(b4.getMin(), closeTo(1.54221, MAX_ERROR)); + assertThat(b4.getMax(), closeTo(1.68179, MAX_ERROR)); + + Bucket b5 = b.get(4); + assertThat(b5.getCount(), equalTo(1L)); + assertThat(b5.getMin(), closeTo(1.68179, MAX_ERROR)); + assertThat(b5.getMax(), closeTo(1.83401, MAX_ERROR)); + + Bucket b6 = b.get(5); + assertThat(b6.getCount(), equalTo(4L)); + assertThat(b6.getMin(), closeTo(1.83401, MAX_ERROR)); + assertThat(b6.getMax(), closeTo(2, MAX_ERROR)); + } + + @Test + public void testExponentialHistogramWithNegativeScale() { + List b = OTelMetricsProtoHelper.createExponentialBuckets( + ExponentialHistogramDataPoint.Buckets.newBuilder() + .addBucketCounts(4) + .addBucketCounts(2) + .addBucketCounts(3) + .addBucketCounts(2) + .setOffset(0) + .build(), -3); + + assertThat(b.size(), equalTo(4)); + + Bucket b1 = b.get(0); + assertThat(b1.getCount(), equalTo(4L)); + assertThat(b1.getMin(), closeTo(2, MAX_ERROR)); + assertThat(b1.getMax(), closeTo(4, MAX_ERROR)); + + Bucket b2 = b.get(1); + assertThat(b2.getCount(), equalTo(2L)); + assertThat(b2.getMin(), closeTo(4, MAX_ERROR)); + assertThat(b2.getMax(), closeTo(16, MAX_ERROR)); + + Bucket b3 = b.get(2); + assertThat(b3.getCount(), equalTo(3L)); + assertThat(b3.getMin(), closeTo(16, MAX_ERROR)); + assertThat(b3.getMax(), closeTo(256, MAX_ERROR)); + + Bucket b4 = b.get(3); + assertThat(b4.getCount(), equalTo(2L)); + assertThat(b4.getMin(), closeTo(256, MAX_ERROR)); + assertThat(b4.getMax(), closeTo(65536, MAX_ERROR)); + } +} diff --git a/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OtelMetricsRawProcessorConfigTest.java b/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OtelMetricsRawProcessorConfigTest.java new file mode 100644 index 0000000000..64606b27d9 --- /dev/null +++ b/data-prepper-plugins/otel-metrics-raw-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OtelMetricsRawProcessorConfigTest.java @@ -0,0 +1,25 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + + +package org.opensearch.dataprepper.plugins.processor.otelmetrics; + +import org.junit.jupiter.api.Test; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; + +public class OtelMetricsRawProcessorConfigTest { + + @Test + void testDefaultConfig() { + final OtelMetricsRawProcessorConfig dateProcessorConfig = new OtelMetricsRawProcessorConfig(); + + assertThat(dateProcessorConfig.getCalculateExponentialHistogramBuckets(), equalTo(true)); + assertThat(dateProcessorConfig.getCalculateHistogramBuckets(), equalTo(true)); + assertThat(dateProcessorConfig.getExponentialHistogramMaxAllowedScale(), equalTo(10)); + } + +} diff --git a/data-prepper-plugins/otel-metrics-source/build.gradle b/data-prepper-plugins/otel-metrics-source/build.gradle index 1b19ecafc4..7d81e0a2af 100644 --- a/data-prepper-plugins/otel-metrics-source/build.gradle +++ b/data-prepper-plugins/otel-metrics-source/build.gradle @@ -14,7 +14,7 @@ dependencies { implementation 'commons-codec:commons-codec:1.15' implementation project(':data-prepper-plugins:armeria-common') testImplementation project(':data-prepper-api').sourceSets.test.output - implementation "io.opentelemetry:opentelemetry-proto:${versionMap.opentelemetryProto}" + implementation "io.opentelemetry.proto:opentelemetry-proto:${versionMap.opentelemetryProto}" implementation "commons-io:commons-io:2.11.0" implementation 'software.amazon.awssdk:acm' implementation 'software.amazon.awssdk:auth' diff --git a/data-prepper-plugins/otel-proto-common/build.gradle b/data-prepper-plugins/otel-proto-common/build.gradle index 2e9c92ae70..75ba3ad2fb 100644 --- a/data-prepper-plugins/otel-proto-common/build.gradle +++ b/data-prepper-plugins/otel-proto-common/build.gradle @@ -9,10 +9,12 @@ plugins { dependencies { implementation project(':data-prepper-api') - implementation "io.opentelemetry:opentelemetry-proto:${versionMap.opentelemetryProto}" + implementation "io.opentelemetry.proto:opentelemetry-proto:${versionMap.opentelemetryProto}" implementation "com.google.protobuf:protobuf-java-util:${versionMap.protobufJavaUtil}" implementation 'com.fasterxml.jackson.core:jackson-databind' implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml' implementation "org.apache.commons:commons-lang3:3.12.0" implementation 'commons-codec:commons-codec:1.15' + testImplementation "org.hamcrest:hamcrest:2.2" + testImplementation 'org.assertj:assertj-core:3.22.0' } diff --git a/data-prepper-plugins/otel-proto-common/src/main/java/org/opensearch/dataprepper/plugins/otel/codec/OTelProtoCodec.java b/data-prepper-plugins/otel-proto-common/src/main/java/org/opensearch/dataprepper/plugins/otel/codec/OTelProtoCodec.java index ca9b846f16..d724c25778 100644 --- a/data-prepper-plugins/otel-proto-common/src/main/java/org/opensearch/dataprepper/plugins/otel/codec/OTelProtoCodec.java +++ b/data-prepper-plugins/otel-proto-common/src/main/java/org/opensearch/dataprepper/plugins/otel/codec/OTelProtoCodec.java @@ -11,15 +11,27 @@ import io.opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest; import io.opentelemetry.proto.common.v1.AnyValue; import io.opentelemetry.proto.common.v1.InstrumentationLibrary; +import io.opentelemetry.proto.common.v1.InstrumentationScope; import io.opentelemetry.proto.common.v1.KeyValue; +import io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint; +import io.opentelemetry.proto.metrics.v1.NumberDataPoint; +import io.opentelemetry.proto.metrics.v1.SummaryDataPoint; import io.opentelemetry.proto.resource.v1.Resource; import io.opentelemetry.proto.trace.v1.InstrumentationLibrarySpans; import io.opentelemetry.proto.trace.v1.ResourceSpans; import io.opentelemetry.proto.trace.v1.Status; import org.apache.commons.codec.DecoderException; import org.apache.commons.codec.binary.Hex; +import org.opensearch.dataprepper.model.metric.Bucket; +import org.opensearch.dataprepper.model.metric.DefaultBucket; +import org.opensearch.dataprepper.model.metric.DefaultExemplar; +import org.opensearch.dataprepper.model.metric.DefaultQuantile; +import org.opensearch.dataprepper.model.metric.Exemplar; +import org.opensearch.dataprepper.model.metric.Quantile; import org.opensearch.dataprepper.model.trace.DefaultLink; import org.opensearch.dataprepper.model.trace.DefaultSpanEvent; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.opensearch.dataprepper.model.trace.DefaultTraceGroupFields; import org.opensearch.dataprepper.model.trace.JacksonSpan; import org.opensearch.dataprepper.model.trace.Link; @@ -36,7 +48,9 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; import java.util.stream.Collectors; @@ -44,6 +58,9 @@ * OTelProtoCodec is for encoding/decoding between {@link org.opensearch.dataprepper.model.trace} and {@link io.opentelemetry.proto}. */ public class OTelProtoCodec { + + private static final Logger LOG = LoggerFactory.getLogger(OTelProtoCodec.class); + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final long NANO_MULTIPLIER = 1_000 * 1_000 * 1_000; protected static final String SERVICE_NAME = "service.name"; @@ -53,11 +70,18 @@ public class OTelProtoCodec { static final String INSTRUMENTATION_LIBRARY_VERSION = "instrumentationLibrary.version"; static final String STATUS_CODE = "status.code"; static final String STATUS_MESSAGE = "status.message"; + + /** * To make it OpenSearch friendly we will replace '.' in keys with '@' in all the Keys in {@link io.opentelemetry.proto.common.v1.KeyValue} */ private static final String DOT = "."; private static final String AT = "@"; + private static final String METRIC_ATTRIBUTES = "metric.attributes"; + private static final String EXEMPLAR_ATTRIBUTES = "exemplar.attributes"; + private static final String INSTRUMENTATION_SCOPE_NAME = "instrumentationScope.name"; + private static final String INSTRUMENTATION_SCOPE_VERSION = "instrumentationScope.version"; + public static final Function REPLACE_DOT_WITH_AT = i -> i.replace(DOT, AT); /** * Span and Resource attributes are essential for OpenSearch so they should not be nested. SO we will prefix them with "span.attributes" @@ -66,7 +90,44 @@ public class OTelProtoCodec { */ public static final Function SPAN_ATTRIBUTES_REPLACE_DOT_WITH_AT = i -> SPAN_ATTRIBUTES + DOT + i.replace(DOT, AT); public static final Function RESOURCE_ATTRIBUTES_REPLACE_DOT_WITH_AT = i -> RESOURCE_ATTRIBUTES + DOT + i.replace(DOT, AT); + public static final Function PREFIX_AND_METRIC_ATTRIBUTES_REPLACE_DOT_WITH_AT = i -> METRIC_ATTRIBUTES + DOT + i.replace(DOT, AT); + public static final Function PREFIX_AND_RESOURCE_ATTRIBUTES_REPLACE_DOT_WITH_AT = i -> RESOURCE_ATTRIBUTES + DOT + i.replace(DOT, AT); + public static final Function PREFIX_AND_EXEMPLAR_ATTRIBUTES_REPLACE_DOT_WITH_AT = i -> EXEMPLAR_ATTRIBUTES + DOT + i.replace(DOT, AT); + + private static final Map EXPONENTIAL_BUCKET_BOUNDS = new ConcurrentHashMap<>(); + + static class BoundsKey { + private final Integer scale; + private final Sign sign; + + public enum Sign {POSITIVE, NEGATIVE}; + + public BoundsKey(Integer scale, Sign sign) { + this.scale = scale; + this.sign = sign; + } + + public Integer getScale() { + return scale; + } + + public Sign getSign() { + return sign; + } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BoundsKey boundsKey = (BoundsKey) o; + return scale.equals(boundsKey.scale) && sign == boundsKey.sign; + } + + @Override + public int hashCode() { + return Objects.hash(scale, sign); + } + } public static String convertUnixNanosToISO8601(final long unixNano) { return Instant.ofEpochSecond(0L, unixNano).toString(); } @@ -457,4 +518,311 @@ protected AnyValue objectToAnyValue(final Object obj) throws UnsupportedEncoding return anyValueBuilder.build(); } } + + /** + * Converts an {@link AnyValue} into its appropriate data type + * + * @param value The value to convert + * @return the converted value as object + */ + public static Object convertAnyValue(final AnyValue value) { + switch (value.getValueCase()) { + case VALUE_NOT_SET: + case STRING_VALUE: + return value.getStringValue(); + case BOOL_VALUE: + return value.getBoolValue(); + case INT_VALUE: + return value.getIntValue(); + case DOUBLE_VALUE: + return value.getDoubleValue(); + /** + * Both {@link AnyValue.ARRAY_VALUE_FIELD_NUMBER} and {@link AnyValue.KVLIST_VALUE_FIELD_NUMBER} are + * nested objects. Storing them in flatten structure is not OpenSearch friendly. So they are stored + * as Json string. + */ + case ARRAY_VALUE: + try { + return OBJECT_MAPPER.writeValueAsString(value.getArrayValue().getValuesList().stream() + .map(OTelProtoCodec::convertAnyValue) + .collect(Collectors.toList())); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + case KVLIST_VALUE: + try { + return OBJECT_MAPPER.writeValueAsString(value.getKvlistValue().getValuesList().stream() + .collect(Collectors.toMap(i -> REPLACE_DOT_WITH_AT.apply(i.getKey()), i -> convertAnyValue(i.getValue())))); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + default: + throw new RuntimeException(String.format("Can not convert AnyValue of type %s", value.getValueCase())); + } + } + + /** + * Converts the keys of all attributes in the {@link NumberDataPoint}. + * Also, casts the underlying data into its actual type + * + * @param numberDataPoint The point to process + * @return A Map containing all attributes of `numberDataPoint` with keys converted into an OS-friendly format + */ + public static Map convertKeysOfDataPointAttributes(final NumberDataPoint numberDataPoint) { + return numberDataPoint.getAttributesList().stream() + .collect(Collectors.toMap(i -> PREFIX_AND_METRIC_ATTRIBUTES_REPLACE_DOT_WITH_AT.apply(i.getKey()), i -> convertAnyValue(i.getValue()))); + } + + /** + * Unpacks the List of {@link KeyValue} object into a Map. + *

+ * Converts the keys into an os friendly format and casts the underlying data into its actual type? + * + * @param attributesList The list of {@link KeyValue} objects to process + * @return A Map containing unpacked {@link KeyValue} data + */ + public static Map unpackKeyValueList(List attributesList) { + return attributesList.stream() + .collect(Collectors.toMap(i -> PREFIX_AND_METRIC_ATTRIBUTES_REPLACE_DOT_WITH_AT.apply(i.getKey()), i -> convertAnyValue(i.getValue()))); + } + + /** + * Unpacks the List of {@link KeyValue} object into a Map. + *

+ * Converts the keys into an os friendly format and casts the underlying data into its actual type? + * + * @param attributesList The list of {@link KeyValue} objects to process + * @return A Map containing unpacked {@link KeyValue} data + */ + public static Map unpackExemplarValueList(List attributesList) { + return attributesList.stream() + .collect(Collectors.toMap(i -> PREFIX_AND_EXEMPLAR_ATTRIBUTES_REPLACE_DOT_WITH_AT.apply(i.getKey()), i -> convertAnyValue(i.getValue()))); + } + + + /** + * Extracts a value from the passed {@link NumberDataPoint} into a double representation + * + * @param ndp The {@link NumberDataPoint} which's data should be turned into a double value + * @return A double representing the numerical value of the passed {@link NumberDataPoint}. + * Null if the numerical data point is not present + */ + public static Double getValueAsDouble(final NumberDataPoint ndp) { + NumberDataPoint.ValueCase ndpCase = ndp.getValueCase(); + if (NumberDataPoint.ValueCase.AS_DOUBLE == ndpCase) { + return ndp.getAsDouble(); + } else if (NumberDataPoint.ValueCase.AS_INT == ndpCase) { + return (double) ndp.getAsInt(); + } else { + return null; + } + } + + /** + * Extracts a value from the passed {@link io.opentelemetry.proto.metrics.v1.Exemplar} into a double representation + * + * @param exemplar The {@link io.opentelemetry.proto.metrics.v1.Exemplar} which's data should be turned into a double value + * @return A double representing the numerical value of the passed {@link io.opentelemetry.proto.metrics.v1.Exemplar}. + * Null if the numerical data point is not present + */ + public static Double getExemplarValueAsDouble(final io.opentelemetry.proto.metrics.v1.Exemplar exemplar) { + io.opentelemetry.proto.metrics.v1.Exemplar.ValueCase valueCase = exemplar.getValueCase(); + if (io.opentelemetry.proto.metrics.v1.Exemplar.ValueCase.AS_DOUBLE == valueCase) { + return exemplar.getAsDouble(); + } else if (io.opentelemetry.proto.metrics.v1.Exemplar.ValueCase.AS_INT == valueCase) { + return (double) exemplar.getAsInt(); + } else { + return null; + } + } + + public static Map getResourceAttributes(final Resource resource) { + return resource.getAttributesList().stream() + .collect(Collectors.toMap(i -> PREFIX_AND_RESOURCE_ATTRIBUTES_REPLACE_DOT_WITH_AT.apply(i.getKey()), i -> convertAnyValue(i.getValue()))); + } + + /** + * Extracts the name and version of the used instrumentation library used + * + * @param instrumentationLibrary the instrumentation library + * @return A map, containing information about the instrumentation library + */ + public static Map getInstrumentationLibraryAttributes(final InstrumentationLibrary instrumentationLibrary) { + final Map instrumentationAttr = new HashMap<>(); + if (!instrumentationLibrary.getName().isEmpty()) { + instrumentationAttr.put(INSTRUMENTATION_LIBRARY_NAME, instrumentationLibrary.getName()); + } + if (!instrumentationLibrary.getVersion().isEmpty()) { + instrumentationAttr.put(INSTRUMENTATION_LIBRARY_VERSION, instrumentationLibrary.getVersion()); + } + return instrumentationAttr; + } + + /** + * Extracts the name and version of the used instrumentation scope used + * + * @param instrumentationScope the instrumentation scope + * @return A map, containing information about the instrumentation scope + */ + public static Map getInstrumentationScopeAttributes(final InstrumentationScope instrumentationScope) { + final Map instrumentationScopeAttr = new HashMap<>(); + if (!instrumentationScope.getName().isEmpty()) { + instrumentationScopeAttr.put(INSTRUMENTATION_SCOPE_NAME, instrumentationScope.getName()); + } + if (!instrumentationScope.getVersion().isEmpty()) { + instrumentationScopeAttr.put(INSTRUMENTATION_SCOPE_VERSION, instrumentationScope.getVersion()); + } + return instrumentationScopeAttr; + } + + + public static String getStartTimeISO8601(final NumberDataPoint numberDataPoint) { + return convertUnixNanosToISO8601(numberDataPoint.getStartTimeUnixNano()); + } + + public static String getTimeISO8601(final NumberDataPoint ndp) { + return convertUnixNanosToISO8601(ndp.getTimeUnixNano()); + } + + public static Optional getServiceName(final Resource resource) { + return resource.getAttributesList().stream() + .filter(keyValue -> keyValue.getKey().equals(SERVICE_NAME) && !keyValue.getValue().getStringValue().isEmpty()) + .findFirst() + .map(i -> i.getValue().getStringValue()); + } + + + public static Map mergeAllAttributes(final Collection> attributes) { + return attributes.stream() + .flatMap(map -> map.entrySet().stream()) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + + public static List getQuantileValues(List quantileValues) { + return quantileValues.stream() + .map(q -> new DefaultQuantile(q.getQuantile(), q.getValue())) + .collect(Collectors.toList()); + } + + /** + * Create the buckets, see + * the OTel metrics proto spec + *

+ * The boundaries for bucket at index i are: + *

+ *
{@code
+     * (-infinity, explicit_bounds[i]) for i == 0
+     * (explicit_bounds[i-1], +infinity) for i == size(explicit_bounds)
+     * (explicit_bounds[i-1], explicit_bounds[i]) for 0 < i < size(explicit_bounds)
+     * }
+ *
+ *
+ * NOTE: here we map infinity as +/- FLOAT.MAX_VALUE since JSON rfc4627 only supports finite numbers and + * OpenSearch maps double values to floats as per default. + * + * @param bucketCountsList a list with the bucket counts + * @param explicitBoundsList a list with the bounds + * @return buckets list + */ + public static List createBuckets(List bucketCountsList, List explicitBoundsList) { + List buckets = new ArrayList<>(); + if (bucketCountsList.isEmpty()) { + return buckets; + } + if (bucketCountsList.size() - 1 != explicitBoundsList.size()) { + LOG.error("bucket count list not equals to bounds list {} {}", bucketCountsList.size(), explicitBoundsList.size()); + throw new IllegalArgumentException("OpenTelemetry protocol mandates that the number of elements in bucket_counts array must be by one greater than\n" + + " // the number of elements in explicit_bounds array."); + } else { + for (int i = 0; i < bucketCountsList.size(); i++) { + if (i == 0) { + double min = -Float.MAX_VALUE; // "-Infinity" + double max = explicitBoundsList.get(i); + Long bucketCount = bucketCountsList.get(i); + buckets.add(new DefaultBucket(min, max, bucketCount)); + } else if (i == bucketCountsList.size() - 1) { + double min = explicitBoundsList.get(i - 1); + double max = Float.MAX_VALUE; // "Infinity" + Long bucketCount = bucketCountsList.get(i); + buckets.add(new DefaultBucket(min, max, bucketCount)); + } else { + double min = explicitBoundsList.get(i - 1); + double max = explicitBoundsList.get(i); + Long bucketCount = bucketCountsList.get(i); + buckets.add(new DefaultBucket(min, max, bucketCount)); + } + } + } + return buckets; + } + + /** + * Converts a List of {@link io.opentelemetry.proto.metrics.v1.Exemplar} values to {@link DefaultExemplar}, the + * internal representation for Data Prepper + * + * @param exemplarsList the List of Exemplars + * @return a mapped list of DefaultExemplars + */ + public static List convertExemplars(List exemplarsList) { + return exemplarsList.stream().map(exemplar -> + new DefaultExemplar(convertUnixNanosToISO8601(exemplar.getTimeUnixNano()), + getExemplarValueAsDouble(exemplar), + Hex.encodeHexString(exemplar.getSpanId().toByteArray()), + Hex.encodeHexString(exemplar.getTraceId().toByteArray()), + unpackExemplarValueList(exemplar.getFilteredAttributesList()))) + .collect(Collectors.toList()); + } + + /** + * Pre-Calculates all possible bucket bounds for this scale. + * Uses the entire range of Double values. + * + * @param key a tuple with scale and offset sign for bounds calculation + * @return an array with all possible bucket bounds with the scale + */ + static double[] calculateBoundsForScale(BoundsKey key) { + + //base = 2**(2**(-scale)) + double base = Math.pow(2., Math.pow(2., -key.getScale())); + + //calculate all possible buckets in the Double range and consider the offset sign + int maxIndex = Math.toIntExact(Math.round(Math.log(Double.MAX_VALUE) / Math.log(base))); + double[] boundaries = new double[maxIndex + 1]; + for (int i = 0; i <= maxIndex ; i++) { + boundaries[i] = Math.pow(base, key.getSign() == BoundsKey.Sign.POSITIVE ? i : -i); + } + return boundaries; + } + + /** + * Maps a List of {@link io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets} to an + * internal representation for Data Prepper. + * See data model + * + * @param buckets the list of buckets + * @param scale the scale of the exponential histogram + * @return a mapped list of Buckets + */ + public static List createExponentialBuckets(ExponentialHistogramDataPoint.Buckets buckets, int scale) { + int offset = buckets.getOffset(); + BoundsKey key = new BoundsKey(scale, offset < 0 ? BoundsKey.Sign.NEGATIVE : BoundsKey.Sign.POSITIVE); + double[] bucketBounds = EXPONENTIAL_BUCKET_BOUNDS.computeIfAbsent(key, boundsKey -> calculateBoundsForScale(key)); + + List mappedBuckets = new ArrayList<>(); + List bucketsList = buckets.getBucketCountsList(); + + int boundOffset = Math.abs(offset); // Offset can be negative, but we always want positive offsets for array access + if (bucketsList.size() + boundOffset >= bucketBounds.length) { + LOG.error("Max offset is out of range for Double data type, ignoring buckets"); + } else { + for (int i = 0; i < bucketsList.size(); i++) { + Long value = bucketsList.get(i); + double lowerBound = bucketBounds[boundOffset + i]; + double upperBound = bucketBounds[boundOffset + i + 1]; + mappedBuckets.add(new DefaultBucket(lowerBound, upperBound, value)); + } + } + return mappedBuckets; + } } diff --git a/data-prepper-plugins/otel-proto-common/src/test/java/org/opensearch/dataprepper/plugins/otel/codec/OTelProtoCodecTest.java b/data-prepper-plugins/otel-proto-common/src/test/java/org/opensearch/dataprepper/plugins/otel/codec/OTelProtoCodecTest.java index 47ab04eb6b..abf1f89248 100644 --- a/data-prepper-plugins/otel-proto-common/src/test/java/org/opensearch/dataprepper/plugins/otel/codec/OTelProtoCodecTest.java +++ b/data-prepper-plugins/otel-proto-common/src/test/java/org/opensearch/dataprepper/plugins/otel/codec/OTelProtoCodecTest.java @@ -16,14 +16,21 @@ import io.opentelemetry.proto.common.v1.InstrumentationLibrary; import io.opentelemetry.proto.common.v1.KeyValue; import io.opentelemetry.proto.common.v1.KeyValueList; +import io.opentelemetry.proto.metrics.v1.Exemplar; +import io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint; +import io.opentelemetry.proto.metrics.v1.NumberDataPoint; import io.opentelemetry.proto.resource.v1.Resource; import io.opentelemetry.proto.trace.v1.InstrumentationLibrarySpans; import io.opentelemetry.proto.trace.v1.ResourceSpans; import io.opentelemetry.proto.trace.v1.Status; import org.apache.commons.codec.DecoderException; import org.apache.commons.codec.binary.Hex; +import org.hamcrest.MatcherAssert; +import org.hamcrest.Matchers; +import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; +import org.opensearch.dataprepper.model.metric.Bucket; import org.opensearch.dataprepper.model.trace.DefaultLink; import org.opensearch.dataprepper.model.trace.DefaultSpanEvent; import org.opensearch.dataprepper.model.trace.DefaultTraceGroupFields; @@ -40,6 +47,9 @@ import java.io.UnsupportedEncodingException; import java.nio.charset.StandardCharsets; import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -48,14 +58,18 @@ import java.util.Objects; import java.util.Random; import java.util.UUID; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import static org.assertj.core.api.Assertions.entry; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertThrows; public class OTelProtoCodecTest { @@ -65,6 +79,16 @@ public class OTelProtoCodecTest { private static final String TEST_REQUEST_JSON_FILE = "test-request.json"; private static final String TEST_SPAN_EVENT_JSON_FILE = "test-span-event.json"; + private static final Long START_TIME = TimeUnit.MILLISECONDS.toNanos(ZonedDateTime.of( + LocalDateTime.of(2020, 5, 24, 14, 0, 0), + ZoneOffset.UTC).toInstant().toEpochMilli()); + + private static final Long TIME = TimeUnit.MILLISECONDS.toNanos(ZonedDateTime.of( + LocalDateTime.of(2020, 5, 24, 14, 1, 0), + ZoneOffset.UTC).toInstant().toEpochMilli()); + + private static final Double MAX_ERROR = 0.00001; + private final OTelProtoCodec.OTelProtoDecoder decoderUnderTest = new OTelProtoCodec.OTelProtoDecoder(); private final OTelProtoCodec.OTelProtoEncoder encoderUnderTest = new OTelProtoCodec.OTelProtoEncoder(); @@ -85,7 +109,7 @@ private List returnList(final String jsonStr) throws JsonProcessingExcep private ExportTraceServiceRequest buildExportTraceServiceRequestFromJsonFile(String requestJsonFileName) throws IOException { final StringBuilder jsonBuilder = new StringBuilder(); try (final InputStream inputStream = Objects.requireNonNull( - OTelProtoCodecTest.class.getClassLoader().getResourceAsStream(requestJsonFileName))){ + OTelProtoCodecTest.class.getClassLoader().getResourceAsStream(requestJsonFileName))) { final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream)); bufferedReader.lines().forEach(jsonBuilder::append); } @@ -102,7 +126,7 @@ public void testParseExportTraceServiceRequest() throws IOException { final ExportTraceServiceRequest exportTraceServiceRequest = buildExportTraceServiceRequestFromJsonFile(TEST_REQUEST_JSON_FILE); final List spans = decoderUnderTest.parseExportTraceServiceRequest(exportTraceServiceRequest); assertThat(spans.size(), is(equalTo(3))); - for (final Span span: spans) { + for (final Span span : spans) { if (span.getParentSpanId().isEmpty()) { assertThat(span.getTraceGroup(), notNullValue()); assertThat(span.getTraceGroupFields().getEndTime(), notNullValue()); @@ -616,8 +640,9 @@ public void testEncodeResourceSpans() throws DecoderException, UnsupportedEncodi private Span buildSpanFromJsonFile(final String jsonFileName) { JacksonSpan.Builder spanBuilder = JacksonSpan.builder(); try (final InputStream inputStream = Objects.requireNonNull( - OTelProtoCodecTest.class.getClassLoader().getResourceAsStream(jsonFileName))){ - final Map spanMap = OBJECT_MAPPER.readValue(inputStream, new TypeReference>() {}); + OTelProtoCodecTest.class.getClassLoader().getResourceAsStream(jsonFileName))) { + final Map spanMap = OBJECT_MAPPER.readValue(inputStream, new TypeReference>() { + }); final String traceId = (String) spanMap.get("traceId"); final String spanId = (String) spanMap.get("spanId"); final String parentSpanId = (String) spanMap.get("parentSpanId"); @@ -655,7 +680,8 @@ private Span buildSpanFromJsonFile(final String jsonFileName) { return spanBuilder.build(); } - private class UnsupportedEncodingClass { } + private class UnsupportedEncodingClass { + } } @Test @@ -673,7 +699,7 @@ public void testOTelProtoCodecConsistency() throws IOException, DecoderException final ExportTraceServiceRequest request = buildExportTraceServiceRequestFromJsonFile(TEST_REQUEST_JSON_FILE); final List spansFirstDec = decoderUnderTest.parseExportTraceServiceRequest(request); final List resourceSpansList = new ArrayList<>(); - for (final Span span: spansFirstDec) { + for (final Span span : spansFirstDec) { resourceSpansList.add(encoderUnderTest.convertToResourceSpans(span)); } final List spansSecondDec = resourceSpansList.stream() @@ -683,4 +709,366 @@ public void testOTelProtoCodecConsistency() throws IOException, DecoderException assertThat(spansFirstDec.get(i).toJsonString(), equalTo(spansSecondDec.get(i).toJsonString())); } } + + @Test + void getValueAsDouble() { + Assertions.assertNull(OTelProtoCodec.getValueAsDouble(NumberDataPoint.newBuilder().build())); + } + + @Test + public void testCreateBucketsEmpty() { + MatcherAssert.assertThat(OTelProtoCodec.createBuckets(new ArrayList<>(), new ArrayList<>()).size(), Matchers.equalTo(0)); + } + + @Test + public void testCreateBuckets() { + List bucketsCountList = List.of(1L, 2L, 3L, 4L); + List explicitBOundsList = List.of(5D, 10D, 25D); + List buckets = OTelProtoCodec.createBuckets(bucketsCountList, explicitBOundsList); + MatcherAssert.assertThat(buckets.size(), Matchers.equalTo(4)); + Bucket b1 = buckets.get(0); + MatcherAssert.assertThat(b1.getCount(), Matchers.equalTo(1L)); + MatcherAssert.assertThat(b1.getMin(), Matchers.equalTo((double) -Float.MAX_VALUE)); + MatcherAssert.assertThat(b1.getMax(), Matchers.equalTo(5D)); + + Bucket b2 = buckets.get(1); + MatcherAssert.assertThat(b2.getCount(), Matchers.equalTo(2L)); + MatcherAssert.assertThat(b2.getMin(), Matchers.equalTo(5D)); + MatcherAssert.assertThat(b2.getMax(), Matchers.equalTo(10D)); + + Bucket b3 = buckets.get(2); + MatcherAssert.assertThat(b3.getCount(), Matchers.equalTo(3L)); + MatcherAssert.assertThat(b3.getMin(), Matchers.equalTo(10D)); + MatcherAssert.assertThat(b3.getMax(), Matchers.equalTo(25D)); + + Bucket b4 = buckets.get(3); + MatcherAssert.assertThat(b4.getCount(), Matchers.equalTo(4L)); + MatcherAssert.assertThat(b4.getMin(), Matchers.equalTo(25D)); + MatcherAssert.assertThat(b4.getMax(), Matchers.equalTo((double) Float.MAX_VALUE)); + } + + @Test + public void testCreateBuckets_illegal_argument() { + List bucketsCountList = List.of(1L, 2L, 3L, 4L); + List boundsList = Collections.emptyList(); + Assertions.assertThrows(IllegalArgumentException.class, () -> OTelProtoCodec.createBuckets(bucketsCountList, boundsList)); + } + + @Test + public void testConvertAnyValueBool() { + Object o = OTelProtoCodec.convertAnyValue(AnyValue.newBuilder().setBoolValue(true).build()); + MatcherAssert.assertThat(o instanceof Boolean, Matchers.equalTo(true)); + MatcherAssert.assertThat(((boolean) o), Matchers.equalTo(true)); + } + + @Test + public void testUnsupportedTypeToAnyValue() { + Assertions.assertThrows(RuntimeException.class, + () -> OTelProtoCodec.convertAnyValue(AnyValue.newBuilder().setBytesValue(ByteString.EMPTY).build())); + } + + @Test + void convertExemplars() { + Exemplar e1 = Exemplar.newBuilder() + .addFilteredAttributes(KeyValue.newBuilder() + .setKey("key") + .setValue(AnyValue.newBuilder().setBoolValue(true).build()).build()) + .setAsDouble(3) + .setSpanId(ByteString.copyFrom(getRandomBytes(8))) + .setTimeUnixNano(TIME) + .setTraceId(ByteString.copyFrom(getRandomBytes(8))) + .build(); + + + Exemplar e2 = Exemplar.newBuilder() + .addFilteredAttributes(KeyValue.newBuilder() + .setKey("key2") + .setValue(AnyValue.newBuilder() + .setArrayValue(ArrayValue.newBuilder().addValues(AnyValue.newBuilder().setStringValue("test").build()).build()) + .build()).build()) + .setAsInt(42) + .setSpanId(ByteString.copyFrom(getRandomBytes(8))) + .setTimeUnixNano(TIME) + .setTraceId(ByteString.copyFrom(getRandomBytes(8))) + .build(); + + List exemplars = Arrays.asList(e1, e2); + List convertedExemplars = OTelProtoCodec.convertExemplars(exemplars); + MatcherAssert.assertThat(convertedExemplars.size(), Matchers.equalTo(2)); + + org.opensearch.dataprepper.model.metric.Exemplar conv1 = convertedExemplars.get(0); + MatcherAssert.assertThat(conv1.getSpanId(), Matchers.equalTo(Hex.encodeHexString(e1.getSpanId().toByteArray()))); + MatcherAssert.assertThat(conv1.getTime(), Matchers.equalTo("2020-05-24T14:01:00Z")); + MatcherAssert.assertThat(conv1.getTraceId(), Matchers.equalTo(Hex.encodeHexString(e1.getTraceId().toByteArray()))); + MatcherAssert.assertThat(conv1.getValue(), Matchers.equalTo(3.0)); + org.assertj.core.api.Assertions.assertThat(conv1.getAttributes()).contains(entry("exemplar.attributes.key", true)); + + org.opensearch.dataprepper.model.metric.Exemplar conv2 = convertedExemplars.get(1); + MatcherAssert.assertThat(conv2.getSpanId(), Matchers.equalTo(Hex.encodeHexString(e2.getSpanId().toByteArray()))); + MatcherAssert.assertThat(conv2.getTime(), Matchers.equalTo("2020-05-24T14:01:00Z")); + MatcherAssert.assertThat(conv2.getTraceId(), Matchers.equalTo(Hex.encodeHexString(e2.getTraceId().toByteArray()))); + MatcherAssert.assertThat(conv2.getValue(), Matchers.equalTo(42.0)); + org.assertj.core.api.Assertions.assertThat(conv2.getAttributes()).contains(entry("exemplar.attributes.key2", "[\"test\"]")); + + } + + + /** + * See: The example table with scale 3 + */ + @Test + public void testExponentialHistogram() { + List b = OTelProtoCodec.createExponentialBuckets( + ExponentialHistogramDataPoint.Buckets.newBuilder() + .addBucketCounts(4) + .addBucketCounts(2) + .addBucketCounts(3) + .addBucketCounts(2) + .addBucketCounts(1) + .addBucketCounts(4) + .addBucketCounts(6) + .addBucketCounts(4) + .setOffset(0) + .build(), 3); + + MatcherAssert.assertThat(b.size(), Matchers.equalTo(8)); + + Bucket b1 = b.get(0); + MatcherAssert.assertThat(b1.getCount(), Matchers.equalTo(4L)); + MatcherAssert.assertThat(b1.getMin(), Matchers.equalTo(1D)); + MatcherAssert.assertThat(b1.getMax(), Matchers.closeTo(1.09051, MAX_ERROR)); + + Bucket b2 = b.get(1); + MatcherAssert.assertThat(b2.getCount(), Matchers.equalTo(2L)); + MatcherAssert.assertThat(b2.getMin(), Matchers.closeTo(1.09051, MAX_ERROR)); + MatcherAssert.assertThat(b2.getMax(), Matchers.closeTo(1.18921, MAX_ERROR)); + + Bucket b3 = b.get(2); + MatcherAssert.assertThat(b3.getCount(), Matchers.equalTo(3L)); + MatcherAssert.assertThat(b3.getMin(), Matchers.closeTo(1.18921, MAX_ERROR)); + MatcherAssert.assertThat(b3.getMax(), Matchers.closeTo(1.29684, MAX_ERROR)); + + Bucket b4 = b.get(3); + MatcherAssert.assertThat(b4.getCount(), Matchers.equalTo(2L)); + MatcherAssert.assertThat(b4.getMin(), Matchers.closeTo(1.29684, MAX_ERROR)); + MatcherAssert.assertThat(b4.getMax(), Matchers.closeTo(1.41421, MAX_ERROR)); + + Bucket b5 = b.get(4); + MatcherAssert.assertThat(b5.getCount(), Matchers.equalTo(1L)); + MatcherAssert.assertThat(b5.getMin(), Matchers.closeTo(1.41421, MAX_ERROR)); + MatcherAssert.assertThat(b5.getMax(), Matchers.closeTo(1.54221, MAX_ERROR)); + + Bucket b6 = b.get(5); + MatcherAssert.assertThat(b6.getCount(), Matchers.equalTo(4L)); + MatcherAssert.assertThat(b6.getMin(), Matchers.closeTo(1.54221, MAX_ERROR)); + MatcherAssert.assertThat(b6.getMax(), Matchers.closeTo(1.68179, MAX_ERROR)); + + Bucket b7 = b.get(6); + MatcherAssert.assertThat(b7.getCount(), Matchers.equalTo(6L)); + MatcherAssert.assertThat(b7.getMin(), Matchers.closeTo(1.68179, MAX_ERROR)); + MatcherAssert.assertThat(b7.getMax(), Matchers.closeTo(1.83401, MAX_ERROR)); + + Bucket b8 = b.get(7); + MatcherAssert.assertThat(b8.getCount(), Matchers.equalTo(4L)); + MatcherAssert.assertThat(b8.getMin(), Matchers.closeTo(1.83401, MAX_ERROR)); + MatcherAssert.assertThat(b8.getMax(), Matchers.closeTo(2, MAX_ERROR)); + } + + @Test + public void testExponentialHistogramWithOffset() { + List b = OTelProtoCodec.createExponentialBuckets( + ExponentialHistogramDataPoint.Buckets.newBuilder() + .addBucketCounts(4) + .addBucketCounts(2) + .addBucketCounts(3) + .addBucketCounts(2) + .addBucketCounts(1) + .addBucketCounts(4) + .setOffset(2) + .build(), 3); + + MatcherAssert.assertThat(b.size(), Matchers.equalTo(6)); + + Bucket b1 = b.get(0); + MatcherAssert.assertThat(b1.getCount(), Matchers.equalTo(4L)); + MatcherAssert.assertThat(b1.getMin(), Matchers.closeTo(1.18920, MAX_ERROR)); + MatcherAssert.assertThat(b1.getMax(), Matchers.closeTo(1.29684, MAX_ERROR)); + + Bucket b2 = b.get(1); + MatcherAssert.assertThat(b2.getCount(), Matchers.equalTo(2L)); + MatcherAssert.assertThat(b2.getMin(), Matchers.closeTo(1.29684, MAX_ERROR)); + MatcherAssert.assertThat(b2.getMax(), Matchers.closeTo(1.41421, MAX_ERROR)); + + Bucket b3 = b.get(2); + MatcherAssert.assertThat(b3.getCount(), Matchers.equalTo(3L)); + MatcherAssert.assertThat(b3.getMin(), Matchers.closeTo(1.41421, MAX_ERROR)); + MatcherAssert.assertThat(b3.getMax(), Matchers.closeTo(1.54221, MAX_ERROR)); + + Bucket b4 = b.get(3); + MatcherAssert.assertThat(b4.getCount(), Matchers.equalTo(2L)); + MatcherAssert.assertThat(b4.getMin(), Matchers.closeTo(1.54221, MAX_ERROR)); + MatcherAssert.assertThat(b4.getMax(), Matchers.closeTo(1.68179, MAX_ERROR)); + + Bucket b5 = b.get(4); + MatcherAssert.assertThat(b5.getCount(), Matchers.equalTo(1L)); + MatcherAssert.assertThat(b5.getMin(), Matchers.closeTo(1.68179, MAX_ERROR)); + MatcherAssert.assertThat(b5.getMax(), Matchers.closeTo(1.83401, MAX_ERROR)); + + Bucket b6 = b.get(5); + MatcherAssert.assertThat(b6.getCount(), Matchers.equalTo(4L)); + MatcherAssert.assertThat(b6.getMin(), Matchers.closeTo(1.83401, MAX_ERROR)); + MatcherAssert.assertThat(b6.getMax(), Matchers.closeTo(2, MAX_ERROR)); + } + + @Test + public void testExponentialHistogramWithLargeOffset() { + List b = OTelProtoCodec.createExponentialBuckets( + ExponentialHistogramDataPoint.Buckets.newBuilder() + .addBucketCounts(4) + .addBucketCounts(2) + .addBucketCounts(3) + .addBucketCounts(2) + .setOffset(20) + .build(), 2); + + MatcherAssert.assertThat(b.size(), Matchers.equalTo(4)); + + Bucket b1 = b.get(0); + MatcherAssert.assertThat(b1.getCount(), Matchers.equalTo(4L)); + MatcherAssert.assertThat(b1.getMin(), Matchers.closeTo(32.0, MAX_ERROR)); + MatcherAssert.assertThat(b1.getMax(), Matchers.closeTo(38.05462, MAX_ERROR)); + + Bucket b2 = b.get(1); + MatcherAssert.assertThat(b2.getCount(), Matchers.equalTo(2L)); + MatcherAssert.assertThat(b2.getMin(), Matchers.closeTo(38.05462, MAX_ERROR)); + MatcherAssert.assertThat(b2.getMax(), Matchers.closeTo(45.254833, MAX_ERROR)); + + Bucket b3 = b.get(2); + MatcherAssert.assertThat(b3.getCount(), Matchers.equalTo(3L)); + MatcherAssert.assertThat(b3.getMin(), Matchers.closeTo(45.254833, MAX_ERROR)); + MatcherAssert.assertThat(b3.getMax(), Matchers.closeTo(53.81737, MAX_ERROR)); + + Bucket b4 = b.get(3); + MatcherAssert.assertThat(b4.getCount(), Matchers.equalTo(2L)); + MatcherAssert.assertThat(b4.getMin(), Matchers.closeTo(53.81737, MAX_ERROR)); + MatcherAssert.assertThat(b4.getMax(), Matchers.closeTo(63.99999, MAX_ERROR)); + } + + @Test + public void testExponentialHistogramWithNegativeOffset() { + List b = OTelProtoCodec.createExponentialBuckets( + ExponentialHistogramDataPoint.Buckets.newBuilder() + .addBucketCounts(4) + .addBucketCounts(2) + .addBucketCounts(3) + .addBucketCounts(2) + .setOffset(-5) + .build(), 2); + + MatcherAssert.assertThat(b.size(), Matchers.equalTo(4)); + + Bucket b1 = b.get(0); + MatcherAssert.assertThat(b1.getCount(), Matchers.equalTo(4L)); + MatcherAssert.assertThat(b1.getMin(), Matchers.closeTo(0.42044820762685736, MAX_ERROR)); + MatcherAssert.assertThat(b1.getMax(), Matchers.closeTo(0.35355339059327384, MAX_ERROR)); + + Bucket b2 = b.get(1); + MatcherAssert.assertThat(b2.getCount(), Matchers.equalTo(2L)); + MatcherAssert.assertThat(b2.getMin(), Matchers.closeTo(0.35355339059327384, MAX_ERROR)); + MatcherAssert.assertThat(b2.getMax(), Matchers.closeTo(0.2973017787506803, MAX_ERROR)); + + Bucket b3 = b.get(2); + MatcherAssert.assertThat(b3.getCount(), Matchers.equalTo(3L)); + MatcherAssert.assertThat(b3.getMin(), Matchers.closeTo(0.2973017787506803, MAX_ERROR)); + MatcherAssert.assertThat(b3.getMax(), Matchers.closeTo(0.2500000000, MAX_ERROR)); + + Bucket b4 = b.get(3); + MatcherAssert.assertThat(b4.getCount(), Matchers.equalTo(2L)); + MatcherAssert.assertThat(b4.getMin(), Matchers.closeTo(0.2500000000, MAX_ERROR)); + MatcherAssert.assertThat(b4.getMax(), Matchers.closeTo(0.2102241038134287, MAX_ERROR)); + } + + @Test + public void testExponentialHistogramWithNegativeScale() { + List b = OTelProtoCodec.createExponentialBuckets( + ExponentialHistogramDataPoint.Buckets.newBuilder() + .addBucketCounts(4) + .addBucketCounts(2) + .addBucketCounts(3) + .addBucketCounts(2) + .setOffset(0) + .build(), -2); + + MatcherAssert.assertThat(b.size(), Matchers.equalTo(4)); + + Bucket b1 = b.get(0); + MatcherAssert.assertThat(b1.getCount(), Matchers.equalTo(4L)); + MatcherAssert.assertThat(b1.getMin(), Matchers.closeTo(1, MAX_ERROR)); + MatcherAssert.assertThat(b1.getMax(), Matchers.closeTo(16, MAX_ERROR)); + + Bucket b2 = b.get(1); + MatcherAssert.assertThat(b2.getCount(), Matchers.equalTo(2L)); + MatcherAssert.assertThat(b2.getMin(), Matchers.closeTo(16, MAX_ERROR)); + MatcherAssert.assertThat(b2.getMax(), Matchers.closeTo(256, MAX_ERROR)); + + Bucket b3 = b.get(2); + MatcherAssert.assertThat(b3.getCount(), Matchers.equalTo(3L)); + MatcherAssert.assertThat(b3.getMin(), Matchers.closeTo(256, MAX_ERROR)); + MatcherAssert.assertThat(b3.getMax(), Matchers.closeTo(4096, MAX_ERROR)); + + Bucket b4 = b.get(3); + MatcherAssert.assertThat(b4.getCount(), Matchers.equalTo(2L)); + MatcherAssert.assertThat(b4.getMin(), Matchers.closeTo(4096, MAX_ERROR)); + MatcherAssert.assertThat(b4.getMax(), Matchers.closeTo(65536, MAX_ERROR)); + } + + @Test + public void testExponentialHistogramWithMaxOffsetOutOfRange() { + List b = OTelProtoCodec.createExponentialBuckets( + ExponentialHistogramDataPoint.Buckets.newBuilder() + .addBucketCounts(4) + .addBucketCounts(2) + .addBucketCounts(3) + .addBucketCounts(2) + .setOffset(1025) + .build(), -2); + + MatcherAssert.assertThat(b.size(), Matchers.equalTo(0)); + } + + @Test + public void testExponentialHistogramWithMaxNegativeOffsetOutOfRange() { + List b = OTelProtoCodec.createExponentialBuckets( + ExponentialHistogramDataPoint.Buckets.newBuilder() + .addBucketCounts(4) + .addBucketCounts(2) + .addBucketCounts(3) + .addBucketCounts(2) + .setOffset(-1025) + .build(), -2); + + MatcherAssert.assertThat(b.size(), Matchers.equalTo(0)); + } + + @Test + public void testBoundsKeyEquals() { + OTelProtoCodec.BoundsKey k1 = new OTelProtoCodec.BoundsKey(2, OTelProtoCodec.BoundsKey.Sign.POSITIVE); + OTelProtoCodec.BoundsKey k2 = new OTelProtoCodec.BoundsKey(2, OTelProtoCodec.BoundsKey.Sign.POSITIVE); + assertEquals(k1, k2); + } + + @Test + public void testBoundsKeyNotEqualsScale() { + OTelProtoCodec.BoundsKey k1 = new OTelProtoCodec.BoundsKey(2, OTelProtoCodec.BoundsKey.Sign.POSITIVE); + OTelProtoCodec.BoundsKey k2 = new OTelProtoCodec.BoundsKey(-2, OTelProtoCodec.BoundsKey.Sign.POSITIVE); + assertNotEquals(k1, k2); + } + + @Test + public void testBoundsKeyNotEqualsSign() { + OTelProtoCodec.BoundsKey k1 = new OTelProtoCodec.BoundsKey(2, OTelProtoCodec.BoundsKey.Sign.POSITIVE); + OTelProtoCodec.BoundsKey k2 = new OTelProtoCodec.BoundsKey(2, OTelProtoCodec.BoundsKey.Sign.NEGATIVE); + assertNotEquals(k1, k2); + } + } diff --git a/data-prepper-plugins/otel-trace-raw-prepper/build.gradle b/data-prepper-plugins/otel-trace-raw-prepper/build.gradle new file mode 100644 index 0000000000..0bb0e09504 --- /dev/null +++ b/data-prepper-plugins/otel-trace-raw-prepper/build.gradle @@ -0,0 +1,37 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +plugins { + id 'java' +} + +dependencies { + implementation project(':data-prepper-api') + implementation project(':data-prepper-plugins:common') + implementation 'commons-codec:commons-codec:1.15' + testImplementation project(':data-prepper-api').sourceSets.test.output + implementation "io.opentelemetry.proto:opentelemetry-proto:${versionMap.opentelemetryProto}" + implementation 'com.google.protobuf:protobuf-java-util:3.19.4' + implementation 'com.linecorp.armeria:armeria:1.9.2' + implementation 'com.linecorp.armeria:armeria-grpc:1.9.2' + implementation 'com.fasterxml.jackson.core:jackson-databind' + implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml' + implementation 'com.google.guava:guava:31.1-jre' + testImplementation 'org.assertj:assertj-core:3.22.0' + testImplementation "org.mockito:mockito-inline:${versionMap.mockito}" + testImplementation 'org.hamcrest:hamcrest:2.2' + testImplementation 'org.awaitility:awaitility:4.1.1' +} + +jacocoTestCoverageVerification { + dependsOn jacocoTestReport + violationRules { + rule { //in addition to core projects rule + limit { + minimum = 0.95 //Increase this to 0.90 + } + } + } +} \ No newline at end of file diff --git a/data-prepper-plugins/otel-trace-raw-processor/build.gradle b/data-prepper-plugins/otel-trace-raw-processor/build.gradle index 681b67ae49..d30de7c6d0 100644 --- a/data-prepper-plugins/otel-trace-raw-processor/build.gradle +++ b/data-prepper-plugins/otel-trace-raw-processor/build.gradle @@ -12,7 +12,7 @@ dependencies { implementation project(':data-prepper-plugins:common') implementation 'commons-codec:commons-codec:1.15' testImplementation project(':data-prepper-api').sourceSets.test.output - implementation "io.opentelemetry:opentelemetry-proto:${versionMap.opentelemetryProto}" + implementation "io.opentelemetry.proto:opentelemetry-proto:${versionMap.opentelemetryProto}" implementation "com.google.protobuf:protobuf-java-util:${versionMap.protobufJavaUtil}" implementation "com.linecorp.armeria:armeria:${versionMap.armeria}" implementation "com.linecorp.armeria:armeria-grpc:${versionMap.armeriaGrpc}" diff --git a/data-prepper-plugins/otel-trace-source/build.gradle b/data-prepper-plugins/otel-trace-source/build.gradle index 066473d574..1231de81f2 100644 --- a/data-prepper-plugins/otel-trace-source/build.gradle +++ b/data-prepper-plugins/otel-trace-source/build.gradle @@ -15,7 +15,7 @@ dependencies { implementation project(':data-prepper-plugins:otel-proto-common') implementation 'commons-codec:commons-codec:1.15' testImplementation project(':data-prepper-api').sourceSets.test.output - implementation "io.opentelemetry:opentelemetry-proto:${versionMap.opentelemetryProto}" + implementation "io.opentelemetry.proto:opentelemetry-proto:${versionMap.opentelemetryProto}" implementation "commons-io:commons-io:2.11.0" implementation 'software.amazon.awssdk:s3' implementation 'software.amazon.awssdk:acm' diff --git a/data-prepper-plugins/peer-forwarder/build.gradle b/data-prepper-plugins/peer-forwarder/build.gradle new file mode 100644 index 0000000000..80cb6d0241 --- /dev/null +++ b/data-prepper-plugins/peer-forwarder/build.gradle @@ -0,0 +1,45 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +plugins { + id 'java' +} + +group 'com.amazon' +version '0.1.0' + +repositories { + mavenCentral() +} + +dependencies { + implementation project(':data-prepper-api') + implementation project(':data-prepper-plugins:otel-proto-common') + testImplementation project(':data-prepper-api').sourceSets.test.output + implementation "io.opentelemetry.proto:opentelemetry-proto:${versionMap.opentelemetryProto}" + implementation "com.linecorp.armeria:armeria:1.9.2" + implementation "com.linecorp.armeria:armeria-grpc:1.9.2" + implementation 'com.amazonaws:aws-java-sdk-s3' + implementation 'com.amazonaws:aws-java-sdk-acm' + implementation 'software.amazon.awssdk:servicediscovery' + implementation "commons-io:commons-io:2.11.0" + implementation "org.apache.commons:commons-lang3:3.12.0" + implementation "commons-validator:commons-validator:1.7" + testImplementation "org.hamcrest:hamcrest:2.2" + testImplementation "org.mockito:mockito-inline:${versionMap.mockito}" + testImplementation "commons-io:commons-io:2.10.0" + testImplementation 'org.awaitility:awaitility:4.1.1' +} + +jacocoTestCoverageVerification { + dependsOn jacocoTestReport + violationRules { + rule { + limit { + minimum = 0.90 + } + } + } +} \ No newline at end of file diff --git a/data-prepper-plugins/service-map-stateful/build.gradle b/data-prepper-plugins/service-map-stateful/build.gradle index 2b6d1e0cc9..e1aa701692 100644 --- a/data-prepper-plugins/service-map-stateful/build.gradle +++ b/data-prepper-plugins/service-map-stateful/build.gradle @@ -19,7 +19,7 @@ dependencies { implementation 'commons-codec:commons-codec:1.15' implementation 'io.micrometer:micrometer-core' implementation 'com.fasterxml.jackson.core:jackson-databind' - implementation("io.opentelemetry:opentelemetry-proto:${versionMap.opentelemetryProto}") { + implementation("io.opentelemetry.proto:opentelemetry-proto:${versionMap.opentelemetryProto}") { exclude group: 'com.google.protobuf', module: 'protobuf-java' } implementation "com.google.protobuf:protobuf-java:${versionMap.protobufJava}" diff --git a/e2e-test/peerforwarder/build.gradle b/e2e-test/peerforwarder/build.gradle index 071f7f7fd4..ed56198512 100644 --- a/e2e-test/peerforwarder/build.gradle +++ b/e2e-test/peerforwarder/build.gradle @@ -192,7 +192,7 @@ dependencies { integrationTestImplementation project(':data-prepper-plugins:opensearch') integrationTestImplementation project(':data-prepper-plugins:otel-trace-group-processor') integrationTestImplementation 'org.awaitility:awaitility:4.2.0' - integrationTestImplementation "io.opentelemetry:opentelemetry-proto:${versionMap.opentelemetryProto}" + integrationTestImplementation "io.opentelemetry.proto:opentelemetry-proto:${versionMap.opentelemetryProto}" integrationTestImplementation "com.google.protobuf:protobuf-java-util:${versionMap.protobufJavaUtil}" integrationTestImplementation "com.linecorp.armeria:armeria:${versionMap.armeria}" integrationTestImplementation "com.linecorp.armeria:armeria-grpc:${versionMap.armeriaGrpc}" diff --git a/e2e-test/trace/build.gradle b/e2e-test/trace/build.gradle index b169e3f264..96ecfec735 100644 --- a/e2e-test/trace/build.gradle +++ b/e2e-test/trace/build.gradle @@ -233,7 +233,7 @@ dependencies { integrationTestImplementation project(':data-prepper-plugins:opensearch') integrationTestImplementation project(':data-prepper-plugins:otel-trace-group-processor') integrationTestImplementation 'org.awaitility:awaitility:4.2.0' - integrationTestImplementation "io.opentelemetry:opentelemetry-proto:${versionMap.opentelemetryProto}" + integrationTestImplementation "io.opentelemetry.proto:opentelemetry-proto:${versionMap.opentelemetryProto}" integrationTestImplementation "com.google.protobuf:protobuf-java-util:${versionMap.protobufJavaUtil}" integrationTestImplementation "com.linecorp.armeria:armeria:${versionMap.armeria}" integrationTestImplementation "com.linecorp.armeria:armeria-grpc:${versionMap.armeriaGrpc}"