Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(metric_unify): refactor and add diff command #1165

Draft
wants to merge 2 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions ci/scripts/metric_unify/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@ serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
clap = { version = "4.0", features = ["derive"] }
num-format = "0.4"
itertools = { workspace = true }
72 changes: 72 additions & 0 deletions ci/scripts/metric_unify/src/aggregate.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
use std::{collections::HashSet, path::Path};

use crate::metric::{AggregationEntry, AggregationFile, AggregationOperation, Metric};

/// Load aggregation metrics from a file
pub fn load_aggregation_metrics<P: AsRef<Path>>(
aggregation_file_path: P,
) -> Result<Vec<AggregationEntry>, Box<dyn std::error::Error>> {
let file = std::fs::File::open(aggregation_file_path)?;
let aggregation_file: AggregationFile = serde_json::from_reader(file)?;
Ok(aggregation_file.aggregations)
}

/// Generate aggregation tables
pub fn aggregate_metrics(agg_entries: Vec<AggregationEntry>, metrics: Vec<Metric>) -> Vec<Metric> {
let mut results = Vec::new();
for agg_entry in agg_entries {
let group_by = &agg_entry.group_by;
let name = &agg_entry.name;

// 1. Filter metrics by group_by(primary_labels) and name(metric_name)
let filtered_metrics_by_primary_labels: Vec<_> = metrics
.iter()
.filter(|m| group_by.iter().all(|g| m.primary_labels.contains(g)) && name == &m.name)
.collect();
if filtered_metrics_by_primary_labels.is_empty() {
continue;
}

// 2. Group filtered_metrics by secondary_labels
let secondary_labels_set: HashSet<Vec<String>> = filtered_metrics_by_primary_labels
.iter()
.map(|m| m.secondary_labels.clone())
.collect();
let grouped_metrics_by_secondary_labels =
secondary_labels_set.into_iter().map(|secondary_labels| {
filtered_metrics_by_primary_labels
.iter()
.filter(|m| m.secondary_labels == secondary_labels)
.collect::<Vec<_>>()
});

// 3. Aggregate metrics by secondary_labels and operation
let aggregated_metrics: Vec<Metric> = grouped_metrics_by_secondary_labels
.map(|grouped_metrics| {
let secondary_labels = grouped_metrics[0].secondary_labels.clone();
let aggregated_value: f64 =
grouped_metrics
.into_iter()
.fold(0.0, |acc, m| match agg_entry.operation {
AggregationOperation::Sum => acc + m.value,
AggregationOperation::Unique => {
assert!(acc == 0.0 || acc == m.value);
m.value
}
});
Metric {
name: name.to_string(),
primary_labels: group_by.clone(),
secondary_labels,
value: aggregated_value,
..Default::default()
}
})
.collect();

// 4. Generate table
results.extend(aggregated_metrics);
}

results
}
21 changes: 21 additions & 0 deletions ci/scripts/metric_unify/src/diff.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
use crate::metric::Metric;

pub fn diff_metrics(news: Vec<Metric>, olds: Vec<Metric>) -> Vec<Metric> {
let mut results = Vec::with_capacity(news.len());
for new in news {
let old = olds
.iter()
.find(|old| **old == new && old.value != new.value);
if let Some(old) = old {
results.push(Metric {
diff: Some(new.value - old.value),
diff_percentage: Some((new.value - old.value) / old.value * 100.0),
..new
});
} else {
results.push(new.clone());
}
}

results
}
Loading