This is an automated email from the ASF dual-hosted git repository.
piotr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/iggy.git
The following commit(s) were added to refs/heads/master by this push:
new 1ca37f199 perf(bench): downsample time series charts with LTTB (#2831)
1ca37f199 is described below
commit 1ca37f19903a9ec2ba3d1493d9b5ea6a6072d6d5
Author: Hubert Gruszecki <[email protected]>
AuthorDate: Fri Feb 27 20:09:00 2026 +0100
perf(bench): downsample time series charts with LTTB (#2831)
ECharts becomes sluggish rendering 100k+ data points,
common with multi-series 10-minute benchmarks at 10ms
sampling. LTTB (Largest-Triangle-Three-Buckets) reduces
each series to 3000 points at the chart boundary while
preserving visual shape -- peaks, valleys, and trends.
Full-resolution data remains in report.json; only the
charming conversion path is downsampled. The latency
distribution chart (50 pre-computed bins) is unaffected.
---
Cargo.lock | 6 +-
DEPENDENCIES.md | 6 +-
core/bench/dashboard/frontend/Cargo.toml | 2 +-
core/bench/dashboard/server/Cargo.toml | 2 +-
core/bench/report/Cargo.toml | 2 +-
core/bench/report/src/lib.rs | 16 ++--
core/bench/report/src/types/time_series.rs | 11 +++
core/bench/report/src/utils.rs | 140 ++++++++++++++++++++++++++++-
8 files changed, 169 insertions(+), 16 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index bdbedc8d4..7ce811cec 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1052,7 +1052,7 @@ checksum =
"3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1"
[[package]]
name = "bench-dashboard-frontend"
-version = "0.5.0"
+version = "0.5.1-edge.1"
dependencies = [
"bench-dashboard-shared",
"bench-report",
@@ -1082,7 +1082,7 @@ dependencies = [
[[package]]
name = "bench-report"
-version = "0.2.2"
+version = "0.2.3-edge.1"
dependencies = [
"charming",
"colored",
@@ -4553,7 +4553,7 @@ dependencies = [
[[package]]
name = "iggy-bench-dashboard-server"
-version = "0.6.0"
+version = "0.6.1-edge.1"
dependencies = [
"actix-cors",
"actix-files",
diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md
index 2f3a6e0b9..537fcd7b3 100644
--- a/DEPENDENCIES.md
+++ b/DEPENDENCIES.md
@@ -84,9 +84,9 @@ base64: 0.22.1, "Apache-2.0 OR MIT",
base64ct: 1.8.3, "Apache-2.0 OR MIT",
bdd: 0.0.1, "Apache-2.0",
beef: 0.5.2, "Apache-2.0 OR MIT",
-bench-dashboard-frontend: 0.5.0, "Apache-2.0",
+bench-dashboard-frontend: 0.5.1-edge.1, "Apache-2.0",
bench-dashboard-shared: 0.1.0, "Apache-2.0",
-bench-report: 0.2.2, "Apache-2.0",
+bench-report: 0.2.3-edge.1, "Apache-2.0",
bench-runner: 0.1.0, "Apache-2.0",
bigdecimal: 0.4.10, "Apache-2.0 OR MIT",
bimap: 0.6.3, "Apache-2.0 OR MIT",
@@ -392,7 +392,7 @@ idna: 1.1.0, "Apache-2.0 OR MIT",
idna_adapter: 1.2.1, "Apache-2.0 OR MIT",
iggy: 0.9.0, "Apache-2.0",
iggy-bench: 0.4.0, "Apache-2.0",
-iggy-bench-dashboard-server: 0.6.0, "Apache-2.0",
+iggy-bench-dashboard-server: 0.6.1-edge.1, "Apache-2.0",
iggy-cli: 0.11.0, "Apache-2.0",
iggy-connectors: 0.3.0, "Apache-2.0",
iggy-mcp: 0.3.0, "Apache-2.0",
diff --git a/core/bench/dashboard/frontend/Cargo.toml
b/core/bench/dashboard/frontend/Cargo.toml
index b35c5670c..7e02e1e71 100644
--- a/core/bench/dashboard/frontend/Cargo.toml
+++ b/core/bench/dashboard/frontend/Cargo.toml
@@ -18,7 +18,7 @@
[package]
name = "bench-dashboard-frontend"
license = "Apache-2.0"
-version = "0.5.0"
+version = "0.5.1-edge.1"
edition = "2024"
[package.metadata.cargo-machete]
diff --git a/core/bench/dashboard/server/Cargo.toml
b/core/bench/dashboard/server/Cargo.toml
index 5353d9b55..4ec3e5a24 100644
--- a/core/bench/dashboard/server/Cargo.toml
+++ b/core/bench/dashboard/server/Cargo.toml
@@ -18,7 +18,7 @@
[package]
name = "iggy-bench-dashboard-server"
license = "Apache-2.0"
-version = "0.6.0"
+version = "0.6.1-edge.1"
edition = "2024"
[dependencies]
diff --git a/core/bench/report/Cargo.toml b/core/bench/report/Cargo.toml
index 45cbde6bc..9041442d2 100644
--- a/core/bench/report/Cargo.toml
+++ b/core/bench/report/Cargo.toml
@@ -17,7 +17,7 @@
[package]
name = "bench-report"
-version = "0.2.2"
+version = "0.2.3-edge.1"
edition = "2024"
description = "Benchmark report and chart generation library for iggy-bench
binary and iggy-benchmarks-dashboard web app"
license = "Apache-2.0"
diff --git a/core/bench/report/src/lib.rs b/core/bench/report/src/lib.rs
index 8a180752f..ae979607e 100644
--- a/core/bench/report/src/lib.rs
+++ b/core/bench/report/src/lib.rs
@@ -52,7 +52,7 @@ pub fn create_throughput_chart(
chart = chart.add_dual_time_line_series(
&format!("{} {} [MB/s]", actor_type, metrics.summary.actor_id),
- metrics.throughput_mb_ts.as_charming_points(),
+ metrics.throughput_mb_ts.as_downsampled_charming_points(),
None,
0.4,
0,
@@ -60,7 +60,7 @@ pub fn create_throughput_chart(
);
chart = chart.add_dual_time_line_series(
&format!("{} {} [msg/s]", actor_type, metrics.summary.actor_id),
- metrics.throughput_msg_ts.as_charming_points(),
+ metrics.throughput_msg_ts.as_downsampled_charming_points(),
None,
0.4,
1,
@@ -77,7 +77,9 @@ pub fn create_throughput_chart(
chart = chart.add_dual_time_line_series(
&format!("All {}s [MB/s]", metrics.summary.kind.actor()),
- metrics.avg_throughput_mb_ts.as_charming_points(),
+ metrics
+ .avg_throughput_mb_ts
+ .as_downsampled_charming_points(),
None,
1.0,
0,
@@ -85,7 +87,9 @@ pub fn create_throughput_chart(
);
chart = chart.add_dual_time_line_series(
&format!("All {}s [msg/s]", metrics.summary.kind.actor()),
- metrics.avg_throughput_msg_ts.as_charming_points(),
+ metrics
+ .avg_throughput_msg_ts
+ .as_downsampled_charming_points(),
None,
1.0,
1,
@@ -117,7 +121,7 @@ pub fn create_latency_chart(
chart = chart.add_time_series(
&format!("{} {} [ms]", actor_type, metrics.summary.actor_id),
- metrics.latency_ts.as_charming_points(),
+ metrics.latency_ts.as_downsampled_charming_points(),
None,
0.3,
);
@@ -131,7 +135,7 @@ pub fn create_latency_chart(
chart = chart.add_dual_time_line_series(
&format!("Avg {}s [ms]", metrics.summary.kind.actor()),
- metrics.avg_latency_ts.as_charming_points(),
+ metrics.avg_latency_ts.as_downsampled_charming_points(),
None,
1.0,
0,
diff --git a/core/bench/report/src/types/time_series.rs
b/core/bench/report/src/types/time_series.rs
index 73f343713..bb1912067 100644
--- a/core/bench/report/src/types/time_series.rs
+++ b/core/bench/report/src/types/time_series.rs
@@ -48,6 +48,8 @@ pub enum TimeSeriesKind {
Latency,
}
+const MAX_CHART_POINTS: usize = 3000;
+
impl TimeSeries {
pub fn as_charming_points(&self) -> Vec<Vec<f64>> {
self.points
@@ -55,4 +57,13 @@ impl TimeSeries {
.map(|p| vec![p.time_s, p.value])
.collect()
}
+
+ /// LTTB-downsampled points for chart rendering.
+ /// Caps output at `MAX_CHART_POINTS` to keep ECharts responsive.
+ pub fn as_downsampled_charming_points(&self) -> Vec<Vec<f64>> {
+ crate::utils::lttb_downsample(&self.points, MAX_CHART_POINTS)
+ .iter()
+ .map(|p| vec![p.time_s, p.value])
+ .collect()
+ }
}
diff --git a/core/bench/report/src/utils.rs b/core/bench/report/src/utils.rs
index 7627b5b5e..b4d0abcc7 100644
--- a/core/bench/report/src/utils.rs
+++ b/core/bench/report/src/utils.rs
@@ -16,7 +16,7 @@
* under the License.
*/
-use crate::time_series::TimeSeries;
+use crate::time_series::{TimePoint, TimeSeries};
use serde::Serializer;
pub(crate) fn round_float<S>(value: &f64, serializer: S) -> Result<S::Ok,
S::Error>
@@ -48,6 +48,66 @@ pub fn max(series: &TimeSeries) -> Option<f64> {
.max_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal))
}
+/// LTTB (Largest-Triangle-Three-Buckets) downsampling.
+///
+/// Reduces `points` to at most `threshold` points while preserving visual
shape
+/// (peaks, valleys, trends). Returns a clone if `points.len() <= threshold`.
+pub fn lttb_downsample(points: &[TimePoint], threshold: usize) ->
Vec<TimePoint> {
+ let len = points.len();
+ if len <= threshold || threshold < 3 {
+ return points.to_vec();
+ }
+
+ let bucket_count = threshold - 2;
+ let bucket_size = (len - 2) as f64 / bucket_count as f64;
+
+ let mut result = Vec::with_capacity(threshold);
+ result.push(points[0].clone());
+
+ let mut prev_selected = 0usize;
+
+ for bucket_idx in 0..bucket_count {
+ // Compute average of the *next* bucket (used as the third triangle
vertex)
+ let next_start = ((bucket_idx + 1) as f64 * bucket_size) as usize + 1;
+ let next_end = (((bucket_idx + 2) as f64 * bucket_size) as usize +
1).min(len - 1);
+
+ let mut avg_time = 0.0;
+ let mut avg_value = 0.0;
+ let next_count = (next_end - next_start + 1) as f64;
+ for p in &points[next_start..=next_end] {
+ avg_time += p.time_s;
+ avg_value += p.value;
+ }
+ avg_time /= next_count;
+ avg_value /= next_count;
+
+ // Current bucket range
+ let cur_start = (bucket_idx as f64 * bucket_size) as usize + 1;
+ let cur_end = next_start;
+
+ // Pick the point in this bucket that forms the largest triangle with
+ // the previously selected point and the next-bucket average.
+ let prev = &points[prev_selected];
+ let mut max_area = -1.0;
+ let mut best = cur_start;
+ for (i, p) in points[cur_start..cur_end].iter().enumerate() {
+ let area = ((prev.time_s - avg_time) * (p.value - prev.value)
+ - (prev.time_s - p.time_s) * (avg_value - prev.value))
+ .abs();
+ if area > max_area {
+ max_area = area;
+ best = cur_start + i;
+ }
+ }
+
+ result.push(points[best].clone());
+ prev_selected = best;
+ }
+
+ result.push(points[len - 1].clone());
+ result
+}
+
/// Calculate the standard deviation of values from a TimeSeries
///
/// Returns None if the TimeSeries has fewer than 2 points
@@ -73,3 +133,81 @@ pub fn std_dev(series: &TimeSeries) -> Option<f64> {
Some(variance.sqrt())
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ fn make_points(values: impl IntoIterator<Item = f64>) -> Vec<TimePoint> {
+ values
+ .into_iter()
+ .enumerate()
+ .map(|(i, v)| TimePoint::new(i as f64, v))
+ .collect()
+ }
+
+ #[test]
+ fn lttb_passthrough_when_below_threshold() {
+ let pts = make_points([1.0, 2.0, 3.0]);
+ let result = lttb_downsample(&pts, 5);
+ assert_eq!(result, pts);
+ }
+
+ #[test]
+ fn lttb_passthrough_when_equal_to_threshold() {
+ let pts = make_points([1.0, 2.0, 3.0, 4.0, 5.0]);
+ let result = lttb_downsample(&pts, 5);
+ assert_eq!(result, pts);
+ }
+
+ #[test]
+ fn lttb_reduces_count() {
+ let pts = make_points((0..10_000).map(|i| (i as f64).sin()));
+ let result = lttb_downsample(&pts, 100);
+ assert_eq!(result.len(), 100);
+ }
+
+ #[test]
+ fn lttb_preserves_endpoints() {
+ let pts = make_points((0..1000).map(|i| i as f64 * 0.1));
+ let result = lttb_downsample(&pts, 50);
+ assert_eq!(result.first().unwrap().time_s,
pts.first().unwrap().time_s);
+ assert_eq!(result.last().unwrap().time_s, pts.last().unwrap().time_s);
+ }
+
+ #[test]
+ fn lttb_preserves_peaks() {
+ // Triangle wave with clear peaks at indices 50, 150, 250, ...
+ let pts: Vec<TimePoint> = (0..500)
+ .map(|i| {
+ let v = if (i / 50) % 2 == 0 {
+ (i % 50) as f64
+ } else {
+ (50 - i % 50) as f64
+ };
+ TimePoint::new(i as f64, v)
+ })
+ .collect();
+
+ let result = lttb_downsample(&pts, 100);
+ let result_values: Vec<f64> = result.iter().map(|p| p.value).collect();
+ let max_val = result_values
+ .iter()
+ .cloned()
+ .fold(f64::NEG_INFINITY, f64::max);
+ // LTTB should retain the peaks (value = 50) in the downsampled output
+ assert!(
+ (max_val - 50.0).abs() < f64::EPSILON,
+ "peak 50.0 not preserved, got max {max_val}"
+ );
+ }
+
+ #[test]
+ fn lttb_edge_cases() {
+ assert!(lttb_downsample(&[], 10).is_empty());
+ let one = make_points([42.0]);
+ assert_eq!(lttb_downsample(&one, 10), one);
+ let two = make_points([1.0, 2.0]);
+ assert_eq!(lttb_downsample(&two, 10), two);
+ }
+}