From 6b3a54ee7cf9b4fedf6df33926cb5004bb8ab0eb Mon Sep 17 00:00:00 2001 From: andsel Date: Thu, 16 Apr 2026 10:22:43 +0200 Subject: [PATCH 1/2] Updates the report generated when taking Logstash diags to include the max percentile just for batch's byte_size. Furthermore, adds the lifetime window for all batch relates metrics (byte_size and event_count) --- .../flow_metrics.html.ftlh | 64 ++++++++++++++++--- 1 file changed, 56 insertions(+), 8 deletions(-) diff --git a/src/main/resources/logstash-diagnostic-templates/flow_metrics.html.ftlh b/src/main/resources/logstash-diagnostic-templates/flow_metrics.html.ftlh index 21238926..9c6a449e 100644 --- a/src/main/resources/logstash-diagnostic-templates/flow_metrics.html.ftlh +++ b/src/main/resources/logstash-diagnostic-templates/flow_metrics.html.ftlh @@ -159,6 +159,30 @@ } } + const _batchByteSizeAverageRolling = (pipelineName) => { + try { + const a = data.stats.pipelines[pipelineName].batch.byte_size.average; + if (!a) return "N/A"; + for (const k of ["last_1_minute", "last_5_minutes", "last_15_minutes"]) { + const v = a[k]; + if (v !== null && v !== undefined && v !== "") return v; + } + } catch (e) {} + return "N/A"; + } + + const _batchEventCountAverageRolling = (pipelineName) => { + try { + const a = data.stats.pipelines[pipelineName].batch.event_count.average; + if (!a) return "N/A"; + for (const k of ["last_1_minute", "last_5_minutes", "last_15_minutes"]) { + const v = a[k]; + if (v !== null && v !== undefined && v !== "") return v; + } + } catch (e) {} + return "N/A"; + } + const createComparisonFormatter = ( analysisWindow, flow, @@ -757,10 +781,9 @@ }; } - const hasBatchP50 = _try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.p50) !== "" - || _try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p50) !== ""; - const hasBatchP90 = _try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.p90) !== "" - || _try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p90) !== ""; + const hasBatchP50 = _try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p50) !== ""; + const hasBatchP90 = _try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p90) !== ""; + const hasBatchMax = _try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.max) !== ""; return ( @@ -865,8 +888,9 @@ Metric Current Average - {hasBatchP50 && p50} - {hasBatchP90 && p90} + {hasBatchP50 && p50} + {hasBatchP90 && p90} + {hasBatchMax && max} @@ -876,11 +900,19 @@ 1 min 5 min 15 min + Lifetime } {hasBatchP90 && 1 min 5 min 15 min + Lifetime + } + {hasBatchMax && + 1 min + 5 min + 15 min + Lifetime } @@ -888,31 +920,47 @@ Event count {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.current)} - {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.average.lifetime)} + {_batchEventCountAverageRolling(selectedPipeline.name)} {hasBatchP50 && {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.p50.last_1_minute)} {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.p50.last_5_minutes)} {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.p50.last_15_minutes)} + N/A } {hasBatchP90 && {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.p90.last_1_minute)} {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.p90.last_5_minutes)} {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.p90.last_15_minutes)} + N/A + } + {hasBatchMax && + N/A + N/A + N/A + N/A } Byte size {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.current)} - {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.average.lifetime)} + {_batchByteSizeAverageRolling(selectedPipeline.name)} {hasBatchP50 && {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p50.last_1_minute)} {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p50.last_5_minutes)} {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p50.last_15_minutes)} + {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p50.lifetime)} } {hasBatchP90 && {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p90.last_1_minute)} {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p90.last_5_minutes)} {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p90.last_15_minutes)} + {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p90.lifetime)} + } + {hasBatchMax && + {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.max.last_1_minute)} + {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.max.last_5_minutes)} + {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.max.last_15_minutes)} + {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.max.lifetime)} } From d87912b65f36347741109503818bb1fda4773d9e Mon Sep 17 00:00:00 2001 From: andsel Date: Wed, 22 Apr 2026 11:06:13 +0200 Subject: [PATCH 2/2] Refactored two almost identival functions to share same code --- .../flow_metrics.html.ftlh | 29 +++++++++---------- 1 file changed, 13 insertions(+), 16 deletions(-) diff --git a/src/main/resources/logstash-diagnostic-templates/flow_metrics.html.ftlh b/src/main/resources/logstash-diagnostic-templates/flow_metrics.html.ftlh index 9c6a449e..ddcc2616 100644 --- a/src/main/resources/logstash-diagnostic-templates/flow_metrics.html.ftlh +++ b/src/main/resources/logstash-diagnostic-templates/flow_metrics.html.ftlh @@ -159,21 +159,18 @@ } } - const _batchByteSizeAverageRolling = (pipelineName) => { + const BatchMetricType = Object.freeze({ + BYTE_SIZE: 'byte_size', + EVENT_COUNT: 'event_count', + }); + + /** + * @param {string} pipelineName + * @param {string} metricType One of BatchMetricType.BYTE_SIZE or BatchMetricType.EVENT_COUNT. + */ + const _batchAverageRollingFor = (pipelineName, metricType) => { try { - const a = data.stats.pipelines[pipelineName].batch.byte_size.average; - if (!a) return "N/A"; - for (const k of ["last_1_minute", "last_5_minutes", "last_15_minutes"]) { - const v = a[k]; - if (v !== null && v !== undefined && v !== "") return v; - } - } catch (e) {} - return "N/A"; - } - - const _batchEventCountAverageRolling = (pipelineName) => { - try { - const a = data.stats.pipelines[pipelineName].batch.event_count.average; + const a = data.stats.pipelines[pipelineName].batch[metricType].average; if (!a) return "N/A"; for (const k of ["last_1_minute", "last_5_minutes", "last_15_minutes"]) { const v = a[k]; @@ -920,7 +917,7 @@ Event count {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.current)} - {_batchEventCountAverageRolling(selectedPipeline.name)} + {_batchAverageRollingFor(selectedPipeline.name, BatchMetricType.EVENT_COUNT)} {hasBatchP50 && {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.p50.last_1_minute)} {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.p50.last_5_minutes)} @@ -943,7 +940,7 @@ Byte size {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.current)} - {_batchByteSizeAverageRolling(selectedPipeline.name)} + {_batchAverageRollingFor(selectedPipeline.name, BatchMetricType.BYTE_SIZE)} {hasBatchP50 && {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p50.last_1_minute)} {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p50.last_5_minutes)}