diff --git a/src/main/resources/logstash-diagnostic-templates/flow_metrics.html.ftlh b/src/main/resources/logstash-diagnostic-templates/flow_metrics.html.ftlh index 21238926..ddcc2616 100644 --- a/src/main/resources/logstash-diagnostic-templates/flow_metrics.html.ftlh +++ b/src/main/resources/logstash-diagnostic-templates/flow_metrics.html.ftlh @@ -159,6 +159,27 @@ } } + const BatchMetricType = Object.freeze({ + BYTE_SIZE: 'byte_size', + EVENT_COUNT: 'event_count', + }); + + /** + * @param {string} pipelineName + * @param {string} metricType One of BatchMetricType.BYTE_SIZE or BatchMetricType.EVENT_COUNT. + */ + const _batchAverageRollingFor = (pipelineName, metricType) => { + try { + const a = data.stats.pipelines[pipelineName].batch[metricType].average; + if (!a) return "N/A"; + for (const k of ["last_1_minute", "last_5_minutes", "last_15_minutes"]) { + const v = a[k]; + if (v !== null && v !== undefined && v !== "") return v; + } + } catch (e) {} + return "N/A"; + } + const createComparisonFormatter = ( analysisWindow, flow, @@ -757,10 +778,9 @@ }; } - const hasBatchP50 = _try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.p50) !== "" - || _try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p50) !== ""; - const hasBatchP90 = _try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.p90) !== "" - || _try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p90) !== ""; + const hasBatchP50 = _try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p50) !== ""; + const hasBatchP90 = _try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p90) !== ""; + const hasBatchMax = _try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.max) !== ""; return ( @@ -865,8 +885,9 @@ Metric Current Average - {hasBatchP50 && p50} - {hasBatchP90 && p90} + {hasBatchP50 && p50} + {hasBatchP90 && p90} + {hasBatchMax && max} @@ -876,11 +897,19 @@ 1 min 5 min 15 min + Lifetime } {hasBatchP90 && 1 min 5 min 15 min + Lifetime + } + {hasBatchMax && + 1 min + 5 min + 15 min + Lifetime } @@ -888,31 +917,47 @@ Event count {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.current)} - {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.average.lifetime)} + {_batchAverageRollingFor(selectedPipeline.name, BatchMetricType.EVENT_COUNT)} {hasBatchP50 && {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.p50.last_1_minute)} {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.p50.last_5_minutes)} {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.p50.last_15_minutes)} + N/A } {hasBatchP90 && {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.p90.last_1_minute)} {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.p90.last_5_minutes)} {_try(() => data.stats.pipelines[selectedPipeline.name].batch.event_count.p90.last_15_minutes)} + N/A + } + {hasBatchMax && + N/A + N/A + N/A + N/A } Byte size {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.current)} - {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.average.lifetime)} + {_batchAverageRollingFor(selectedPipeline.name, BatchMetricType.BYTE_SIZE)} {hasBatchP50 && {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p50.last_1_minute)} {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p50.last_5_minutes)} {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p50.last_15_minutes)} + {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p50.lifetime)} } {hasBatchP90 && {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p90.last_1_minute)} {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p90.last_5_minutes)} {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p90.last_15_minutes)} + {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.p90.lifetime)} + } + {hasBatchMax && + {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.max.last_1_minute)} + {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.max.last_5_minutes)} + {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.max.last_15_minutes)} + {_try(() => data.stats.pipelines[selectedPipeline.name].batch.byte_size.max.lifetime)} }