Skip to content

Commit 5a1efa4

Browse files
committed
Fix "metric was collected before" error
The networksecurity.googleapis.com/https/request_count sometimes returns duplicate series. Filter them out to avoid errors. Signed-Off-By: Colin Leroy-Mira <[email protected]>
1 parent 93b6031 commit 5a1efa4

File tree

1 file changed

+30
-1
lines changed

1 file changed

+30
-1
lines changed

collectors/monitoring_collector.go

+30-1
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ import (
1818
"fmt"
1919
"log/slog"
2020
"math"
21+
"reflect"
2122
"strings"
2223
"sync"
2324
"time"
@@ -399,6 +400,28 @@ func (c *MonitoringCollector) reportMonitoringMetrics(ch chan<- prometheus.Metri
399400
return <-errChannel
400401
}
401402

403+
func (c *MonitoringCollector) filterDuplicateTimeSeries(
404+
timeSeries []*monitoring.TimeSeries,
405+
) []*monitoring.TimeSeries {
406+
var keptTimeSeries []*monitoring.TimeSeries
407+
408+
for _, timeSerie := range timeSeries {
409+
var skip = false
410+
411+
for _, keptTimeSerie := range keptTimeSeries {
412+
// Did we already find a timeSerie with the exact same key-value labels?
413+
if reflect.DeepEqual(timeSerie.Metric.Labels, keptTimeSerie.Metric.Labels) {
414+
skip = true
415+
break
416+
}
417+
}
418+
if !skip {
419+
keptTimeSeries = append(keptTimeSeries, timeSerie)
420+
}
421+
}
422+
return keptTimeSeries
423+
}
424+
402425
func (c *MonitoringCollector) reportTimeSeriesMetrics(
403426
page *monitoring.ListTimeSeriesResponse,
404427
metricDescriptor *monitoring.MetricDescriptor,
@@ -408,6 +431,7 @@ func (c *MonitoringCollector) reportTimeSeriesMetrics(
408431
var metricValue float64
409432
var metricValueType prometheus.ValueType
410433
var newestTSPoint *monitoring.Point
434+
var uniqueTimeSeries []*monitoring.TimeSeries
411435

412436
timeSeriesMetrics, err := newTimeSeriesMetrics(metricDescriptor,
413437
ch,
@@ -419,7 +443,12 @@ func (c *MonitoringCollector) reportTimeSeriesMetrics(
419443
if err != nil {
420444
return fmt.Errorf("error creating the TimeSeriesMetrics %v", err)
421445
}
422-
for _, timeSeries := range page.TimeSeries {
446+
447+
// Make sure we don't feed Prometheus duplicate time series if the
448+
// metrics page gives us some.
449+
uniqueTimeSeries = c.filterDuplicateTimeSeries(page.TimeSeries)
450+
451+
for _, timeSeries := range uniqueTimeSeries {
423452
newestEndTime := time.Unix(0, 0)
424453
for _, point := range timeSeries.Points {
425454
endTime, err := time.Parse(time.RFC3339Nano, point.Interval.EndTime)

0 commit comments

Comments
 (0)