hashicorp/go-metrics

Using labels causes Prometheus sink to panic

dougfort opened this issue · 0 comments

Note this in prometheus.go

func (p *PrometheusSink) SetGaugeWithLabels(parts []string, val float32, labels []metrics.Label) {
        p.mu.Lock()
        defer p.mu.Unlock()
        key, hash := p.flattenKey(parts, labels)
        g, ok := p.gauges[hash]
        if !ok {
                g = prometheus.NewGauge(prometheus.GaugeOpts{
                        Name:        key,
                        Help:        key,
                        ConstLabels: prometheusLabels(labels),
                })
                prometheus.MustRegister(g)
                p.gauges[key] = g
        }
        g.Set(float64(val))
}

it checks p.gauges[hash] and stores p.gauges[key] = g

storehttpserver_1  | 2018-05-25T12:13:51Z |DEBU| labels = [{name value}] service=store-http-server
storehttpserver_1  | panic: duplicate metrics collector registration attempted
storehttpserver_1  |
storehttpserver_1  | goroutine 13 [running]:
storehttpserver_1  | github.com/prometheus/client_golang/prometheus.(*Registry).MustRegister(0xc42005e6c0, 0xc42029c7a0, 0x1, 0x1)
storehttpserver_1  | 	/Users/dougfort/go/src/github.com/prometheus/client_golang/prometheus/registry.go:362 +0x9e
storehttpserver_1  | github.com/prometheus/client_golang/prometheus.MustRegister(0xc42029c7a0, 0x1, 0x1)
storehttpserver_1  | 	/Users/dougfort/go/src/github.com/prometheus/client_golang/prometheus/registry.go:154 +0x53
storehttpserver_1  | github.com/armon/go-metrics/prometheus.(*PrometheusSink).IncrCounterWithLabels(0xc420184f60, 0xc42029c740, 0x1, 0x1, 0x0, 0xc420277a00, 0x1, 0x1)
storehttpserver_1  | 	/Users/dougfort/go/src/github.com/armon/go-metrics/prometheus/prometheus.go:117 +0x2b6