phlare
phlare copied to clipboard
Panics from registering metrics when disk is full
I think there is still a problem with registering the metrics on the wrapped registry in ops and I only see it when the disk is full:
2022-10-14 14:34:08.631606 I | http2: panic serving 10.137.90.100:56660: duplicate metrics collector registration attempted
goroutine 438449 [running]:
golang.org/x/net/http2.(*serverConn).runHandler.func1()
/go/pkg/mod/golang.org/x/[email protected]/http2/server.go:2245 +0x145
panic({0x3443a80, 0xc000ba59c0})
/usr/local/go/src/runtime/panic.go:884 +0x212
github.com/opentracing-contrib/go-stdlib/nethttp.MiddlewareFunc.func5.1()
/go/pkg/mod/github.com/opentracing-contrib/[email protected]/nethttp/server.go:150 +0x126
panic({0x3443a80, 0xc000ba59c0})
/usr/local/go/src/runtime/panic.go:884 +0x212
github.com/prometheus/client_golang/prometheus.(*wrappingRegisterer).MustRegister(0xc001037980, {0xc000e6a890?, 0x1, 0x0?})
/go/pkg/mod/github.com/prometheus/[email protected]/prometheus/wrap.go:106 +0x151
github.com/prometheus/client_golang/prometheus/promauto.Factory.NewCounterVec({{0x3fbc290?, 0xc001037980?}}, {{0x0, 0x0}, {0x0, 0x0}, {0x39bc941, 0x25}, {0x39cec75, 0x2a}, ...}, ...)
/go/pkg/mod/github.com/prometheus/[email protected]/prometheus/promauto/auto.go:276 +0x133
github.com/grafana/phlare/pkg/phlaredb.newHeadMetrics({0x3fbc290, 0xc001037980})
/src/phlare/pkg/phlaredb/metrics.go:38 +0xb5
github.com/grafana/phlare/pkg/phlaredb.New({0x3fccd08, 0xc0010379e0}, {{0xc000830420?, 0x9?}, 0x0?, 0x0?})
/src/phlare/pkg/phlaredb/phlaredb.go:101 +0x7a
github.com/grafana/phlare/pkg/ingester.newInstance({0x3fccd08, 0xc000e8f290}, {{0x3953a8c?, 0x0?}, 0x9d29229e000, 0x0}, {0x395b189, 0x9}, {0x3fe5420, 0xc000ba57a0})
/src/phlare/pkg/ingester/instance.go:35 +0x112
github.com/grafana/phlare/pkg/ingester.(*Ingester).GetOrCreateInstance(0xc000e38900, {0x395b189, 0x9})
/src/phlare/pkg/ingester/ingester.go:139 +0x15a
github.com/grafana/phlare/pkg/ingester.(*Ingester).forInstance(0xc000f99798?, {0x3fccd08?, 0xc001037920?}, 0xc000d7d878)
/src/phlare/pkg/ingester/ingester.go:175 +0xc5
github.com/grafana/phlare/pkg/ingester.forInstanceUnary[...]({0x3fccd08?, 0xc001037920?}, 0x40d95f?, 0x12?)
/src/phlare/pkg/ingester/ingester.go:159 +0x65
github.com/grafana/phlare/pkg/ingester.(*Ingester).Push(0x7f3fc6869108?, {0x3fccd08?, 0xc001037920?}, 0xc001037920?)
/src/phlare/pkg/ingester/ingester.go:183 +0x59
github.com/bufbuild/connect-go.NewUnaryHandler[...].func1({0x3fd0420, 0xc000e71d00})
/go/pkg/mod/github.com/bufbuild/[email protected]/handler.go:50 +0x95
github.com/grafana/phlare/pkg/tenant.(*authInterceptor).WrapUnary.func1({0x3fcccd0, 0xc002183e60}, {0x3fd0420, 0xc000e71d00})
/src/phlare/pkg/tenant/interceptor.go:46 +0x122
github.com/bufbuild/connect-go.NewUnaryHandler[...].func2({0x7f3f9ee60360, 0xc000ba5920})
/go/pkg/mod/github.com/bufbuild/[email protected]/handler.go:68 +0x205
github.com/bufbuild/connect-go.(*Handler).ServeHTTP(0xc000b9fe00, {0x3fcbad0, 0xc000e71c40}, 0xc001a4b000)
/go/pkg/mod/github.com/bufbuild/[email protected]/handler.go:213 +0x4d5
github.com/gorilla/mux.(*Router).ServeHTTP(0xc0001815c0, {0x3fcbad0, 0xc000e71c40}, 0xc001a4ad00)
/go/pkg/mod/github.com/gorilla/[email protected]/mux.go:210 +0x1cf
github.com/opentracing-contrib/go-stdlib/nethttp.MiddlewareFunc.func5({0x3fbe9f0?, 0xc00038ed90}, 0xc001a4ac00)
/go/pkg/mod/github.com/opentracing-contrib/[email protected]/nethttp/server.go:154 +0x4ee
net/http.HandlerFunc.ServeHTTP(0xc001c8ff98?, {0x3fbe9f0?, 0xc00038ed90?}, 0x3fce0b8?)
/usr/local/go/src/net/http/server.go:2109 +0x2f
golang.org/x/net/http2.(*serverConn).runHandler(0x3fb8780?, 0x5ceb950?, 0x0?, 0x0?)
/go/pkg/mod/golang.org/x/[email protected]/http2/server.go:2252 +0x83
created by golang.org/x/net/http2.(*serverConn).processHeaders