From 6a40d0f3ceb7e7c418de4b22e3dd94472bf97c70 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20=C3=81ngel=20Ortu=C3=B1o?= Date: Mon, 12 Dec 2022 10:35:43 +0100 Subject: [PATCH] removed cortex_ and thanos_ prefixes from memcached metrics names MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Miguel Ángel Ortuño --- cache/lru.go | 6 +++--- cache/lru_test.go | 26 +++++++++++++------------- cache/memcached.go | 4 ++-- cache/tracing.go | 2 -- cacheutil/memcached_client.go | 16 ++++++++-------- 5 files changed, 26 insertions(+), 28 deletions(-) diff --git a/cache/lru.go b/cache/lru.go index d4d9c3160..477c41b92 100644 --- a/cache/lru.go +++ b/cache/lru.go @@ -47,19 +47,19 @@ func WrapWithLRUCache(c Cache, name string, reg prometheus.Registerer, lruSize i defaultTTL: defaultTTL, requests: promauto.With(reg).NewCounter(prometheus.CounterOpts{ - Name: "cortex_cache_memory_requests_total", + Name: "cache_memory_requests_total", Help: "Total number of requests to the in-memory cache.", ConstLabels: map[string]string{"name": name}, }), hits: promauto.With(reg).NewCounter(prometheus.CounterOpts{ - Name: "cortex_cache_memory_hits_total", + Name: "cache_memory_hits_total", Help: "Total number of requests to the in-memory cache that were a hit.", ConstLabels: map[string]string{"name": name}, }), } cache.items = promauto.With(reg).NewGaugeFunc(prometheus.GaugeOpts{ - Name: "cortex_cache_memory_items_count", + Name: "cache_memory_items_count", Help: "Total number of items currently in the in-memory cache.", ConstLabels: map[string]string{"name": name}, }, func() float64 { diff --git a/cache/lru_test.go b/cache/lru_test.go index bace75a82..880ccfda6 100644 --- a/cache/lru_test.go +++ b/cache/lru_test.go @@ -46,15 +46,15 @@ func TestLRUCache_StoreFetch(t *testing.T) { require.True(t, time.Until(item.(*Item).ExpiresAt) > 1*time.Hour) require.NoError(t, testutil.GatherAndCompare(reg, strings.NewReader(` - # HELP cortex_cache_memory_items_count Total number of items currently in the in-memory cache. - # TYPE cortex_cache_memory_items_count gauge - cortex_cache_memory_items_count{name="test"} 3 - # HELP cortex_cache_memory_hits_total Total number of requests to the in-memory cache that were a hit. - # TYPE cortex_cache_memory_hits_total counter - cortex_cache_memory_hits_total{name="test"} 2 - # HELP cortex_cache_memory_requests_total Total number of requests to the in-memory cache. - # TYPE cortex_cache_memory_requests_total counter - cortex_cache_memory_requests_total{name="test"} 4 + # HELP cache_memory_items_count Total number of items currently in the in-memory cache. + # TYPE cache_memory_items_count gauge + cache_memory_items_count{name="test"} 3 + # HELP cache_memory_hits_total Total number of requests to the in-memory cache that were a hit. + # TYPE cache_memory_hits_total counter + cache_memory_hits_total{name="test"} 2 + # HELP cache_memory_requests_total Total number of requests to the in-memory cache. + # TYPE cache_memory_requests_total counter + cache_memory_requests_total{name="test"} 4 `))) } @@ -72,8 +72,8 @@ func TestLRUCache_Evictions(t *testing.T) { }, time.Minute) require.NoError(t, testutil.GatherAndCompare(reg, strings.NewReader(` - # HELP cortex_cache_memory_items_count Total number of items currently in the in-memory cache. - # TYPE cortex_cache_memory_items_count gauge - cortex_cache_memory_items_count{name="test"} 2 - `), "cortex_cache_memory_items_count")) + # HELP cache_memory_items_count Total number of items currently in the in-memory cache. + # TYPE cache_memory_items_count gauge + cache_memory_items_count{name="test"} 2 + `), "cache_memory_items_count")) } diff --git a/cache/memcached.go b/cache/memcached.go index 512ddfa03..c64699ba6 100644 --- a/cache/memcached.go +++ b/cache/memcached.go @@ -32,13 +32,13 @@ func NewMemcachedCache(name string, logger log.Logger, memcached cacheutil.Remot } c.requests = promauto.With(reg).NewCounter(prometheus.CounterOpts{ - Name: "thanos_cache_memcached_requests_total", + Name: "cache_memcached_requests_total", Help: "Total number of items requests to memcached.", ConstLabels: prometheus.Labels{"name": name}, }) c.hits = promauto.With(reg).NewCounter(prometheus.CounterOpts{ - Name: "thanos_cache_memcached_hits_total", + Name: "cache_memcached_hits_total", Help: "Total number of items requests to the cache that were a hit.", ConstLabels: prometheus.Labels{"name": name}, }) diff --git a/cache/tracing.go b/cache/tracing.go index 8eb0a9286..fe93e706b 100644 --- a/cache/tracing.go +++ b/cache/tracing.go @@ -11,8 +11,6 @@ import ( ) // SpanlessTracingCache wraps a Cache and logs Fetch operation in the parent spans. -// This is different than Thanos' TracingCache because this logs in the parent span -// without creating a new span. type SpanlessTracingCache struct { c Cache resolver spanlogger.TenantResolver diff --git a/cacheutil/memcached_client.go b/cacheutil/memcached_client.go index cbf2ec779..257884de8 100644 --- a/cacheutil/memcached_client.go +++ b/cacheutil/memcached_client.go @@ -259,7 +259,7 @@ func newMemcachedClient( ) (*memcachedClient, error) { addressProvider := dns.NewProvider( logger, - prometheus.WrapRegistererWithPrefix("thanos_memcached_", reg), + prometheus.WrapRegistererWithPrefix("memcached_", reg), dns.MiekgdnsResolverType, ) @@ -272,13 +272,13 @@ func newMemcachedClient( asyncQueue: make(chan func(), config.MaxAsyncBufferSize), stop: make(chan struct{}, 1), getMultiGate: gate.New( - prometheus.WrapRegistererWithPrefix("thanos_memcached_getmulti_", reg), + prometheus.WrapRegistererWithPrefix("memcached_getmulti_", reg), config.MaxGetMultiConcurrency, ), } c.clientInfo = promauto.With(reg).NewGaugeFunc(prometheus.GaugeOpts{ - Name: "thanos_memcached_client_info", + Name: "memcached_client_info", Help: "A metric with a constant '1' value labeled by configuration options from which memcached client was configured.", ConstLabels: prometheus.Labels{ "timeout": config.Timeout.String(), @@ -295,7 +295,7 @@ func newMemcachedClient( ) c.operations = promauto.With(reg).NewCounterVec(prometheus.CounterOpts{ - Name: "thanos_memcached_operations_total", + Name: "memcached_operations_total", Help: "Total number of operations against memcached.", }, []string{"operation"}) c.operations.WithLabelValues(opGetMulti) @@ -303,7 +303,7 @@ func newMemcachedClient( c.operations.WithLabelValues(opDelete) c.failures = promauto.With(reg).NewCounterVec(prometheus.CounterOpts{ - Name: "thanos_memcached_operation_failures_total", + Name: "memcached_operation_failures_total", Help: "Total number of operations against memcached that failed.", }, []string{"operation", "reason"}) for _, op := range []string{opGetMulti, opSet, opDelete} { @@ -315,7 +315,7 @@ func newMemcachedClient( } c.skipped = promauto.With(reg).NewCounterVec(prometheus.CounterOpts{ - Name: "thanos_memcached_operation_skipped_total", + Name: "memcached_operation_skipped_total", Help: "Total number of operations against memcached that have been skipped.", }, []string{"operation", "reason"}) c.skipped.WithLabelValues(opGetMulti, reasonMaxItemSize) @@ -323,7 +323,7 @@ func newMemcachedClient( c.skipped.WithLabelValues(opSet, reasonAsyncBufferFull) c.duration = promauto.With(reg).NewHistogramVec(prometheus.HistogramOpts{ - Name: "thanos_memcached_operation_duration_seconds", + Name: "memcached_operation_duration_seconds", Help: "Duration of operations against memcached.", Buckets: []float64{0.001, 0.005, 0.01, 0.025, 0.05, 0.1, 0.2, 0.5, 1, 3, 6, 10}, }, []string{"operation"}) @@ -332,7 +332,7 @@ func newMemcachedClient( c.duration.WithLabelValues(opDelete) c.dataSize = promauto.With(reg).NewHistogramVec(prometheus.HistogramOpts{ - Name: "thanos_memcached_operation_data_size_bytes", + Name: "memcached_operation_data_size_bytes", Help: "Tracks the size of the data stored in and fetched from memcached.", Buckets: []float64{ 32, 256, 512, 1024, 32 * 1024, 256 * 1024, 512 * 1024, 1024 * 1024, 32 * 1024 * 1024, 256 * 1024 * 1024, 512 * 1024 * 1024,