Skip to content
This repository has been archived by the owner on Apr 6, 2023. It is now read-only.

Commit

Permalink
feat: instrument cache hit and misses (#208)
Browse files Browse the repository at this point in the history
Co-authored-by: Michael Cooper <mythmon@gmail.com>
  • Loading branch information
Dexterp37 and mythmon authored Nov 9, 2021
1 parent 5b2d553 commit 3df72ed
Show file tree
Hide file tree
Showing 3 changed files with 51 additions and 4 deletions.
18 changes: 18 additions & 0 deletions docs/data.md
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,18 @@ the level and type of the log is listed.
- `cache-status` - If the response was pulled from the cache or regenerated.
`"hit"`, `"miss"`, `"error"`, or `"none"`.

- `cache.memory.hit` - A counter that is incremented every time the in-memory
cache is queried and a cached suggestion is found.

- `cache.memory.miss` - A counter that is incremented every time the in-memory
cache is queried and a cached suggestion is not found.

- `cache.memory.pointers-len` - A gauge representing the number of entries in
the first level of hashing in the in-memory deduped hashmap.

- `cache.memory.storage-len` - A gauge representing the number of entries in the
second level of hashing in the in-memory deduped hashmap.

- `cache.redis.duration-us` - A histogram that records the amount of time, in
microseconds, that the Redis cache took to provide a suggestion. Includes the
time it takes to fallback to the inner provider for cache misses and errors.
Expand All @@ -119,3 +131,9 @@ the level and type of the log is listed.

- `cache-status` - If the response was pulled from the cache or regenerated.
`"hit"`, `"miss"`, `"error"`, or `"none"`.

- `cache.redis.hit` - A counter that is incremented every time the redis cache
is queried and a cached suggestion is found.

- `cache.redis.miss` - A counter that is incremented every time the redis cache
is queried and a cached suggestion is not found.
33 changes: 30 additions & 3 deletions merino-cache/src/memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

use anyhow::anyhow;
use async_trait::async_trait;
use cadence::{Histogrammed, StatsdClient};
use cadence::{CountedExt, Gauged, Histogrammed, StatsdClient};
use deduped_dashmap::{ControlFlow, DedupedMap};
use lazy_static::lazy_static;
use merino_settings::providers::MemoryCacheConfig;
Expand Down Expand Up @@ -108,6 +108,7 @@ impl Suggester {
let items = Arc::new(DedupedMap::new());

{
let metrics = metrics_client.clone();
let task_items = items.clone();
let task_interval = config.cleanup_interval;
tokio::spawn(async move {
Expand All @@ -118,7 +119,7 @@ impl Suggester {
timer.tick().await;
loop {
timer.tick().await;
Self::remove_expired_entries(&task_items);
Self::remove_expired_entries(&task_items, &metrics);
}
});
}
Expand All @@ -138,6 +139,7 @@ impl Suggester {
#[tracing::instrument(level = "debug", skip(items))]
fn remove_expired_entries<K: Eq + Hash + Debug>(
items: &Arc<DedupedMap<K, Instant, Vec<Suggestion>>>,
metrics_client: &StatsdClient,
) {
let start = Instant::now();
let count_before_storage = items.len_storage();
Expand Down Expand Up @@ -174,6 +176,13 @@ impl Suggester {
?removed_storage,
"finished removing expired entries from cache"
);

metrics_client
.gauge("cache.memory.storage-len", items.len_storage() as u64)
.ok();
metrics_client
.gauge("cache.memory.pointers-len", items.len_pointers() as u64)
.ok();
}
}

Expand Down Expand Up @@ -207,6 +216,7 @@ impl SuggestionProvider for Suggester {
}
Some((expiration, suggestions)) => {
tracing::debug!("cache hit");
self.metrics_client.incr("cache.memory.hit").ok();
rv = Some(SuggestionResponse {
cache_status: CacheStatus::Hit,
cache_ttl: Some(expiration - now),
Expand All @@ -215,6 +225,7 @@ impl SuggestionProvider for Suggester {
}
None => {
tracing::debug!("cache miss");
self.metrics_client.incr("cache.memory.miss").ok();
}
}

Expand Down Expand Up @@ -271,6 +282,7 @@ impl SuggestionProvider for Suggester {
#[cfg(test)]
mod tests {
use super::{Suggester, LOCK_TABLE};
use cadence::{SpyMetricSink, StatsdClient};
use deduped_dashmap::DedupedMap;
use fake::{Fake, Faker};
use merino_suggest::Suggestion;
Expand Down Expand Up @@ -299,12 +311,27 @@ mod tests {
assert!(cache.contains_key(&"current".to_owned()));
assert!(cache.contains_key(&"expired".to_owned()));

Suggester::remove_expired_entries(&cache);
// Provide an inspectable metrics sink to validate the collected data.
let (rx, sink) = SpyMetricSink::new();
let metrics_client = StatsdClient::from_sink("merino-test", sink);

Suggester::remove_expired_entries(&cache, &metrics_client);

assert_eq!(cache.len_storage(), 1);
assert_eq!(cache.len_pointers(), 1);
assert!(cache.contains_key(&"current".to_owned()));
assert!(!cache.contains_key(&"expired".to_owned()));

// Verify the reported metric.
assert_eq!(rx.len(), 2);
let collected_data: Vec<String> = rx
.iter()
.take(2)
.map(|x| String::from_utf8(x).unwrap())
.collect();
dbg!(&collected_data);
assert!(collected_data.contains(&"merino-test.cache.memory.storage-len:1|g".to_string()));
assert!(collected_data.contains(&"merino-test.cache.memory.pointers-len:1|g".to_string()));
}

#[test]
Expand Down
4 changes: 3 additions & 1 deletion merino-cache/src/redis/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use std::{
use crate::redis::domain::RedisSuggestions;
use anyhow::Context;
use async_trait::async_trait;
use cadence::{Histogrammed, StatsdClient};
use cadence::{CountedExt, Histogrammed, StatsdClient};
use fix_hidden_lifetime_bug::fix_hidden_lifetime_bug;
use merino_settings::{providers::RedisCacheConfig, Settings};
use merino_suggest::{
Expand Down Expand Up @@ -380,6 +380,7 @@ impl SuggestionProvider for Suggester {

let rv = if let CacheCheckResult::Hit(suggestions) = cache_result {
tracing::debug!(%key, "cache hit");
self.metrics_client.incr("cache.redis.hit").ok();
suggestions
} else if rlock.is_locked(&key).await? {
tracing::debug!(%key, "cache updating...");
Expand All @@ -396,6 +397,7 @@ impl SuggestionProvider for Suggester {

if let CacheCheckResult::Miss = cache_result {
tracing::debug!(%key, "cache miss");
self.metrics_client.incr("cache.redis.miss").ok();
response.with_cache_status(CacheStatus::Miss)
} else {
debug_assert!(matches!(cache_result, CacheCheckResult::ErrorAsMiss));
Expand Down

0 comments on commit 3df72ed

Please sign in to comment.