Skip to content

Commit

Permalink
fix(insights): Invalidate persons cache when insight is refreshed (#1…
Browse files Browse the repository at this point in the history
…4953)

* fix(insights): Invalidate persons cache when insight is refreshed

* Revert "fix(insights): Invalidate persons cache when insight is refreshed"

This reverts commit 178c42b.

* Backend only approach

* Update query snapshots

* Update query snapshots

* fix tsts

* Fix tests

* Update query snapshots

* Update query snapshots

---------

Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
  • Loading branch information
timgl and github-actions[bot] authored Apr 6, 2023
1 parent 2701208 commit a180691
Show file tree
Hide file tree
Showing 8 changed files with 117 additions and 60 deletions.
62 changes: 31 additions & 31 deletions posthog/api/test/__snapshots__/test_query.ambr

Large diffs are not rendered by default.

45 changes: 45 additions & 0 deletions posthog/api/test/test_person.py
Original file line number Diff line number Diff line change
Expand Up @@ -646,6 +646,51 @@ def test_rate_limits_for_persons_are_independent(self, rate_limit_enabled_mock,
},
)

@freeze_time("2021-08-25T22:09:14.252Z")
def test_person_cache_invalidation(self):
_create_person(
team=self.team, distinct_ids=["person_1", "anonymous_id"], properties={"$os": "Chrome"}, immediate=True
)
_create_event(event="test", team=self.team, distinct_id="person_1")
_create_event(event="test", team=self.team, distinct_id="anonymous_id")
_create_event(event="test", team=self.team, distinct_id="someone_else")
data = {"events": json.dumps([{"id": "test", "type": "events"}]), "entity_type": "events", "entity_id": "test"}

trend_response = self.client.get(
f"/api/projects/{self.team.id}/insights/trend/",
data=data,
content_type="application/json",
).json()
response = self.client.get("/" + trend_response["result"][0]["persons_urls"][-1]["url"]).json()
self.assertEqual(response["results"][0]["count"], 1)
self.assertEqual(response["is_cached"], False)

# Create another person
_create_person(team=self.team, distinct_ids=["person_2"], properties={"$os": "Chrome"}, immediate=True)
_create_event(event="test", team=self.team, distinct_id="person_2")

# Check cached response hasn't changed
response = self.client.get("/" + trend_response["result"][0]["persons_urls"][-1]["url"]).json()
self.assertEqual(response["results"][0]["count"], 1)
self.assertEqual(response["is_cached"], True)

new_trend_response = self.client.get(
f"/api/projects/{self.team.id}/insights/trend/",
data={**data, "refresh": True},
content_type="application/json",
).json()

self.assertEqual(new_trend_response["is_cached"], False)
self.assertNotEqual(
new_trend_response["result"][0]["persons_urls"][-1]["url"],
trend_response["result"][0]["persons_urls"][-1]["url"],
)

# Cached response should have been updated
response = self.client.get("/" + new_trend_response["result"][0]["persons_urls"][-1]["url"]).json()
self.assertEqual(response["results"][0]["count"], 2)
self.assertEqual(response["is_cached"], False)

def _get_person_activity(self, person_id: Optional[str] = None, *, expected_status: int = status.HTTP_200_OK):
if person_id:
url = f"/api/person/{person_id}/activity"
Expand Down
9 changes: 8 additions & 1 deletion posthog/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,14 @@ def wrapper(self, request) -> T:
return f(self, request)

filter = get_filter(request=request, team=team)
cache_key = generate_cache_key(f"{filter.toJSON()}_{team.pk}")
cache_key = f"{filter.toJSON()}_{team.pk}"
if request.data.get("cache_invalidation_key"):
cache_key += f"_{request.data['cache_invalidation_key']}"

if request.GET.get("cache_invalidation_key"):
cache_key += f"_{request.GET['cache_invalidation_key']}"

cache_key = generate_cache_key(cache_key)

tag_queries(cache_key=cache_key)

Expand Down
8 changes: 6 additions & 2 deletions posthog/queries/stickiness/stickiness.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from posthog.queries.stickiness.stickiness_actors import StickinessActors
from posthog.queries.stickiness.stickiness_event_query import StickinessEventsQuery
from posthog.queries.util import correct_result_for_sampling
from posthog.utils import encode_get_request_params
from posthog.utils import encode_get_request_params, generate_short_id


class Stickiness:
Expand Down Expand Up @@ -97,6 +97,7 @@ def _serialize_entity(self, entity: Entity, filter: StickinessFilter, team: Team

def _get_persons_url(self, filter: StickinessFilter, entity: Entity) -> List[Dict[str, Any]]:
persons_url = []
cache_invalidation_key = generate_short_id()
for interval_idx in range(1, filter.total_intervals):
filter_params = filter.to_params()
extra_params = {
Expand All @@ -108,6 +109,9 @@ def _get_persons_url(self, filter: StickinessFilter, entity: Entity) -> List[Dic
}
parsed_params: Dict[str, str] = encode_get_request_params({**filter_params, **extra_params})
persons_url.append(
{"filter": extra_params, "url": f"api/person/stickiness/?{urllib.parse.urlencode(parsed_params)}"}
{
"filter": extra_params,
"url": f"api/person/stickiness/?{urllib.parse.urlencode(parsed_params)}&cache_invalidation_key={cache_invalidation_key}",
}
)
return persons_url
38 changes: 18 additions & 20 deletions posthog/queries/test/test_trends.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import json
from datetime import datetime
from typing import Dict, List, Optional, Tuple, Union
from unittest.mock import patch
from unittest.mock import patch, ANY
from urllib.parse import parse_qsl, urlparse

import pytz
Expand Down Expand Up @@ -5115,25 +5115,23 @@ def test_timezones_hourly(self):
)
self.assertEqual(response[0]["data"], [0.0, 0.0, 0.0, 0.0, 0, 0, 0, 1, 1, 0, 0])

self.assertEqual(
dict(parse_qsl(urlparse(response[0]["persons_urls"][7]["url"]).query)),
{
"breakdown_attribution_type": "first_touch",
"breakdown_normalize_url": "False",
"date_from": f"2020-01-05T07:00:00{utc_offset_sign}{abs(utc_offset_hours):02.0f}:00",
"date_to": f"2020-01-05T08:00:00{utc_offset_sign}{abs(utc_offset_hours):02.0f}:00",
"display": "ActionsLineGraph",
"entity_id": "sign up",
"entity_math": "dau",
"entity_type": "events",
"events": '[{"id": "sign up", "type": "events", "order": null, "name": "sign '
'up", "custom_name": null, "math": "dau", "math_property": null, '
'"math_group_type_index": null, "properties": {}}]',
"insight": "TRENDS",
"interval": "hour",
"smoothing_intervals": "1",
},
)
assert dict(parse_qsl(urlparse(response[0]["persons_urls"][7]["url"]).query)) == {
"breakdown_attribution_type": "first_touch",
"breakdown_normalize_url": "False",
"date_from": f"2020-01-05T07:00:00{utc_offset_sign}{abs(utc_offset_hours):02.0f}:00",
"date_to": f"2020-01-05T08:00:00{utc_offset_sign}{abs(utc_offset_hours):02.0f}:00",
"display": "ActionsLineGraph",
"entity_id": "sign up",
"entity_math": "dau",
"entity_type": "events",
"events": '[{"id": "sign up", "type": "events", "order": null, "name": "sign '
'up", "custom_name": null, "math": "dau", "math_property": null, '
'"math_group_type_index": null, "properties": {}}]',
"insight": "TRENDS",
"interval": "hour",
"smoothing_intervals": "1",
"cache_invalidation_key": ANY,
}
persons = self.client.get("/" + response[0]["persons_urls"][7]["url"]).json()
self.assertEqual(persons["results"][0]["count"], 1)

Expand Down
5 changes: 3 additions & 2 deletions posthog/queries/trends/breakdown.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@
process_math,
)
from posthog.queries.util import PersonPropertiesMode
from posthog.utils import PersonOnEventsMode, encode_get_request_params
from posthog.utils import PersonOnEventsMode, encode_get_request_params, generate_short_id


class TrendsBreakdown:
Expand Down Expand Up @@ -569,6 +569,7 @@ def _get_persons_url(
self, filter: Filter, entity: Entity, team_id: int, dates: List[datetime], breakdown_value: Union[str, int]
) -> List[Dict[str, Any]]:
persons_url = []
cache_invalidation_key = generate_short_id()
for date in dates:
date_in_utc = datetime(
date.year,
Expand All @@ -593,7 +594,7 @@ def _get_persons_url(
persons_url.append(
{
"filter": extra_params,
"url": f"api/projects/{team_id}/persons/trends/?{urllib.parse.urlencode(parsed_params)}",
"url": f"api/projects/{team_id}/persons/trends/?{urllib.parse.urlencode(parsed_params)}&cache_invalidation_key={cache_invalidation_key}",
}
)
return persons_url
Expand Down
5 changes: 3 additions & 2 deletions posthog/queries/trends/lifecycle.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from posthog.queries.trends.sql import LIFECYCLE_EVENTS_QUERY, LIFECYCLE_PEOPLE_SQL, LIFECYCLE_SQL
from posthog.queries.trends.util import parse_response
from posthog.queries.util import get_person_properties_mode
from posthog.utils import PersonOnEventsMode, encode_get_request_params
from posthog.utils import PersonOnEventsMode, encode_get_request_params, generate_short_id

# Lifecycle takes an event/action, time range, interval and for every period, splits the users who did the action into 4:
#
Expand Down Expand Up @@ -85,6 +85,7 @@ def get_people(self, filter: Filter, team: Team, target_date: datetime, lifecycl

def _get_persons_urls(self, filter: Filter, entity: Entity, times: List[str], status) -> List[Dict[str, Any]]:
persons_url = []
cache_invalidation_key = generate_short_id()
for target_date in times:
filter_params = filter.to_params()
extra_params = {
Expand All @@ -100,7 +101,7 @@ def _get_persons_urls(self, filter: Filter, entity: Entity, times: List[str], st
persons_url.append(
{
"filter": extra_params,
"url": f"api/person/lifecycle/?{urllib.parse.urlencode(parsed_params)}",
"url": f"api/person/lifecycle/?{urllib.parse.urlencode(parsed_params)}&cache_invalidation_key={cache_invalidation_key}",
}
)
return persons_url
Expand Down
5 changes: 3 additions & 2 deletions posthog/queries/trends/total_volume.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
process_math,
)
from posthog.queries.util import TIME_IN_SECONDS, get_interval_func_ch, get_trunc_func_ch
from posthog.utils import PersonOnEventsMode, encode_get_request_params
from posthog.utils import PersonOnEventsMode, encode_get_request_params, generate_short_id


class TrendsTotalVolume:
Expand Down Expand Up @@ -234,6 +234,7 @@ def _get_persons_url(
self, filter: Filter, entity: Entity, team: Team, point_datetimes: List[datetime]
) -> List[Dict[str, Any]]:
persons_url = []
cache_invalidation_key = generate_short_id()
for point_datetime in point_datetimes:
filter_params = filter.to_params()
extra_params = {
Expand All @@ -249,7 +250,7 @@ def _get_persons_url(
persons_url.append(
{
"filter": extra_params,
"url": f"api/projects/{team.pk}/persons/trends/?{urllib.parse.urlencode(parsed_params)}",
"url": f"api/projects/{team.pk}/persons/trends/?{urllib.parse.urlencode(parsed_params)}&cache_invalidation_key={cache_invalidation_key}",
}
)
return persons_url

0 comments on commit a180691

Please sign in to comment.