diff --git a/CHANGELOG.md b/CHANGELOG.md index 8bdb024c..e740846b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ - Breaking: Go version 1.17 is now the minimum required version to build this. (#292) - Breaking: Thumbnail generation now requires libvips. See [docs/build.md](./docs/build.md) for prerequisite instructions. (#366, #369) - Breaking: Resolver caches are now stored in PostgreSQL. See [docs/build.md](./docs/build.md) for prerequisite instructions. (#271) +- Twitter: Generate thumbnails with all images of a tweet. (#373) - YouTube: Added support for 'YouTube shorts' URLs. (#299) - Fix: SevenTV emotes now resolve correctly. (#281, #288, #307) - Fix: YouTube videos are no longer resolved as channels. (#284) diff --git a/internal/caches/twitchusernamecache/cache.go b/internal/caches/twitchusernamecache/cache.go index 4f168534..0e49ea2c 100644 --- a/internal/caches/twitchusernamecache/cache.go +++ b/internal/caches/twitchusernamecache/cache.go @@ -19,5 +19,7 @@ func New(ctx context.Context, cfg config.APIConfig, pool db.Pool, helixClient *h helixClient: helixClient, } - return cache.NewPostgreSQLCache(ctx, cfg, pool, "twitch:username", usernameLoader, 1*time.Hour) + return cache.NewPostgreSQLCache( + ctx, cfg, pool, cache.NewPrefixKeyProvider("twitch:username"), usernameLoader, 1*time.Hour, + ) } diff --git a/internal/db/pool.go b/internal/db/pool.go index 9b10fbe1..83b13b96 100644 --- a/internal/db/pool.go +++ b/internal/db/pool.go @@ -12,6 +12,7 @@ import ( ) type Pool interface { + Query(ctx context.Context, sql string, args ...interface{}) (pgx.Rows, error) QueryRow(ctx context.Context, sql string, args ...interface{}) pgx.Row Exec(ctx context.Context, sql string, arguments ...interface{}) (pgconn.CommandTag, error) Ping(ctx context.Context) error @@ -21,7 +22,6 @@ type Pool interface { func NewPool(ctx context.Context, dsn string) (Pool, error) { pool, err := pgxpool.Connect(ctx, dsn) - if err != nil { return nil, fmt.Errorf("error connecting to pool: %w", err) } diff --git a/internal/migration/3_dependent_cache.go b/internal/migration/3_dependent_cache.go new file mode 100644 index 00000000..a8438f19 --- /dev/null +++ b/internal/migration/3_dependent_cache.go @@ -0,0 +1,45 @@ +//go:build !test || migrationtest + +package migration + +import ( + "context" + + "github.com/jackc/pgx/v4" +) + +func init() { + // The version of this migration + const migrationVersion = 3 + + Register( + migrationVersion, + func(ctx context.Context, tx pgx.Tx) error { + // The Up action of this migration + _, err := tx.Exec(ctx, ` +CREATE TABLE dependent_values ( + key TEXT UNIQUE NOT NULL, + parent_key TEXT NOT NULL, + value bytea NOT NULL, + http_content_type TEXT NOT NULL, + committed BOOLEAN NOT NULL DEFAULT FALSE, + expiration_timestamp TIMESTAMP NOT NULL +); + +CREATE INDEX idx_dependent_values_key ON dependent_values(key); +CREATE INDEX idx_dependent_values_parent_entry_key ON dependent_values(parent_key); +CREATE INDEX idx_dependent_values_committed ON dependent_values(committed); + `) + + return err + }, + func(ctx context.Context, tx pgx.Tx) error { + // The Down action of this migration + _, err := tx.Exec(ctx, ` +DROP TABLE dependent_values; + `) + + return err + }, + ) +} diff --git a/internal/resolvers/betterttv/emote_resolver.go b/internal/resolvers/betterttv/emote_resolver.go index ae7e312e..552d2f53 100644 --- a/internal/resolvers/betterttv/emote_resolver.go +++ b/internal/resolvers/betterttv/emote_resolver.go @@ -49,7 +49,9 @@ func NewEmoteResolver(ctx context.Context, cfg config.APIConfig, pool db.Pool, e emoteLoader := NewEmoteLoader(emoteAPIURL) r := &EmoteResolver{ - emoteCache: cache.NewPostgreSQLCache(ctx, cfg, pool, "betterttv:emote", resolver.NewResponseMarshaller(emoteLoader), 1*time.Hour), + emoteCache: cache.NewPostgreSQLCache( + ctx, cfg, pool, cache.NewPrefixKeyProvider("betterttv:emote"), + resolver.NewResponseMarshaller(emoteLoader), 1*time.Hour), } return r diff --git a/internal/resolvers/default/initialize.go b/internal/resolvers/default/initialize.go index e5243784..5511dd1f 100644 --- a/internal/resolvers/default/initialize.go +++ b/internal/resolvers/default/initialize.go @@ -23,16 +23,16 @@ const ( URL: {{.URL}}` ) -var ( - defaultTooltip = template.Must(template.New("default_tooltip").Parse(defaultTooltipString)) -) +var defaultTooltip = template.Must(template.New("default_tooltip").Parse(defaultTooltipString)) func Initialize(ctx context.Context, cfg config.APIConfig, pool db.Pool, router *chi.Mux, helixClient *helix.Client) { defaultLinkResolver := New(ctx, cfg, pool, helixClient) cached := stampede.Handler(512, 10*time.Second) imageCached := stampede.Handler(256, 2*time.Second) + generatedValuesCached := stampede.Handler(256, 2*time.Second) router.With(cached).Get("/link_resolver/{url}", defaultLinkResolver.HandleRequest) router.With(imageCached).Get("/thumbnail/{url}", defaultLinkResolver.HandleThumbnailRequest) + router.With(generatedValuesCached).Get("/generated/{url}", defaultLinkResolver.HandleGeneratedValueRequest) } diff --git a/internal/resolvers/default/link_resolver.go b/internal/resolvers/default/link_resolver.go index 7272402a..7d63d820 100644 --- a/internal/resolvers/default/link_resolver.go +++ b/internal/resolvers/default/link_resolver.go @@ -33,6 +33,7 @@ type LinkResolver struct { linkCache cache.Cache thumbnailCache cache.Cache + generatedCache cache.DependentCache } func (r *LinkResolver) HandleRequest(w http.ResponseWriter, req *http.Request) { @@ -148,7 +149,6 @@ func (r *LinkResolver) HandleThumbnailRequest(w http.ResponseWriter, req *http.R } response, err := r.thumbnailCache.Get(ctx, url, req) - if err != nil { log.Errorw("Error in thumbnail request", "url", url, @@ -167,7 +167,50 @@ func (r *LinkResolver) HandleThumbnailRequest(w http.ResponseWriter, req *http.R } } +func (r *LinkResolver) HandleGeneratedValueRequest(w http.ResponseWriter, req *http.Request) { + ctx := req.Context() + log := logger.FromContext(ctx) + + url, err := utils.UnescapeURLArgument(req, "url") + if err != nil { + _, err = resolver.WriteInvalidURL(w) + if err != nil { + log.Errorw("Error writing response", + "error", err, + ) + } + return + } + + payload, contentType, err := r.generatedCache.Get(ctx, url) + if err != nil { + log.Errorw("Error in request for generated value", + "url", url, + "error", err, + ) + return + } + + if payload == nil { + log.Warnw("Requested generated value does not exist", + "url", url, + ) + return + } + + w.Header().Add("Content-Type", contentType) + w.WriteHeader(http.StatusOK) + _, err = w.Write(payload) + if err != nil { + log.Errorw("Error writing response", + "error", err, + ) + } +} + func New(ctx context.Context, cfg config.APIConfig, pool db.Pool, helixClient *helix.Client) *LinkResolver { + generatedCache := cache.NewPostgreSQLDependentCache(ctx, cfg, pool, cache.NewPrefixKeyProvider("default:dependent")) + customResolvers := []resolver.Resolver{} // Register Link Resolvers from internal/resolvers/ @@ -179,7 +222,7 @@ func New(ctx context.Context, cfg config.APIConfig, pool db.Pool, helixClient *h oembed.Initialize(ctx, cfg, pool, &customResolvers) supinic.Initialize(ctx, cfg, pool, &customResolvers) twitch.Initialize(ctx, cfg, pool, helixClient, &customResolvers) - twitter.Initialize(ctx, cfg, pool, &customResolvers) + twitter.Initialize(ctx, cfg, pool, &customResolvers, generatedCache) wikipedia.Initialize(ctx, cfg, pool, &customResolvers) youtube.Initialize(ctx, cfg, pool, &customResolvers) seventv.Initialize(ctx, cfg, pool, &customResolvers) @@ -195,11 +238,20 @@ func New(ctx context.Context, cfg config.APIConfig, pool db.Pool, helixClient *h enableLilliput: cfg.EnableLilliput, } + thumbnailCache := cache.NewPostgreSQLCache( + ctx, cfg, pool, cache.NewPrefixKeyProvider("default:thumbnail"), thumbnailLoader, + 10*time.Minute, + ) + linkCache := cache.NewPostgreSQLCache( + ctx, cfg, pool, cache.NewPrefixKeyProvider("default:link"), linkLoader, 10*time.Minute, + ) + r := &LinkResolver{ customResolvers: customResolvers, - linkCache: cache.NewPostgreSQLCache(ctx, cfg, pool, "default:link", linkLoader, 10*time.Minute), - thumbnailCache: cache.NewPostgreSQLCache(ctx, cfg, pool, "default:thumbnail", thumbnailLoader, 10*time.Minute), + linkCache: linkCache, + thumbnailCache: thumbnailCache, + generatedCache: generatedCache, } return r diff --git a/internal/resolvers/discord/invite_resolver.go b/internal/resolvers/discord/invite_resolver.go index e23f96d8..7dfa7527 100644 --- a/internal/resolvers/discord/invite_resolver.go +++ b/internal/resolvers/discord/invite_resolver.go @@ -45,7 +45,9 @@ func NewInviteResolver(ctx context.Context, cfg config.APIConfig, pool db.Pool) // We cache invites longer on purpose as the API is pretty strict with its rate limiting, and the information changes very seldomly anyway // TODO: Log 429 errors from the loader r := &InviteResolver{ - inviteCache: cache.NewPostgreSQLCache(ctx, cfg, pool, "discord:invite", resolver.NewResponseMarshaller(inviteLoader), 6*time.Hour), + inviteCache: cache.NewPostgreSQLCache( + ctx, cfg, pool, cache.NewPrefixKeyProvider("discord:invite"), + resolver.NewResponseMarshaller(inviteLoader), 6*time.Hour), } return r diff --git a/internal/resolvers/frankerfacez/emote_resolver.go b/internal/resolvers/frankerfacez/emote_resolver.go index 9b6ec3e3..357a4a18 100644 --- a/internal/resolvers/frankerfacez/emote_resolver.go +++ b/internal/resolvers/frankerfacez/emote_resolver.go @@ -47,7 +47,9 @@ func NewEmoteResolver(ctx context.Context, cfg config.APIConfig, pool db.Pool, e emoteLoader := NewEmoteLoader(emoteAPIURL) r := &EmoteResolver{ - emoteCache: cache.NewPostgreSQLCache(ctx, cfg, pool, "frankerfacez:emote", resolver.NewResponseMarshaller(emoteLoader), 1*time.Hour), + emoteCache: cache.NewPostgreSQLCache( + ctx, cfg, pool, cache.NewPrefixKeyProvider("frankerfacez:emote"), + resolver.NewResponseMarshaller(emoteLoader), 1*time.Hour), } return r diff --git a/internal/resolvers/imgur/resolver.go b/internal/resolvers/imgur/resolver.go index 7b3e49a0..82662d1f 100644 --- a/internal/resolvers/imgur/resolver.go +++ b/internal/resolvers/imgur/resolver.go @@ -45,7 +45,9 @@ func NewResolver(ctx context.Context, cfg config.APIConfig, pool db.Pool, imgurC } r := &Resolver{ - imgurCache: cache.NewPostgreSQLCache(ctx, cfg, pool, "imgur", resolver.NewResponseMarshaller(loader), 1*time.Hour), + imgurCache: cache.NewPostgreSQLCache( + ctx, cfg, pool, cache.NewPrefixKeyProvider("imgur"), + resolver.NewResponseMarshaller(loader), 1*time.Hour), } return r diff --git a/internal/resolvers/livestreamfails/clip_resolver.go b/internal/resolvers/livestreamfails/clip_resolver.go index 1fed3959..a7329ab0 100644 --- a/internal/resolvers/livestreamfails/clip_resolver.go +++ b/internal/resolvers/livestreamfails/clip_resolver.go @@ -51,7 +51,9 @@ func NewClipResolver(ctx context.Context, cfg config.APIConfig, pool db.Pool, ap } r := &ClipResolver{ - clipCache: cache.NewPostgreSQLCache(ctx, cfg, pool, "livestreamfails:clip", resolver.NewResponseMarshaller(clipLoader), 1*time.Hour), + clipCache: cache.NewPostgreSQLCache( + ctx, cfg, pool, cache.NewPrefixKeyProvider("livestreamfails:clip"), + resolver.NewResponseMarshaller(clipLoader), 1*time.Hour), } return r diff --git a/internal/resolvers/oembed/resolver.go b/internal/resolvers/oembed/resolver.go index 36022106..2156a63f 100644 --- a/internal/resolvers/oembed/resolver.go +++ b/internal/resolvers/oembed/resolver.go @@ -55,8 +55,11 @@ func NewResolver(ctx context.Context, cfg config.APIConfig, pool db.Pool, data [ } r := &Resolver{ - oEmbedCache: cache.NewPostgreSQLCache(ctx, cfg, pool, "oembed", resolver.NewResponseMarshaller(loader), 1*time.Hour), - oEmbed: oEmbed, + oEmbedCache: cache.NewPostgreSQLCache( + ctx, cfg, pool, cache.NewPrefixKeyProvider("oembed"), + resolver.NewResponseMarshaller(loader), 1*time.Hour, + ), + oEmbed: oEmbed, } return r, nil diff --git a/internal/resolvers/seventv/emote_resolver.go b/internal/resolvers/seventv/emote_resolver.go index fcfe3840..1c32996a 100644 --- a/internal/resolvers/seventv/emote_resolver.go +++ b/internal/resolvers/seventv/emote_resolver.go @@ -43,7 +43,9 @@ func NewEmoteResolver(ctx context.Context, cfg config.APIConfig, pool db.Pool, a emoteLoader := NewEmoteLoader(cfg, apiURL) r := &EmoteResolver{ - emoteCache: cache.NewPostgreSQLCache(ctx, cfg, pool, "seventv:emote", resolver.NewResponseMarshaller(emoteLoader), 1*time.Hour), + emoteCache: cache.NewPostgreSQLCache( + ctx, cfg, pool, cache.NewPrefixKeyProvider("seventv:emote"), + resolver.NewResponseMarshaller(emoteLoader), 1*time.Hour), } return r diff --git a/internal/resolvers/supinic/track_resolver.go b/internal/resolvers/supinic/track_resolver.go index f3ac9fe0..6abe7247 100644 --- a/internal/resolvers/supinic/track_resolver.go +++ b/internal/resolvers/supinic/track_resolver.go @@ -48,7 +48,9 @@ func NewTrackResolver(ctx context.Context, cfg config.APIConfig, pool db.Pool) * trackLoader := &TrackLoader{} r := &TrackResolver{ - trackCache: cache.NewPostgreSQLCache(ctx, cfg, pool, "supinic:track", resolver.NewResponseMarshaller(trackLoader), 1*time.Hour), + trackCache: cache.NewPostgreSQLCache( + ctx, cfg, pool, cache.NewPrefixKeyProvider("supinic:track"), + resolver.NewResponseMarshaller(trackLoader), 1*time.Hour), } return r diff --git a/internal/resolvers/twitch/clip_resolver.go b/internal/resolvers/twitch/clip_resolver.go index fae7fcf6..20460879 100644 --- a/internal/resolvers/twitch/clip_resolver.go +++ b/internal/resolvers/twitch/clip_resolver.go @@ -13,9 +13,7 @@ import ( "github.com/Chatterino/api/pkg/resolver" ) -var ( - clipSlugRegex = regexp.MustCompile(`^\/(\w{2,25}\/clip\/)?(clip\/)?([a-zA-Z0-9]+(?:-[-\w]{16})?)$`) -) +var clipSlugRegex = regexp.MustCompile(`^\/(\w{2,25}\/clip\/)?(clip\/)?([a-zA-Z0-9]+(?:-[-\w]{16})?)$`) type ClipResolver struct { clipCache cache.Cache @@ -76,7 +74,10 @@ func NewClipResolver(ctx context.Context, cfg config.APIConfig, pool db.Pool, he } r := &ClipResolver{ - clipCache: cache.NewPostgreSQLCache(ctx, cfg, pool, "twitch:clip", resolver.NewResponseMarshaller(clipLoader), 1*time.Hour), + clipCache: cache.NewPostgreSQLCache( + ctx, cfg, pool, cache.NewPrefixKeyProvider("twitch:clip"), + resolver.NewResponseMarshaller(clipLoader), 1*time.Hour, + ), } return r diff --git a/internal/resolvers/twitter/api.go b/internal/resolvers/twitter/api.go deleted file mode 100644 index c96e2244..00000000 --- a/internal/resolvers/twitter/api.go +++ /dev/null @@ -1,42 +0,0 @@ -package twitter - -import ( - "time" - - "github.com/Chatterino/api/pkg/humanize" -) - -func buildTweetTooltip(tweet *TweetApiResponse) *tweetTooltipData { - data := &tweetTooltipData{} - data.Text = tweet.Text - data.Name = tweet.User.Name - data.Username = tweet.User.Username - data.Likes = humanize.Number(tweet.Likes) - data.Retweets = humanize.Number(tweet.Retweets) - - // TODO: what time format is this exactly? can we move to humanize a la CreationDteRFC3339? - timestamp, err := time.Parse("Mon Jan 2 15:04:05 -0700 2006", tweet.Timestamp) - if err != nil { - data.Timestamp = "" - } else { - data.Timestamp = humanize.CreationDateTime(timestamp) - } - - if len(tweet.Entities.Media) > 0 { - // If tweet contains an image, it will be used as thumbnail - data.Thumbnail = tweet.Entities.Media[0].Url - } - - return data -} - -func buildTwitterUserTooltip(user *TwitterUserApiResponse) *twitterUserTooltipData { - data := &twitterUserTooltipData{} - data.Name = user.Name - data.Username = user.Username - data.Description = user.Description - data.Followers = humanize.Number(user.Followers) - data.Thumbnail = user.ProfileImageUrl - - return data -} diff --git a/internal/resolvers/twitter/initialize.go b/internal/resolvers/twitter/initialize.go index fd7c93b3..4d80a2a9 100644 --- a/internal/resolvers/twitter/initialize.go +++ b/internal/resolvers/twitter/initialize.go @@ -7,6 +7,7 @@ import ( "github.com/Chatterino/api/internal/db" "github.com/Chatterino/api/internal/logger" + "github.com/Chatterino/api/pkg/cache" "github.com/Chatterino/api/pkg/config" "github.com/Chatterino/api/pkg/resolver" "github.com/Chatterino/api/pkg/utils" @@ -33,6 +34,7 @@ const ( ) var ( + cfg config.APIConfig tweetRegexp = regexp.MustCompile(`^/.*\/status(?:es)?\/([^\/\?]+)`) twitterUserRegexp = regexp.MustCompile(`^/([^\/\?\s]+)(?:\/?$|\?.*)$`) @@ -60,15 +62,27 @@ var ( twitterUserTooltipTemplate = template.Must(template.New("twitterUserTooltip").Parse(twitterUserTooltip)) ) -func Initialize(ctx context.Context, cfg config.APIConfig, pool db.Pool, resolvers *[]resolver.Resolver) { +func Initialize( + ctx context.Context, + conf config.APIConfig, + pool db.Pool, + resolvers *[]resolver.Resolver, + collageCache cache.DependentCache, +) { log := logger.FromContext(ctx) - if cfg.TwitterBearerToken == "" { + if conf.TwitterBearerToken == "" { log.Warnw("Twitter credentials missing, won't do special responses for Twitter") return } + cfg = conf const userEndpointURLFormat = "https://api.twitter.com/1.1/users/show.json?screen_name=%s" const tweetEndpointURLFormat = "https://api.twitter.com/1.1/statuses/show.json?id=%s&tweet_mode=extended" - *resolvers = append(*resolvers, NewTwitterResolver(ctx, cfg, pool, userEndpointURLFormat, tweetEndpointURLFormat)) + *resolvers = append( + *resolvers, + NewTwitterResolver( + ctx, cfg, pool, userEndpointURLFormat, tweetEndpointURLFormat, collageCache, + ), + ) } diff --git a/internal/resolvers/twitter/initialize_test.go b/internal/resolvers/twitter/initialize_test.go index dcad0238..ea7ad85e 100644 --- a/internal/resolvers/twitter/initialize_test.go +++ b/internal/resolvers/twitter/initialize_test.go @@ -5,6 +5,7 @@ import ( "testing" "github.com/Chatterino/api/internal/logger" + "github.com/Chatterino/api/pkg/cache" "github.com/Chatterino/api/pkg/config" "github.com/Chatterino/api/pkg/resolver" "github.com/pashagolub/pgxmock" @@ -22,8 +23,9 @@ func TestInitialize(t *testing.T) { c.Run("No credentials", func(c *qt.C) { cfg := config.APIConfig{} customResolvers := []resolver.Resolver{} + collageCache := cache.NewPostgreSQLDependentCache(ctx, cfg, pool, cache.NewPrefixKeyProvider("test")) c.Assert(customResolvers, qt.HasLen, 0) - Initialize(ctx, cfg, pool, &customResolvers) + Initialize(ctx, cfg, pool, &customResolvers, collageCache) c.Assert(customResolvers, qt.HasLen, 0) }) @@ -32,8 +34,9 @@ func TestInitialize(t *testing.T) { TwitterBearerToken: "test", } customResolvers := []resolver.Resolver{} + collageCache := cache.NewPostgreSQLDependentCache(ctx, cfg, pool, cache.NewPrefixKeyProvider("test")) c.Assert(customResolvers, qt.HasLen, 0) - Initialize(ctx, cfg, pool, &customResolvers) + Initialize(ctx, cfg, pool, &customResolvers, collageCache) c.Assert(customResolvers, qt.HasLen, 1) }) } diff --git a/internal/resolvers/twitter/resolver.go b/internal/resolvers/twitter/resolver.go index a566816b..27ff15ed 100644 --- a/internal/resolvers/twitter/resolver.go +++ b/internal/resolvers/twitter/resolver.go @@ -70,20 +70,45 @@ func (r *TwitterResolver) Name() string { return "twitter" } -func NewTwitterResolver(ctx context.Context, cfg config.APIConfig, pool db.Pool, userEndpointURLFormat, tweetEndpointURLFormat string) *TwitterResolver { - tweetLoader := &TweetLoader{ - bearerKey: cfg.TwitterBearerToken, - endpointURLFormat: tweetEndpointURLFormat, - } +func NewTwitterResolver( + ctx context.Context, + cfg config.APIConfig, + pool db.Pool, + userEndpointURLFormat string, + tweetEndpointURLFormat string, + collageCache cache.DependentCache, +) *TwitterResolver { + tweetCacheKeyProvider := cache.NewPrefixKeyProvider("twitter:tweet") + userCacheKeyProvider := cache.NewPrefixKeyProvider("twitter:user") + + tweetLoader := NewTweetLoader( + cfg.BaseURL, + cfg.TwitterBearerToken, + tweetEndpointURLFormat, + tweetCacheKeyProvider, + collageCache, + cfg.MaxThumbnailSize, + ) userLoader := &UserLoader{ bearerKey: cfg.TwitterBearerToken, endpointURLFormat: userEndpointURLFormat, } + tweetCache := cache.NewPostgreSQLCache( + ctx, cfg, pool, tweetCacheKeyProvider, resolver.NewResponseMarshaller(tweetLoader), + 24*time.Hour, + ) + tweetCache.RegisterDependent(ctx, collageCache) + + userCache := cache.NewPostgreSQLCache( + ctx, cfg, pool, userCacheKeyProvider, resolver.NewResponseMarshaller(userLoader), + 24*time.Hour, + ) + r := &TwitterResolver{ - tweetCache: cache.NewPostgreSQLCache(ctx, cfg, pool, "twitter:tweet", resolver.NewResponseMarshaller(tweetLoader), 24*time.Hour), - userCache: cache.NewPostgreSQLCache(ctx, cfg, pool, "twitter:user", resolver.NewResponseMarshaller(userLoader), 24*time.Hour), + tweetCache: tweetCache, + userCache: userCache, } return r diff --git a/internal/resolvers/twitter/resolver_test.go b/internal/resolvers/twitter/resolver_test.go index 6b34756a..6865e264 100644 --- a/internal/resolvers/twitter/resolver_test.go +++ b/internal/resolvers/twitter/resolver_test.go @@ -40,6 +40,7 @@ func TestResolver(t *testing.T) { pool, ts.URL+"/1.1/users/show.json?screen_name=%s", ts.URL+"/1.1/statuses/show.json?id=%s&tweet_mode=extended", + cache.NewPostgreSQLDependentCache(ctx, cfg, pool, cache.NewPrefixKeyProvider("test")), ) c.Assert(r, qt.IsNotNil) diff --git a/internal/resolvers/twitter/tweet_loader.go b/internal/resolvers/twitter/tweet_loader.go index 312085ad..b5f3064f 100644 --- a/internal/resolvers/twitter/tweet_loader.go +++ b/internal/resolvers/twitter/tweet_loader.go @@ -6,13 +6,19 @@ import ( "encoding/json" "errors" "fmt" + "io" + "math" "net/http" "net/url" + "sync" "time" "github.com/Chatterino/api/internal/logger" "github.com/Chatterino/api/pkg/cache" + "github.com/Chatterino/api/pkg/humanize" "github.com/Chatterino/api/pkg/resolver" + "github.com/Chatterino/api/pkg/utils" + "github.com/davidbyttow/govips/v2/vips" ) type APIUser struct { @@ -36,7 +42,7 @@ type TweetApiResponse struct { Likes uint64 `json:"favorite_count"` Retweets uint64 `json:"retweet_count"` User APIUser `json:"user"` - Entities APIEntities `json:"entities"` + Entities APIEntities `json:"extended_entities"` } type tweetTooltipData struct { @@ -50,14 +56,41 @@ type tweetTooltipData struct { } type TweetLoader struct { - bearerKey string - endpointURLFormat string + baseURL string + bearerKey string + endpointURLFormat string + tweetCacheKeyProvider cache.KeyProvider + collageCache cache.DependentCache + maxThumbnailSize uint } var ( - errTweetNotFound = errors.New("tweet not found") + errTweetNotFound = errors.New("tweet not found") + errNoMediaDownloaded = errors.New("couldn't download any of the attached media items") ) +func NewTweetLoader( + baseURL string, + bearerKey string, + endpointURLFormat string, + tweetCacheKeyProvider cache.KeyProvider, + collageCache cache.DependentCache, + maxThumbnailSize uint, +) *TweetLoader { + return &TweetLoader{ + baseURL: baseURL, + bearerKey: bearerKey, + endpointURLFormat: endpointURLFormat, + tweetCacheKeyProvider: tweetCacheKeyProvider, + collageCache: collageCache, + maxThumbnailSize: maxThumbnailSize, + } +} + +func buildCollageKey(tweetID string) string { + return fmt.Sprintf("twitter:collage:%s", tweetID) +} + func (l *TweetLoader) getTweetByID(id string) (*TweetApiResponse, error) { endpointUrl := fmt.Sprintf(l.endpointURLFormat, id) extraHeaders := map[string]string{ @@ -87,7 +120,11 @@ func (l *TweetLoader) getTweetByID(id string) (*TweetApiResponse, error) { return tweet, nil } -func (l *TweetLoader) Load(ctx context.Context, tweetID string, r *http.Request) (*resolver.Response, time.Duration, error) { +func (l *TweetLoader) Load( + ctx context.Context, + tweetID string, + r *http.Request, +) (*resolver.Response, time.Duration, error) { log := logger.FromContext(ctx) log.Debugw("[Twitter] Get tweet", @@ -106,15 +143,192 @@ func (l *TweetLoader) Load(ctx context.Context, tweetID string, r *http.Request) return resolver.Errorf("Twitter tweet API error: %s", err) } - tweetData := buildTweetTooltip(tweetResp) + tooltipData := l.buildTweetTooltip(ctx, tweetResp, r) + var tooltip bytes.Buffer - if err := tweetTooltipTemplate.Execute(&tooltip, tweetData); err != nil { + if err := tweetTooltipTemplate.Execute(&tooltip, tooltipData); err != nil { return resolver.Errorf("Twitter tweet template error: %s", err) } return &resolver.Response{ Status: http.StatusOK, Tooltip: url.PathEscape(tooltip.String()), - Thumbnail: tweetData.Thumbnail, + Thumbnail: tooltipData.Thumbnail, }, cache.NoSpecialDur, nil } + +func (l *TweetLoader) buildTweetTooltip( + ctx context.Context, + tweet *TweetApiResponse, + r *http.Request, +) *tweetTooltipData { + data := &tweetTooltipData{} + data.Text = tweet.Text + data.Name = tweet.User.Name + data.Username = tweet.User.Username + data.Likes = humanize.Number(tweet.Likes) + data.Retweets = humanize.Number(tweet.Retweets) + + // TODO: what time format is this exactly? can we move to humanize a la CreationDteRFC3339? + timestamp, err := time.Parse("Mon Jan 2 15:04:05 -0700 2006", tweet.Timestamp) + if err != nil { + data.Timestamp = "" + } else { + data.Timestamp = humanize.CreationDateTime(timestamp) + } + + data.Thumbnail = l.buildThumbnailURL(ctx, tweet, r) + + return data +} + +func (l *TweetLoader) buildThumbnailURL( + ctx context.Context, + tweet *TweetApiResponse, + r *http.Request, +) string { + log := logger.FromContext(ctx) + + numMedia := len(tweet.Entities.Media) + if numMedia == 1 { + // If tweet contains exactly one image, it will be used as thumbnail + return tweet.Entities.Media[0].Url + } + + // More than one media item, need to compose a thumbnail + thumb, err := l.composeThumbnail(ctx, tweet.Entities.Media) + if err != nil { + log.Errorw("Couldn't compose Twitter collage", + "err", err, + ) + return "" + } + + outputBuf, metaData, err := thumb.ExportNative() + if err != nil { + log.Errorw("Couldn't export Twitter collage thumbnail", + "err", err, + ) + return "" + } + + parentKey := l.tweetCacheKeyProvider.CacheKey(ctx, tweet.ID) + collageKey := buildCollageKey(tweet.ID) + contentType := utils.MimeType(metaData.Format) + + err = l.collageCache.Insert(ctx, collageKey, parentKey, outputBuf, contentType) + if err != nil { + log.Errorw("Couldn't insert Twitter collage thumbnail into cache", + "err", err, + ) + return "" + } + + return utils.FormatGeneratedThumbnailURL(l.baseURL, r, collageKey) +} + +func (l *TweetLoader) composeThumbnail( + ctx context.Context, + mediaEntities []APIEntitiesMedia, +) (*vips.ImageRef, error) { + log := logger.FromContext(ctx) + + numMedia := len(mediaEntities) + + // First, download all images + downloaded := make([]*vips.ImageRef, numMedia) + wg := new(sync.WaitGroup) + wg.Add(numMedia) + + for idx, media := range mediaEntities { + idx := idx + media := media + + go func() { + defer wg.Done() + + resp, err := resolver.RequestGET(ctx, media.Url) + if err != nil { + log.Errorw("Couldn't download Twitter media", + "url", media.Url, + "err", err, + ) + return + } + + buf, err := io.ReadAll(resp.Body) + if err != nil { + log.Errorw("Couldn't read response body", + "url", media.Url, + "err", err, + ) + return + } + + ref, err := vips.NewImageFromBuffer(buf) + if err != nil { + log.Errorw("Couldn't convert buffer to vips.ImageRef", + "err", err, + ) + return + } + + downloaded[idx] = ref + }() + } + + wg.Wait() + + // Prepare downloaded media for collage + var collageSource []*vips.ImageRef + + // Keep track of smallest dimension for proper resizing later + smallestDimensionFound := math.MaxFloat64 + + // In a first pass, check downloaded media to determine the smallest dimension + for _, ref := range downloaded { + if ref != nil { + smallerDimensionCur := math.Min(float64(ref.Width()), float64(ref.Height())) + smallestDimensionFound = math.Min(smallestDimensionFound, smallerDimensionCur) + + collageSource = append(collageSource, ref) + } + } + + // In the second pass, resize the images according to smallest dimension + for _, ref := range collageSource { + ref.ThumbnailWithSize( + int(smallestDimensionFound), int(smallestDimensionFound), vips.InterestingCentre, + vips.SizeDown, + ) + } + + if len(collageSource) == 0 { + log.Errorw("No Twitter media could be downloaded, cannot build collage") + return nil, errNoMediaDownloaded + } + + // Now compose the thumbnail + stem := collageSource[0] + + err := stem.ArrayJoin(collageSource[1:], 2) + if err != nil { + log.Errorw("Couldn't ArrayJoin imags", + "err", err, + ) + return nil, err + } + + maxThumbnailSize := int(l.maxThumbnailSize) + err = stem.ThumbnailWithSize( + maxThumbnailSize, maxThumbnailSize, vips.InterestingNone, vips.SizeDown, + ) + if err != nil { + log.Errorw("Couldn't generate thumbnail", + "err", err, + ) + return nil, err + } + + return stem, nil +} diff --git a/internal/resolvers/twitter/user_loader.go b/internal/resolvers/twitter/user_loader.go index 23f3161d..ca4efdc6 100644 --- a/internal/resolvers/twitter/user_loader.go +++ b/internal/resolvers/twitter/user_loader.go @@ -13,6 +13,7 @@ import ( "github.com/Chatterino/api/internal/logger" "github.com/Chatterino/api/pkg/cache" + "github.com/Chatterino/api/pkg/humanize" "github.com/Chatterino/api/pkg/resolver" ) @@ -37,9 +38,7 @@ type UserLoader struct { endpointURLFormat string } -var ( - errUserNotFound = errors.New("user not found") -) +var errUserNotFound = errors.New("user not found") func (l *UserLoader) getUserByName(userName string) (*TwitterUserApiResponse, error) { endpointUrl := fmt.Sprintf(l.endpointURLFormat, userName) @@ -107,3 +106,14 @@ func (l *UserLoader) Load(ctx context.Context, userName string, r *http.Request) Thumbnail: userData.Thumbnail, }, cache.NoSpecialDur, nil } + +func buildTwitterUserTooltip(user *TwitterUserApiResponse) *twitterUserTooltipData { + data := &twitterUserTooltipData{} + data.Name = user.Name + data.Username = user.Username + data.Description = user.Description + data.Followers = humanize.Number(user.Followers) + data.Thumbnail = user.ProfileImageUrl + + return data +} diff --git a/internal/resolvers/wikipedia/article_resolver.go b/internal/resolvers/wikipedia/article_resolver.go index 055711eb..dcb49bc5 100644 --- a/internal/resolvers/wikipedia/article_resolver.go +++ b/internal/resolvers/wikipedia/article_resolver.go @@ -75,7 +75,10 @@ func NewArticleResolver(ctx context.Context, cfg config.APIConfig, pool db.Pool, } r := &ArticleResolver{ - articleCache: cache.NewPostgreSQLCache(ctx, cfg, pool, "wikipedia:article", resolver.NewResponseMarshaller(articleLoader), 1*time.Hour), + articleCache: cache.NewPostgreSQLCache( + ctx, cfg, pool, cache.NewPrefixKeyProvider("wikipedia:article"), + resolver.NewResponseMarshaller(articleLoader), 1*time.Hour, + ), } return r diff --git a/internal/resolvers/youtube/channel_resolver.go b/internal/resolvers/youtube/channel_resolver.go index 5a445e5d..9a96bc9c 100644 --- a/internal/resolvers/youtube/channel_resolver.go +++ b/internal/resolvers/youtube/channel_resolver.go @@ -16,9 +16,7 @@ import ( youtubeAPI "google.golang.org/api/youtube/v3" ) -var ( - youtubeChannelRegex = regexp.MustCompile(`^/(c\/|channel\/|user\/)?([a-zA-Z0-9\-]{1,})$`) -) +var youtubeChannelRegex = regexp.MustCompile(`^/(c\/|channel\/|user\/)?([a-zA-Z0-9\-]{1,})$`) type YouTubeChannelResolver struct { channelCache cache.Cache @@ -62,7 +60,9 @@ func NewYouTubeChannelResolver(ctx context.Context, cfg config.APIConfig, pool d loader := NewYouTubeChannelLoader(youtubeClient) r := &YouTubeChannelResolver{ - channelCache: cache.NewPostgreSQLCache(ctx, cfg, pool, "youtube:channel", loader, 24*time.Hour), + channelCache: cache.NewPostgreSQLCache( + ctx, cfg, pool, cache.NewPrefixKeyProvider("youtube:channel"), loader, 24*time.Hour, + ), } return r diff --git a/internal/resolvers/youtube/initialize.go b/internal/resolvers/youtube/initialize.go index f4eb29ab..f263bbd1 100644 --- a/internal/resolvers/youtube/initialize.go +++ b/internal/resolvers/youtube/initialize.go @@ -43,7 +43,9 @@ var ( func NewYouTubeVideoResolvers(ctx context.Context, cfg config.APIConfig, pool db.Pool, youtubeClient *youtubeAPI.Service) (resolver.Resolver, resolver.Resolver) { videoLoader := NewVideoLoader(youtubeClient) - videoCache := cache.NewPostgreSQLCache(ctx, cfg, pool, "youtube:video", videoLoader, 24*time.Hour) + videoCache := cache.NewPostgreSQLCache( + ctx, cfg, pool, cache.NewPrefixKeyProvider("youtube:video"), videoLoader, 24*time.Hour, + ) videoResolver := NewYouTubeVideoResolver(videoCache) videoShortURLResolver := NewYouTubeVideoShortURLResolver(videoCache) diff --git a/internal/resolvers/youtube/video_resolver_test.go b/internal/resolvers/youtube/video_resolver_test.go index 74aee95b..38427f14 100644 --- a/internal/resolvers/youtube/video_resolver_test.go +++ b/internal/resolvers/youtube/video_resolver_test.go @@ -55,7 +55,9 @@ func TestVideoResolver(t *testing.T) { c.Assert(err, qt.IsNil) loader := NewVideoLoader(youtubeClient) - videoCache := cache.NewPostgreSQLCache(ctx, cfg, pool, "youtube:video", loader, 24*time.Hour) + videoCache := cache.NewPostgreSQLCache( + ctx, cfg, pool, cache.NewPrefixKeyProvider("youtube:video"), loader, 24*time.Hour, + ) resolver := NewYouTubeVideoResolver(videoCache) diff --git a/internal/resolvers/youtube/video_shorturl_resolver_test.go b/internal/resolvers/youtube/video_shorturl_resolver_test.go index f279dd4f..32620ccd 100644 --- a/internal/resolvers/youtube/video_shorturl_resolver_test.go +++ b/internal/resolvers/youtube/video_shorturl_resolver_test.go @@ -55,7 +55,9 @@ func TestVideoShortURLResolver(t *testing.T) { c.Assert(err, qt.IsNil) loader := NewVideoLoader(youtubeClient) - videoCache := cache.NewPostgreSQLCache(ctx, cfg, pool, "youtube:video", loader, 24*time.Hour) + videoCache := cache.NewPostgreSQLCache( + ctx, cfg, pool, cache.NewPrefixKeyProvider("youtube:video"), loader, 24*time.Hour, + ) resolver := NewYouTubeVideoShortURLResolver(videoCache) diff --git a/internal/routes/twitchemotes/initialize.go b/internal/routes/twitchemotes/initialize.go index a463793c..68e2b454 100644 --- a/internal/routes/twitchemotes/initialize.go +++ b/internal/routes/twitchemotes/initialize.go @@ -58,7 +58,10 @@ func Initialize(ctx context.Context, cfg config.APIConfig, pool db.Pool, router helixAPI: helixClient, helixUsernameCache: helixUsernameCache, } - twitchemotesCache := cache.NewPostgreSQLCache(ctx, cfg, pool, "twitchemotes", loader, time.Duration(30)*time.Minute) + twitchemotesCache := cache.NewPostgreSQLCache( + ctx, cfg, pool, cache.NewPrefixKeyProvider("twitchemotes"), loader, + time.Duration(30)*time.Minute, + ) router.Get("/twitchemotes/set/{setID}", func(w http.ResponseWriter, r *http.Request) { setHandler(ctx, twitchemotesCache, w, r) diff --git a/pkg/cache/cache.go b/pkg/cache/cache.go index 274f15c2..db1ed59a 100644 --- a/pkg/cache/cache.go +++ b/pkg/cache/cache.go @@ -17,15 +17,35 @@ type Cache interface { // GetOnly returns the cached value, and doesn't try to load it if it doesn't exist GetOnly(ctx context.Context, key string) *Response + + RegisterDependent(ctx context.Context, dependent DependentCache) + + // Commits dependent values that belong to the key + commitDependents(ctx context.Context, key string) error + + // Rolls back uncommmited dependent values that belong to the key + rollbackDependents(ctx context.Context, key string) error } type Loader interface { Load(ctx context.Context, key string, r *http.Request) ([]byte, *int, *string, time.Duration, error) } +type DependentCache interface { + // Returns (value, content type, error) + Get(ctx context.Context, key string) ([]byte, string, error) + + Insert(ctx context.Context, key string, parentKey string, value []byte, contentType string) error + + commit(ctx context.Context, parentKey string) error + rollback(ctx context.Context, parentKey string) error +} + var NoSpecialDur time.Duration var NewDefaultCache = NewPostgreSQLCache -var defaultStatusCode int = 200 -var defaultContentType string = "application/json" +var ( + defaultStatusCode int = 200 + defaultContentType string = "application/json" +) diff --git a/pkg/cache/db.go b/pkg/cache/db.go index 8c867edb..b07d98a5 100644 --- a/pkg/cache/db.go +++ b/pkg/cache/db.go @@ -8,7 +8,6 @@ import ( "github.com/Chatterino/api/internal/db" "github.com/Chatterino/api/internal/logger" "github.com/Chatterino/api/pkg/config" - "github.com/jackc/pgconn" "github.com/jackc/pgx/v4" "github.com/prometheus/client_golang/prometheus" ) @@ -45,19 +44,67 @@ type PostgreSQLCache struct { cacheDuration time.Duration - prefix string + keyProvider KeyProvider pool db.Pool + + dependentCaches []DependentCache } -var ( - // TODO: Make the "internal error" tooltip an actual tooltip - tooltipInternalError = []byte("internal error") -) +// TODO: Make the "internal error" tooltip an actual tooltip +var tooltipInternalError = []byte("internal error") + +// Returns the number of deleted tooltip entries +func clearOldTooltips(ctx context.Context, pool db.Pool) (int, error) { + log := logger.FromContext(ctx) + + const query = "DELETE FROM cache WHERE now() > cached_until RETURNING key;" + + rows, err := pool.Query(ctx, query) + if err != nil { + log.Errorw("Error deleting old tooltips from cache", + "error", err, + ) + return -1, err + } + + // Remember the deleted keys: they may be parent keys of dependent values + var deletedParents []string + for rows.Next() { + var curKey string + err := rows.Scan(&curKey) + if err != nil { + log.Warnw("Could not scan a deleted key") + continue + } + deletedParents = append(deletedParents, curKey) + } -func clearOldTooltips(ctx context.Context, pool db.Pool) (pgconn.CommandTag, error) { - const query = "DELETE FROM cache WHERE now() > cached_until;" - return pool.Exec(ctx, query) + _, err = pool.Exec(ctx, "DELETE FROM dependent_values WHERE parent_key = ANY($1)", deletedParents) + if err != nil { + log.Errorw("Error deleting dependent values", + "err", err, + ) + return -1, err + } + + return len(deletedParents), nil +} + +// This is meant as a safety measure in case a DependentCache user does not properly manage parent +// keys. +func clearExpiredDependentValues(ctx context.Context, pool db.Pool) error { + log := logger.FromContext(ctx) + + _, err := pool.Exec(ctx, "DELETE FROM dependent_values WHERE now() > expiration_timestamp") + if err != nil { + log.Errorw("Error clearing expired dependent values", + "err", err, + ) + return err + } + + return nil } func startTooltipClearer(ctx context.Context, pool db.Pool) { @@ -67,6 +114,8 @@ func (c *PostgreSQLCache) load(ctx context.Context, key string, r *http.Request) log := logger.FromContext(ctx) payload, statusCode, contentType, overrideDuration, err := c.loader.Load(ctx, key, r) + // If the parent cannot be inserted into the cache, rollback the dependents + defer c.rollbackDependents(ctx, key) if statusCode == nil { log.Warnw("Missing status code, setting to 200 default") @@ -77,7 +126,7 @@ func (c *PostgreSQLCache) load(ctx context.Context, key string, r *http.Request) contentType = &defaultContentType } - var dur = c.cacheDuration + dur := c.cacheDuration if overrideDuration != 0 { dur = overrideDuration } @@ -86,14 +135,17 @@ func (c *PostgreSQLCache) load(ctx context.Context, key string, r *http.Request) return nil, err } - cacheKey := c.prefix + ":" + key + cacheKey := c.keyProvider.CacheKey(ctx, key) if _, err := c.pool.Exec(ctx, "INSERT INTO cache (key, value, http_status_code, http_content_type, cached_until) VALUES ($1, $2, $3, $4, $5)", cacheKey, payload, *statusCode, *contentType, time.Now().Add(dur)); err != nil { log.Errorw("Error inserting tooltip into cache", - "prefix", c.prefix, + "cacheKey", cacheKey, "key", key, "error", err, ) } + // Parent entry was inserted correctly, commit the dependents to prevent them from being rolled + // back + c.commitDependents(ctx, key) return &Response{ Payload: payload, @@ -118,7 +170,7 @@ func (c *PostgreSQLCache) loadFromDatabase(ctx context.Context, cacheKey string) func (c *PostgreSQLCache) Get(ctx context.Context, key string, r *http.Request) (*Response, error) { log := logger.FromContext(ctx) - cacheKey := c.prefix + ":" + key + cacheKey := c.keyProvider.CacheKey(ctx, key) cacheResponse, err := c.loadFromDatabase(ctx, cacheKey) if err != nil { @@ -131,18 +183,18 @@ func (c *PostgreSQLCache) Get(ctx context.Context, key string, r *http.Request) return &tooltipInternalError, err } else if cacheResponse != nil { cacheHits.Inc() - log.Debugw("DB Get cache hit", "prefix", c.prefix, "key", key) + log.Debugw("DB Get cache hit", "cacheKey", cacheKey) return cacheResponse, nil } cacheMisses.Inc() - log.Debugw("DB Get cache miss", "prefix", c.prefix, "key", key) + log.Debugw("DB Get cache miss", "cacheKey", cacheKey) return c.load(ctx, key, r) } func (c *PostgreSQLCache) GetOnly(ctx context.Context, key string) *Response { log := logger.FromContext(ctx) - cacheKey := c.prefix + ":" + key + cacheKey := c.keyProvider.CacheKey(ctx, key) value, err := c.loadFromDatabase(ctx, cacheKey) if err != nil { @@ -150,39 +202,81 @@ func (c *PostgreSQLCache) GetOnly(ctx context.Context, key string) *Response { return nil } else if value != nil { cacheHits.Inc() - log.Debugw("DB GetOnly cache hit", "prefix", c.prefix, "key", key) + log.Debugw("DB GetOnly cache hit", "cacheKey", cacheKey) return value } cacheMisses.Inc() - log.Debugw("DB GetOnly cache miss", "prefix", c.prefix, "key", key) + log.Debugw("DB GetOnly cache miss", "cacheKey", cacheKey) return nil } func StartCacheClearer(ctx context.Context, pool db.Pool) { log := logger.FromContext(ctx) - ticker := time.NewTicker(1 * time.Minute) + tooltipTicker := time.NewTicker(1 * time.Minute) + dependentValuesTicker := time.NewTicker(12 * time.Hour) for { select { case <-ctx.Done(): return - case <-ticker.C: - if ct, err := clearOldTooltips(ctx, pool); err != nil { + case <-tooltipTicker.C: + if numDeleted, err := clearOldTooltips(ctx, pool); err != nil { log.Errorw("Error clearing old tooltips") } else { - clearedEntries.Add(float64(ct.RowsAffected())) - log.Debugw("Cleared old tooltips", "rowsAffected", ct.RowsAffected()) + clearedEntries.Add(float64(numDeleted)) + log.Debugw("Cleared old tooltips", "rowsAffected", numDeleted) } + + case <-dependentValuesTicker.C: + if err := clearExpiredDependentValues(ctx, pool); err != nil { + log.Errorw("Error clearing expired dependent values") + } + } + } +} + +func (c *PostgreSQLCache) RegisterDependent(ctx context.Context, dependent DependentCache) { + c.dependentCaches = append(c.dependentCaches, dependent) +} + +func (c *PostgreSQLCache) commitDependents(ctx context.Context, key string) error { + parentKey := c.keyProvider.CacheKey(ctx, key) + + // XXX: If we knew whether all dependent caches were PostgreSQLDependentCaches, this could be + // optimized (since all commit queries will be the same). But there might be other + // DependentCache implementations and thus we must delegate to each registered one. + for _, dependent := range c.dependentCaches { + err := dependent.commit(ctx, parentKey) + if err != nil { + continue + } + } + + return nil +} + +func (c *PostgreSQLCache) rollbackDependents(ctx context.Context, key string) error { + parentKey := c.keyProvider.CacheKey(ctx, key) + + // XXX: If we knew whether all dependent caches were PostgreSQLDependentCaches, this could be + // optimized (since all rollback queries will be the same). But there might be other + // DependentCache implementations and thus we must delegate to each registered one. + for _, dependent := range c.dependentCaches { + err := dependent.rollback(ctx, parentKey) + if err != nil { + continue } } + + return nil } -func NewPostgreSQLCache(ctx context.Context, cfg config.APIConfig, pool db.Pool, prefix string, loader Loader, cacheDuration time.Duration) *PostgreSQLCache { +func NewPostgreSQLCache(ctx context.Context, cfg config.APIConfig, pool db.Pool, keyProvider KeyProvider, loader Loader, cacheDuration time.Duration) *PostgreSQLCache { // Create connection pool if it's not already initialized return &PostgreSQLCache{ - prefix: prefix, + keyProvider: keyProvider, loader: loader, cacheDuration: cacheDuration, pool: pool, @@ -190,3 +284,128 @@ func NewPostgreSQLCache(ctx context.Context, cfg config.APIConfig, pool db.Pool, } var _ Cache = (*PostgreSQLCache)(nil) + +type PostgreSQLDependentCache struct { + keyProvider KeyProvider + + pool db.Pool +} + +// The time after which dependent values will get cleaned up regardless of whether the parent key +// exists or not. This is done as a fail-safe in case of improper parent key management. +var dependentExpirationDuration = 24 * time.Hour + +func (c *PostgreSQLDependentCache) loadFromDatabase(ctx context.Context, cacheKey string) (*Response, error) { + var response Response + err := c.pool.QueryRow( + ctx, + "SELECT value, http_status_code, http_content_type FROM cache WHERE key=$1", + cacheKey, + ).Scan(&response.Payload, &response.StatusCode, &response.ContentType) + if err == nil { + return &response, nil + } + + if err != pgx.ErrNoRows { + return nil, err + } + + return nil, nil +} + +func (c *PostgreSQLDependentCache) Get(ctx context.Context, key string) ([]byte, string, error) { + log := logger.FromContext(ctx) + + cacheKey := c.keyProvider.CacheKey(ctx, key) + var value []byte + var contentType string + + err := c.pool.QueryRow( + ctx, + "SELECT value, http_content_type FROM dependent_values WHERE key=$1", + cacheKey, + ).Scan(&value, &contentType) + if err != nil { + if err != pgx.ErrNoRows { + // An actual error + log.Warnw("Unhandled sql error", "error", err) + return nil, "", err + } + + // Cache entry didn't exist + return nil, "", nil + } + + return value, contentType, nil +} + +func (c *PostgreSQLDependentCache) Insert( + ctx context.Context, key string, parentKey string, value []byte, contentType string, +) error { + log := logger.FromContext(ctx) + + cacheKey := c.keyProvider.CacheKey(ctx, key) + if _, err := c.pool.Exec( + ctx, + "INSERT INTO dependent_values (key, parent_key, value, http_content_type, "+ + "expiration_timestamp) VALUES ($1, $2, $3, $4, $5)", + cacheKey, parentKey, value, contentType, time.Now().Add(dependentExpirationDuration), + ); err != nil { + log.Errorw("Error inserting dependent value", + "cacheKey", cacheKey, + "parentKey", parentKey, + "error", err, + ) + return err + } + + return nil +} + +func (c *PostgreSQLDependentCache) commit(ctx context.Context, parentKey string) error { + log := logger.FromContext(ctx) + + _, err := c.pool.Exec( + ctx, + "UPDATE dependent_values SET committed = TRUE WHERE parent_key = $1 AND NOT committed", + parentKey, + ) + if err != nil { + log.Errorw("Error committing dependent values", + "parentKey", parentKey, + "err", err, + ) + return err + } + + return nil +} + +func (c *PostgreSQLDependentCache) rollback(ctx context.Context, parentKey string) error { + log := logger.FromContext(ctx) + + _, err := c.pool.Exec( + ctx, + "DELETE FROM dependent_values WHERE parent_key = $1 AND NOT committed", + parentKey, + ) + if err != nil { + log.Errorw("Error rolling back dependent values", + "err", err, + ) + return err + } + + return nil +} + +func NewPostgreSQLDependentCache( + ctx context.Context, cfg config.APIConfig, pool db.Pool, keyProvider KeyProvider, +) *PostgreSQLDependentCache { + return &PostgreSQLDependentCache{ + keyProvider: keyProvider, + pool: pool, + } +} + +var _ DependentCache = (*PostgreSQLDependentCache)(nil) diff --git a/pkg/cache/key_provider.go b/pkg/cache/key_provider.go new file mode 100644 index 00000000..a8aca841 --- /dev/null +++ b/pkg/cache/key_provider.go @@ -0,0 +1,22 @@ +package cache + +import "context" + +type KeyProvider interface { + // Returns the name of the cache key generated for the query + CacheKey(ctx context.Context, query string) string +} + +type PrefixKeyProvider struct { + prefix string +} + +func NewPrefixKeyProvider(prefix string) *PrefixKeyProvider { + return &PrefixKeyProvider{ + prefix: prefix, + } +} + +func (p *PrefixKeyProvider) CacheKey(ctx context.Context, query string) string { + return p.prefix + ":" + query +} diff --git a/pkg/cache/memory.go b/pkg/cache/memory.go index 9cc36873..89a1e798 100644 --- a/pkg/cache/memory.go +++ b/pkg/cache/memory.go @@ -27,7 +27,22 @@ type MemoryCache struct { cacheDuration time.Duration - prefix string + keyProvider KeyProvider +} + +func (*MemoryCache) RegisterDependent(ctx context.Context, dependent DependentCache) { + // Required for Cache interface + panic("DependentCache management is unimplemented for MemoryCache") +} + +func (*MemoryCache) commitDependents(ctx context.Context, key string) error { + // Required for Cache interface + panic("DependentCache management is unimplemented for MemoryCache") +} + +func (*MemoryCache) rollbackDependents(ctx context.Context, key string) error { + // Required for Cache interface + panic("DependentCache management is unimplemented for MemoryCache") } func (c *MemoryCache) load(ctx context.Context, key string, r *http.Request) { @@ -44,7 +59,7 @@ func (c *MemoryCache) load(ctx context.Context, key string, r *http.Request) { contentType = &defaultContentType } - var dur = c.cacheDuration + dur := c.cacheDuration if overrideDuration != 0 { dur = overrideDuration } @@ -57,7 +72,7 @@ func (c *MemoryCache) load(ctx context.Context, key string, r *http.Request) { // Cache it if err == nil { - cacheKey := c.prefix + ":" + key + cacheKey := c.keyProvider.CacheKey(ctx, key) kvCache.Set(cacheKey, response, dur) } else { fmt.Println("Error when some load function was called:", err) @@ -73,11 +88,11 @@ func (c *MemoryCache) load(ctx context.Context, key string, r *http.Request) { func (c *MemoryCache) Get(ctx context.Context, key string, r *http.Request) (*Response, error) { log := logger.FromContext(ctx) - cacheKey := c.prefix + ":" + key + cacheKey := c.keyProvider.CacheKey(ctx, key) // If key is in cache, return value if value, found := kvCache.Get(cacheKey); found && value != nil { - log.Debugw("Memory Get cache hit", "prefix", c.prefix, "key", key) + log.Debugw("Memory Get cache hit", "cacheKey", cacheKey) if response, ok := value.(Response); ok { return &response, nil } @@ -96,7 +111,7 @@ func (c *MemoryCache) Get(ctx context.Context, key string, r *http.Request) (*Re c.requestsMutex.Unlock() if first { - log.Debugw("Memory Get cache miss", "prefix", c.prefix, "key", key) + log.Debugw("Memory Get cache miss", "cacheKey", cacheKey) go c.load(ctx, key, r) } @@ -108,25 +123,25 @@ func (c *MemoryCache) Get(ctx context.Context, key string, r *http.Request) (*Re func (c *MemoryCache) GetOnly(ctx context.Context, key string) *Response { log := logger.FromContext(ctx) - cacheKey := c.prefix + ":" + key + cacheKey := c.keyProvider.CacheKey(ctx, key) if value, _ := kvCache.Get(cacheKey); value != nil { - log.Debugw("Memory GetOnly cache hit", "prefix", c.prefix, "key", key) + log.Debugw("Memory GetOnly cache hit", "cacheKey", cacheKey) if response, ok := value.(Response); ok { return &response } - log.Debugw("Memory GetOnly cache type mismatch", "prefix", c.prefix, "key", key) + log.Debugw("Memory GetOnly cache type mismatch", "cacheKey", cacheKey) return nil } - log.Debugw("Memory GetOnly cache miss", "prefix", c.prefix, "key", key) + log.Debugw("Memory GetOnly cache miss", "cacheKey", cacheKey) return nil } -func NewMemoryCache(cfg config.APIConfig, prefix string, loader Loader, cacheDuration time.Duration) *MemoryCache { +func NewMemoryCache(cfg config.APIConfig, keyProvider KeyProvider, loader Loader, cacheDuration time.Duration) *MemoryCache { return &MemoryCache{ - prefix: prefix, + keyProvider: keyProvider, loader: loader, requests: make(map[string][]chan Response), cacheDuration: cacheDuration, diff --git a/pkg/utils/image.go b/pkg/utils/image.go new file mode 100644 index 00000000..7c909633 --- /dev/null +++ b/pkg/utils/image.go @@ -0,0 +1,13 @@ +package utils + +import "github.com/davidbyttow/govips/v2/vips" + +// MimeType turn a vips.ImageType into a MIME type string +func MimeType(imgType vips.ImageType) string { + subtype, ok := vips.ImageTypes[imgType] + if ok { + return "image/" + subtype + } + + return "" +} diff --git a/pkg/utils/url.go b/pkg/utils/url.go index 7f823627..74d31e69 100644 --- a/pkg/utils/url.go +++ b/pkg/utils/url.go @@ -29,6 +29,28 @@ func FormatThumbnailURL(baseURL string, r *http.Request, urlString string) strin return fmt.Sprintf("%s%s/thumbnail/%s", scheme, r.Host, url.QueryEscape(urlString)) } +func FormatGeneratedThumbnailURL(baseURL string, r *http.Request, urlString string) string { + if baseURL != "" { + return fmt.Sprintf( + "%s/generated/%s", strings.TrimSuffix(baseURL, "/"), url.QueryEscape(urlString), + ) + } + + forwardedProtocol := r.Header.Get("X-Forwarded-Proto") + + scheme := "https://" + + if forwardedProtocol == "https" { + scheme = "https://" + } else if forwardedProtocol == "http" { + scheme = "http://" + } else if r.TLS == nil { + scheme = "http://" // https://github.com/golang/go/issues/28940#issuecomment-441749380 + } + + return fmt.Sprintf("%s%s/generated/%s", scheme, r.Host, url.QueryEscape(urlString)) +} + func UnescapeURLArgument(r *http.Request, key string) (string, error) { escapedURL := chi.URLParam(r, key) url, err := url.PathUnescape(escapedURL)