Skip to content

Commit

Permalink
Implement cache support (#5)
Browse files Browse the repository at this point in the history
  • Loading branch information
fisker authored Jun 30, 2024
1 parent b149399 commit 793979c
Show file tree
Hide file tree
Showing 8 changed files with 217 additions and 28 deletions.
8 changes: 8 additions & 0 deletions .vscode/extensions.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"recommendations": [
"esbenp.prettier-vscode",
"editorconfig.editorconfig",
"dbaeumer.vscode-eslint",
"streetsidesoftware.code-spell-checker"
]
}
17 changes: 17 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
{
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": [
"source.fixAll.eslint"
]
},
"editor.codeActionsOnSave": {
"source.fixAll.eslint": "explicit"
},
"prettier.requireConfig": true,
"eslint.experimental.useFlatConfig": true,
"files.exclude": {
"dist*": true
}
}
105 changes: 105 additions & 0 deletions cache.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
import fs from 'node:fs'
import * as path from 'node:path'
import crypto from 'node:crypto'
import packageJson from './package-json-proxy.cjs'
import temporaryDirectory from 'temp-dir'

function hash(data) {
return crypto.createHash('sha1').update(data).digest('hex')
}

class Cache {
#root

#cacheDirectory

#metaFile

#files

#updated = new Map()

constructor(root) {
this.#root = root

let cacheDirectory
if (fs.existsSync(path.join(root, 'node_modules'))) {
cacheDirectory = path.join(
root,
`node_modules/${packageJson.name}-cache/`,
)
} else {
cacheDirectory = path.join(
temporaryDirectory,
`${packageJson.name}-cache/${hash(root)}/`,
)
}

this.#cacheDirectory = cacheDirectory

this.#metaFile = path.join(this.#cacheDirectory, 'meta.json')

this.#files = this.#load() ?? new Set()
}

#load() {
let data
try {
data = JSON.parse(fs.readFileSync(this.#metaFile))
} catch {}

if (
!data ||
data.version !== packageJson.version ||
data.root !== this.#root ||
!Array.isArray(data.files)
) {
return
}

return new Set(data.files)
}

getCachedData(content) {
if (!this.#files) return

const contentHash = hash(content)

if (!this.#files.has(contentHash)) {
return
}

try {
return fs.readFileSync(path.join(this.#cacheDirectory, contentHash))
} catch {}
}

updateCache(content, data) {
const contentHash = hash(content)
this.#updated.set(contentHash, data)
}

writeFile() {
fs.mkdirSync(this.#cacheDirectory, {recursive: true})

for (const [contentHash, data] of this.#updated) {
fs.writeFileSync(path.join(this.#cacheDirectory, contentHash), data)
}

fs.writeFileSync(
this.#metaFile,
JSON.stringify(
{
version: packageJson.version,
root: this.#root,
files: [...new Set([...this.#files, ...this.#updated.keys()])],
time: new Date(),
},
undefined,
2,
),
)
}
}

export default Cache
66 changes: 46 additions & 20 deletions index.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
import fs from 'node:fs/promises'

Check warning on line 1 in index.js

View workflow job for this annotation

GitHub Actions / Lint

'fs' is defined but never used
import assert from 'node:assert/strict'
import {isSupportedImage, squooshImages} from './squoosh.js'
import optimizeSvg from './svgo.js'
import Cache from './cache.js'
import packageJson from './package-json-proxy.cjs'

async function minifyWithSquoosh(bundle) {
async function minifyWithSquoosh(bundle, cache) {
/** @type {import('vite').Rollup.OutputAsset[]} */
const images = Object.values(bundle).filter(
(assetOrChunk) =>
Expand All @@ -15,41 +18,64 @@ async function minifyWithSquoosh(bundle) {

const compressed = await squooshImages(
images.map((image) => ({content: image.source, name: image.fileName})),
cache,
)

for (const [index, image] of images.entries()) {
bundle[image.fileName].source = compressed[index]
const original = image.source
const updated = compressed[index]
cache.updateCache(original, updated)

bundle[image.fileName].source = updated
}
}

async function minifySvg(bundle) {
async function minifySvg(bundle, cache) {
/** @type {import('vite').Rollup.OutputAsset[]} */
const images = Object.values(bundle).filter(
(assetOrChunk) =>
assetOrChunk.type === 'asset' && /\.svg$/i.test(assetOrChunk.fileName),
)

for (const {fileName: name, source: content} of images) {
bundle[name].source = optimizeSvg(content)
const cached = cache.getCachedData(content)
const compressed = cached ?? optimizeSvg(content)
cache.updateCache(content, compressed)
bundle[name].source = compressed
}
}

/**
* @type {import('vite').Plugin}
*/
const vitePluginImageMinify = {
name: 'vite-plugin-image-minify',
apply: 'build',
async generateBundle(_options, bundle) {
for (const [fileName, assetOrChunk] of Object.entries(bundle)) {
assert.equal(fileName, assetOrChunk.fileName, 'Unexpected asset')
}

await minifyWithSquoosh(bundle)
await minifySvg(bundle)
},
}
function createVitePluginImageMinify(options) {
const cacheEnabled = options?.__test_enable_cache !== false
let root

/**
* @type {import('vite').Plugin}
*/
return {
name: packageJson.name,
apply: 'build',
config(config) {
;({root} = config)
},
async generateBundle(options, bundle) {
for (const [fileName, assetOrChunk] of Object.entries(bundle)) {
assert.equal(fileName, assetOrChunk.fileName, 'Unexpected asset')
}

const createVitePluginImageMinify = () => vitePluginImageMinify
const cache = cacheEnabled
? new Cache(root)
: {
getCachedData() {},
updateCache() {},
writeFile() {},
}
await minifyWithSquoosh(bundle, cache)
await minifySvg(bundle, cache)

cache.writeFile()
},
}
}

export default createVitePluginImageMinify
2 changes: 1 addition & 1 deletion index.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ async function runVite() {
envFile: false,
write: false,
logLevel: 'warn',
plugins: [vitePluginImageSquoosh()],
plugins: [vitePluginImageSquoosh({__test_enable_cache: false})],
build: {assetsInlineLimit: 0},
})

Expand Down
1 change: 1 addition & 0 deletions package-json-proxy.cjs
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
module.exports = require('./package.json')
16 changes: 14 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,16 @@
{
"name": "vite-plugin-image-minify",
"version": "0.0.2",
"version": "0.0.3",
"description": "Vite plugin to minify images.",
"keywords": [
"vite-plugin",
"Vite",
"Image",
"Minify",
"Minifier",
"squoosh",
"svgo"
],
"homepage": "https://github.com/fisker/vite-plugin-image-minify#readme",
"bugs": {
"url": "https://github.com/fisker/vite-plugin-image-minify/issues"
Expand All @@ -19,6 +28,8 @@
"exports": "./index.js",
"files": [
"index.js",
"package-json-proxy.cjs",
"cache.js",
"squoosh.js",
"svgo.js"
],
Expand All @@ -44,7 +55,8 @@
},
"dependencies": {
"@frostoven/libsquoosh": "^0.4.8",
"svgo": "^3.3.2"
"svgo": "^3.3.2",
"temp-dir": "3.0.0"
},
"devDependencies": {
"@fisker/eslint-config": "13.1.0",
Expand Down
30 changes: 25 additions & 5 deletions squoosh.js
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,23 @@ function getEncoder(filename) {
* @param {{content: Buffer, name: string}[]} files
* @returns {}
*/
async function squooshImages(files) {
async function squooshImages(files, cache) {
if (files.length === 0) {
return []
}

const {ImagePool} = await importLibrarySquoosh()
const imagePool = new ImagePool(os.cpus().length)
let imagePoolLoadPromise
let imagePool

function getImagePool() {
imagePoolLoadPromise ??= (async () => {
const {ImagePool} = await importLibrarySquoosh()
imagePool = new ImagePool(os.cpus().length)
return imagePool
})()

return imagePoolLoadPromise
}

let result

Expand All @@ -50,15 +60,25 @@ async function squooshImages(files) {
if (!encoder) {
return original
}

const cached = cache.getCachedData(original)

if (cached) {
return cached
}

const imagePool = await getImagePool()
const image = imagePool.ingestImage(original)
await image.encode({[encoder]: {}})
const result = await image.encodedWith[encoder]
const compressed = result.binary
return compressed.length < original.length ? compressed : original
const data = compressed.length < original.length ? compressed : original

return data
}),
)
} finally {
imagePool.close()
imagePool?.close()
}

return result
Expand Down

0 comments on commit 793979c

Please sign in to comment.