diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 0000000..a0ee928 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,8 @@ +{ + "recommendations": [ + "esbenp.prettier-vscode", + "editorconfig.editorconfig", + "dbaeumer.vscode-eslint", + "streetsidesoftware.code-spell-checker" + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..62d4909 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,17 @@ +{ + "[javascript]": { + "editor.defaultFormatter": "esbenp.prettier-vscode", + "editor.formatOnSave": true, + "editor.codeActionsOnSave": [ + "source.fixAll.eslint" + ] + }, + "editor.codeActionsOnSave": { + "source.fixAll.eslint": "explicit" + }, + "prettier.requireConfig": true, + "eslint.experimental.useFlatConfig": true, + "files.exclude": { + "dist*": true + } +} diff --git a/cache.js b/cache.js new file mode 100644 index 0000000..b850c36 --- /dev/null +++ b/cache.js @@ -0,0 +1,105 @@ +import fs from 'node:fs' +import * as path from 'node:path' +import crypto from 'node:crypto' +import packageJson from './package-json-proxy.cjs' +import temporaryDirectory from 'temp-dir' + +function hash(data) { + return crypto.createHash('sha1').update(data).digest('hex') +} + +class Cache { + #root + + #cacheDirectory + + #metaFile + + #files + + #updated = new Map() + + constructor(root) { + this.#root = root + + let cacheDirectory + if (fs.existsSync(path.join(root, 'node_modules'))) { + cacheDirectory = path.join( + root, + `node_modules/${packageJson.name}-cache/`, + ) + } else { + cacheDirectory = path.join( + temporaryDirectory, + `${packageJson.name}-cache/${hash(root)}/`, + ) + } + + this.#cacheDirectory = cacheDirectory + + this.#metaFile = path.join(this.#cacheDirectory, 'meta.json') + + this.#files = this.#load() ?? new Set() + } + + #load() { + let data + try { + data = JSON.parse(fs.readFileSync(this.#metaFile)) + } catch {} + + if ( + !data || + data.version !== packageJson.version || + data.root !== this.#root || + !Array.isArray(data.files) + ) { + return + } + + return new Set(data.files) + } + + getCachedData(content) { + if (!this.#files) return + + const contentHash = hash(content) + + if (!this.#files.has(contentHash)) { + return + } + + try { + return fs.readFileSync(path.join(this.#cacheDirectory, contentHash)) + } catch {} + } + + updateCache(content, data) { + const contentHash = hash(content) + this.#updated.set(contentHash, data) + } + + writeFile() { + fs.mkdirSync(this.#cacheDirectory, {recursive: true}) + + for (const [contentHash, data] of this.#updated) { + fs.writeFileSync(path.join(this.#cacheDirectory, contentHash), data) + } + + fs.writeFileSync( + this.#metaFile, + JSON.stringify( + { + version: packageJson.version, + root: this.#root, + files: [...new Set([...this.#files, ...this.#updated.keys()])], + time: new Date(), + }, + undefined, + 2, + ), + ) + } +} + +export default Cache diff --git a/index.js b/index.js index 9408d55..e7dd62a 100644 --- a/index.js +++ b/index.js @@ -1,8 +1,11 @@ +import fs from 'node:fs/promises' import assert from 'node:assert/strict' import {isSupportedImage, squooshImages} from './squoosh.js' import optimizeSvg from './svgo.js' +import Cache from './cache.js' +import packageJson from './package-json-proxy.cjs' -async function minifyWithSquoosh(bundle) { +async function minifyWithSquoosh(bundle, cache) { /** @type {import('vite').Rollup.OutputAsset[]} */ const images = Object.values(bundle).filter( (assetOrChunk) => @@ -15,14 +18,19 @@ async function minifyWithSquoosh(bundle) { const compressed = await squooshImages( images.map((image) => ({content: image.source, name: image.fileName})), + cache, ) for (const [index, image] of images.entries()) { - bundle[image.fileName].source = compressed[index] + const original = image.source + const updated = compressed[index] + cache.updateCache(original, updated) + + bundle[image.fileName].source = updated } } -async function minifySvg(bundle) { +async function minifySvg(bundle, cache) { /** @type {import('vite').Rollup.OutputAsset[]} */ const images = Object.values(bundle).filter( (assetOrChunk) => @@ -30,26 +38,44 @@ async function minifySvg(bundle) { ) for (const {fileName: name, source: content} of images) { - bundle[name].source = optimizeSvg(content) + const cached = cache.getCachedData(content) + const compressed = cached ?? optimizeSvg(content) + cache.updateCache(content, compressed) + bundle[name].source = compressed } } -/** - * @type {import('vite').Plugin} - */ -const vitePluginImageMinify = { - name: 'vite-plugin-image-minify', - apply: 'build', - async generateBundle(_options, bundle) { - for (const [fileName, assetOrChunk] of Object.entries(bundle)) { - assert.equal(fileName, assetOrChunk.fileName, 'Unexpected asset') - } - - await minifyWithSquoosh(bundle) - await minifySvg(bundle) - }, -} +function createVitePluginImageMinify(options) { + const cacheEnabled = options?.__test_enable_cache !== false + let root + + /** + * @type {import('vite').Plugin} + */ + return { + name: packageJson.name, + apply: 'build', + config(config) { + ;({root} = config) + }, + async generateBundle(options, bundle) { + for (const [fileName, assetOrChunk] of Object.entries(bundle)) { + assert.equal(fileName, assetOrChunk.fileName, 'Unexpected asset') + } -const createVitePluginImageMinify = () => vitePluginImageMinify + const cache = cacheEnabled + ? new Cache(root) + : { + getCachedData() {}, + updateCache() {}, + writeFile() {}, + } + await minifyWithSquoosh(bundle, cache) + await minifySvg(bundle, cache) + + cache.writeFile() + }, + } +} export default createVitePluginImageMinify diff --git a/index.test.js b/index.test.js index a6b3f61..2b24b40 100644 --- a/index.test.js +++ b/index.test.js @@ -16,7 +16,7 @@ async function runVite() { envFile: false, write: false, logLevel: 'warn', - plugins: [vitePluginImageSquoosh()], + plugins: [vitePluginImageSquoosh({__test_enable_cache: false})], build: {assetsInlineLimit: 0}, }) diff --git a/package-json-proxy.cjs b/package-json-proxy.cjs new file mode 100644 index 0000000..81e7af6 --- /dev/null +++ b/package-json-proxy.cjs @@ -0,0 +1 @@ +module.exports = require('./package.json') diff --git a/package.json b/package.json index f82df71..3320fe0 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,16 @@ { "name": "vite-plugin-image-minify", - "version": "0.0.2", + "version": "0.0.3", "description": "Vite plugin to minify images.", + "keywords": [ + "vite-plugin", + "Vite", + "Image", + "Minify", + "Minifier", + "squoosh", + "svgo" + ], "homepage": "https://github.com/fisker/vite-plugin-image-minify#readme", "bugs": { "url": "https://github.com/fisker/vite-plugin-image-minify/issues" @@ -19,6 +28,8 @@ "exports": "./index.js", "files": [ "index.js", + "package-json-proxy.cjs", + "cache.js", "squoosh.js", "svgo.js" ], @@ -44,7 +55,8 @@ }, "dependencies": { "@frostoven/libsquoosh": "^0.4.8", - "svgo": "^3.3.2" + "svgo": "^3.3.2", + "temp-dir": "3.0.0" }, "devDependencies": { "@fisker/eslint-config": "13.1.0", diff --git a/squoosh.js b/squoosh.js index 0514135..7961948 100644 --- a/squoosh.js +++ b/squoosh.js @@ -33,13 +33,23 @@ function getEncoder(filename) { * @param {{content: Buffer, name: string}[]} files * @returns {} */ -async function squooshImages(files) { +async function squooshImages(files, cache) { if (files.length === 0) { return [] } - const {ImagePool} = await importLibrarySquoosh() - const imagePool = new ImagePool(os.cpus().length) + let imagePoolLoadPromise + let imagePool + + function getImagePool() { + imagePoolLoadPromise ??= (async () => { + const {ImagePool} = await importLibrarySquoosh() + imagePool = new ImagePool(os.cpus().length) + return imagePool + })() + + return imagePoolLoadPromise + } let result @@ -50,15 +60,25 @@ async function squooshImages(files) { if (!encoder) { return original } + + const cached = cache.getCachedData(original) + + if (cached) { + return cached + } + + const imagePool = await getImagePool() const image = imagePool.ingestImage(original) await image.encode({[encoder]: {}}) const result = await image.encodedWith[encoder] const compressed = result.binary - return compressed.length < original.length ? compressed : original + const data = compressed.length < original.length ? compressed : original + + return data }), ) } finally { - imagePool.close() + imagePool?.close() } return result