Skip to content
This repository has been archived by the owner on Jun 12, 2022. It is now read-only.

Commit

Permalink
feat(integrity): full Subresource Integrity support (#10)
Browse files Browse the repository at this point in the history
Fixes: #7
  • Loading branch information
zkat authored Apr 3, 2017
1 parent 11774a1 commit a590159
Show file tree
Hide file tree
Showing 5 changed files with 171 additions and 20 deletions.
4 changes: 1 addition & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -281,11 +281,9 @@ fetch('http://reliable.site.com', {

#### <a name="opts-integrity"></a> `> opts.integrity`

**(NOT IMPLEMENTED YET)**

Matches the response body against the given [Subresource Integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) metadata. If verification fails, the request will fail with an `EBADCHECKSUM` error.

`integrity` may either be a string or an [`ssri`](https://npm.im/ssri) Integrity-like.
`integrity` may either be a string or an [`ssri`](https://npm.im/ssri) `Integrity`-like.

##### Example

Expand Down
34 changes: 24 additions & 10 deletions cache.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ const cacache = require('cacache')
const fetch = require('node-fetch')
const fs = require('fs')
const pipe = require('mississippi').pipe
const ssri = require('ssri')
const through = require('mississippi').through
const to = require('mississippi').to
const url = require('url')
Expand Down Expand Up @@ -37,12 +38,14 @@ module.exports = class Cache {

// Returns a Promise that resolves to the response associated with the first
// matching request in the Cache object.
match (req) {
match (req, opts) {
return cacache.get.info(this._path, cacheKey(req)).then(info => {
if (info && matchDetails(req, {
url: info.metadata.url,
reqHeaders: new fetch.Headers(info.metadata.reqHeaders),
resHeaders: new fetch.Headers(info.metadata.resHeaders)
resHeaders: new fetch.Headers(info.metadata.resHeaders),
cacheIntegrity: info.integrity,
integrity: opts && opts.integrity
})) {
if (req.method === 'HEAD') {
return new fetch.Response(null, {
Expand Down Expand Up @@ -70,13 +73,14 @@ module.exports = class Cache {
} else {
disturbed = true
if (stat.size > MAX_MEM_SIZE) {
pipe(cacache.get.stream.byDigest(cachePath, info.digest, {
hashAlgorithm: info.hashAlgorithm
}), body, () => {})
pipe(
cacache.get.stream.byDigest(cachePath, info.integrity),
body,
() => {}
)
} else {
// cacache is much faster at bulk reads
cacache.get.byDigest(cachePath, info.digest, {
hashAlgorithm: info.hashAlgorithm,
cacache.get.byDigest(cachePath, info.integrity, {
memoize: true
}).then(data => {
body.write(data, () => {
Expand Down Expand Up @@ -120,11 +124,10 @@ module.exports = class Cache {
// Update metadata without writing
return cacache.get.info(this._path, cacheKey(req)).then(info => {
// Providing these will bypass content write
opts.hashAlgorithm = info.hashAlgorithm
opts.digest = info.digest
opts.integrity = info.integrity
return new this.Promise((resolve, reject) => {
pipe(
cacache.get.stream.byDigest(this._path, info.digest, opts),
cacache.get.stream.byDigest(this._path, info.integrity, opts),
cacache.put.stream(this._path, cacheKey(req), opts),
err => err ? reject(err) : resolve(response)
)
Expand Down Expand Up @@ -212,6 +215,17 @@ function matchDetails (req, cached) {
}
}
}
if (cached.integrity) {
const cachedSri = ssri.parse(cached.cacheIntegrity)
const sri = ssri.parse(cached.integrity)
const algo = sri.pickAlgorithm()
if (cachedSri[algo] && !sri[algo].some(hash => {
// cachedSri always has exactly one item per algorithm
return cachedSri[algo][0].digest === hash.digest
})) {
return false
}
}
reqUrl.hash = null
cacheUrl.hash = null
return url.format(reqUrl) === url.format(cacheUrl)
Expand Down
20 changes: 19 additions & 1 deletion index.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ const https = require('https')
let ProxyAgent
const pkg = require('./package.json')
const retry = require('promise-retry')
let ssri
const Stream = require('stream')
const url = require('url')

Expand Down Expand Up @@ -45,6 +46,9 @@ function cachingFetch (uri, _opts) {
// Default cacache-based cache
Cache = require('./cache')
}
if (opts.integrity && !ssri) {
ssri = require('ssri')
}
opts.cacheManager = opts.cacheManager && (
typeof opts.cacheManager === 'string'
? new Cache(opts.cacheManager, opts.cacheOpts)
Expand All @@ -71,7 +75,7 @@ function cachingFetch (uri, _opts) {
method: opts.method,
headers: opts.headers
})
return opts.cacheManager.match(req, opts.cacheOpts).then(res => {
return opts.cacheManager.match(req, opts).then(res => {
if (res) {
const warningCode = (res.headers.get('Warning') || '').match(/^\d+/)
if (warningCode && +warningCode >= 100 && +warningCode < 200) {
Expand Down Expand Up @@ -238,6 +242,20 @@ function remoteFetch (uri, opts) {
return retry((retryHandler, attemptNum) => {
const req = new fetch.Request(uri, reqOpts)
return fetch(req).then(res => {
if (opts.integrity) {
const oldBod = res.body
const newBod = ssri.integrityStream({
integrity: opts.integrity
})
oldBod.pipe(newBod)
res.body = newBod
oldBod.once('error', err => {
newBod.emit('error', err)
})
newBod.once('error', err => {
oldBod.emit('error', err)
})
}
const cacheCtrl = res.headers.get('cache-control') || ''
if (
(req.method === 'GET' || req.method === 'HEAD') &&
Expand Down
5 changes: 3 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -34,14 +34,15 @@
"license": "CC0-1.0",
"dependencies": {
"bluebird": "^3.5.0",
"cacache": "^6.3.0",
"cacache": "^7.0.1",
"checksum-stream": "^1.0.2",
"lru-cache": "^4.0.2",
"mississippi": "^1.2.0",
"node-fetch": "^2.0.0-alpha.3",
"promise-retry": "^1.1.1",
"proxy-agent": "^2.0.0",
"safe-buffer": "^5.0.1"
"safe-buffer": "^5.0.1",
"ssri": "^3.0.2"
},
"devDependencies": {
"mkdirp": "^0.5.1",
Expand Down
128 changes: 124 additions & 4 deletions test/integrity.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,128 @@
'use strict'

const Buffer = require('safe-buffer').Buffer

const ssri = require('ssri')
const test = require('tap').test
const tnock = require('./util/tnock')

const CACHE = require('./util/test-dir')(__filename)
const CONTENT = Buffer.from('hello, world!', 'utf8')
const INTEGRITY = ssri.fromData(CONTENT)
const HOST = 'https://make-fetch-happen-safely.npm'

const fetch = require('..').defaults({retry: false})

test('basic integrity verification', t => {
const srv = tnock(t, HOST)
srv.get('/wowsosafe').reply(200, CONTENT)
srv.get('/wowsobad').reply(200, Buffer.from('pwnd'))
const safetch = fetch.defaults({
integrity: INTEGRITY
})
return safetch(`${HOST}/wowsosafe`).then(res => {
return res.buffer()
}).then(buf => {
t.deepEqual(buf, CONTENT, 'good content passed scrutiny 👍🏼')
return safetch(`${HOST}/wowsobad`).then(res => {
return res.buffer()
}).then(buf => {
throw new Error(`bad data: ${buf.toString('utf8')}`)
}).catch(err => {
t.equal(err.code, 'EBADCHECKSUM', 'content failed checksum!')
})
})
})

test('picks the "best" algorithm', t => {
const integrity = ssri.fromData(CONTENT, {
algorithms: ['md5', 'sha384', 'sha1', 'sha256']
})
integrity['md5'][0].digest = 'badc0ffee'
integrity['sha1'][0].digest = 'badc0ffee'
const safetch = fetch.defaults({integrity})
const srv = tnock(t, HOST)
srv.get('/good').times(3).reply(200, CONTENT)
srv.get('/bad').reply(200, 'pwnt')
return safetch(`${HOST}/good`).then(res => res.buffer()).then(buf => {
t.deepEqual(buf, CONTENT, 'data passed integrity check')
return safetch(`${HOST}/bad`).then(res => {
return res.buffer()
}).then(buf => {
throw new Error(`bad data: ${buf.toString('utf8')}`)
}).catch(err => {
t.equal(err.code, 'EBADCHECKSUM', 'content validated with either sha256 or sha384 (likely the latter)')
})
}).then(() => {
// invalidate sha384. sha256 is still valid, in theory
integrity['sha384'][0].digest = 'pwnt'
return safetch(`${HOST}/good`).then(res => {
return res.buffer()
}).then(buf => {
throw new Error(`bad data: ${buf.toString('utf8')}`)
}).catch(err => {
t.equal(err.code, 'EBADCHECKSUM', 'strongest algorithm (sha384) treated as authoritative -- sha256 not used')
})
}).then(() => {
// remove bad sha384 altogether. sha256 remains valid
delete integrity['sha384']
return safetch(`${HOST}/good`).then(res => res.buffer())
}).then(buf => {
t.deepEqual(buf, CONTENT, 'data passed integrity check with sha256')
})
})

test('supports multiple hashes per algorithm', t => {
const ALTCONTENT = Buffer.from('alt-content is like content but not really')
const integrity = ssri.fromData(CONTENT, {
algorithms: ['md5', 'sha384', 'sha1', 'sha256']
}).concat(ssri.fromData(ALTCONTENT, {
algorithms: ['sha384']
}))
const safetch = fetch.defaults({integrity})
const srv = tnock(t, HOST)
srv.get('/main').reply(200, CONTENT)
srv.get('/alt').reply(200, ALTCONTENT)
srv.get('/bad').reply(200, 'nope')
return safetch(`${HOST}/main`).then(res => res.buffer()).then(buf => {
t.deepEqual(buf, CONTENT, 'main content validated against sha384')
return safetch(`${HOST}/alt`).then(res => res.buffer())
}).then(buf => {
t.deepEqual(buf, ALTCONTENT, 'alt content validated against sha384')
return safetch(`${HOST}/bad`).then(res => res.buffer()).then(buf => {
throw new Error(`bad data: ${buf.toString('utf8')}`)
}).catch(err => {
t.equal(err.code, 'EBADCHECKSUM', 'only the two valid contents pass')
})
})
})

test('basic integrity verification')
test('picks the "best" algorithm')
test('fails with EBADCHECKSUM if integrity fails')
test('checks integrity on cache fetch too')
test('checks integrity on cache fetch too', t => {
const srv = tnock(t, HOST)
srv.get('/test').reply(200, CONTENT)
const safetch = fetch.defaults({
cacheManager: CACHE,
integrity: INTEGRITY,
cache: 'must-revalidate'
})
return safetch(`${HOST}/test`).then(res => res.buffer()).then(buf => {
t.deepEqual(buf, CONTENT, 'good content passed scrutiny 👍🏼')
srv.get('/test').reply(200, 'nope')
return safetch(`${HOST}/test`).then(res => res.buffer()).then(buf => {
throw new Error(`bad data: ${buf.toString('utf8')}`)
}).catch(err => {
t.equal(err.code, 'EBADCHECKSUM', 'cached content failed checksum!')
})
}).then(() => {
srv.get('/test').reply(200, 'nope')
return safetch(`${HOST}/test`, {
// try to use local cached version
cache: 'force-cache',
integrity: {algorithm: 'sha512', digest: 'doesnotmatch'}
}).then(res => res.buffer()).then(buf => {
throw new Error(`bad data: ${buf.toString('utf8')}`)
}).catch(err => {
t.equal(err.code, 'EBADCHECKSUM', 'cached content failed checksum!')
})
})
})

0 comments on commit a590159

Please sign in to comment.