From bc32f8b73a3165b9e14596af2bffceeb2be89211 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kat=20March=C3=A1n?= Date: Tue, 28 Aug 2018 15:14:31 -0700 Subject: [PATCH] search: stop using npm-registry-client --- lib/search.js | 70 +++--- lib/search/all-package-metadata.js | 306 +++++++++++++------------- lib/search/all-package-search.js | 2 +- lib/search/esearch.js | 64 ------ test/tap/search.all-package-search.js | 61 ++--- test/tap/search.esearch.js | 192 ---------------- test/tap/search.js | 104 ++++++--- 7 files changed, 306 insertions(+), 493 deletions(-) delete mode 100644 lib/search/esearch.js delete mode 100644 test/tap/search.esearch.js diff --git a/lib/search.js b/lib/search.js index 3987be135c9ae..a566d5481c70e 100644 --- a/lib/search.js +++ b/lib/search.js @@ -2,14 +2,16 @@ module.exports = exports = search -var npm = require('./npm.js') -var allPackageSearch = require('./search/all-package-search') -var esearch = require('./search/esearch.js') -var formatPackageStream = require('./search/format-package-stream.js') -var usage = require('./utils/usage') -var output = require('./utils/output.js') -var log = require('npmlog') -var ms = require('mississippi') +const npm = require('./npm.js') +const allPackageSearch = require('./search/all-package-search') +const figgyPudding = require('figgy-pudding') +const formatPackageStream = require('./search/format-package-stream.js') +const libSearch = require('libnpmsearch') +const log = require('npmlog') +const ms = require('mississippi') +const npmConfig = require('./config/figgy-config.js') +const output = require('./utils/output.js') +const usage = require('./utils/usage') search.usage = usage( 'search', @@ -20,46 +22,50 @@ search.completion = function (opts, cb) { cb(null, []) } +const SearchOpts = figgyPudding({ + description: {}, + exclude: {}, + include: {}, + limit: {}, + log: {}, + staleness: {}, + unicode: {} +}) + function search (args, cb) { - var searchOpts = { + const opts = SearchOpts(npmConfig()).concat({ description: npm.config.get('description'), exclude: prepareExcludes(npm.config.get('searchexclude')), include: prepareIncludes(args, npm.config.get('searchopts')), - limit: npm.config.get('searchlimit'), + limit: npm.config.get('searchlimit') || 20, log: log, staleness: npm.config.get('searchstaleness'), unicode: npm.config.get('unicode') - } - - if (searchOpts.include.length === 0) { + }) + if (opts.include.length === 0) { return cb(new Error('search must be called with arguments')) } // Used later to figure out whether we had any packages go out - var anyOutput = false + let anyOutput = false - var entriesStream = ms.through.obj() + const entriesStream = ms.through.obj() - var esearchWritten = false - esearch(searchOpts).on('data', function (pkg) { + let esearchWritten = false + libSearch.stream(opts.include, opts).on('data', pkg => { entriesStream.write(pkg) !esearchWritten && (esearchWritten = true) - }).on('error', function (e) { + }).on('error', err => { if (esearchWritten) { // If esearch errored after already starting output, we can't fall back. - return entriesStream.emit('error', e) + return entriesStream.emit('error', err) } log.warn('search', 'fast search endpoint errored. Using old search.') - allPackageSearch(searchOpts).on('data', function (pkg) { - entriesStream.write(pkg) - }).on('error', function (e) { - entriesStream.emit('error', e) - }).on('end', function () { - entriesStream.end() - }) - }).on('end', function () { - entriesStream.end() - }) + allPackageSearch(opts) + .on('data', pkg => entriesStream.write(pkg)) + .on('error', err => entriesStream.emit('error', err)) + .on('end', () => entriesStream.end()) + }).on('end', () => entriesStream.end()) // Grab a configured output stream that will spit out packages in the // desired format. @@ -71,14 +77,14 @@ function search (args, cb) { parseable: npm.config.get('parseable'), color: npm.color }) - outputStream.on('data', function (chunk) { + outputStream.on('data', chunk => { if (!anyOutput) { anyOutput = true } output(chunk.toString('utf8')) }) log.silly('search', 'searching packages') - ms.pipe(entriesStream, outputStream, function (er) { - if (er) return cb(er) + ms.pipe(entriesStream, outputStream, err => { + if (err) return cb(err) if (!anyOutput && !npm.config.get('json') && !npm.config.get('parseable')) { output('No matches found for ' + (args.map(JSON.stringify).join(' '))) } diff --git a/lib/search/all-package-metadata.js b/lib/search/all-package-metadata.js index 5a27bdbcee658..16be7e5b88add 100644 --- a/lib/search/all-package-metadata.js +++ b/lib/search/all-package-metadata.js @@ -1,21 +1,28 @@ 'use strict' -var fs = require('graceful-fs') -var path = require('path') -var mkdir = require('mkdirp') -var chownr = require('chownr') -var npm = require('../npm.js') -var log = require('npmlog') -var cacheFile = require('npm-cache-filename') -var correctMkdir = require('../utils/correct-mkdir.js') -var mapToRegistry = require('../utils/map-to-registry.js') -var jsonstream = require('JSONStream') -var writeStreamAtomic = require('fs-write-stream-atomic') -var ms = require('mississippi') -var sortedUnionStream = require('sorted-union-stream') -var once = require('once') -var gunzip = require('../utils/gunzip-maybe') +const BB = require('bluebird') +const cacheFile = require('npm-cache-filename') +const chownr = BB.promisify(require('chownr')) +const correctMkdir = BB.promisify(require('../utils/correct-mkdir.js')) +const figgyPudding = require('figgy-pudding') +const fs = require('graceful-fs') +const JSONStream = require('JSONStream') +const log = require('npmlog') +const mkdir = require('mkdirp') +const ms = require('mississippi') +const npmFetch = require('npm-registry-fetch') +const path = require('path') +const sortedUnionStream = require('sorted-union-stream') +const url = require('url') +const writeStreamAtomic = require('fs-write-stream-atomic') + +const statAsync = BB.promisify(fs.stat) + +const APMOpts = figgyPudding({ + cache: {}, + registry: {} +}) // Returns a sorted stream of all package metadata. Internally, takes care of // maintaining its metadata cache and making partial or full remote requests, // according to staleness, validity, etc. @@ -27,63 +34,70 @@ var gunzip = require('../utils/gunzip-maybe') // 4. It must include all entries that exist in the metadata endpoint as of // the value in `_updated` module.exports = allPackageMetadata -function allPackageMetadata (staleness) { - var stream = ms.through.obj() - - mapToRegistry('-/all', npm.config, function (er, uri, auth) { - if (er) return stream.emit('error', er) +function allPackageMetadata (opts) { + const staleness = opts.staleness + const stream = ms.through.obj() - var cacheBase = cacheFile(npm.config.get('cache'))(uri) - var cachePath = path.join(cacheBase, '.cache.json') - - createEntryStream(cachePath, uri, auth, staleness, function (err, entryStream, latest, newEntries) { - if (err) return stream.emit('error', err) - log.silly('all-package-metadata', 'entry stream created') - if (entryStream && newEntries) { - createCacheWriteStream(cachePath, latest, function (err, writeStream) { - if (err) return stream.emit('error', err) - log.silly('all-package-metadata', 'output stream created') - ms.pipeline.obj(entryStream, writeStream, stream) - }) - } else if (entryStream) { - ms.pipeline.obj(entryStream, stream) - } else { - stream.emit('error', new Error('No search sources available')) - } - }) - }) + opts = APMOpts(opts) + const cacheBase = cacheFile(path.resolve(opts.cache, '..'))(url.resolve(opts.registry, '/-/all')) + const cachePath = path.join(cacheBase, '.cache.json') + createEntryStream( + cachePath, staleness, opts + ).then(({entryStream, latest, newEntries}) => { + log.silly('all-package-metadata', 'entry stream created') + if (entryStream && newEntries) { + return createCacheWriteStream(cachePath, latest, opts).then(writer => { + log.silly('all-package-metadata', 'output stream created') + ms.pipeline.obj(entryStream, writer, stream) + }) + } else if (entryStream) { + ms.pipeline.obj(entryStream, stream) + } else { + stream.emit('error', new Error('No search sources available')) + } + }).catch(err => stream.emit('error', err)) return stream } // Creates a stream of the latest available package metadata. // Metadata will come from a combination of the local cache and remote data. module.exports._createEntryStream = createEntryStream -function createEntryStream (cachePath, uri, auth, staleness, cb) { - createCacheEntryStream(cachePath, function (err, cacheStream, cacheLatest) { +function createEntryStream (cachePath, staleness, opts) { + return createCacheEntryStream( + cachePath, opts + ).catch(err => { + log.warn('', 'Failed to read search cache. Rebuilding') + log.silly('all-package-metadata', 'cache read error: ', err) + return {} + }).then(({ + updateStream: cacheStream, + updatedLatest: cacheLatest + }) => { cacheLatest = cacheLatest || 0 - if (err) { - log.warn('', 'Failed to read search cache. Rebuilding') - log.silly('all-package-metadata', 'cache read error: ', err) - } - createEntryUpdateStream(uri, auth, staleness, cacheLatest, function (err, updateStream, updatedLatest) { + return createEntryUpdateStream(staleness, cacheLatest, opts).catch(err => { + log.warn('', 'Search data request failed, search might be stale') + log.silly('all-package-metadata', 'update request error: ', err) + return {} + }).then(({updateStream, updatedLatest}) => { updatedLatest = updatedLatest || 0 - var latest = updatedLatest || cacheLatest + const latest = updatedLatest || cacheLatest if (!cacheStream && !updateStream) { - return cb(new Error('No search sources available')) - } - if (err) { - log.warn('', 'Search data request failed, search might be stale') - log.silly('all-package-metadata', 'update request error: ', err) + throw new Error('No search sources available') } if (cacheStream && updateStream) { // Deduped, unioned, sorted stream from the combination of both. - cb(null, - createMergedStream(cacheStream, updateStream), + return { + entryStream: createMergedStream(cacheStream, updateStream), latest, - !!updatedLatest) + newEntries: !!updatedLatest + } } else { // Either one works if one or the other failed - cb(null, cacheStream || updateStream, latest, !!updatedLatest) + return { + entryStream: cacheStream || updateStream, + latest, + newEntries: !!updatedLatest + } } }) }) @@ -96,66 +110,54 @@ function createEntryStream (cachePath, uri, auth, staleness, cb) { module.exports._createMergedStream = createMergedStream function createMergedStream (a, b) { linkStreams(a, b) - return sortedUnionStream(b, a, function (pkg) { return pkg.name }) + return sortedUnionStream(b, a, ({name}) => name) } // Reads the local index and returns a stream that spits out package data. module.exports._createCacheEntryStream = createCacheEntryStream -function createCacheEntryStream (cacheFile, cb) { +function createCacheEntryStream (cacheFile, opts) { log.verbose('all-package-metadata', 'creating entry stream from local cache') log.verbose('all-package-metadata', cacheFile) - fs.stat(cacheFile, function (err, stat) { - if (err) return cb(err) + return statAsync(cacheFile).then(stat => { // TODO - This isn't very helpful if `cacheFile` is empty or just `{}` - var entryStream = ms.pipeline.obj( + const entryStream = ms.pipeline.obj( fs.createReadStream(cacheFile), - jsonstream.parse('*'), + JSONStream.parse('*'), // I believe this passthrough is necessary cause `jsonstream` returns // weird custom streams that behave funny sometimes. ms.through.obj() ) - extractUpdated(entryStream, 'cached-entry-stream', cb) + return extractUpdated(entryStream, 'cached-entry-stream', opts) }) } // Stream of entry updates from the server. If `latest` is `0`, streams the // entire metadata object from the registry. module.exports._createEntryUpdateStream = createEntryUpdateStream -function createEntryUpdateStream (all, auth, staleness, latest, cb) { +function createEntryUpdateStream (staleness, latest, opts) { log.verbose('all-package-metadata', 'creating remote entry stream') - var params = { - timeout: 600, - follow: true, - staleOk: true, - auth: auth, - streaming: true - } - var partialUpdate = false + let partialUpdate = false + let uri = '/-/all' if (latest && (Date.now() - latest < (staleness * 1000))) { // Skip the request altogether if our `latest` isn't stale. log.verbose('all-package-metadata', 'Local data up to date, skipping update') - return cb(null) + return BB.resolve({}) } else if (latest === 0) { log.warn('', 'Building the local index for the first time, please be patient') log.verbose('all-package-metadata', 'No cached data: requesting full metadata db') } else { log.verbose('all-package-metadata', 'Cached data present with timestamp:', latest, 'requesting partial index update') - all += '/since?stale=update_after&startkey=' + latest + uri += '/since?stale=update_after&startkey=' + latest partialUpdate = true } - npm.registry.request(all, params, function (er, res) { - if (er) return cb(er) + return npmFetch(uri, opts).then(res => { log.silly('all-package-metadata', 'request stream opened, code:', res.statusCode) // NOTE - The stream returned by `request` seems to be very persnickety // and this is almost a magic incantation to get it to work. // Modify how `res` is used here at your own risk. - var entryStream = ms.pipeline.obj( - res, - ms.through(function (chunk, enc, cb) { - cb(null, chunk) - }), - gunzip(), - jsonstream.parse('*', function (pkg, key) { + let entryStream = ms.pipeline.obj( + res.body, + JSONStream.parse('*', (pkg, key) => { if (key[0] === '_updated' || key[0][0] !== '_') { return pkg } @@ -164,9 +166,12 @@ function createEntryUpdateStream (all, auth, staleness, latest, cb) { if (partialUpdate) { // The `/all/since` endpoint doesn't return `_updated`, so we // just use the request's own timestamp. - cb(null, entryStream, Date.parse(res.headers.date)) + return { + updateStream: entryStream, + updatedLatest: Date.parse(res.headers.get('date')) + } } else { - extractUpdated(entryStream, 'entry-update-stream', cb) + return extractUpdated(entryStream, 'entry-update-stream', opts) } }) } @@ -175,36 +180,40 @@ function createEntryUpdateStream (all, auth, staleness, latest, cb) { // first returned entries. This is the "latest" unix timestamp for the metadata // in question. This code does a bit of juggling with the data streams // so that we can pretend that field doesn't exist, but still extract `latest` -function extractUpdated (entryStream, label, cb) { - cb = once(cb) +function extractUpdated (entryStream, label, opts) { log.silly('all-package-metadata', 'extracting latest') - function nope (msg) { - return function () { - log.warn('all-package-metadata', label, msg) - entryStream.removeAllListeners() - entryStream.destroy() - cb(new Error(msg)) - } - } - var onErr = nope('Failed to read stream') - var onEnd = nope('Empty or invalid stream') - entryStream.on('error', onErr) - entryStream.on('end', onEnd) - entryStream.once('data', function (latest) { - log.silly('all-package-metadata', 'got first stream entry for', label, latest) - entryStream.removeListener('error', onErr) - entryStream.removeListener('end', onEnd) - // Because `.once()` unpauses the stream, we re-pause it after the first - // entry so we don't vomit entries into the void. - entryStream.pause() - if (typeof latest === 'number') { - // The extra pipeline is to return a stream that will implicitly unpause - // after having an `.on('data')` listener attached, since using this - // `data` event broke its initial state. - cb(null, ms.pipeline.obj(entryStream, ms.through.obj()), latest) - } else { - cb(new Error('expected first entry to be _updated')) + return new BB((resolve, reject) => { + function nope (msg) { + return function () { + log.warn('all-package-metadata', label, msg) + entryStream.removeAllListeners() + entryStream.destroy() + reject(new Error(msg)) + } } + const onErr = nope('Failed to read stream') + const onEnd = nope('Empty or invalid stream') + entryStream.on('error', onErr) + entryStream.on('end', onEnd) + entryStream.once('data', latest => { + log.silly('all-package-metadata', 'got first stream entry for', label, latest) + entryStream.removeListener('error', onErr) + entryStream.removeListener('end', onEnd) + // Because `.once()` unpauses the stream, we re-pause it after the first + // entry so we don't vomit entries into the void. + entryStream.pause() + if (typeof latest === 'number') { + // The extra pipeline is to return a stream that will implicitly unpause + // after having an `.on('data')` listener attached, since using this + // `data` event broke its initial state. + resolve({ + updateStream: entryStream, + updatedLatest: latest + }) + } else { + reject(new Error('expected first entry to be _updated')) + } + }) }) } @@ -213,59 +222,52 @@ function extractUpdated (entryStream, label, cb) { // The stream is also passthrough, so entries going through it will also // be output from it. module.exports._createCacheWriteStream = createCacheWriteStream -function createCacheWriteStream (cacheFile, latest, cb) { - _ensureCacheDirExists(cacheFile, function (err) { - if (err) return cb(err) +function createCacheWriteStream (cacheFile, latest, opts) { + return _ensureCacheDirExists(cacheFile, opts).then(() => { log.silly('all-package-metadata', 'creating output stream') - var outStream = _createCacheOutStream() - var cacheFileStream = writeStreamAtomic(cacheFile) - var inputStream = _createCacheInStream(cacheFileStream, outStream, latest) + const outStream = _createCacheOutStream() + const cacheFileStream = writeStreamAtomic(cacheFile) + const inputStream = _createCacheInStream( + cacheFileStream, outStream, latest + ) // Glue together the various streams so they fail together. // `cacheFileStream` errors are already handled by the `inputStream` // pipeline - var errEmitted = false - linkStreams(inputStream, outStream, function () { errEmitted = true }) + let errEmitted = false + linkStreams(inputStream, outStream, () => { errEmitted = true }) - cacheFileStream.on('close', function () { !errEmitted && outStream.end() }) + cacheFileStream.on('close', () => !errEmitted && outStream.end()) - cb(null, ms.duplex.obj(inputStream, outStream)) + return ms.duplex.obj(inputStream, outStream) }) } -function _ensureCacheDirExists (cacheFile, cb) { +function _ensureCacheDirExists (cacheFile, opts) { var cacheBase = path.dirname(cacheFile) log.silly('all-package-metadata', 'making sure cache dir exists at', cacheBase) - correctMkdir(npm.cache, function (er, st) { - if (er) return cb(er) - mkdir(cacheBase, function (er, made) { - if (er) return cb(er) - chownr(made || cacheBase, st.uid, st.gid, cb) + return correctMkdir(opts.cache).then(st => { + return mkdir(cacheBase).then(made => { + return chownr(made || cacheBase, st.uid, st.gid) }) }) } function _createCacheOutStream () { - return ms.pipeline.obj( - // These two passthrough `through` streams compensate for some - // odd behavior with `jsonstream`. - ms.through(), - jsonstream.parse('*', function (obj, key) { - // This stream happens to get _updated passed through it, for - // implementation reasons. We make sure to filter it out cause - // the fact that it comes t - if (typeof obj === 'object') { - return obj - } - }), - ms.through.obj() - ) + return JSONStream.parse('*', (obj, key) => { + // This stream happens to get _updated passed through it, for + // implementation reasons. We make sure to filter it out cause + // the fact that it comes t + if (typeof obj === 'object') { + return obj + } + }) } function _createCacheInStream (writer, outStream, latest) { - var updatedWritten = false - var inStream = ms.pipeline.obj( - ms.through.obj(function (pkg, enc, cb) { + let updatedWritten = false + const inStream = ms.pipeline.obj( + ms.through.obj((pkg, enc, cb) => { if (!updatedWritten && typeof pkg === 'number') { // This is the `_updated` value getting sent through. updatedWritten = true @@ -277,13 +279,11 @@ function _createCacheInStream (writer, outStream, latest) { cb(null, [pkg.name, pkg]) } }), - jsonstream.stringifyObject('{', ',', '}'), - ms.through(function (chunk, enc, cb) { + JSONStream.stringifyObject('{', ',', '}'), + ms.through((chunk, enc, cb) => { // This tees off the buffer data to `outStream`, and then continues // the pipeline as usual - outStream.write(chunk, enc, function () { - cb(null, chunk) - }) + outStream.write(chunk, enc, () => cb(null, chunk)) }), // And finally, we write to the cache file. writer @@ -300,14 +300,14 @@ function linkStreams (a, b, cb) { if (err !== lastError) { lastError = err b.emit('error', err) - cb(err) + cb && cb(err) } }) b.on('error', function (err) { if (err !== lastError) { lastError = err a.emit('error', err) - cb(err) + cb && cb(err) } }) } diff --git a/lib/search/all-package-search.js b/lib/search/all-package-search.js index 7a893d517b82c..fef343bcbc3ba 100644 --- a/lib/search/all-package-search.js +++ b/lib/search/all-package-search.js @@ -8,7 +8,7 @@ function allPackageSearch (opts) { // Get a stream with *all* the packages. This takes care of dealing // with the local cache as well, but that's an internal detail. - var allEntriesStream = allPackageMetadata(opts.staleness) + var allEntriesStream = allPackageMetadata(opts) // Grab a stream that filters those packages according to given params. var filterStream = streamFilter(function (pkg) { diff --git a/lib/search/esearch.js b/lib/search/esearch.js deleted file mode 100644 index f4beb7ade66b1..0000000000000 --- a/lib/search/esearch.js +++ /dev/null @@ -1,64 +0,0 @@ -'use strict' - -var npm = require('../npm.js') -var log = require('npmlog') -var mapToRegistry = require('../utils/map-to-registry.js') -var jsonstream = require('JSONStream') -var ms = require('mississippi') -var gunzip = require('../utils/gunzip-maybe') - -module.exports = esearch - -function esearch (opts) { - var stream = ms.through.obj() - - mapToRegistry('-/v1/search', npm.config, function (er, uri, auth) { - if (er) return stream.emit('error', er) - createResultStream(uri, auth, opts, function (err, resultStream) { - if (err) return stream.emit('error', err) - ms.pipeline.obj(resultStream, stream) - }) - }) - return stream -} - -function createResultStream (uri, auth, opts, cb) { - log.verbose('esearch', 'creating remote entry stream') - var params = { - timeout: 600, - follow: true, - staleOk: true, - auth: auth, - streaming: true - } - var q = buildQuery(opts) - npm.registry.request(uri + '?text=' + encodeURIComponent(q) + '&size=' + opts.limit, params, function (err, res) { - if (err) return cb(err) - log.silly('esearch', 'request stream opened, code:', res.statusCode) - // NOTE - The stream returned by `request` seems to be very persnickety - // and this is almost a magic incantation to get it to work. - // Modify how `res` is used here at your own risk. - var entryStream = ms.pipeline.obj( - res, - ms.through(function (chunk, enc, cb) { - cb(null, chunk) - }), - gunzip(), - jsonstream.parse('objects.*.package', function (data) { - return { - name: data.name, - description: data.description, - maintainers: data.maintainers, - keywords: data.keywords, - version: data.version, - date: data.date ? new Date(data.date) : null - } - }) - ) - return cb(null, entryStream) - }) -} - -function buildQuery (opts) { - return opts.include.join(' ') -} diff --git a/test/tap/search.all-package-search.js b/test/tap/search.all-package-search.js index c70f4f8e7ef07..51c1ffcf90157 100644 --- a/test/tap/search.all-package-search.js +++ b/test/tap/search.all-package-search.js @@ -1,21 +1,25 @@ -var path = require('path') -var mkdirp = require('mkdirp') -var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') -var cacheFile = require('npm-cache-filename') -var test = require('tap').test -var Tacks = require('tacks') -var File = Tacks.File +'use strict' -var common = require('../common-tap.js') +const cacheFile = require('npm-cache-filename') +const mkdirp = require('mkdirp') +const mr = require('npm-registry-mock') +const osenv = require('osenv') +const path = require('path') +const qs = require('querystring') +const rimraf = require('rimraf') +const Tacks = require('tacks') +const test = require('tap').test -var PKG_DIR = path.resolve(__dirname, 'search') -var CACHE_DIR = path.resolve(PKG_DIR, 'cache') -var cacheBase = cacheFile(CACHE_DIR)(common.registry + '/-/all') -var cachePath = path.join(cacheBase, '.cache.json') +const {File} = Tacks -var server +const common = require('../common-tap.js') + +const PKG_DIR = path.resolve(__dirname, 'search') +const CACHE_DIR = path.resolve(PKG_DIR, 'cache') +const cacheBase = cacheFile(CACHE_DIR)(common.registry + '/-/all') +const cachePath = path.join(cacheBase, '.cache.json') + +let server test('setup', function (t) { mr({port: common.port, throwOnUnmatched: true}, function (err, s) { @@ -26,7 +30,7 @@ test('setup', function (t) { }) }) -var searches = [ +const searches = [ { term: 'cool', description: 'non-regex search', @@ -139,19 +143,26 @@ var searches = [ searches.forEach(function (search) { test(search.description, function (t) { setup() - server.get('/-/v1/search?text=' + encodeURIComponent(search.term) + '&size=20').once().reply(404, {}) - var now = Date.now() - var cacheContents = { + const query = qs.stringify({ + text: search.term, + size: 20, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + server.get(`/-/v1/search?${query}`).once().reply(404, {}) + const now = Date.now() + const cacheContents = { '_updated': now, bar: { name: 'bar', version: '1.0.0' }, cool: { name: 'cool', version: '5.0.0' }, foo: { name: 'foo', version: '2.0.0' }, other: { name: 'other', version: '1.0.0' } } - for (var k in search.inject) { + for (let k in search.inject) { cacheContents[k] = search.inject[k] } - var fixture = new Tacks(File(cacheContents)) + const fixture = new Tacks(File(cacheContents)) fixture.create(cachePath) common.npm([ 'search', search.term, @@ -167,12 +178,12 @@ searches.forEach(function (search) { t.equal(code, 0, 'search finished successfully') t.ifErr(err, 'search finished successfully') // \033 == \u001B - var markStart = '\u001B\\[[0-9][0-9]m' - var markEnd = '\u001B\\[0m' + const markStart = '\u001B\\[[0-9][0-9]m' + const markEnd = '\u001B\\[0m' - var re = new RegExp(markStart + '.*?' + markEnd) + const re = new RegExp(markStart + '.*?' + markEnd) - var cnt = stdout.search(re) + const cnt = stdout.search(re) t.equal( cnt, search.location, diff --git a/test/tap/search.esearch.js b/test/tap/search.esearch.js deleted file mode 100644 index d892aec95759c..0000000000000 --- a/test/tap/search.esearch.js +++ /dev/null @@ -1,192 +0,0 @@ -var common = require('../common-tap.js') -var finished = require('mississippi').finished -var mkdirp = require('mkdirp') -var mr = require('npm-registry-mock') -var npm = require('../../') -var osenv = require('osenv') -var path = require('path') -var rimraf = require('rimraf') -var test = require('tap').test - -var SEARCH = '/-/v1/search' -var PKG_DIR = path.resolve(__dirname, 'create-entry-update-stream') -var CACHE_DIR = path.resolve(PKG_DIR, 'cache') - -var esearch = require('../../lib/search/esearch') - -var server - -function setup () { - cleanup() - mkdirp.sync(CACHE_DIR) - process.chdir(CACHE_DIR) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(PKG_DIR) -} - -test('setup', function (t) { - mr({port: common.port, throwOnUnmatched: true}, function (err, s) { - t.ifError(err, 'registry mocked successfully') - npm.load({ cache: CACHE_DIR, registry: common.registry }, function (err) { - t.ifError(err, 'npm loaded successfully') - server = s - t.pass('all set up') - t.done() - }) - }) -}) - -test('basic test', function (t) { - setup() - server.get(SEARCH + '?text=oo&size=1').once().reply(200, { - objects: [ - { package: { name: 'cool', version: '1.0.0' } }, - { package: { name: 'foo', version: '2.0.0' } } - ] - }) - var results = [] - var s = esearch({ - include: ['oo'], - limit: 1 - }) - t.ok(s, 'got a stream') - s.on('data', function (d) { - results.push(d) - }) - finished(s, function (err) { - if (err) { throw err } - t.ok(true, 'stream finished without error') - t.deepEquals(results, [{ - name: 'cool', - version: '1.0.0', - description: null, - maintainers: null, - keywords: null, - date: null - }, { - name: 'foo', - version: '2.0.0', - description: null, - maintainers: null, - keywords: null, - date: null - }]) - server.done() - t.done() - }) -}) - -test('only returns certain fields for each package', function (t) { - setup() - var date = new Date() - server.get(SEARCH + '?text=oo&size=1').once().reply(200, { - objects: [{ - package: { - name: 'cool', - version: '1.0.0', - description: 'desc', - maintainers: [ - {username: 'x', email: 'a@b.c'}, - {username: 'y', email: 'c@b.a'} - ], - keywords: ['a', 'b', 'c'], - date: date.toISOString(), - extra: 'lol' - } - }] - }) - var results = [] - var s = esearch({ - include: ['oo'], - limit: 1 - }) - t.ok(s, 'got a stream') - s.on('data', function (d) { - results.push(d) - }) - finished(s, function (err) { - if (err) { throw err } - t.ok(true, 'stream finished without error') - t.deepEquals(results, [{ - name: 'cool', - version: '1.0.0', - description: 'desc', - maintainers: [ - {username: 'x', email: 'a@b.c'}, - {username: 'y', email: 'c@b.a'} - ], - keywords: ['a', 'b', 'c'], - date: date - }]) - server.done() - t.done() - }) -}) - -test('accepts a limit option', function (t) { - setup() - server.get(SEARCH + '?text=oo&size=3').once().reply(200, { - objects: [ - { package: { name: 'cool', version: '1.0.0' } }, - { package: { name: 'cool', version: '1.0.0' } } - ] - }) - var results = 0 - var s = esearch({ - include: ['oo'], - limit: 3 - }) - s.on('data', function () { results++ }) - finished(s, function (err) { - if (err) { throw err } - t.ok(true, 'request sent with correct size') - t.equal(results, 2, 'behaves fine with fewer results than size') - server.done() - t.done() - }) -}) - -test('passes foo:bar syntax params directly', function (t) { - setup() - server.get(SEARCH + '?text=foo%3Abar&size=1').once().reply(200, { - objects: [] - }) - var s = esearch({ - include: ['foo:bar'], - limit: 1 - }) - s.on('data', function () {}) - finished(s, function (err) { - if (err) { throw err } - t.ok(true, 'request sent with correct params') - server.done() - t.done() - }) -}) - -test('space-separates and URI-encodes multiple search params', function (t) { - setup() - server.get(SEARCH + '?text=foo%20bar%3Abaz%20quux%3F%3D&size=1').once().reply(200, { - objects: [] - }) - var s = esearch({ - include: ['foo', 'bar:baz', 'quux?='], - limit: 1 - }) - s.on('data', function () {}) - finished(s, function (err) { - if (err) { throw err } - t.ok(true, 'request sent with correct params') - server.done() - t.done() - }) -}) - -test('cleanup', function (t) { - server.close() - cleanup() - t.done() -}) diff --git a/test/tap/search.js b/test/tap/search.js index df7ff0fe375b7..bbd293c3a1a3f 100644 --- a/test/tap/search.js +++ b/test/tap/search.js @@ -1,21 +1,25 @@ -var path = require('path') -var mkdirp = require('mkdirp') -var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') -var cacheFile = require('npm-cache-filename') -var test = require('tap').test -var Tacks = require('tacks') -var File = Tacks.File +'use strict' -var common = require('../common-tap.js') +const cacheFile = require('npm-cache-filename') +const mkdirp = require('mkdirp') +const mr = require('npm-registry-mock') +const osenv = require('osenv') +const path = require('path') +const qs = require('querystring') +const rimraf = require('rimraf') +const test = require('tap').test -var PKG_DIR = path.resolve(__dirname, 'search') -var CACHE_DIR = path.resolve(PKG_DIR, 'cache') -var cacheBase = cacheFile(CACHE_DIR)(common.registry + '/-/all') -var cachePath = path.join(cacheBase, '.cache.json') +const Tacks = require('tacks') +const File = Tacks.File -var server +const common = require('../common-tap.js') + +const PKG_DIR = path.resolve(__dirname, 'search') +const CACHE_DIR = path.resolve(PKG_DIR, 'cache') +const cacheBase = cacheFile(CACHE_DIR)(common.registry + '/-/all') +const cachePath = path.join(cacheBase, '.cache.json') + +let server test('setup', function (t) { mr({port: common.port, throwOnUnmatched: true}, function (err, s) { @@ -28,7 +32,14 @@ test('setup', function (t) { test('notifies when there are no results', function (t) { setup() - server.get('/-/v1/search?text=none&size=20').once().reply(200, { + const query = qs.stringify({ + text: 'none', + size: 20, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + server.get(`/-/v1/search?${query}`).once().reply(200, { objects: [] }) common.npm([ @@ -46,10 +57,17 @@ test('notifies when there are no results', function (t) { test('spits out a useful error when no cache nor network', function (t) { setup() - server.get('/-/v1/search?text=foo&size=20').once().reply(404, {}) + const query = qs.stringify({ + text: 'foo', + size: 20, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + server.get(`/-/v1/search?${query}`).once().reply(404, {}) server.get('/-/all').many().reply(404, {}) - var cacheContents = {} - var fixture = new Tacks(File(cacheContents)) + const cacheContents = {} + const fixture = new Tacks(File(cacheContents)) fixture.create(cachePath) common.npm([ 'search', 'foo', @@ -70,7 +88,14 @@ test('spits out a useful error when no cache nor network', function (t) { test('can switch to JSON mode', function (t) { setup() - server.get('/-/v1/search?text=oo&size=20').once().reply(200, { + const query = qs.stringify({ + text: 'oo', + size: 20, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + server.get(`/-/v1/search?${query}`).once().reply(200, { objects: [ { package: { name: 'cool', version: '1.0.0' } }, { package: { name: 'foo', version: '2.0.0' } } @@ -86,9 +111,15 @@ test('can switch to JSON mode', function (t) { if (err) throw err t.equal(stderr, '', 'no error output') t.equal(code, 0, 'search gives 0 error code even if no matches') - t.deepEquals(JSON.parse(stdout), [ - { name: 'cool', version: '1.0.0', date: null }, - { name: 'foo', version: '2.0.0', date: null } + t.similar(JSON.parse(stdout), [ + { + name: 'cool', + version: '1.0.0' + }, + { + name: 'foo', + version: '2.0.0' + } ], 'results returned as valid json') t.done() }) @@ -96,7 +127,14 @@ test('can switch to JSON mode', function (t) { test('JSON mode does not notify on empty', function (t) { setup() - server.get('/-/v1/search?text=oo&size=20').once().reply(200, { + const query = qs.stringify({ + text: 'oo', + size: 20, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + server.get(`/-/v1/search?${query}`).once().reply(200, { objects: [] }) common.npm([ @@ -116,7 +154,14 @@ test('JSON mode does not notify on empty', function (t) { test('can switch to tab separated mode', function (t) { setup() - server.get('/-/v1/search?text=oo&size=20').once().reply(200, { + const query = qs.stringify({ + text: 'oo', + size: 20, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + server.get(`/-/v1/search?${query}`).once().reply(200, { objects: [ { package: { name: 'cool', version: '1.0.0' } }, { package: { name: 'foo', description: 'this\thas\ttabs', version: '2.0.0' } } @@ -139,7 +184,14 @@ test('can switch to tab separated mode', function (t) { test('tab mode does not notify on empty', function (t) { setup() - server.get('/-/v1/search?text=oo&size=20').once().reply(200, { + const query = qs.stringify({ + text: 'oo', + size: 20, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + server.get(`/-/v1/search?${query}`).once().reply(200, { objects: [] }) common.npm([