diff --git a/lib/mkdir.js b/lib/mkdir.js index 29aa3be1..69e8c083 100644 --- a/lib/mkdir.js +++ b/lib/mkdir.js @@ -37,8 +37,17 @@ class CwdError extends Error { const cGet = (cache, key) => cache.get(normPath(key)) const cSet = (cache, key, val) => cache.set(normPath(key), val) +const checkCwd = (dir, cb) => { + fs.stat(dir, (er, st) => { + if (er || !st.isDirectory()) + er = new CwdError(dir, er && er.code || 'ENOTDIR') + cb(er) + }) +} + module.exports = (dir, opt, cb) => { dir = normPath(dir) + // if there's any overlap between mask and mode, // then we'll need an explicit chmod const umask = opt.umask @@ -74,16 +83,12 @@ module.exports = (dir, opt, cb) => { return done() if (dir === cwd) - return fs.stat(dir, (er, st) => { - if (er || !st.isDirectory()) - er = new CwdError(dir, er && er.code || 'ENOTDIR') - done(er) - }) + return checkCwd(dir, done) if (preserve) return mkdirp(dir, mode, done) - const sub = path.relative(cwd, dir) + const sub = normPath(path.relative(cwd, dir)) const parts = sub.split('/') mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done) } @@ -92,7 +97,7 @@ const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => { if (!parts.length) return cb(null, created) const p = parts.shift() - const part = base + '/' + p + const part = normPath(path.resolve(base + '/' + p)) if (cGet(cache, part)) return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)) @@ -100,14 +105,11 @@ const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => { const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => { if (er) { - if (er.path && path.dirname(er.path) === cwd && - (er.code === 'ENOTDIR' || er.code === 'ENOENT')) - return cb(new CwdError(cwd, er.code)) - fs.lstat(part, (statEr, st) => { - if (statEr) + if (statEr) { + statEr.path = statEr.path && normPath(statEr.path) cb(statEr) - else if (st.isDirectory()) + } else if (st.isDirectory()) mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) else if (unlink) fs.unlink(part, er => { @@ -126,6 +128,19 @@ const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => { } } +const checkCwdSync = dir => { + let ok = false + let code = 'ENOTDIR' + try { + ok = fs.statSync(dir).isDirectory() + } catch (er) { + code = er.code + } finally { + if (!ok) + throw new CwdError(dir, code) + } +} + module.exports.sync = (dir, opt) => { dir = normPath(dir) // if there's any overlap between mask and mode, @@ -157,29 +172,20 @@ module.exports.sync = (dir, opt) => { return done() if (dir === cwd) { - let ok = false - let code = 'ENOTDIR' - try { - ok = fs.statSync(dir).isDirectory() - } catch (er) { - code = er.code - } finally { - if (!ok) - throw new CwdError(dir, code) - } - done() - return + checkCwdSync(cwd) + return done() } if (preserve) return done(mkdirp.sync(dir, mode)) - const sub = path.relative(cwd, dir) + const sub = normPath(path.relative(cwd, dir)) const parts = sub.split('/') let created = null for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) { + part = normPath(path.resolve(part)) if (cGet(cache, part)) continue @@ -188,10 +194,6 @@ module.exports.sync = (dir, opt) => { created = created || part cSet(cache, part, true) } catch (er) { - if (er.path && path.dirname(er.path) === cwd && - (er.code === 'ENOTDIR' || er.code === 'ENOENT')) - return new CwdError(cwd, er.code) - const st = fs.lstatSync(part) if (st.isDirectory()) { cSet(cache, part, true) diff --git a/lib/normalize-windows-path.js b/lib/normalize-windows-path.js index 8e3c30a0..eb13ba01 100644 --- a/lib/normalize-windows-path.js +++ b/lib/normalize-windows-path.js @@ -5,4 +5,4 @@ const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform module.exports = platform !== 'win32' ? p => p - : p => p.replace(/\\/g, '/') + : p => p && p.replace(/\\/g, '/') diff --git a/lib/read-entry.js b/lib/read-entry.js index 8acee94b..6ea3135c 100644 --- a/lib/read-entry.js +++ b/lib/read-entry.js @@ -1,6 +1,7 @@ 'use strict' const types = require('./types.js') const MiniPass = require('minipass') +const normPath = require('./normalize-windows-path.js') const SLURP = Symbol('slurp') module.exports = class ReadEntry extends MiniPass { @@ -47,7 +48,7 @@ module.exports = class ReadEntry extends MiniPass { this.ignore = true } - this.path = header.path + this.path = normPath(header.path) this.mode = header.mode if (this.mode) this.mode = this.mode & 0o7777 @@ -59,7 +60,7 @@ module.exports = class ReadEntry extends MiniPass { this.mtime = header.mtime this.atime = header.atime this.ctime = header.ctime - this.linkpath = header.linkpath + this.linkpath = normPath(header.linkpath) this.uname = header.uname this.gname = header.gname @@ -92,7 +93,7 @@ module.exports = class ReadEntry extends MiniPass { // a global extended header, because that's weird. if (ex[k] !== null && ex[k] !== undefined && !(global && k === 'path')) - this[k] = ex[k] + this[k] = k === 'path' || k === 'linkpath' ? normPath(ex[k]) : ex[k] } } } diff --git a/lib/replace.js b/lib/replace.js index 571cee94..68d83a47 100644 --- a/lib/replace.js +++ b/lib/replace.js @@ -168,7 +168,8 @@ const replace = (opt, files, cb) => { fs.fstat(fd, (er, st) => { if (er) - return reject(er) + return fs.close(fd, () => reject(er)) + getPos(fd, st.size, (er, position) => { if (er) return reject(er) diff --git a/lib/unpack.js b/lib/unpack.js index 6ba11a21..a027ca02 100644 --- a/lib/unpack.js +++ b/lib/unpack.js @@ -43,13 +43,9 @@ const SKIP = Symbol('skip') const DOCHOWN = Symbol('doChown') const UID = Symbol('uid') const GID = Symbol('gid') +const CHECKED_CWD = Symbol('checkedCwd') const crypto = require('crypto') -/* istanbul ignore next */ -const neverCalled = () => { - throw new Error('sync function called cb somehow?!?') -} - // Unlinks on Windows are not atomic. // // This means that if you have a file entry, followed by another @@ -117,6 +113,8 @@ class Unpack extends Parser { super(opt) + this[CHECKED_CWD] = false + this.reservations = pathReservations() this.transform = typeof opt.transform === 'function' ? opt.transform : null @@ -206,8 +204,6 @@ class Unpack extends Parser { if (parts.length < this.strip) return false entry.path = parts.slice(this.strip).join('/') - if (entry.path === '' && entry.type !== 'Directory' && entry.type !== 'GNUDumpDir') - return false if (entry.type === 'Link') { const linkparts = normPath(entry.linkpath).split('/') @@ -232,18 +228,26 @@ class Unpack extends Parser { } } - // only encode : chars that aren't drive letter indicators - if (this.win32) { - const parsed = path.win32.parse(entry.path) - entry.path = parsed.root === '' ? wc.encode(entry.path) - : parsed.root + wc.encode(entry.path.substr(parsed.root.length)) - } - if (path.isAbsolute(entry.path)) - entry.absolute = normPath(entry.path) + entry.absolute = normPath(path.resolve(entry.path)) else entry.absolute = normPath(path.resolve(this.cwd, entry.path)) + // an archive can set properties on the extraction directory, but it + // may not replace the cwd with a different kind of thing entirely. + if (entry.absolute === this.cwd && + entry.type !== 'Directory' && + entry.type !== 'GNUDumpDir') + return false + + // only encode : chars that aren't drive letter indicators + if (this.win32) { + const { root: aRoot } = path.win32.parse(entry.absolute) + entry.absolute = aRoot + wc.encode(entry.absolute.substr(aRoot.length)) + const { root: pRoot } = path.win32.parse(entry.path) + entry.path = pRoot + wc.encode(entry.path.substr(pRoot.length)) + } + return true } @@ -327,16 +331,35 @@ class Unpack extends Parser { mode: mode, autoClose: false }) - stream.on('error', er => this[ONERROR](er, entry)) + stream.on('error', er => { + if (stream.fd) + fs.close(stream.fd, () => {}) + + // flush all the data out so that we aren't left hanging + // if the error wasn't actually fatal. otherwise the parse + // is blocked, and we never proceed. + /* istanbul ignore next */ + stream.write = () => true + this[ONERROR](er, entry) + fullyDone() + }) let actions = 1 const done = er => { - if (er) - return this[ONERROR](er, entry) + if (er) { + /* istanbul ignore else - we should always have a fd by now */ + if (stream.fd) + fs.close(stream.fd, () => {}) + + this[ONERROR](er, entry) + fullyDone() + return + } if (--actions === 0) { fs.close(stream.fd, er => { fullyDone() + /* istanbul ignore next */ er ? this[ONERROR](er, entry) : this[UNPEND]() }) } @@ -451,7 +474,12 @@ class Unpack extends Parser { // check if a thing is there, and if so, try to clobber it [CHECKFS] (entry) { this[PEND]() - + const paths = [entry.path] + if (entry.linkpath) + paths.push(entry.linkpath) + this.reservations.reserve(paths, done => this[CHECKFS2](entry, done)) + } + [CHECKFS2] (entry, done) { // if we are not creating a directory, and the path is in the dirCache, // then that means we are about to delete the directory we created // previously, and it is no longer going to be a directory, and neither @@ -459,36 +487,76 @@ class Unpack extends Parser { if (entry.type !== 'Directory') pruneCache(this.dirCache, entry.absolute) - const paths = [entry.path] - if (entry.linkpath) - paths.push(entry.linkpath) - this.reservations.reserve(paths, done => this[CHECKFS2](entry, done)) - } - [CHECKFS2] (entry, done) { - this[MKDIR](path.dirname(entry.absolute), this.dmode, er => { - if (er) { - done() - return this[ONERROR](er, entry) + const checkCwd = () => { + this[MKDIR](this.cwd, this.dmode, er => { + if (er) { + this[ONERROR](er, entry) + done() + return + } + this[CHECKED_CWD] = true + start() + }) + } + + const start = () => { + if (entry.absolute !== this.cwd) { + const parent = normPath(path.dirname(entry.absolute)) + if (parent !== this.cwd) { + return this[MKDIR](parent, this.dmode, er => { + if (er) { + this[ONERROR](er, entry) + done() + return + } + afterMakeParent() + }) + } } - fs.lstat(entry.absolute, (er, st) => { + afterMakeParent() + } + + const afterMakeParent = () => { + fs.lstat(entry.absolute, (lstatEr, st) => { if (st && (this.keep || this.newer && st.mtime > entry.mtime)) { this[SKIP](entry) done() - } else if (er || this[ISREUSABLE](entry, st)) - this[MAKEFS](null, entry, done) - else if (st.isDirectory()) { + return + } + if (lstatEr || this[ISREUSABLE](entry, st)) + return this[MAKEFS](null, entry, done) + + if (st.isDirectory()) { if (entry.type === 'Directory') { - if (!entry.mode || (st.mode & 0o7777) === entry.mode) - this[MAKEFS](null, entry, done) - else - fs.chmod(entry.absolute, entry.mode, - er => this[MAKEFS](er, entry, done)) - } else - fs.rmdir(entry.absolute, er => this[MAKEFS](er, entry, done)) - } else - unlinkFile(entry.absolute, er => this[MAKEFS](er, entry, done)) + const needChmod = !this.noChmod && + entry.mode && + (st.mode & 0o7777) !== entry.mode + const afterChmod = er => this[MAKEFS](er, entry, done) + if (!needChmod) + return afterChmod() + return fs.chmod(entry.absolute, entry.mode, afterChmod) + } + // not a dir entry, have to remove it. + if (entry.absolute !== this.cwd) { + return fs.rmdir(entry.absolute, er => + this[MAKEFS](er, entry, done)) + } + } + + // not a dir, and not reusable + // don't remove if the cwd, we want that error + if (entry.absolute === this.cwd) + return this[MAKEFS](null, entry, done) + + unlinkFile(entry.absolute, er => + this[MAKEFS](er, entry, done)) }) - }) + } + + if (this[CHECKED_CWD]) + start() + else + checkCwd() } [MAKEFS] (er, entry, done) { @@ -525,42 +593,67 @@ class Unpack extends Parser { } } +const callSync = fn => { + try { + return [null, fn()] + } catch (er) { + return [er, null] + } +} class UnpackSync extends Unpack { - constructor (opt) { - super(opt) + [MAKEFS] (er, entry) { + return super[MAKEFS](er, entry, /* istanbul ignore next */ () => {}) } [CHECKFS] (entry) { if (entry.type !== 'Directory') pruneCache(this.dirCache, entry.absolute) - const er = this[MKDIR](path.dirname(entry.absolute), this.dmode, neverCalled) - if (er) - return this[ONERROR](er, entry) - try { - const st = fs.lstatSync(entry.absolute) - if (this.keep || this.newer && st.mtime > entry.mtime) - return this[SKIP](entry) - else if (this[ISREUSABLE](entry, st)) - return this[MAKEFS](null, entry, neverCalled) - else { - try { - if (st.isDirectory()) { - if (entry.type === 'Directory') { - if (entry.mode && (st.mode & 0o7777) !== entry.mode) - fs.chmodSync(entry.absolute, entry.mode) - } else - fs.rmdirSync(entry.absolute) - } else - unlinkFileSync(entry.absolute) - return this[MAKEFS](null, entry, neverCalled) - } catch (er) { - return this[ONERROR](er, entry) - } + if (!this[CHECKED_CWD]) { + const er = this[MKDIR](this.cwd, this.dmode) + if (er) + return this[ONERROR](er, entry) + this[CHECKED_CWD] = true + } + + // don't bother to make the parent if the current entry is the cwd, + // we've already checked it. + if (entry.absolute !== this.cwd) { + const parent = normPath(path.dirname(entry.absolute)) + if (parent !== this.cwd) { + const mkParent = this[MKDIR](parent, this.dmode) + if (mkParent) + return this[ONERROR](mkParent, entry) } - } catch (er) { - return this[MAKEFS](null, entry, neverCalled) } + + const [lstatEr, st] = callSync(() => fs.lstatSync(entry.absolute)) + if (st && (this.keep || this.newer && st.mtime > entry.mtime)) + return this[SKIP](entry) + + if (lstatEr || this[ISREUSABLE](entry, st)) + return this[MAKEFS](null, entry) + + if (st.isDirectory()) { + if (entry.type === 'Directory') { + const needChmod = !this.noChmod && + entry.mode && + (st.mode & 0o7777) !== entry.mode + const [er] = needChmod ? callSync(() => { + fs.chmodSync(entry.absolute, entry.mode) + }) : [] + return this[MAKEFS](er, entry) + } + // not a dir entry, have to remove it + const [er] = callSync(() => fs.rmdirSync(entry.absolute)) + this[MAKEFS](er, entry) + } + + // not a dir, and not reusable. + // don't remove if it's the cwd, since we want that error. + const [er] = entry.absolute === this.cwd ? [] + : callSync(() => unlinkFileSync(entry.absolute)) + this[MAKEFS](er, entry) } [FILE] (entry, _) { diff --git a/lib/write-entry.js b/lib/write-entry.js index b1df02a7..78ac49f5 100644 --- a/lib/write-entry.js +++ b/lib/write-entry.js @@ -35,6 +35,7 @@ const MODE = Symbol('mode') const AWAITDRAIN = Symbol('awaitDrain') const ONDRAIN = Symbol('ondrain') const PREFIX = Symbol('prefix') +const HAD_ERROR = Symbol('hadError') const warner = require('./warn-mixin.js') const winchars = require('./winchars.js') const stripAbsolutePath = require('./strip-absolute-path.js') @@ -51,18 +52,18 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { // suppress atime, ctime, uid, gid, uname, gname this.portable = !!opt.portable // until node has builtin pwnam functions, this'll have to do - this.myuid = process.getuid && process.getuid() + this.myuid = process.getuid && process.getuid() || 0 this.myuser = process.env.USER || '' this.maxReadSize = opt.maxReadSize || maxReadSize this.linkCache = opt.linkCache || new Map() this.statCache = opt.statCache || new Map() this.preservePaths = !!opt.preservePaths - this.cwd = opt.cwd || process.cwd() + this.cwd = normPath(opt.cwd || process.cwd()) this.strict = !!opt.strict this.noPax = !!opt.noPax this.noMtime = !!opt.noMtime this.mtime = opt.mtime || null - this.prefix = opt.prefix || null + this.prefix = opt.prefix ? normPath(opt.prefix) : null this.fd = null this.blockLen = null @@ -86,6 +87,8 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { this.win32 = !!opt.win32 || process.platform === 'win32' if (this.win32) { + // force the \ to / normalization, since we might not *actually* + // be on windows, but want \ to be considered a path separator. this.path = winchars.decode(this.path.replace(/\\/g, '/')) p = p.replace(/\\/g, '/') } @@ -101,6 +104,12 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { this[LSTAT]() } + emit (ev, ...data) { + if (ev === 'error') + this[HAD_ERROR] = true + return super.emit(ev, ...data) + } + [LSTAT] () { fs.lstat(this.absolute, (er, stat) => { if (er) @@ -197,14 +206,14 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { } [ONREADLINK] (linkpath) { - this.linkpath = linkpath + this.linkpath = normPath(linkpath) this[HEADER]() this.end() } [HARDLINK] (linkpath) { this.type = 'Link' - this.linkpath = path.relative(this.cwd, linkpath) + this.linkpath = normPath(path.relative(this.cwd, linkpath)) this.stat.size = 0 this[HEADER]() this.end() @@ -238,6 +247,9 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { [ONOPENFILE] (fd) { this.fd = fd + if (this[HAD_ERROR]) + return this[CLOSE]() + this.blockLen = 512 * Math.ceil(this.stat.size / 512) this.blockRemain = this.blockLen const bufLen = Math.min(this.blockLen, this.maxReadSize) @@ -404,7 +416,7 @@ const WriteEntryTar = warner(class WriteEntryTar extends MiniPass { this.prefix = opt.prefix || null - this.path = readEntry.path + this.path = normPath(readEntry.path) this.mode = this[MODE](readEntry.mode) this.uid = this.portable ? null : readEntry.uid this.gid = this.portable ? null : readEntry.gid @@ -414,7 +426,7 @@ const WriteEntryTar = warner(class WriteEntryTar extends MiniPass { this.mtime = this.noMtime ? null : opt.mtime || readEntry.mtime this.atime = this.portable ? null : readEntry.atime this.ctime = this.portable ? null : readEntry.ctime - this.linkpath = readEntry.linkpath + this.linkpath = normPath(readEntry.linkpath) if (typeof opt.onwarn === 'function') this.on('warn', opt.onwarn) diff --git a/test/create.js b/test/create.js index 2adc9e53..9aa66d19 100644 --- a/test/create.js +++ b/test/create.js @@ -1,5 +1,6 @@ 'use strict' +const isWindows = process.platform === 'win32' const t = require('tap') const c = require('../lib/create.js') const list = require('../lib/list.js') @@ -7,11 +8,12 @@ const fs = require('fs') const path = require('path') const dir = path.resolve(__dirname, 'fixtures/create') const tars = path.resolve(__dirname, 'fixtures/tars') -const rimraf = require('rimraf') -const mkdirp = require('mkdirp') const spawn = require('child_process').spawn const Pack = require('../lib/pack.js') const mutateFS = require('mutate-fs') +const {promisify} = require('util') +const rimraf = promisify(require('rimraf')) +const mkdirp = promisify(require('mkdirp')) const readtar = (file, cb) => { const child = spawn('tar', ['tf', file]) @@ -21,12 +23,11 @@ const readtar = (file, cb) => { cb(code, signal, Buffer.concat(out).toString())) } -t.teardown(_ => rimraf.sync(dir)) +t.teardown(() => rimraf(dir)) -t.test('setup', t => { - rimraf.sync(dir) - mkdirp.sync(dir) - t.end() +t.test('setup', async () => { + await rimraf(dir) + await mkdirp(dir) }) t.test('no cb if sync or without file', t => { @@ -88,7 +89,7 @@ t.test('create file', t => { }) t.test('with specific mode', t => { - const mode = 0o740 + const mode = isWindows ? 0o666 : 0o740 t.test('sync', t => { const file = path.resolve(dir, 'sync-mode.tar') c({ diff --git a/test/extract.js b/test/extract.js index f6c992b1..8c1064de 100644 --- a/test/extract.js +++ b/test/extract.js @@ -6,49 +6,47 @@ const path = require('path') const fs = require('fs') const extractdir = path.resolve(__dirname, 'fixtures/extract') const tars = path.resolve(__dirname, 'fixtures/tars') -const mkdirp = require('mkdirp') -const rimraf = require('rimraf') +const {promisify} = require('util') +const mkdirp = promisify(require('mkdirp')) +const rimraf = promisify(require('rimraf')) const mutateFS = require('mutate-fs') -t.teardown(_ => rimraf.sync(extractdir)) +t.teardown(_ => rimraf(extractdir)) t.test('basic extracting', t => { const file = path.resolve(tars, 'utf8.tar') const dir = path.resolve(extractdir, 'basic') - t.beforeEach(cb => { - rimraf.sync(dir) - mkdirp.sync(dir) - cb() + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) - const check = t => { + const check = async t => { fs.lstatSync(dir + '/Ω.txt') fs.lstatSync(dir + '/🌟.txt') t.throws(_ => fs.lstatSync(dir + '/long-path/r/e/a/l/l/y/-/d/e/e/p/-' + '/f/o/l/d/e/r/-/p/a/t/h/Ω.txt')) - rimraf.sync(dir) + await rimraf(dir) t.end() } const files = [ '🌟.txt', 'Ω.txt' ] t.test('sync', t => { x({ file: file, sync: true, C: dir }, files) - check(t) + return check(t) }) t.test('async promisey', t => { - return x({ file: file, cwd: dir }, files).then(_ => { - check(t) - }) + return x({ file: file, cwd: dir }, files).then(_ => check(t)) }) t.test('async cb', t => { return x({ file: file, cwd: dir }, files, er => { if (er) throw er - check(t) + return check(t) }) }) @@ -59,32 +57,31 @@ t.test('file list and filter', t => { const file = path.resolve(tars, 'utf8.tar') const dir = path.resolve(extractdir, 'filter') - t.beforeEach(cb => { - rimraf.sync(dir) - mkdirp.sync(dir) - cb() + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) - const check = t => { + const check = async t => { fs.lstatSync(dir + '/Ω.txt') t.throws(_ => fs.lstatSync(dir + '/🌟.txt')) t.throws(_ => fs.lstatSync(dir + '/long-path/r/e/a/l/l/y/-/d/e/e/p/-' + '/f/o/l/d/e/r/-/p/a/t/h/Ω.txt')) - rimraf.sync(dir) + await rimraf(dir) t.end() } const filter = path => path === 'Ω.txt' t.test('sync', t => { - x({ filter: filter, file: file, sync: true, C: dir }, [ '🌟.txt', 'Ω.txt' ]) - check(t) + x({ filter: filter, file: file, sync: true, C: dir }, ['🌟.txt', 'Ω.txt']) + return check(t) }) t.test('async promisey', t => { - return x({ filter: filter, file: file, cwd: dir }, [ '🌟.txt', 'Ω.txt' ]).then(_ => { - check(t) + return x({ filter: filter, file: file, cwd: dir }, ['🌟.txt', 'Ω.txt']).then(_ => { + return check(t) }) }) @@ -92,7 +89,7 @@ t.test('file list and filter', t => { return x({ filter: filter, file: file, cwd: dir }, [ '🌟.txt', 'Ω.txt' ], er => { if (er) throw er - check(t) + return check(t) }) }) @@ -103,29 +100,28 @@ t.test('no file list', t => { const file = path.resolve(tars, 'body-byte-counts.tar') const dir = path.resolve(extractdir, 'no-list') - t.beforeEach(cb => { - rimraf.sync(dir) - mkdirp.sync(dir) - cb() + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) - const check = t => { + const check = async t => { t.equal(fs.lstatSync(path.resolve(dir, '1024-bytes.txt')).size, 1024) t.equal(fs.lstatSync(path.resolve(dir, '512-bytes.txt')).size, 512) t.equal(fs.lstatSync(path.resolve(dir, 'one-byte.txt')).size, 1) t.equal(fs.lstatSync(path.resolve(dir, 'zero-byte.txt')).size, 0) - rimraf.sync(dir) + await rimraf(dir) t.end() } t.test('sync', t => { x({ file: file, sync: true, C: dir }) - check(t) + return check(t) }) t.test('async promisey', t => { return x({ file: file, cwd: dir }).then(_ => { - check(t) + return check(t) }) }) @@ -133,7 +129,7 @@ t.test('no file list', t => { return x({ file: file, cwd: dir }, er => { if (er) throw er - check(t) + return check(t) }) }) @@ -145,29 +141,28 @@ t.test('read in itty bits', t => { const file = path.resolve(tars, 'body-byte-counts.tar') const dir = path.resolve(extractdir, 'no-list') - t.beforeEach(cb => { - rimraf.sync(dir) - mkdirp.sync(dir) - cb() + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) - const check = t => { + const check = async t => { t.equal(fs.lstatSync(path.resolve(dir, '1024-bytes.txt')).size, 1024) t.equal(fs.lstatSync(path.resolve(dir, '512-bytes.txt')).size, 512) t.equal(fs.lstatSync(path.resolve(dir, 'one-byte.txt')).size, 1) t.equal(fs.lstatSync(path.resolve(dir, 'zero-byte.txt')).size, 0) - rimraf.sync(dir) + await rimraf(dir) t.end() } t.test('sync', t => { x({ file: file, sync: true, C: dir, maxReadSize: maxReadSize }) - check(t) + return check(t) }) t.test('async promisey', t => { return x({ file: file, cwd: dir, maxReadSize: maxReadSize }).then(_ => { - check(t) + return check(t) }) }) @@ -175,7 +170,7 @@ t.test('read in itty bits', t => { return x({ file: file, cwd: dir, maxReadSize: maxReadSize }, er => { if (er) throw er - check(t) + return check(t) }) }) @@ -210,16 +205,15 @@ t.test('read fail', t => { t.end() }) -t.test('sync gzip error edge case test', t => { - const zlib = require('minizlib') +t.test('sync gzip error edge case test', async t => { const file = path.resolve(__dirname, 'fixtures/sync-gzip-fail.tgz') const dir = path.resolve(__dirname, 'sync-gzip-fail') const cwd = process.cwd() - mkdirp.sync(dir + '/x') + await mkdirp(dir + '/x') process.chdir(dir) - t.teardown(() => { + t.teardown(async () => { process.chdir(cwd) - rimraf.sync(dir) + await rimraf(dir) }) x({ diff --git a/test/pack.js b/test/pack.js index 24b61ead..57e09008 100644 --- a/test/pack.js +++ b/test/pack.js @@ -20,6 +20,8 @@ const EE = require('events').EventEmitter const rimraf = require('rimraf') const mkdirp = require('mkdirp') const ReadEntry = require('../lib/read-entry.js') +const isWindows = process.platform === 'win32' +const normPath = require('../lib/normalize-windows-path.js') const ctime = new Date('2017-05-10T01:03:12.000Z') const atime = new Date('2017-04-17T00:00:00.000Z') @@ -58,7 +60,7 @@ t.test('pack a file', t => { cksumValid: true, needPax: false, path: 'one-byte.txt', - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 1, mtime: mtime, cksum: Number, @@ -102,7 +104,7 @@ t.test('pack a file with a prefix', t => { cksumValid: true, needPax: false, path: 'package/.dotfile', - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 2, mtime: mtime, cksum: Number, @@ -211,7 +213,7 @@ t.test('use process cwd if cwd not specified', t => { cksumValid: true, needPax: false, path: 'dir/', - mode: 0o755, + mode: isWindows ? 0o777 : 0o755, size: 0, mtime: mtime, cksum: Number, @@ -239,7 +241,7 @@ t.test('use process cwd if cwd not specified', t => { cksumValid: true, needPax: false, path: 'dir/x', - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 0, mtime: mtime, cksum: Number, @@ -277,7 +279,7 @@ t.test('filter', t => { cksumValid: true, needPax: false, path: 'dir/', - mode: 0o755, + mode: isWindows ? 0o777 : 0o755, size: 0, mtime: mtime, cksum: Number, @@ -323,7 +325,7 @@ t.test('add the same dir twice (exercise cache code)', t => { cksumValid: true, needPax: false, path: 'dir/', - mode: 0o755, + mode: isWindows ? 0o777 : 0o755, size: 0, mtime: mtime, cksum: Number, @@ -571,7 +573,7 @@ t.test('pipe into a slow reader', t => { cksumValid: true, needPax: false, path: 'long-path/', - mode: 0o755, + mode: isWindows ? 0o777 : 0o755, size: 0, mtime: mtime, cksum: Number, @@ -629,7 +631,7 @@ t.test('pipe into a slow gzip reader', t => { cksumValid: true, needPax: false, path: 'long-path/', - mode: 0o755, + mode: isWindows ? 0o777 : 0o755, size: 0, mtime: mtime, cksum: Number, @@ -695,11 +697,12 @@ t.test('warnings', t => { const data = Buffer.concat(out) t.equal(data.length, 2048) t.match(warnings, [[ - /stripping .* from absolute path/, f + /stripping .* from absolute path/, + normPath(f), ]]) t.match(new Header(data), { - path: f.replace(/^(\/|[a-z]:\\\\)/, '') + path: normPath(f).replace(/^(\/|[a-z]:\/)/i, ''), }) t.end() }) @@ -724,7 +727,7 @@ t.test('warnings', t => { t.equal(warnings.length, 0) t.match(new Header(data), { - path: f + path: normPath(f), }) t.end() }) @@ -737,7 +740,10 @@ t.test('warnings', t => { strict: true, cwd: files }).end(f).on('error', e => { - t.match(e, { message: /stripping .* from absolute path/, data: f }) + t.match(e, { + message: /stripping .* from absolute path/, + data: normPath(f), + }) t.end() }) }) @@ -791,7 +797,7 @@ t.test('no dir recurse', t => { t.end() }) -t.test('follow', t => { +t.test('follow', { skip: isWindows && 'file symlinks not available' }, t => { const check = (out, t) => { const data = Buffer.concat(out) t.equal(data.length, 2048) @@ -800,8 +806,8 @@ t.test('follow', t => { cksumValid: true, needPax: false, path: 'symlink', - mode: 0o644, - size: 26 + mode: isWindows ? 0o666 : 0o644, + size: 26, }) t.match(data.slice(512).toString(), /this link is like diamond\n\0+$/) t.end() diff --git a/test/read-entry.js b/test/read-entry.js index e0feb7ed..40662ea4 100644 --- a/test/read-entry.js +++ b/test/read-entry.js @@ -6,7 +6,7 @@ const Header = require('../lib/header.js') t.test('create read entry', t => { const h = new Header({ - path: 'foo.txt', + path: 'oof.txt', mode: 0o755, uid: 24561, gid: 20, @@ -20,17 +20,17 @@ t.test('create read entry', t => { }) h.encode() - const entry = new ReadEntry(h, { x: 'y' }, { z: 0, a: null, b: undefined }) + const entry = new ReadEntry(h, { x: 'y', path: 'foo.txt' }, { z: 0, a: null, b: undefined }) t.ok(entry.header.cksumValid, 'header checksum should be valid') t.match(entry, { - extended: { x: 'y' }, + extended: { x: 'y', path: 'foo.txt' }, globalExtended: { z: 0, a: null, b: undefined }, header: { cksumValid: true, needPax: false, - path: 'foo.txt', + path: 'oof.txt', mode: 0o755, uid: 24561, gid: 20, diff --git a/test/replace.js b/test/replace.js index b6821582..1ce30d46 100644 --- a/test/replace.js +++ b/test/replace.js @@ -4,59 +4,29 @@ const t = require('tap') const r = require('../lib/replace.js') const path = require('path') const fs = require('fs') -const mkdirp = require('mkdirp') -const rimraf = require('rimraf') const mutateFS = require('mutate-fs') const list = require('../lib/list.js') +const {resolve} = require('path') const fixtures = path.resolve(__dirname, 'fixtures') -const dir = path.resolve(fixtures, 'replace') const tars = path.resolve(fixtures, 'tars') -const file = dir + '/body-byte-counts.tar' -const fileNoNulls = dir + '/no-null-eof.tar' -const fileTruncHead = dir + '/truncated-head.tar' -const fileTruncBody = dir + '/truncated-body.tar' -const fileNonExistent = dir + '/does-not-exist.tar' -const fileZeroByte = dir + '/zero.tar' -const fileEmpty = dir + '/empty.tar' -const fileCompressed = dir + '/compressed.tgz' const zlib = require('zlib') const spawn = require('child_process').spawn -t.teardown(_ => rimraf.sync(dir)) - -const reset = cb => { - rimraf.sync(dir) - mkdirp.sync(dir) - const data = fs.readFileSync(tars + '/body-byte-counts.tar') - fs.writeFileSync(file, data) - - const dataNoNulls = data.slice(0, data.length - 1024) - fs.writeFileSync(fileNoNulls, dataNoNulls) - - const dataTruncHead = Buffer.concat([dataNoNulls, data.slice(0, 500)]) - fs.writeFileSync(fileTruncHead, dataTruncHead) - - const dataTruncBody = Buffer.concat([dataNoNulls, data.slice(0, 700)]) - fs.writeFileSync(fileTruncBody, dataTruncBody) - - fs.writeFileSync(fileZeroByte, '') - fs.writeFileSync(fileEmpty, Buffer.alloc(1024)) - - fs.writeFileSync(fileCompressed, zlib.gzipSync(data)) - - if (cb) - cb() +const data = fs.readFileSync(tars + '/body-byte-counts.tar') +const dataNoNulls = data.slice(0, data.length - 1024) +const fixtureDef = { + 'body-byte-counts.tar': data, + 'no-null-eof.tar': dataNoNulls, + 'truncated-head.tar': Buffer.concat([dataNoNulls, data.slice(0, 500)]), + 'truncated-body.tar': Buffer.concat([dataNoNulls, data.slice(0, 700)]), + 'zero.tar': Buffer.from(''), + 'empty.tar': Buffer.alloc(512), + 'compressed.tgz': zlib.gzipSync(data), } -t.test('setup', t => { - reset(t.end) -}) - t.test('basic file add to archive (good or truncated)', t => { - t.beforeEach(reset) - const check = (file, t) => { const c = spawn('tar', ['tf', file], { stdio: [ 0, 'pipe', 2 ] }) const out = [] @@ -64,7 +34,7 @@ t.test('basic file add to archive (good or truncated)', t => { c.on('close', (code, signal) => { t.equal(code, 0) t.equal(signal, null) - const actual = Buffer.concat(out).toString().trim().split('\n') + const actual = Buffer.concat(out).toString().trim().split(/\r?\n/) t.same(actual, [ '1024-bytes.txt', '512-bytes.txt', @@ -76,50 +46,68 @@ t.test('basic file add to archive (good or truncated)', t => { }) } - ;[file, - fileNoNulls, - fileTruncHead, - fileTruncBody - ].forEach(file => { - const fileList = [ path.basename(__filename) ] - t.test(path.basename(file), t => { - t.test('sync', t => { + const files = [ + 'body-byte-counts.tar', + 'no-null-eof.tar', + 'truncated-head.tar', + 'truncated-body.tar', + ] + const td = files.map(f => [f, fixtureDef[f]]).reduce((s, [k, v]) => { + s[k] = v + return s + }, {}) + const fileList = [path.basename(__filename)] + t.test('sync', t => { + t.plan(files.length) + const dir = t.testdir(td) + for (const file of files) { + t.test(file, t => { r({ sync: true, - file: file, - cwd: __dirname + file: resolve(dir, file), + cwd: __dirname, }, fileList) - check(file, t) + check(resolve(dir, file), t) }) + } + }) - t.test('async cb', t => { + t.test('async cb', t => { + t.plan(files.length) + const dir = t.testdir(td) + for (const file of files) { + t.test(file, t => { r({ - file: file, - cwd: __dirname + file: resolve(dir, file), + cwd: __dirname, }, fileList, er => { if (er) throw er - check(file, t) + check(resolve(dir, file), t) }) }) + } + }) - t.test('async promise', t => { + t.test('async', t => { + t.plan(files.length) + const dir = t.testdir(td) + for (const file of files) { + t.test(file, t => { r({ - file: file, - cwd: __dirname - }, fileList).then(_ => check(file, t)) + file: resolve(dir, file), + cwd: __dirname, + }, fileList).then(() => { + check(resolve(dir, file), t) + }) }) - - t.end() - }) + } }) t.end() }) t.test('add to empty archive', t => { - t.beforeEach(reset) - const check = (file, t) => { const c = spawn('tar', ['tf', file]) const out = [] @@ -135,70 +123,91 @@ t.test('add to empty archive', t => { }) } - ;[fileNonExistent, - fileEmpty, - fileZeroByte - ].forEach(file => { - t.test(path.basename(file), t => { - t.test('sync', t => { + const files = [ + 'empty.tar', + 'zero.tar', + ] + const td = files.map(f => [f, fixtureDef[f]]).reduce((s, [k, v]) => { + s[k] = v + return s + }, {}) + files.push('not-existing.tar') + + t.test('sync', t => { + const dir = t.testdir(td) + t.plan(files.length) + for (const file of files) { + t.test(file, t => { r({ sync: true, - file: file, - cwd: __dirname + file: resolve(dir, file), + cwd: __dirname, }, [path.basename(__filename)]) - check(file, t) + check(resolve(dir, file), t) }) + } + }) - t.test('async cb', t => { + t.test('async cb', t => { + const dir = t.testdir(td) + t.plan(files.length) + for (const file of files) { + t.test(file, t => { r({ - file: file, - cwd: __dirname + file: resolve(dir, file), + cwd: __dirname, }, [path.basename(__filename)], er => { if (er) throw er - check(file, t) + check(resolve(dir, file), t) }) }) + } + }) - t.test('async promise', t => { + t.test('async', async t => { + const dir = t.testdir(td) + t.plan(files.length) + for (const file of files) { + t.test(file, t => { r({ - file: file, - cwd: __dirname - }, [path.basename(__filename)]).then(_ => check(file, t)) + file: resolve(dir, file), + cwd: __dirname, + }, [path.basename(__filename)]).then(() => { + check(resolve(dir, file), t) + }) }) - - t.end() - }) + } }) t.end() }) -t.test('cannot append to gzipped archives', t => { - reset() +t.test('cannot append to gzipped archives', async t => { + const dir = t.testdir({ + 'compressed.tgz': fixtureDef['compressed.tgz'], + }) + const file = resolve(dir, 'compressed.tgz') const expect = new Error('cannot append to compressed archives') const expectT = new TypeError('cannot append to compressed archives') t.throws(_ => r({ - file: fileCompressed, + file, cwd: __dirname, gzip: true }, [path.basename(__filename)]), expectT) t.throws(_ => r({ - file: fileCompressed, + file, cwd: __dirname, sync: true }, [path.basename(__filename)]), expect) - r({ - file: fileCompressed, + return r({ + file, cwd: __dirname, - }, [path.basename(__filename)], er => { - t.match(er, expect) - t.end() - }) + }, [path.basename(__filename)], er => t.match(er, expect)) }) t.test('other throws', t => { @@ -209,37 +218,61 @@ t.test('other throws', t => { }) t.test('broken open', t => { + const dir = t.testdir({ + 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], + }) + const file = resolve(dir, 'body-byte-counts.tar') const poop = new Error('poop') t.teardown(mutateFS.fail('open', poop)) - t.throws(_ => r({ sync: true, file: file }, ['README.md']), poop) - r({ file: file }, ['README.md'], er => { + t.throws(_ => r({ sync: true, file }, ['README.md']), poop) + r({ file }, ['README.md'], er => { t.match(er, poop) t.end() }) }) t.test('broken fstat', t => { + const td = { + 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], + } const poop = new Error('poop') - t.teardown(mutateFS.fail('fstat', poop)) - t.throws(_ => r({ sync: true, file: file }, ['README.md']), poop) - r({ file: file }, ['README.md'], er => { - t.match(er, poop) + t.test('sync', t => { + const dir = t.testdir(td) + const file = resolve(dir, 'body-byte-counts.tar') + t.teardown(mutateFS.fail('fstat', poop)) + t.throws(_ => r({ sync: true, file }, ['README.md']), poop) t.end() }) + t.test('async', t => { + const dir = t.testdir(td) + const file = resolve(dir, 'body-byte-counts.tar') + t.teardown(mutateFS.fail('fstat', poop)) + r({ file }, ['README.md'], async er => { + t.match(er, poop) + t.end() + }) + }) + t.end() }) t.test('broken read', t => { + const dir = t.testdir({ + 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], + }) + const file = resolve(dir, 'body-byte-counts.tar') const poop = new Error('poop') t.teardown(mutateFS.fail('read', poop)) - t.throws(_ => r({ sync: true, file: file }, ['README.md']), poop) - r({ file: file }, ['README.md'], er => { + t.throws(_ => r({ sync: true, file }, ['README.md']), poop) + r({ file }, ['README.md'], er => { t.match(er, poop) t.end() }) }) -t.test('mtime cache', t => { - t.beforeEach(reset) +t.test('mtime cache', async t => { + const td = { + 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], + } let mtimeCache @@ -250,7 +283,7 @@ t.test('mtime cache', t => { c.on('close', (code, signal) => { t.equal(code, 0) t.equal(signal, null) - const actual = Buffer.concat(out).toString().trim().split('\n') + const actual = Buffer.concat(out).toString().trim().split(/\r?\n/) t.same(actual, [ '1024-bytes.txt', '512-bytes.txt', @@ -271,9 +304,11 @@ t.test('mtime cache', t => { } t.test('sync', t => { + const dir = t.testdir(td) + const file = resolve(dir, 'body-byte-counts.tar') r({ sync: true, - file: file, + file, cwd: __dirname, mtimeCache: mtimeCache = new Map() }, [path.basename(__filename)]) @@ -281,8 +316,10 @@ t.test('mtime cache', t => { }) t.test('async cb', t => { + const dir = t.testdir(td) + const file = resolve(dir, 'body-byte-counts.tar') r({ - file: file, + file, cwd: __dirname, mtimeCache: mtimeCache = new Map() }, [path.basename(__filename)], er => { @@ -293,8 +330,10 @@ t.test('mtime cache', t => { }) t.test('async promise', t => { + const dir = t.testdir(td) + const file = resolve(dir, 'body-byte-counts.tar') r({ - file: file, + file, cwd: __dirname, mtimeCache: mtimeCache = new Map() }, [path.basename(__filename)]).then(_ => check(file, t)) @@ -304,40 +343,58 @@ t.test('mtime cache', t => { }) t.test('create tarball out of another tarball', t => { - const out = path.resolve(dir, 'out.tar') - - t.beforeEach(cb => { - fs.writeFile(out, fs.readFileSync(path.resolve(tars, 'dir.tar')), cb) - }) + const td = { + 'out.tar': fs.readFileSync(path.resolve(tars, 'dir.tar')), + } - const check = t => { + const check = (out, t) => { const expect = [ 'dir/', 'Ω.txt', '🌟.txt', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt' ] - list({ f: out, sync: true, onentry: entry => { - t.equal(entry.path, expect.shift()) - }}) + list({ + f: out, + sync: true, + onentry: entry => { + t.equal(entry.path, expect.shift()) + }}) t.same(expect, []) t.end() } t.test('sync', t => { + const dir = t.testdir(td) + const out = resolve(dir, 'out.tar') r({ f: out, cwd: tars, sync: true }, ['@utf8.tar']) - check(t) + check(out, t) + }) + + t.test('async cb', t => { + const dir = t.testdir(td) + const out = resolve(dir, 'out.tar') + r({ + f: out, + cwd: tars, + }, ['@utf8.tar'], er => { + if (er) + throw er + check(out, t) + }) }) t.test('async', t => { + const dir = t.testdir(td) + const out = resolve(dir, 'out.tar') r({ f: out, - cwd: tars - }, ['@utf8.tar'], _ => check(t)) + cwd: tars, + }, ['@utf8.tar']).then(() => check(out, t)) }) t.end() diff --git a/test/unpack.js b/test/unpack.js index f19d126c..2c18ccc9 100644 --- a/test/unpack.js +++ b/test/unpack.js @@ -17,22 +17,31 @@ const files = path.resolve(fixtures, 'files') const tars = path.resolve(fixtures, 'tars') const parses = path.resolve(fixtures, 'parse') const unpackdir = path.resolve(fixtures, 'unpack') -const rimraf = require('rimraf') -const mkdirp = require('mkdirp') +const {promisify} = require('util') +const rimraf = promisify(require('rimraf')) +const mkdirp = promisify(require('mkdirp')) const mutateFS = require('mutate-fs') const eos = require('end-of-stream') +const normPath = require('../lib/normalize-windows-path.js') +const requireInject = require('require-inject') -t.teardown(_ => rimraf.sync(unpackdir)) +// On Windows in particular, the "really deep folder path" file +// often tends to cause problems, which don't indicate a failure +// of this library, it's just what happens on Windows with super +// long file paths. +const isWindows = process.platform === 'win32' +const isLongFile = f => f.match(/r.e.a.l.l.y.-.d.e.e.p.-.f.o.l.d.e.r.-.p.a.t.h/) -t.test('setup', t => { - rimraf.sync(unpackdir) - mkdirp.sync(unpackdir) - t.end() +t.teardown(_ => rimraf(unpackdir)) + +t.test('setup', async () => { + await rimraf(unpackdir) + await mkdirp(unpackdir) }) t.test('basic file unpack tests', t => { const basedir = path.resolve(unpackdir, 'basic') - t.teardown(_ => rimraf.sync(basedir)) + t.teardown(_ => rimraf(basedir)) const cases = { 'emptypax.tar': { @@ -80,18 +89,19 @@ t.test('basic file unpack tests', t => { const tf = path.resolve(tars, tarfile) const dir = path.resolve(basedir, tarfile) const linkdir = path.resolve(basedir, tarfile + '.link') - t.beforeEach(cb => { - rimraf.sync(dir) - rimraf.sync(linkdir) - mkdirp.sync(dir) - fs.symlinkSync(dir, linkdir) - cb() + t.beforeEach(async () => { + await rimraf(dir) + await rimraf(linkdir) + await mkdirp(dir) + fs.symlinkSync(dir, linkdir, 'junction') }) const check = t => { const expect = cases[tarfile] Object.keys(expect).forEach(file => { const f = path.resolve(dir, file) + if (isWindows && isLongFile(file)) + return t.equal(fs.readFileSync(f, 'utf8'), expect[file], file) }) t.end() @@ -133,7 +143,7 @@ t.test('basic file unpack tests', t => { t.test('cwd default to process cwd', t => { const u = new Unpack() const us = new UnpackSync() - const cwd = process.cwd() + const cwd = normPath(process.cwd()) t.equal(u.cwd, cwd) t.equal(us.cwd, cwd) t.end() @@ -145,8 +155,8 @@ t.test('links!', t => { const stripData = fs.readFileSync(tars + '/links-strip.tar') t.plan(6) - t.beforeEach(cb => mkdirp(dir, cb)) - t.afterEach(cb => rimraf(dir, cb)) + t.beforeEach(() => mkdirp(dir)) + t.afterEach(() => rimraf(dir)) const check = t => { const hl1 = fs.lstatSync(dir + '/hardlink-1') @@ -155,9 +165,12 @@ t.test('links!', t => { t.equal(hl1.ino, hl2.ino) t.equal(hl1.nlink, 2) t.equal(hl2.nlink, 2) - const sym = fs.lstatSync(dir + '/symlink') - t.ok(sym.isSymbolicLink()) - t.equal(fs.readlinkSync(dir + '/symlink'), 'hardlink-2') + if (!isWindows) { + // doesn't work on win32 without special privs + const sym = fs.lstatSync(dir + '/symlink') + t.ok(sym.isSymbolicLink()) + t.equal(fs.readlinkSync(dir + '/symlink'), 'hardlink-2') + } t.end() } const checkForStrip = t => { @@ -170,9 +183,11 @@ t.test('links!', t => { t.equal(hl1.ino, hl3.ino) t.equal(hl1.nlink, 3) t.equal(hl2.nlink, 3) - const sym = fs.lstatSync(dir + '/symlink') - t.ok(sym.isSymbolicLink()) - t.equal(fs.readlinkSync(dir + '/symlink'), 'hardlink-2') + if (!isWindows) { + const sym = fs.lstatSync(dir + '/symlink') + t.ok(sym.isSymbolicLink()) + t.equal(fs.readlinkSync(dir + '/symlink'), 'hardlink-2') + } t.end() } const checkForStrip3 = t => { @@ -183,7 +198,8 @@ t.test('links!', t => { } catch(e) { err = e } - // can't be extracted because we've passed it in the tar (specially crafted tar for this not to work) + // can't be extracted because we've passed it in the tar + // (specially crafted tar for this not to work) t.equal(err.code, 'ENOENT') t.end() } @@ -205,7 +221,7 @@ t.test('links!', t => { t.test('sync strip', t => { const unpack = new UnpackSync({ cwd: dir, strip: 1 }) - unpack.end(fs.readFileSync(tars + '/links-strip.tar')) + unpack.end(stripData) checkForStrip(t) }) @@ -240,7 +256,7 @@ t.test('links without cleanup (exercise clobbering code)', t => { t.plan(6) mkdirp.sync(dir) - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) t.beforeEach(cb => { // clobber this junk @@ -259,9 +275,11 @@ t.test('links without cleanup (exercise clobbering code)', t => { t.equal(hl1.ino, hl2.ino) t.equal(hl1.nlink, 2) t.equal(hl2.nlink, 2) - const sym = fs.lstatSync(dir + '/symlink') - t.ok(sym.isSymbolicLink()) - t.equal(fs.readlinkSync(dir + '/symlink'), 'hardlink-2') + if (!isWindows) { + const sym = fs.lstatSync(dir + '/symlink') + t.ok(sym.isSymbolicLink()) + t.equal(fs.readlinkSync(dir + '/symlink'), 'hardlink-2') + } t.end() } @@ -309,7 +327,7 @@ t.test('links without cleanup (exercise clobbering code)', t => { t.test('nested dir dupe', t => { const dir = path.resolve(unpackdir, 'nested-dir') mkdirp.sync(dir + '/d/e/e/p') - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) const expect = { 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt': 'short\n', 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', @@ -338,14 +356,15 @@ t.test('nested dir dupe', t => { zip.end(data) }) -t.test('symlink in dir path', t => { +t.test('symlink in dir path', { + skip: isWindows && 'symlinks not fully supported', +}, t => { const dir = path.resolve(unpackdir, 'symlink-junk') - t.teardown(_ => rimraf.sync(dir)) - t.beforeEach(cb => { - rimraf.sync(dir) - mkdirp.sync(dir) - cb() + t.teardown(_ => rimraf(dir)) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) const data = makeTar([ @@ -399,18 +418,22 @@ t.test('symlink in dir path', t => { const warnings = [] const u = new Unpack({ cwd: dir, onwarn: (w,d) => warnings.push([w,d]) }) u.on('close', _ => { - t.equal(fs.lstatSync(dir + '/d/i').mode & 0o7777, 0o755) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) + t.equal(fs.lstatSync(dir + '/d/i').mode & 0o7777, isWindows ? 0o666 : 0o755) + t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, isWindows ? 0o666 : 0o751) t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') - t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) + if (!isWindows) { + t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') + t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) + } + if (!isWindows) { + t.equal(warnings[0][0], 'Cannot extract through symbolic link') + t.match(warnings[0][1], { + name: 'SylinkError', + path: dir + '/d/i/r/symlink/', + symlink: dir + '/d/i/r/symlink', + }) + } t.equal(warnings.length, 1) - t.equal(warnings[0][0], 'Cannot extract through symbolic link') - t.match(warnings[0][1], { - name: 'SylinkError', - path: dir + '/d/i/r/symlink/', - symlink: dir + '/d/i/r/symlink' - }) t.end() }) u.end(data) @@ -423,10 +446,12 @@ t.test('symlink in dir path', t => { onwarn: (w,d) => warnings.push([w,d]) }) u.end(data) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) + t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, isWindows ? 0o666 : 0o751) t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') - t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) + if (!isWindows) { + t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') + t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) + } t.equal(warnings.length, 1) t.equal(warnings[0][0], 'Cannot extract through symbolic link') t.match(warnings[0][1], { @@ -586,7 +611,7 @@ t.test('symlink in dir path', t => { t.test('unsupported entries', t => { const dir = path.resolve(unpackdir, 'unsupported-entries') mkdirp.sync(dir) - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) const unknown = new Header({ path: 'qux', type: 'File', size: 4 }) unknown.type = 'Z' unknown.encode() @@ -663,11 +688,10 @@ t.test('unsupported entries', t => { t.test('file in dir path', t => { const dir = path.resolve(unpackdir, 'file-junk') - t.teardown(_ => rimraf.sync(dir)) - t.beforeEach(cb => { - rimraf.sync(dir) - mkdirp.sync(dir) - cb() + t.teardown(_ => rimraf(dir)) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) const data = makeTar([ @@ -737,7 +761,7 @@ t.test('file in dir path', t => { t.test('set umask option', t => { const dir = path.resolve(unpackdir, 'umask') mkdirp.sync(dir) - t.tearDown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) const data = makeTar([ { @@ -753,22 +777,24 @@ t.test('set umask option', t => { umask: 0o027, cwd: dir }).on('close', _ => { - t.equal(fs.statSync(dir + '/d/i/r').mode & 0o7777, 0o750) - t.equal(fs.statSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) + t.equal(fs.statSync(dir + '/d/i/r').mode & 0o7777, isWindows ? 0o666 : 0o750) + t.equal(fs.statSync(dir + '/d/i/r/dir').mode & 0o7777, isWindows ? 0o666 : 0o751) t.end() }).end(data) }) t.test('absolute paths', t => { const dir = path.join(unpackdir, 'absolute-paths') - t.teardown(_ => rimraf.sync(dir)) - t.beforeEach(cb => { - rimraf.sync(dir) - mkdirp.sync(dir) - cb() + t.teardown(_ => rimraf(dir)) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) const absolute = path.resolve(dir, 'd/i/r/absolute') + const root = path.parse(absolute).root + const extraAbsolute = root + root + root + absolute + t.ok(path.isAbsolute(extraAbsolute)) t.ok(path.isAbsolute(absolute)) const parsed = path.parse(absolute) const relative = absolute.substr(parsed.root.length) @@ -776,7 +802,7 @@ t.test('absolute paths', t => { const data = makeTar([ { - path: absolute, + path: extraAbsolute, type: 'File', size: 1, atime: new Date('1979-07-01T19:10:00.000Z'), @@ -790,9 +816,10 @@ t.test('absolute paths', t => { t.test('warn and correct', t => { const check = t => { - t.same(warnings, [[ - 'stripping / from absolute path', - absolute + const r = normPath(root) + t.match(warnings, [[ + `stripping ${r}${r}${r}${r} from absolute path`, + normPath(absolute), ]]) t.ok(fs.lstatSync(path.resolve(dir, relative)).isFile(), 'is file') t.end() @@ -821,6 +848,22 @@ t.test('absolute paths', t => { }) t.test('preserve absolute path', t => { + // if we use the extraAbsolute path here, we end up creating a dir + // like C:\C:\C:\C:\path\to\absolute, which is both 100% valid on + // windows, as well as SUUUUUPER annoying. + const data = makeTar([ + { + path: isWindows ? absolute : extraAbsolute, + type: 'File', + size: 1, + atime: new Date('1979-07-01T19:10:00.000Z'), + ctime: new Date('2011-03-27T22:16:31.000Z'), + mtime: new Date('2011-03-27T22:16:31.000Z'), + }, + 'a', + '', + '', + ]) const check = t => { t.same(warnings, []) t.ok(fs.lstatSync(absolute).isFile(), 'is file') @@ -856,11 +899,10 @@ t.test('absolute paths', t => { t.test('.. paths', t => { const dir = path.join(unpackdir, 'dotted-paths') - t.teardown(_ => rimraf.sync(dir)) - t.beforeEach(cb => { - rimraf.sync(dir) - mkdirp.sync(dir) - cb() + t.teardown(_ => rimraf(dir)) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) const fmode = 0o755 @@ -919,7 +961,7 @@ t.test('.. paths', t => { const check = t => { t.same(warnings, []) t.ok(fs.lstatSync(resolved).isFile(), 'is file') - t.equal(fs.lstatSync(resolved).mode & 0o777, fmode, 'mode is 0755') + t.equal(fs.lstatSync(resolved).mode & 0o777, isWindows ? 0o666 : fmode) t.end() } @@ -955,20 +997,46 @@ t.test('.. paths', t => { t.test('fail all stats', t => { const poop = new Error('poop') poop.code = 'EPOOP' - let unmutate - const dir = path.join(unpackdir, 'stat-fail') + const dir = normPath(path.join(unpackdir, 'stat-fail')) + const { + stat, + fstat, + lstat, + statSync, + fstatSync, + lstatSync, + } = fs + const unmutate = () => Object.assign(fs, { + stat, + fstat, + lstat, + statSync, + fstatSync, + lstatSync, + }) + const mutate = () => { + fs.stat = fs.lstat = fs.fstat = (...args) => { + // don't fail statting the cwd, or we get different errors + if (normPath(args[0]) === dir) + return lstat(dir, args.pop()) + process.nextTick(() => args.pop()(poop)) + } + fs.statSync = fs.lstatSync = fs.fstatSync = (...args) => { + if (normPath(args[0]) === dir) + return lstatSync(dir) + throw poop + } + } const warnings = [] - t.beforeEach(cb => { + t.beforeEach(async () => { warnings.length = 0 - mkdirp.sync(dir) - unmutate = mutateFS.statFail(poop) - cb() + await mkdirp(dir) + mutate() }) - t.afterEach(cb => { + t.afterEach(async () => { unmutate() - rimraf.sync(dir) - cb() + await rimraf(dir) }) const data = makeTar([ @@ -1039,18 +1107,18 @@ t.test('fail all stats', t => { String, { code: 'EISDIR', - path: path.resolve(dir, 'd/i/r/file'), - syscall: 'open' - } + path: normPath(path.resolve(dir, 'd/i/r/file')), + syscall: 'open', + }, ], [ String, { - dest: path.resolve(dir, 'd/i/r/link'), - path: path.resolve(dir, 'd/i/r/file'), - syscall: 'link' - } - ] + dest: normPath(path.resolve(dir, 'd/i/r/link')), + path: normPath(path.resolve(dir, 'd/i/r/file')), + syscall: 'link', + }, + ], ] new UnpackSync({ cwd: dir, @@ -1067,14 +1135,16 @@ t.test('fail symlink', t => { poop.code = 'EPOOP' const unmutate = mutateFS.fail('symlink', poop) const dir = path.join(unpackdir, 'symlink-fail') - t.teardown(_ => (unmutate(), rimraf.sync(dir))) + t.teardown(async _ => { + unmutate() + await rimraf(dir) + }) const warnings = [] - t.beforeEach(cb => { + t.beforeEach(async () => { warnings.length = 0 - rimraf.sync(dir) - mkdirp.sync(dir) - cb() + await rimraf(dir) + await mkdirp(dir) }) const data = makeTar([ @@ -1129,14 +1199,16 @@ t.test('fail chmod', t => { poop.code = 'EPOOP' const unmutate = mutateFS.fail('chmod', poop) const dir = path.join(unpackdir, 'chmod-fail') - t.teardown(_ => (unmutate(), rimraf.sync(dir))) + t.teardown(async _ => { + unmutate() + await rimraf(dir) + }) const warnings = [] - t.beforeEach(cb => { + t.beforeEach(async () => { warnings.length = 0 - rimraf.sync(dir) - mkdirp.sync(dir) - cb() + await rimraf(dir) + await mkdirp(dir) }) const data = makeTar([ @@ -1190,19 +1262,17 @@ t.test('fail mkdir', t => { poop.code = 'EPOOP' let unmutate const dir = path.join(unpackdir, 'mkdir-fail') - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) const warnings = [] - t.beforeEach(cb => { + t.beforeEach(async () => { warnings.length = 0 - rimraf.sync(dir) - mkdirp.sync(dir) + await rimraf(dir) + await mkdirp(dir) unmutate = mutateFS.fail('mkdir', poop) - cb() }) - t.afterEach(cb => { + t.afterEach(async () => { unmutate() - cb() }) const data = makeTar([ @@ -1218,15 +1288,14 @@ t.test('fail mkdir', t => { '' ]) - const expect = [ [ - 'ENOENT: no such file or directory, lstat \'' + - path.resolve(dir, 'dir') + '\'', + const expect = [[ + 'ENOENT: no such file or directory', { code: 'ENOENT', syscall: 'lstat', - path: path.resolve(dir, 'dir') - } - ] ] + path: normPath(path.resolve(dir, 'dir')), + }, + ]] const check = t => { t.match(warnings, expect) @@ -1234,13 +1303,6 @@ t.test('fail mkdir', t => { t.end() } - t.test('async', t => { - new Unpack({ - cwd: dir, - onwarn: (w, d) => warnings.push([w, d]) - }).on('close', _ => check(t)).end(data) - }) - t.test('sync', t => { new UnpackSync({ cwd: dir, @@ -1249,6 +1311,13 @@ t.test('fail mkdir', t => { check(t) }) + t.test('async', t => { + new Unpack({ + cwd: dir, + onwarn: (w, d) => warnings.push([w, d]), + }).on('close', _ => check(t)).end(data) + }) + t.end() }) @@ -1257,14 +1326,16 @@ t.test('fail write', t => { poop.code = 'EPOOP' const unmutate = mutateFS.fail('write', poop) const dir = path.join(unpackdir, 'write-fail') - t.teardown(_ => (unmutate(), rimraf.sync(dir))) + t.teardown(async _ => { + unmutate() + await rimraf(dir) + }) const warnings = [] - t.beforeEach(cb => { + t.beforeEach(async () => { warnings.length = 0 - rimraf.sync(dir) - mkdirp.sync(dir) - cb() + await rimraf(dir) + await mkdirp(dir) }) const data = makeTar([ @@ -1308,15 +1379,14 @@ t.test('fail write', t => { t.test('skip existing', t => { const dir = path.join(unpackdir, 'skip-newer') - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) const date = new Date('2011-03-27T22:16:31.000Z') - t.beforeEach(cb => { - rimraf.sync(dir) - mkdirp.sync(dir) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) fs.writeFileSync(dir + '/x', 'y') fs.utimesSync(dir + '/x', date, date) - cb() }) const data = makeTar([ @@ -1361,15 +1431,14 @@ t.test('skip existing', t => { t.test('skip newer', t => { const dir = path.join(unpackdir, 'skip-newer') - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) const date = new Date('2013-12-19T17:00:00.000Z') - t.beforeEach(cb => { - rimraf.sync(dir) - mkdirp.sync(dir) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) fs.writeFileSync(dir + '/x', 'y') fs.utimesSync(dir + '/x', date, date) - cb() }) const data = makeTar([ @@ -1414,12 +1483,11 @@ t.test('skip newer', t => { t.test('no mtime', t => { const dir = path.join(unpackdir, 'skip-newer') - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) - t.beforeEach(cb => { - rimraf.sync(dir) - mkdirp.sync(dir) - cb() + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) const date = new Date('2011-03-27T22:16:31.000Z') @@ -1480,7 +1548,7 @@ t.test('no mtime', t => { t.test('unpack big enough to pause/drain', t => { const dir = path.resolve(unpackdir, 'drain-clog') mkdirp.sync(dir) - t.tearDown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) const stream = fs.createReadStream(fixtures + '/parses.tar') const u = new Unpack({ cwd: dir, @@ -1603,11 +1671,16 @@ t.test('set owner', t => { const unl = mutateFS.fail('lchown', poop) const unf = mutateFS.fail('fchown', poop) - t.teardown(_ => (un(), unf(), unl())) + t.teardown(async () => { + un() + unf() + unl() + await rimraf(dir) + }) t.test('sync', t => { mkdirp.sync(dir) - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) let warned = false const u = new Unpack.Sync({ cwd: dir, @@ -1625,7 +1698,7 @@ t.test('set owner', t => { t.test('async', t => { mkdirp.sync(dir) - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) let warned = false const u = new Unpack({ cwd: dir, @@ -1641,11 +1714,6 @@ t.test('set owner', t => { u.end(data) }) - t.test('cleanup', t => { - rimraf.sync(dir) - t.end() - }) - t.end() }) @@ -1669,7 +1737,7 @@ t.test('set owner', t => { t.test('sync', t => { mkdirp.sync(dir) - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) called = 0 const u = new Unpack.Sync({ cwd: dir, preserveOwner: true }) u.end(data) @@ -1679,7 +1747,7 @@ t.test('set owner', t => { t.test('async', t => { mkdirp.sync(dir) - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) called = 0 const u = new Unpack({ cwd: dir, preserveOwner: true }) u.end(data) @@ -1698,15 +1766,15 @@ t.test('set owner', t => { const un = mutateFS.fail('chown', poop) const unf = mutateFS.fail('fchown', poop) const unl = mutateFS.fail('lchown', poop) - t.teardown(_ => { - rimraf.sync(dir) + t.teardown(async _ => { un() unf() unl() + await rimraf(dir) }) - t.beforeEach(cb => mkdirp(dir, cb)) - t.afterEach(cb => rimraf(dir, cb)) + t.beforeEach(() => mkdirp(dir)) + t.afterEach(() => rimraf(dir)) const check = t => { const dirStat = fs.statSync(dir + '/foo') @@ -1758,11 +1826,11 @@ t.test('unpack when dir is not writable', t => { ]) const dir = path.resolve(unpackdir, 'nowrite-dir') - t.beforeEach(cb => mkdirp(dir, cb)) - t.afterEach(cb => rimraf(dir, cb)) + t.beforeEach(() => mkdirp(dir)) + t.afterEach(() => rimraf(dir)) const check = t => { - t.equal(fs.statSync(dir + '/a').mode & 0o7777, 0o744) + t.equal(fs.statSync(dir + '/a').mode & 0o7777, isWindows ? 0o666 : 0o744) t.equal(fs.readFileSync(dir + '/a/b', 'utf8'), 'a') t.end() } @@ -1795,8 +1863,8 @@ t.test('transmute chars on windows', t => { ]) const dir = path.resolve(unpackdir, 'winchars') - t.beforeEach(cb => mkdirp(dir, cb)) - t.afterEach(cb => rimraf(dir, cb)) + t.beforeEach(() => mkdirp(dir)) + t.afterEach(() => rimraf(dir)) const hex = 'ef80bcef81bcef80beef80bfef80ba2e747874' const uglyName = Buffer.from(hex, 'hex').toString() @@ -1831,8 +1899,17 @@ t.test('transmute chars on windows', t => { t.test('safely transmute chars on windows with absolutes', t => { // don't actually make the directory - const poop = new Error('poop') - t.teardown(mutateFS.fail('mkdir', poop)) + const fsMock = { + ...fs, + mkdir: (path, mode, cb) => process.nextTick(cb), + mkdirSync: (path) => {}, + } + const mkdirp = requireInject('mkdirp', { fs: fsMock }) + const Unpack = requireInject('../lib/unpack.js', { + fs: fsMock, + mkdirp, + '../lib/mkdir.js': requireInject('../lib/mkdir.js', { fs: fsMock, mkdirp }), + }) const data = makeTar([ { @@ -1855,8 +1932,11 @@ t.test('safely transmute chars on windows with absolutes', t => { }) u.on('entry', entry => { t.equal(entry.path, uglyPath) + entry.skipped = true + entry.resume() t.end() }) + u.on('error', () => {}) u.end(data) }) @@ -1876,10 +1956,10 @@ t.test('use explicit chmod when required by umask', t => { '' ]) - const check = t => { + const check = async t => { const st = fs.statSync(basedir + '/x/y/z') - t.equal(st.mode & 0o777, 0o775) - rimraf.sync(basedir) + t.equal(st.mode & 0o777, isWindows ? 0o666 : 0o775) + await rimraf(basedir) t.end() } @@ -1964,11 +2044,11 @@ t.test('chown implicit dirs and also the entries', t => { '' ]) - const check = t => { + const check = async t => { currentTest = null t.equal(chowns, 8) chowns = 0 - rimraf.sync(basedir) + await rimraf(basedir) t.end() } @@ -2018,7 +2098,7 @@ t.test('chown implicit dirs and also the entries', t => { t.test('bad cwd setting', t => { const basedir = path.resolve(unpackdir, 'bad-cwd') mkdirp.sync(basedir) - t.teardown(_ => rimraf.sync(basedir)) + t.teardown(_ => rimraf(basedir)) const cases = [ // the cwd itself @@ -2060,17 +2140,17 @@ t.test('bad cwd setting', t => { t.throws(_ => new Unpack.Sync(opt).end(data), { name: 'CwdError', - message: 'ENOTDIR: Cannot cd into \'' + cwd + '\'', - path: cwd, - code: 'ENOTDIR' + message: 'ENOTDIR: Cannot cd into \'' + normPath(cwd) + '\'', + path: normPath(cwd), + code: 'ENOTDIR', }) new Unpack(opt).on('error', er => { t.match(er, { name: 'CwdError', - message: 'ENOTDIR: Cannot cd into \'' + cwd + '\'', - path: cwd, - code: 'ENOTDIR' + message: 'ENOTDIR: Cannot cd into \'' + normPath(cwd) + '\'', + path: normPath(cwd), + code: 'ENOTDIR', }) t.end() }).end(data) @@ -2082,17 +2162,17 @@ t.test('bad cwd setting', t => { t.throws(_ => new Unpack.Sync(opt).end(data), { name: 'CwdError', - message: 'ENOENT: Cannot cd into \'' + cwd + '\'', - path: cwd, - code: 'ENOENT' + message: 'ENOENT: Cannot cd into \'' + normPath(cwd) + '\'', + path: normPath(cwd), + code: 'ENOENT', }) new Unpack(opt).on('error', er => { t.match(er, { name: 'CwdError', - message: 'ENOENT: Cannot cd into \'' + cwd + '\'', - path: cwd, - code: 'ENOENT' + message: 'ENOENT: Cannot cd into \'' + normPath(cwd) + '\'', + path: normPath(cwd), + code: 'ENOENT', }) t.end() }).end(data) @@ -2104,7 +2184,7 @@ t.test('bad cwd setting', t => { t.test('transform', t => { const basedir = path.resolve(unpackdir, 'transform') - t.teardown(_ => rimraf.sync(basedir)) + t.teardown(_ => rimraf(basedir)) const cases = { 'emptypax.tar': { @@ -2152,10 +2232,9 @@ t.test('transform', t => { t.test(tarfile, t => { const tf = path.resolve(tars, tarfile) const dir = path.resolve(basedir, tarfile) - t.beforeEach(cb => { - rimraf.sync(dir) - mkdirp.sync(dir) - cb() + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) const check = t => { @@ -2203,7 +2282,7 @@ t.test('transform', t => { t.test('transform error', t => { const dir = path.resolve(unpackdir, 'transform-error') mkdirp.sync(dir) - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) const tarfile = path.resolve(tars, 'body-byte-counts.tar') const tardata = fs.readFileSync(tarfile) @@ -2276,13 +2355,12 @@ t.test('futimes/fchown failures', t => { const poop = new Error('poop') const second = new Error('second error') - const reset = cb => { - rimraf.sync(dir) - mkdirp.sync(dir) - } + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) + }) - reset() - t.teardown(() => rimraf.sync(dir)) + t.teardown(() => rimraf(dir)) const methods = ['utimes', 'chown'] methods.forEach(method => { @@ -2297,13 +2375,11 @@ t.test('futimes/fchown failures', t => { t.test('async unpack', t => { t.plan(2) t.test('strict', t => { - reset() const unpack = new Unpack({ cwd: dir, strict: true, forceChown: fc }) unpack.on('finish', t.end) unpack.end(tardata) }) t.test('loose', t => { - reset() const unpack = new Unpack({ cwd: dir, forceChown: fc }) unpack.on('finish', t.end) unpack.on('warn', t.fail) @@ -2313,13 +2389,11 @@ t.test('futimes/fchown failures', t => { t.test('sync unpack', t => { t.plan(2) t.test('strict', t => { - reset() const unpack = new Unpack.Sync({ cwd: dir, strict: true, forceChown: fc }) unpack.end(tardata) t.end() }) t.test('loose', t => { - reset() const unpack = new Unpack.Sync({ cwd: dir, forceChown: fc }) unpack.on('warn', t.fail) unpack.end(tardata) @@ -2339,14 +2413,12 @@ t.test('futimes/fchown failures', t => { t.test('async unpack', t => { t.plan(2) t.test('strict', t => { - reset() const unpack = new Unpack({ cwd: dir, strict: true, forceChown: fc }) t.plan(3) unpack.on('error', er => t.equal(er, poop)) unpack.end(tardata) }) t.test('loose', t => { - reset() const unpack = new Unpack({ cwd: dir, forceChown: fc }) t.plan(3) unpack.on('warn', (m, er) => t.equal(er, poop)) @@ -2356,14 +2428,12 @@ t.test('futimes/fchown failures', t => { t.test('sync unpack', t => { t.plan(2) t.test('strict', t => { - reset() const unpack = new Unpack.Sync({ cwd: dir, strict: true, forceChown: fc }) t.plan(3) unpack.on('error', er => t.equal(er, poop)) unpack.end(tardata) }) t.test('loose', t => { - reset() const unpack = new Unpack.Sync({ cwd: dir, forceChown: fc }) t.plan(3) unpack.on('warn', (m, er) => t.equal(er, poop)) @@ -2379,7 +2449,7 @@ t.test('futimes/fchown failures', t => { t.test('onentry option is preserved', t => { const basedir = path.resolve(unpackdir, 'onentry-method') mkdirp.sync(basedir) - t.teardown(() => rimraf.sync(basedir)) + t.teardown(() => rimraf(basedir)) let oecalls = 0 const onentry = entry => oecalls++ @@ -2434,7 +2504,7 @@ t.test('onentry option is preserved', t => { t.test('do not reuse hardlinks, only nlink=1 files', t => { const basedir = path.resolve(unpackdir, 'hardlink-reuse') mkdirp.sync(basedir) - t.teardown(() => rimraf.sync(basedir)) + t.teardown(() => rimraf(basedir)) const now = new Date('2018-04-30T18:30:39.025Z') @@ -2494,7 +2564,39 @@ t.test('do not reuse hardlinks, only nlink=1 files', t => { t.end() }) -t.test('drop entry from dirCache if no longer a directory', t => { +t.test('trying to unpack a javascript file should fail', t => { + const data = fs.readFileSync(__filename) + const dataGzip = Buffer.concat([Buffer.from([0x1f, 0x8b]), data]) + const basedir = path.resolve(unpackdir, 'bad-archive') + t.test('abort if gzip has an error', t => { + t.plan(2) + const expect = { + message: /^zlib/, + errno: Number, + code: /^Z/, + } + const opts = { + cwd: basedir, + gzip: true, + } + new Unpack(opts) + .once('error', er => t.match(er, expect, 'async emits')) + .on('error', () => { /* zlib emits a few times here */ }) + .end(dataGzip) + const skip = !/^v([0-9]|1[0-4])\./.test(process.version) ? false + : 'node prior to v14 did not raise sync zlib errors properly' + t.test('sync throws', { skip }, t => { + t.plan(1) + t.throws(() => new UnpackSync(opts).end(dataGzip), expect, 'sync throws') + }) + }) + + t.end() +}) + +t.test('drop entry from dirCache if no longer a directory', { + skip: isWindows && 'symlinks not fully supported', +}, t => { const dir = path.resolve(unpackdir, 'dir-cache-error') mkdirp.sync(dir + '/sync/y') mkdirp.sync(dir + '/async/y') @@ -2566,13 +2668,24 @@ t.test('using strip option when top level file exists', t => { size: 'a'.length, }, 'a', + { + path: 'y', + type: 'GNUDumpDir', + }, + { + path: 'y/b', + type: 'File', + size: 'b'.length, + }, + 'b', '', '', ]) t.plan(2) const check = (t, path) => { t.equal(fs.statSync(path).isDirectory(), true) - t.equal(fs.lstatSync(path + '/a').isFile(), true) + t.equal(fs.readFileSync(path + '/a', 'utf8'), 'a') + t.equal(fs.readFileSync(path + '/b', 'utf8'), 'b') t.throws(() => fs.statSync(path + '/top'), { code: 'ENOENT' }) t.end() } diff --git a/test/update.js b/test/update.js index 0d3394f7..70e9b8e1 100644 --- a/test/update.js +++ b/test/update.js @@ -4,58 +4,28 @@ const t = require('tap') const u = require('../lib/update.js') const path = require('path') const fs = require('fs') -const mkdirp = require('mkdirp') -const rimraf = require('rimraf') const mutateFS = require('mutate-fs') +const {resolve} = require('path') const fixtures = path.resolve(__dirname, 'fixtures') -const dir = path.resolve(fixtures, 'update') const tars = path.resolve(fixtures, 'tars') -const file = dir + '/body-byte-counts.tar' -const fileNoNulls = dir + '/no-null-eof.tar' -const fileTruncHead = dir + '/truncated-head.tar' -const fileTruncBody = dir + '/truncated-body.tar' -const fileNonExistent = dir + '/does-not-exist.tar' -const fileZeroByte = dir + '/zero.tar' -const fileEmpty = dir + '/empty.tar' -const fileCompressed = dir + '/compressed.tgz' const zlib = require('zlib') const spawn = require('child_process').spawn -t.teardown(_ => rimraf.sync(dir)) - -const reset = cb => { - rimraf.sync(dir) - mkdirp.sync(dir) - const data = fs.readFileSync(tars + '/body-byte-counts.tar') - fs.writeFileSync(file, data) - - const dataNoNulls = data.slice(0, data.length - 1024) - fs.writeFileSync(fileNoNulls, dataNoNulls) - - const dataTruncHead = Buffer.concat([dataNoNulls, data.slice(0, 500)]) - fs.writeFileSync(fileTruncHead, dataTruncHead) - - const dataTruncBody = Buffer.concat([dataNoNulls, data.slice(0, 700)]) - fs.writeFileSync(fileTruncBody, dataTruncBody) - - fs.writeFileSync(fileZeroByte, '') - fs.writeFileSync(fileEmpty, Buffer.alloc(1024)) - - fs.writeFileSync(fileCompressed, zlib.gzipSync(data)) - - if (cb) - cb() +const data = fs.readFileSync(tars + '/body-byte-counts.tar') +const dataNoNulls = data.slice(0, data.length - 1024) +const fixtureDef = { + 'body-byte-counts.tar': data, + 'no-null-eof.tar': dataNoNulls, + 'truncated-head.tar': Buffer.concat([dataNoNulls, data.slice(0, 500)]), + 'truncated-body.tar': Buffer.concat([dataNoNulls, data.slice(0, 700)]), + 'zero.tar': Buffer.from(''), + 'empty.tar': Buffer.alloc(512), + 'compressed.tgz': zlib.gzipSync(data), } -t.test('setup', t => { - reset(t.end) -}) - t.test('basic file add to archive (good or truncated)', t => { - t.beforeEach(reset) - const check = (file, t) => { const c = spawn('tar', ['tf', file]) const out = [] @@ -63,7 +33,7 @@ t.test('basic file add to archive (good or truncated)', t => { c.on('close', (code, signal) => { t.equal(code, 0) t.equal(signal, null) - const actual = Buffer.concat(out).toString().trim().split('\n') + const actual = Buffer.concat(out).toString().trim().split(/\r?\n/) t.same(actual, [ '1024-bytes.txt', '512-bytes.txt', @@ -75,50 +45,68 @@ t.test('basic file add to archive (good or truncated)', t => { }) } - ;[file, - fileNoNulls, - fileTruncHead, - fileTruncBody - ].forEach(file => { - t.test(path.basename(file), t => { - const fileList = [path.basename(__filename)] - t.test('sync', t => { + const files = [ + 'body-byte-counts.tar', + 'no-null-eof.tar', + 'truncated-head.tar', + 'truncated-body.tar', + ] + const td = files.map(f => [f, fixtureDef[f]]).reduce((s, [k, v]) => { + s[k] = v + return s + }, {}) + const fileList = [path.basename(__filename)] + t.test('sync', t => { + t.plan(files.length) + const dir = t.testdir(td) + for (const file of files) { + t.test(file, t => { u({ sync: true, - file: file, - cwd: __dirname + file: resolve(dir, file), + cwd: __dirname, }, fileList) - check(file, t) + check(resolve(dir, file), t) }) + } + }) - t.test('async cb', t => { + t.test('async cb', t => { + t.plan(files.length) + const dir = t.testdir(td) + for (const file of files) { + t.test(file, t => { u({ - file: file, - cwd: __dirname + file: resolve(dir, file), + cwd: __dirname, }, fileList, er => { if (er) throw er - check(file, t) + check(resolve(dir, file), t) }) }) + } + }) - t.test('async promise', t => { + t.test('async', t => { + t.plan(files.length) + const dir = t.testdir(td) + for (const file of files) { + t.test(file, t => { u({ - file: file, - cwd: __dirname - }, fileList).then(_ => check(file, t)) + file: resolve(dir, file), + cwd: __dirname, + }, fileList).then(() => { + check(resolve(dir, file), t) + }) }) - - t.end() - }) + } }) t.end() }) t.test('add to empty archive', t => { - t.beforeEach(reset) - const check = (file, t) => { const c = spawn('tar', ['tf', file]) const out = [] @@ -126,7 +114,7 @@ t.test('add to empty archive', t => { c.on('close', (code, signal) => { t.equal(code, 0) t.equal(signal, null) - const actual = Buffer.concat(out).toString().trim().split('\n') + const actual = Buffer.concat(out).toString().trim().split(/\r?\n/) t.same(actual, [ path.basename(__filename) ]) @@ -134,66 +122,89 @@ t.test('add to empty archive', t => { }) } - ;[fileNonExistent, - fileEmpty, - fileZeroByte - ].forEach(file => { - t.test(path.basename(file), t => { - const fileList = [path.basename(__filename)] - t.test('sync', t => { + const files = [ + 'empty.tar', + 'zero.tar', + ] + const td = files.map(f => [f, fixtureDef[f]]).reduce((s, [k, v]) => { + s[k] = v + return s + }, {}) + files.push('not-existing.tar') + + t.test('sync', t => { + const dir = t.testdir(td) + t.plan(files.length) + for (const file of files) { + t.test(file, t => { u({ sync: true, - file: file, - cwd: __dirname - }, fileList) - check(file, t) + file: resolve(dir, file), + cwd: __dirname, + }, [path.basename(__filename)]) + check(resolve(dir, file), t) }) + } + }) - t.test('async cb', t => { + t.test('async cb', t => { + const dir = t.testdir(td) + t.plan(files.length) + for (const file of files) { + t.test(file, t => { u({ - file: file, - cwd: __dirname - }, fileList, er => { + file: resolve(dir, file), + cwd: __dirname, + }, [path.basename(__filename)], er => { if (er) throw er - check(file, t) + check(resolve(dir, file), t) }) }) + } + }) - t.test('async promise', t => { + t.test('async', async t => { + const dir = t.testdir(td) + t.plan(files.length) + for (const file of files) { + t.test(file, t => { u({ - file: file, - cwd: __dirname - }, fileList).then(_ => check(file, t)) + file: resolve(dir, file), + cwd: __dirname, + }, [path.basename(__filename)]).then(() => { + check(resolve(dir, file), t) + }) }) - - t.end() - }) + } }) t.end() }) t.test('cannot append to gzipped archives', t => { - reset() + const dir = t.testdir({ + 'compressed.tgz': fixtureDef['compressed.tgz'], + }) + const file = resolve(dir, 'compressed.tgz') const expect = new Error('cannot append to compressed archives') const expectT = new TypeError('cannot append to compressed archives') t.throws(_ => u({ - file: fileCompressed, + file, cwd: __dirname, gzip: true }, [path.basename(__filename)]), expectT) t.throws(_ => u({ - file: fileCompressed, + file, cwd: __dirname, sync: true }, [path.basename(__filename)]), expect) u({ - file: fileCompressed, + file, cwd: __dirname, }, [path.basename(__filename)], er => { t.match(er, expect) @@ -209,88 +220,150 @@ t.test('other throws', t => { }) t.test('broken open', t => { + const dir = t.testdir({ + 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], + }) + const file = resolve(dir, 'body-byte-counts.tar') const poop = new Error('poop') t.teardown(mutateFS.fail('open', poop)) t.throws(_ => u({ sync: true, file: file }, ['README.md']), poop) - u({ file: file }, ['README.md'], er => { + u({ file }, ['README.md'], er => { t.match(er, poop) t.end() }) }) t.test('broken fstat', t => { + const td = { + 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], + } const poop = new Error('poop') - t.teardown(mutateFS.fail('fstat', poop)) - t.throws(_ => u({ sync: true, file: file }, ['README.md']), poop) - u({ file: file }, ['README.md'], er => { - t.match(er, poop) + t.test('sync', t => { + const dir = t.testdir(td) + const file = resolve(dir, 'body-byte-counts.tar') + t.teardown(mutateFS.fail('fstat', poop)) + t.throws(_ => u({ sync: true, file }, ['README.md']), poop) t.end() }) + t.test('async', t => { + const dir = t.testdir(td) + const file = resolve(dir, 'body-byte-counts.tar') + t.teardown(mutateFS.fail('fstat', poop)) + u({ file }, ['README.md'], async er => { + t.match(er, poop) + t.end() + }) + }) + t.end() }) t.test('broken read', t => { + const dir = t.testdir({ + 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], + }) + const file = resolve(dir, 'body-byte-counts.tar') const poop = new Error('poop') t.teardown(mutateFS.fail('read', poop)) - t.throws(_ => u({ sync: true, file: file }, ['README.md']), poop) - u({ file: file }, ['README.md'], er => { + t.throws(_ => u({ sync: true, file }, ['README.md']), poop) + u({ file }, ['README.md'], er => { t.match(er, poop) t.end() }) }) t.test('do not add older file', t => { - reset() + const dir = t.testdir({ + 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], + '1024-bytes.txt': '.'.repeat(1024), + foo: 'foo', + }) + const file = resolve(dir, 'body-byte-counts.tar') - const f = dir + '/1024-bytes.txt' - fs.writeFileSync(f, new Array(1025).join('.')) + const f = resolve(dir, '1024-bytes.txt') const oldDate = new Date('1997-04-10T16:57:47.000Z') fs.utimesSync(f, oldDate, oldDate) + // file size should not change + const expect = fixtureDef['body-byte-counts.tar'].length const check = t => { - t.equal(fs.statSync(file).size, 5120) + t.equal(fs.statSync(file).size, expect) t.end() } t.test('sync', t => { - u({ file: file, cwd: dir, sync: true }, ['1024-bytes.txt']) + u({ + mtimeCache: new Map(), + file, + cwd: dir, + sync: true, + filter: path => path === '1024-bytes.txt', + }, ['1024-bytes.txt', 'foo']) check(t) }) t.test('async', t => { - u({ file: file, cwd: dir }, ['1024-bytes.txt']).then(_ => check(t)) + u({ file, cwd: dir }, ['1024-bytes.txt']).then(_ => check(t)) + }) + + t.test('async cb', t => { + u({ file, cwd: dir }, ['1024-bytes.txt'], er => { + if (er) + throw er + check(t) + }) }) t.end() }) t.test('do add newer file', t => { - t.beforeEach(cb => { - reset() - const f = dir + '/1024-bytes.txt' - fs.writeFileSync(f, new Array(1025).join('.')) - const newDate = new Date('2017-05-01T22:06:43.736Z') + const setup = t => { + const dir = t.testdir({ + 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], + '1024-bytes.txt': '.'.repeat(1024), + foo: 'foo', + }) + + const f = resolve(dir, '1024-bytes.txt') + const newDate = new Date(Date.now() + 24 * 60 * 60 * 1000) fs.utimesSync(f, newDate, newDate) - cb() - }) + return dir + } - const check = t => { - t.equal(fs.statSync(file).size, 6656) + // a chunk for the header, then 2 for the body + const expect = fixtureDef['body-byte-counts.tar'].length + 512 + 1024 + const check = (file, t) => { + t.equal(fs.statSync(file).size, expect) t.end() } t.test('sync', t => { + const dir = setup(t) + const file = resolve(dir, 'body-byte-counts.tar') u({ mtimeCache: new Map(), - file: file, + file, cwd: dir, sync: true, - filter: path => path === '1024-bytes.txt' - }, ['1024-bytes.txt', 'compressed.tgz']) - check(t) + filter: path => path === '1024-bytes.txt', + }, ['1024-bytes.txt', 'foo']) + check(file, t) }) t.test('async', t => { - u({ file: file, cwd: dir }, ['1024-bytes.txt']).then(_ => check(t)) + const dir = setup(t) + const file = resolve(dir, 'body-byte-counts.tar') + u({ file, cwd: dir }, ['1024-bytes.txt']).then(_ => check(file, t)) + }) + + t.test('async cb', t => { + const dir = setup(t) + const file = resolve(dir, 'body-byte-counts.tar') + u({ file, cwd: dir }, ['1024-bytes.txt'], er => { + if (er) + throw er + check(file, t) + }) }) t.end() diff --git a/test/write-entry.js b/test/write-entry.js index a7e49166..ac8cf862 100644 --- a/test/write-entry.js +++ b/test/write-entry.js @@ -28,6 +28,7 @@ const Parser = require('../lib/parse.js') const rimraf = require('rimraf') const mkdirp = require('mkdirp') const isWindows = process.platform === 'win32' +const normPath = require('../lib/normalize-windows-path.js') t.test('set up', t => { const one = fs.statSync(files + '/hardlink-1') @@ -36,7 +37,7 @@ t.test('set up', t => { fs.unlinkSync(files + '/hardlink-2') fs.linkSync(files + '/hardlink-1', files + '/hardlink-2') } - chmodr.sync(files, 0o644) + chmodr.sync(files, isWindows ? 0o666 : 0o644) t.end() }) @@ -63,7 +64,7 @@ t.test('100 byte filename', t => { cksumValid: true, needPax: false, path: '100-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 100, linkpath: null, uname: 'isaacs', @@ -91,7 +92,7 @@ t.test('100 byte filename', t => { cksumValid: true, needPax: false, path: '100-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 100, linkpath: '', uname: 'isaacs', @@ -139,7 +140,7 @@ t.test('directory', t => { cksumValid: true, needPax: false, path: 'dir/', - mode: 0o755, + mode: isWindows ? 0o777 : 0o755, size: 0, linkpath: null, uname: 'isaacs', @@ -155,7 +156,7 @@ t.test('directory', t => { cksumValid: true, needPax: false, path: 'dir/', - mode: 0o755, + mode: isWindows ? 0o777 : 0o755, size: 0, linkpath: null, uname: 'isaacs', @@ -179,7 +180,7 @@ t.test('empty path for cwd', t => { cksumValid: true, needPax: false, path: './', - mode: fs.statSync('./').mode & 0o7777, + mode: isWindows ? 0o777 : fs.statSync('./').mode & 0o7777, size: 0, linkpath: null, uname: 'isaacs', @@ -191,7 +192,9 @@ t.test('empty path for cwd', t => { }) }) -t.test('symlink', t => { +t.test('symlink', { + skip: isWindows && 'symlinks not fully supported', +}, t => { const ws = new WriteEntry('symlink', { cwd: files }) let out = [] ws.on('data', c => out.push(c)) @@ -230,7 +233,7 @@ t.test('zero-byte file', t => { path: 'files/zero-byte.txt', cksumValid: true, needPax: false, - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 0, linkpath: null, uname: 'isaacs', @@ -282,7 +285,7 @@ t.test('hardlinks', t => { path: 'files/hardlink-2', cksumValid: true, needPax: false, - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 0, linkpath: 'files/hardlink-1', uname: 'isaacs', @@ -314,7 +317,7 @@ t.test('hardlinks far away', t => { path: 'files/hardlink-2', cksumValid: true, needPax: false, - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 26, linkpath: null, uname: 'isaacs', @@ -338,7 +341,7 @@ t.test('really deep path', t => { cksumValid: true, needPax: true, path: 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 100, linkpath: null, uname: 'isaacs', @@ -363,7 +366,7 @@ t.test('no pax', t => { cksumValid: true, needPax: true, path: 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 100, linkpath: null, uname: 'isaacs', @@ -394,10 +397,12 @@ t.test('absolute path', t => { const absolute = path.resolve(files, '512-bytes.txt') const { root } = path.parse(absolute) const f = root + root + root + absolute - const warn = root + root + root + root + const warn = normPath(isWindows ? root : root + root + root + root) t.test('preservePaths=false strict=false', t => { const warnings = [] - const ws = new WriteEntry(f, { + // on windows, c:\c:\c:\... is a valid path, so just use the + // single-root absolute version of it. + const ws = new WriteEntry(isWindows ? absolute : f, { cwd: files, onwarn: (m, p) => warnings.push([m, p]) }) @@ -407,14 +412,15 @@ t.test('absolute path', t => { out = Buffer.concat(out) t.equal(out.length, 1024) t.match(warnings, [[ - 'stripping ' + warn + ' from absolute path', f + `stripping ${warn} from absolute path`, + normPath(isWindows ? absolute : f), ]]) t.match(ws.header, { cksumValid: true, needPax: false, - path: f.replace(/^(\/|[a-z]:\\\\){4}/, ''), - mode: 0o644, + path: normPath(absolute.replace(/^(\/|[a-z]:[/\\])*/i, '')), + mode: isWindows ? 0o666 : 0o644, size: 512, linkpath: null, uname: 'isaacs', @@ -433,7 +439,7 @@ t.test('absolute path', t => { t.test('strict=' + strict, t => { const warnings = [] - const ws = new WriteEntry(f, { + const ws = new WriteEntry(isWindows ? absolute : f, { cwd: files, strict: strict, preservePaths: true, @@ -448,8 +454,8 @@ t.test('absolute path', t => { t.match(ws.header, { cksumValid: true, needPax: false, - path: f, - mode: 0o644, + path: normPath(isWindows ? absolute : f), + mode: isWindows ? 0o666 : 0o644, size: 512, linkpath: null, uname: 'isaacs', @@ -465,11 +471,13 @@ t.test('absolute path', t => { t.test('preservePaths=false strict=true', t => { t.throws(_ => { - new WriteEntry(f, { + new WriteEntry(isWindows ? absolute : f, { strict: true, cwd: files }) - }, { message: /stripping .* from absolute path/, data: f }) + }, { + message: /stripping .* from absolute path/ + }) t.end() }) @@ -490,7 +498,7 @@ t.test('no user environ, sets uname to empty string', t => { cksumValid: true, needPax: false, path: '512-bytes.txt', - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 512, uname: '', linkpath: null, @@ -529,17 +537,17 @@ t.test('an unsuppored type', { t.test('readlink fail', t => { const expect = { - message: 'EINVAL: invalid argument, readlink \'' + __filename + '\'', - code: 'EINVAL', syscall: 'readlink', - path: __filename + path: String, } // pretend everything is a symbolic link, then read something that isn't - t.tearDown(mutateFS.statType('SymbolicLink')) - t.throws(_ => new WriteEntry.Sync('write-entry.js', { cwd: __dirname }), - expect) + t.teardown(mutateFS.statType('SymbolicLink')) + t.throws(_ => { + return new WriteEntry.Sync('write-entry.js', { cwd: __dirname }) + }, expect) new WriteEntry('write-entry.js', { cwd: __dirname }).on('error', er => { t.match(er, expect) + t.equal(normPath(er.path), normPath(__filename)) t.end() }) }) @@ -560,10 +568,16 @@ t.test('read fail', t => { code: 'EISDIR', syscall: 'read' } - // pretend everything is a symbolic link, then read something that isn't - t.tearDown(mutateFS.statType('File')) - t.throws(_ => new WriteEntry.Sync('fixtures', { cwd: __dirname }), - expect) + // pretend everything is a file, then read something that isn't + t.teardown(mutateFS.statMutate((er, st) => { + if (er) + return [er, st] + st.isFile = () => true + st.size = 123 + })) + t.throws(_ => new WriteEntry.Sync('fixtures', { + cwd: __dirname, + }), expect) new WriteEntry('fixtures', { cwd: __dirname }).on('error', er => { t.match(er, expect) t.end() @@ -574,7 +588,7 @@ t.test('read invalid EOF', t => { t.tearDown(mutateFS.mutate('read', (er, br) => [er, 0])) const expect = { message: 'encountered unexpected EOF', - path: __filename, + path: normPath(__filename), syscall: 'read', code: 'EOF' } @@ -594,7 +608,7 @@ t.test('read overflow expectation', t => { const f = '512-bytes.txt' const expect = { message: 'did not encounter expected EOF', - path: path.resolve(files, f), + path: normPath(path.resolve(files, f)), syscall: 'read', code: 'EOF' } @@ -952,7 +966,7 @@ t.test('write entry from read entry', t => { path: '$', type: 'File', size: 10, - mode: 0o755, + mode: isWindows ? 0o777 : 0o755, uid: 123, gid: 321, ctime: new Date('1979-07-01'), @@ -993,7 +1007,7 @@ t.test('write entry from read entry', t => { path: '$', type: 'Directory', size: 0, - mode: 0o755, + mode: isWindows ? 0o777 : 0o755, uid: 123, gid: 321, ctime: new Date('1979-07-01'), @@ -1675,3 +1689,15 @@ t.test('hard links from tar entries and no prefix', t => { t.end() }) + +t.test('myuid set by getuid() if available, otherwise 0', t => { + const {getuid} = process + process.getuid = null + const noUid = new WriteEntry(__filename) + t.equal(noUid.myuid, 0, 'set to zero if no getuid function') + process.getuid = () => 123456789 + const hasUid = new WriteEntry(__filename) + t.equal(hasUid.myuid, 123456789, 'set to process.getuid()') + process.getuid = getuid + t.end() +})