Skip to content
This repository has been archived by the owner on Mar 10, 2020. It is now read-only.

feat: adds support for -X symbolic mode and recursive chmod #73

Merged
merged 7 commits into from
Jan 15, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 5 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
"release": "aegir release",
"release-minor": "aegir release --type minor",
"release-major": "aegir release --type major",
"coverage": "nyc --reporter=text --reporter=lcov npm run test:node",
"coverage": "nyc --reporter=text --reporter=lcov --reporter=html npm run test:node",
"dep-check": "aegir dep-check"
},
"repository": {
Expand Down Expand Up @@ -50,6 +50,7 @@
"detect-webworker": "^1.0.0",
"dirty-chai": "^2.0.1",
"form-data": "^3.0.0",
"ipfs-block": "^0.8.1",
"ipfs-block-service": "~0.16.0",
"ipfs-repo": "^0.30.1",
"ipld": "~0.25.0",
Expand All @@ -72,11 +73,12 @@
"interface-datastore": "^0.8.0",
"ipfs-multipart": "^0.3.0",
"ipfs-unixfs": "^0.3.0",
"ipfs-unixfs-exporter": "^0.40.0",
"ipfs-unixfs-importer": "^0.43.1",
"ipfs-unixfs-exporter": "^0.41.0",
"ipfs-unixfs-importer": "^0.44.0",
"ipfs-utils": "^0.4.2",
"ipld-dag-pb": "^0.18.0",
"it-last": "^1.0.1",
"it-pipe": "^1.0.1",
"joi-browser": "^13.4.0",
"mortice": "^2.0.0",
"multicodec": "^1.0.0",
Expand Down
87 changes: 71 additions & 16 deletions src/core/chmod.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,13 @@ const updateMfsRoot = require('./utils/update-mfs-root')
const { DAGNode } = require('ipld-dag-pb')
const mc = require('multicodec')
const mh = require('multihashes')
const pipe = require('it-pipe')
const importer = require('ipfs-unixfs-importer')
const exporter = require('ipfs-unixfs-exporter')
const last = require('it-last')
const cp = require('./cp')
const rm = require('./rm')
const persist = require('ipfs-unixfs-importer/src/utils/persist')

const defaultOptions = {
flush: true,
Expand All @@ -21,10 +28,10 @@ const defaultOptions = {
recursive: false
}

function calculateModification (mode) {
function calculateModification (mode, originalMode, isDirectory) {
let modification = 0

if (mode.includes('x')) {
if (mode.includes('x') || (mode.includes('X') && (isDirectory || (originalMode & 0o1 || originalMode & 0o10 || originalMode & 0o100)))) {
modification += 1
}

Expand Down Expand Up @@ -76,7 +83,7 @@ function calculateSpecial (references, mode, modification) {
}

// https://en.wikipedia.org/wiki/Chmod#Symbolic_modes
function parseSymbolicMode (input, originalMode) {
function parseSymbolicMode (input, originalMode, isDirectory) {
if (!originalMode) {
originalMode = 0
}
Expand All @@ -98,7 +105,7 @@ function parseSymbolicMode (input, originalMode) {
references = 'ugo'
}

let modification = calculateModification(mode)
let modification = calculateModification(mode, originalMode, isDirectory)
modification = calculateUGO(references, modification)
modification = calculateSpecial(references, mode, modification)

Expand Down Expand Up @@ -139,6 +146,20 @@ function parseSymbolicMode (input, originalMode) {
}
}

function calculateMode (mode, metadata) {
if (typeof mode === 'string' || mode instanceof String) {
if (mode.match(/^\d+$/g)) {
mode = parseInt(mode, 8)
} else {
mode = mode.split(',').reduce((curr, acc) => {
return parseSymbolicMode(acc, curr, metadata.isDirectory())
}, metadata.mode)
}
}

return mode
}

module.exports = (context) => {
return async function mfsChmod (path, mode, options) {
options = applyDefaultOptions(options, defaultOptions)
Expand All @@ -155,20 +176,54 @@ module.exports = (context) => {
throw errCode(new Error(`${path} was not a UnixFS node`), 'ERR_NOT_UNIXFS')
}

let node = await context.ipld.get(cid)
const metadata = UnixFS.unmarshal(node.Data)

if (typeof mode === 'string' || mode instanceof String) {
if (mode.match(/^\d+$/g)) {
mode = parseInt(mode, 8)
} else {
mode = mode.split(',').reduce((curr, acc) => {
return parseSymbolicMode(acc, curr)
}, metadata.mode)
}
if (options.recursive) {
// recursively export from root CID, change perms of each entry then reimport
// but do not reimport files, only manipulate dag-pb nodes
const root = await pipe(
async function * () {
for await (const entry of exporter.recursive(cid, context.ipld)) {
let node = await context.ipld.get(entry.cid)
entry.unixfs.mode = calculateMode(mode, entry.unixfs)
node = new DAGNode(entry.unixfs.marshal(), node.Links)

yield {
path: entry.path,
content: node
}
}
},
(source) => importer(source, context.ipld, {
...options,
dagBuilder: async function * (source, ipld, options) {
for await (const entry of source) {
yield async function () {
const cid = await persist(entry.content, ipld, options)

return {
cid,
path: entry.path,
unixfs: UnixFS.unmarshal(entry.content.Data),
node: entry.content
}
}
}
}
}),
(nodes) => last(nodes)
)

// remove old path from mfs
await rm(context)(path, options)

// add newly created tree to mfs at path
await cp(context)(`/ipfs/${root.cid}`, path, options)

return
}

metadata.mode = mode
let node = await context.ipld.get(cid)
const metadata = UnixFS.unmarshal(node.Data)
metadata.mode = calculateMode(mode, metadata)
node = new DAGNode(metadata.marshal(), node.Links)

const updatedCid = await context.ipld.put(node, mc.DAG_PB, {
Expand Down
4 changes: 0 additions & 4 deletions src/core/cp.js
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,6 @@ module.exports = (context) => {
throw errCode(new Error('Please supply at least one source'), 'ERR_INVALID_PARAMS')
}

if (!destination) {
throw errCode(new Error('Please supply a destination'), 'ERR_INVALID_PARAMS')
}

options.parents = options.p || options.parents

// make sure all sources exist
Expand Down
2 changes: 0 additions & 2 deletions src/core/mkdir.js
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,6 @@ const addEmptyDir = async (context, childName, emptyDir, parent, trail, options)
name: childName,
hashAlg: options.hashAlg,
cidVersion: options.cidVersion,
mode: options.mode,
mtime: options.mtime,
flush: options.flush
})

Expand Down
17 changes: 13 additions & 4 deletions src/core/stat.js
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ const statters = {
if (file.unixfs) {
output.size = file.unixfs.fileSize()
output.type = file.unixfs.type
output.mode = file.unixfs.mode

if (file.unixfs.isDirectory()) {
output.size = 0
Expand All @@ -87,10 +88,6 @@ const statters = {
if (file.unixfs.mtime) {
output.mtime = file.unixfs.mtime
}

if (file.unixfs.mode !== undefined && file.unixfs.mode !== null) {
output.mode = file.unixfs.mode
}
}

return output
Expand All @@ -102,5 +99,17 @@ const statters = {
sizeLocal: undefined,
withLocality: false
}
},
identity: (file) => {
return {
cid: file.cid,
size: file.node.digest.length,
cumulativeSize: file.node.digest.length,
blocks: 0,
type: 'file', // for go compatibility
local: undefined,
sizeLocal: undefined,
withLocality: false
}
}
}
22 changes: 16 additions & 6 deletions src/core/utils/add-link.js
Original file line number Diff line number Diff line change
Expand Up @@ -94,10 +94,14 @@ const addToDirectory = async (context, options) => {
options.parent.rmLink(options.name)
options.parent.addLink(new DAGLink(options.name, options.size, options.cid))

// Update mtime
const node = UnixFS.unmarshal(options.parent.Data)
node.mtime = new Date()
options.parent = new DAGNode(node.marshal(), options.parent.Links)

if (node.mtime) {
// Update mtime if previously set
node.mtime = new Date()

options.parent = new DAGNode(node.marshal(), options.parent.Links)
}

const hashAlg = mh.names[options.hashAlg]

Expand All @@ -110,7 +114,8 @@ const addToDirectory = async (context, options) => {

return {
node: options.parent,
cid
cid,
size: options.parent.size
}
}

Expand All @@ -120,12 +125,13 @@ const addToShardedDirectory = async (context, options) => {
} = await addFileToShardedDirectory(context, options)

const result = await last(shard.flush('', context.ipld))
const node = await context.ipld.get(result.cid)

// we have written out the shard, but only one sub-shard will have been written so replace it in the original shard
const oldLink = options.parent.Links
.find(link => link.Name.substring(0, 2) === path[0].prefix)

const newLink = result.node.Links
const newLink = node.Links
.find(link => link.Name.substring(0, 2) === path[0].prefix)

if (oldLink) {
Expand Down Expand Up @@ -159,7 +165,11 @@ const addFileToShardedDirectory = async (context, options) => {
mode: node.mode
}, options)
shard._bucket = rootBucket
shard.mtime = new Date()

if (node.mtime) {
// update mtime if previously set
shard.mtime = new Date()
}

// load subshards until the bucket & position no longer changes
const position = await rootBucket._findNewBucketAndPos(file.name)
Expand Down
3 changes: 2 additions & 1 deletion src/core/utils/hamt-utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,8 @@ const updateHamtDirectory = async (context, links, bucket, options) => {

return {
node: parent,
cid
cid,
size: parent.size
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/core/utils/update-tree.js
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ const updateTree = async (context, trail, options) => {
child = {
cid: result.cid,
name,
size: result.node.size
size: result.size
}
}

Expand Down
Loading