This repository has been archived by the owner on Feb 12, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 1.2k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'master' into fix/dns-cache-and-http-throttling-in-browser
License: MIT Signed-off-by: Marcin Rataj <lidel@lidel.org>
- Loading branch information
Showing
22 changed files
with
533 additions
and
729 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
148 changes: 148 additions & 0 deletions
148
src/core/components/files-regular/add-async-iterator.js
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,148 @@ | ||
'use strict' | ||
|
||
const importer = require('ipfs-unixfs-importer') | ||
const normaliseAddInput = require('ipfs-utils/src/files/normalise-input') | ||
const { parseChunkerString } = require('./utils') | ||
const pipe = require('it-pipe') | ||
const log = require('debug')('ipfs:add') | ||
log.error = require('debug')('ipfs:add:error') | ||
|
||
function noop () {} | ||
|
||
module.exports = function (self) { | ||
// Internal add func that gets used by all add funcs | ||
return async function * addAsyncIterator (source, options) { | ||
options = options || {} | ||
|
||
const chunkerOptions = parseChunkerString(options.chunker) | ||
|
||
const opts = Object.assign({}, { | ||
shardSplitThreshold: self._options.EXPERIMENTAL.sharding | ||
? 1000 | ||
: Infinity | ||
}, options, { | ||
chunker: chunkerOptions.chunker, | ||
chunkerOptions: chunkerOptions.chunkerOptions | ||
}) | ||
|
||
// CID v0 is for multihashes encoded with sha2-256 | ||
if (opts.hashAlg && opts.cidVersion !== 1) { | ||
opts.cidVersion = 1 | ||
} | ||
|
||
let total = 0 | ||
|
||
const prog = opts.progress || noop | ||
const progress = (bytes) => { | ||
total += bytes | ||
prog(total) | ||
} | ||
|
||
opts.progress = progress | ||
|
||
const iterator = pipe( | ||
normaliseAddInput(source), | ||
doImport(self, opts), | ||
transformFile(self, opts), | ||
preloadFile(self, opts), | ||
pinFile(self, opts) | ||
) | ||
|
||
const releaseLock = await self._gcLock.readLock() | ||
|
||
try { | ||
yield * iterator | ||
} finally { | ||
releaseLock() | ||
} | ||
} | ||
} | ||
|
||
function doImport (ipfs, opts) { | ||
return async function * (source) { // eslint-disable-line require-await | ||
yield * importer(source, ipfs._ipld, opts) | ||
} | ||
} | ||
|
||
function transformFile (ipfs, opts) { | ||
return async function * (source) { | ||
for await (const file of source) { | ||
let cid = file.cid | ||
const hash = cid.toBaseEncodedString() | ||
let path = file.path ? file.path : hash | ||
|
||
if (opts.wrapWithDirectory && !file.path) { | ||
path = '' | ||
} | ||
|
||
if (opts.onlyHash) { | ||
yield { | ||
path, | ||
hash, | ||
size: file.unixfs.fileSize() | ||
} | ||
|
||
return | ||
} | ||
|
||
const node = await ipfs.object.get(file.cid, Object.assign({}, opts, { preload: false })) | ||
|
||
if (opts.cidVersion === 1) { | ||
cid = cid.toV1() | ||
} | ||
|
||
let size = node.size | ||
|
||
if (Buffer.isBuffer(node)) { | ||
size = node.length | ||
} | ||
|
||
yield { | ||
path, | ||
hash, | ||
size | ||
} | ||
} | ||
} | ||
} | ||
|
||
function preloadFile (ipfs, opts) { | ||
return async function * (source) { | ||
for await (const file of source) { | ||
const isRootFile = !file.path || opts.wrapWithDirectory | ||
? file.path === '' | ||
: !file.path.includes('/') | ||
|
||
const shouldPreload = isRootFile && !opts.onlyHash && opts.preload !== false | ||
|
||
if (shouldPreload) { | ||
ipfs._preload(file.hash) | ||
} | ||
|
||
yield file | ||
} | ||
} | ||
} | ||
|
||
function pinFile (ipfs, opts) { | ||
return async function * (source) { | ||
for await (const file of source) { | ||
// Pin a file if it is the root dir of a recursive add or the single file | ||
// of a direct add. | ||
const pin = 'pin' in opts ? opts.pin : true | ||
const isRootDir = !file.path.includes('/') | ||
const shouldPin = pin && isRootDir && !opts.onlyHash && !opts.hashAlg | ||
|
||
if (shouldPin) { | ||
// Note: addAsyncIterator() has already taken a GC lock, so tell | ||
// pin.add() not to take a (second) GC lock | ||
await ipfs.pin.add(file.hash, { | ||
preload: false, | ||
lock: false | ||
}) | ||
} | ||
|
||
yield file | ||
} | ||
} | ||
} |
Oops, something went wrong.