-
Notifications
You must be signed in to change notification settings - Fork 167
/
nfts-store.js
218 lines (197 loc) · 5.77 KB
/
nfts-store.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
import last from 'it-last'
import * as CBOR from '@ipld/dag-cbor'
import { CID } from 'multiformats'
import { sha256 } from 'multiformats/hashes/sha2'
import { CarWriter } from '@ipld/car/writer'
import { MemoryBlockStore } from 'ipfs-car/blockstore/memory'
import { createCarCid } from '../utils/car.js'
import { importer as unixFsImporter } from 'ipfs-unixfs-importer'
import { checkAuth } from '../utils/auth.js'
import { setIn } from '../utils/utils.js'
import { JSONResponse } from '../utils/json-response.js'
import { HTTPError } from '../errors.js'
import { uploadCarWithStat } from './nfts-upload.js'
/**
* @typedef {import('../bindings').NFT} NFT
* @typedef {import('ipfs-unixfs-importer').UserImporterOptions} UserImporterOptions
* @typedef {import('ipfs-unixfs-importer').ImportCandidate} ImportCandidate
*/
/** @type {import('../bindings').Handler} */
export async function nftStore(event, ctx) {
const { user, key } = checkAuth(ctx)
const { headers } = event.request
const contentType = headers.get('content-type') || ''
if (!contentType.includes('multipart/form-data')) {
throw new HTTPError('content-type should be `multipart/form-data`', 400)
}
const form = await event.request.formData()
const meta = /** @type {string} */ (form.get('meta'))
if (!meta || typeof meta !== 'string') {
throw new HTTPError('missing `meta` in multipart', 400)
}
const data = JSON.parse(meta)
const dag = JSON.parse(meta)
// accumlate blocks here, until we are ready to export it as a CAR
const bs = new MemoryBlockStore()
for (const [name, file] of form.entries()) {
if (name === 'meta') {
continue
}
if (typeof file === 'string') {
throw new HTTPError('expected File part', 400)
}
const cid = await unixFsEncodeDir([file], bs)
const href = `ipfs://${cid}/${file.name}`
const path = name.split('.')
setIn(data, path, href)
setIn(dag, path, cid)
}
const metadataCid = await unixFsEncodeString(JSON.stringify(data), bs)
const rootCid = await cborEncode(
{
...dag,
'metadata.json': metadataCid,
type: 'nft',
},
bs
)
const size = totalSize(bs)
const structure = 'Complete'
const car = await exportToCar(rootCid, bs)
/** @type {import('./nfts-upload.js').CarStat} */
const carStat = {
rootCid,
structure,
size,
cid: await createCarCid(new Uint8Array(await car.arrayBuffer())),
}
const upload = await uploadCarWithStat(
{
event,
ctx,
user,
key,
uploadType: 'Nft',
files: [],
structure,
car,
},
carStat
)
const result = {
ok: true,
value: {
ipnft: upload.source_cid,
url: `ipfs://${upload.source_cid}/metadata.json`,
data,
},
}
return new JSONResponse(result)
}
/**
* Encode and object as a CBOR block, add it to the blockstore and return it's CID
* @param {unknown} value
* @param {import('ipfs-car/blockstore').Blockstore} bs
*/
async function cborEncode(value, bs) {
const bytes = CBOR.encode(value)
const digest = await sha256.digest(bytes)
const cid = CID.createV1(CBOR.code, digest)
// @ts-expect-error different CID versions
await bs.put(cid, bytes)
return cid
}
// TODO: expose from ipfs-car
// Match ipfs-car defaults used in nft-upload
/** @type {import('ipfs-unixfs-importer').UserImporterOptions} */
const unixfsImporterOptionsDefault = {
cidVersion: 1,
chunker: 'fixed',
maxChunkSize: 262144,
hasher: sha256,
rawLeaves: true,
wrapWithDirectory: true,
maxChildrenPerNode: 174,
}
/**
* Encode an ImportCandidate as UnixFS blocks and add them to the BlockStore.
* @param {ImportCandidate | ImportCandidate[]} ic
* @param {MemoryBlockStore} bs
* @param {UserImporterOptions} opts
*/
async function unixFsEncode(ic, bs, opts = {}) {
const res = await last(
unixFsImporter(ic, bs, {
...unixfsImporterOptionsDefault,
...opts,
})
)
if (res === undefined) {
throw new Error('Failed to unixfs encode')
}
return res.cid
}
/**
* Encode the File, wrapped with a directory, as UnixFS blocks
* and add them to the BlockStore.
* @param {File[]} files
* @param {MemoryBlockStore} bs
* @return {Promise<CID>} the root CID for the file
*/
async function unixFsEncodeDir(files, bs) {
const input = []
for (const f of files) {
const content = new Uint8Array(await f.arrayBuffer())
input.push({ path: f.name, content })
}
// @ts-expect-error different CID versions
return unixFsEncode(input, bs, {
wrapWithDirectory: true,
})
}
/**
* Encode the string as UnixFS blocks and add them to the BlockStore.
* @param {string} str
* @param {MemoryBlockStore} bs
* @return {Promise<CID>} the root CID for the file
*/
async function unixFsEncodeString(str, bs) {
const content = new TextEncoder().encode(str)
const ic = { path: '', content }
// @ts-expect-error different CID versions
return unixFsEncode(ic, bs, {
wrapWithDirectory: false,
})
}
/**
* Consume the blockstore, encoding it as a CAR file.
* Warning! Deletes blocks from blockstore as it consumes them.
* @param {CID} rootCid
* @param {MemoryBlockStore} bs
*/
async function exportToCar(rootCid, bs) {
const { out, writer } = CarWriter.create([rootCid])
for (const [cidStr, bytes] of bs.store.entries()) {
writer.put({ cid: CID.parse(cidStr), bytes })
// try and reduce the mem usage by clearing up as we go...
bs.store.delete(cidStr)
}
writer.close()
let parts = []
for await (const part of out) {
parts.push(part)
}
return new Blob(parts)
}
/**
* Count the total bytes of all blocks in the block store
* @param {MemoryBlockStore} bs
*/
function totalSize(bs) {
let size = 0
// grab the internal store so we don't have to async await. It's just a Map.
for (const bytes of bs.store.values()) {
size += bytes.length
}
return size
}