Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: this param for fetch calls #2428

Merged
merged 11 commits into from
Jul 20, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 9 additions & 3 deletions .github/workflows/client.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: '16'
node-version: 18
- uses: bahmutov/npm-install@v1
- name: Typecheck
uses: gozala/typescript-error-reporter-action@v1.0.8
Expand All @@ -33,11 +33,17 @@ jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
matrix:
node_version:
- 16
- 18
- 20
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: '16'
node-version: ${{ matrix.node_version }}
- uses: bahmutov/npm-install@v1
- name: Test (ES)
run: yarn --cwd packages/client test:es
Expand Down Expand Up @@ -69,7 +75,7 @@ jobs:
- uses: actions/setup-node@v2
if: ${{ steps.tag-release.outputs.releases_created }}
with:
node-version: '16'
node-version: 18
registry-url: https://registry.npmjs.org/
- uses: bahmutov/npm-install@v1
if: ${{ steps.tag-release.outputs.releases_created }}
Expand Down
2 changes: 1 addition & 1 deletion packages/api/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@
"git-rev-sync": "^3.0.1",
"ipfs-unixfs-importer": "^9.0.3",
"miniflare": "^2.10.0",
"minio": "^7.0.28",
"minio": "^7.1.1",
"npm-run-all": "^4.1.5",
"openapi-typescript": "^4.0.2",
"pg": "^8.7.1",
Expand Down
2 changes: 1 addition & 1 deletion packages/client/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@
"@ipld/car": "^3.2.3",
"@ipld/dag-cbor": "^6.0.13",
"@web-std/blob": "^3.0.1",
"@web-std/fetch": "^3.0.3",
"@web-std/fetch": "^4.1.2",
"@web-std/file": "^3.0.0",
"@web-std/form-data": "^3.0.0",
"carbites": "^1.0.6",
Expand Down
16 changes: 14 additions & 2 deletions packages/client/src/lib.js
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,10 @@ class NFTStorage {
{ onStoredChunk, maxRetries, maxChunkSize, decoders, signal } = {}
) {
const url = new URL('upload/', endpoint)
const headers = NFTStorage.auth(token)
const headers = {
...NFTStorage.auth(token),
'Content-Type': 'application/car',
}
const targetSize = maxChunkSize || MAX_CHUNK_SIZE
const splitter =
car instanceof Blob
Expand All @@ -187,6 +190,15 @@ class NFTStorage {
carParts.push(part)
}
const carFile = new Blob(carParts, { type: 'application/car' })
/** @type {Blob|ArrayBuffer} */
let body = carFile
// FIXME: should not be necessary to await arrayBuffer()!
// Node.js 20 hangs reading the stream (it never ends) but in
// older node versions and the browser it is fine to pass a blob.
/* c8 ignore next 3 */
if (parseInt(globalThis.process?.versions?.node) > 18) {
body = await body.arrayBuffer()
}
const cid = await pRetry(
async () => {
await rateLimiter()
Expand All @@ -196,7 +208,7 @@ class NFTStorage {
response = await fetch(url.toString(), {
method: 'POST',
headers,
body: carFile,
body,
signal,
})
} catch (/** @type {any} */ err) {
Expand Down
11 changes: 10 additions & 1 deletion packages/client/src/token.js
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,16 @@ export class Token {
// @ts-ignore blob may be a File!
const name = blob.name || 'blob'
/** @type {import('./platform.js').ReadableStream} */
const content = blob.stream()
let content
// FIXME: should not be necessary to await arrayBuffer()!
// Node.js 20 hangs reading the stream (it never ends) but in
// older node versions and the browser it is fine to use blob.stream().
/* c8 ignore next 5 */
if (parseInt(globalThis.process?.versions?.node) > 18) {
content = new Uint8Array(await blob.arrayBuffer())
} else {
content = blob.stream()
}
const { root: cid } = await pack({
input: [{ path: name, content }],
blockstore,
Expand Down
2 changes: 1 addition & 1 deletion packages/client/test/lib.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ describe('client', () => {

it('upload CAR with custom chunk size', async function () {
// @ts-ignore
this.timeout(10_000)
this.timeout(20_000)

let uploadedChunks = 0

Expand Down
36 changes: 19 additions & 17 deletions packages/client/test/mock-server.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,21 +21,15 @@ export { fetch, Headers }
const toReadableStream = (source) =>
new ReadableStream({
async pull(controller) {
try {
while (controller.desiredSize || 0 > 0) {
const chunk = await source.next()
if (chunk.done) {
controller.close()
} else {
const bytes =
typeof chunk.value === 'string'
? encoder.encode(chunk.value)
: chunk.value
controller.enqueue(bytes)
}
}
} catch (error) {
controller.error(error)
const chunk = await source.next()
if (chunk.done) {
controller.close()
} else {
const bytes =
typeof chunk.value === 'string'
? encoder.encode(chunk.value)
: chunk.value
controller.enqueue(bytes)
}
},
cancel(reason) {
Expand Down Expand Up @@ -272,6 +266,8 @@ export class Service {
// @ts-ignore - headers don't have right type
headers: new Headers({ ...incoming.headers }),
body: toBody(incoming),
// @ts-expect-error TypeError: RequestInit: duplex option is required when sending a body.
duplex: 'half',
})

const response = await this.handler(request, this.state)
Expand All @@ -286,7 +282,7 @@ export class Service {
outgoing.end()
} catch (err) {
const error = /**@type {Error & {status: number}} */ (err)
if (!outgoing.hasHeader) {
if (!outgoing.headersSent) {
outgoing.writeHead(error.status || 500)
}
outgoing.write(error.stack)
Expand Down Expand Up @@ -326,7 +322,13 @@ export const listen = (service, port = 0) =>
* @param {(request:Request, state:State) => Promise<Response>} handler
*/
export const activate = async (state, handler) => {
const service = new Service(new http.Server(), state, handler)
const server = new http.Server()
// Server keepalive timeout is 5s by default but undici sets to 60s in
// Node.js 18 & 20. So when client tries to reuse a conenction after 5s it
// gets ECONNRESET from the server.
// https://connectreport.com/blog/tuning-http-keep-alive-in-node-js/
server.keepAliveTimeout = 60_000
const service = new Service(server, state, handler)
await listen(service)
return service
}
Expand Down
Loading
Loading