Skip to content

Commit

Permalink
@uppy/aws-s3-multipart: fix Golden Retriever integration (#4526)
Browse files Browse the repository at this point in the history
  • Loading branch information
aduh95 authored Jun 29, 2023
1 parent 35b4b0e commit d1697b0
Show file tree
Hide file tree
Showing 3 changed files with 33 additions and 7 deletions.
27 changes: 23 additions & 4 deletions packages/@uppy/aws-s3-multipart/src/MultipartUploader.js
Original file line number Diff line number Diff line change
Expand Up @@ -60,9 +60,12 @@ class MultipartUploader {
/** @type {() => void} */
#onSuccess

/** @type {typeof import('../types/index').AwsS3MultipartOptions["shouldUseMultipart"]} */
/** @type {import('../types/index').AwsS3MultipartOptions["shouldUseMultipart"]} */
#shouldUseMultipart

/** @type {boolean} */
#isRestoring

#onReject = (err) => (err?.cause === pausingUploadReason ? null : this.#onError(err))

#maxMultipartParts = 10_000
Expand All @@ -83,6 +86,11 @@ class MultipartUploader {
this.#onError = this.options.onError
this.#shouldUseMultipart = this.options.shouldUseMultipart

// When we are restoring an upload, we already have an uploadId. Otherwise
// we need to call `createMultipartUpload` to get an `uploadId`.
// Non-multipart uploads are not restorable.
this.#isRestoring = 'uploadId' in options

this.#initChunks()
}

Expand All @@ -108,12 +116,12 @@ class MultipartUploader {
}
this.#chunks = Array(arraySize)

for (let i = 0, j = 0; i < fileSize; i += chunkSize, j++) {
const end = Math.min(fileSize, i + chunkSize)
for (let offset = 0, j = 0; offset < fileSize; offset += chunkSize, j++) {
const end = Math.min(fileSize, offset + chunkSize)

// Defer data fetching/slicing until we actually need the data, because it's slow if we have a lot of files
const getData = () => {
const i2 = i
const i2 = offset
return this.#data.slice(i2, end)
}

Expand All @@ -123,6 +131,14 @@ class MultipartUploader {
onComplete: this.#onPartComplete(j),
shouldUseMultipart,
}
if (this.#isRestoring) {
const size = offset + chunkSize > fileSize ? fileSize - offset : chunkSize
// setAsUploaded is called by listPart, to keep up-to-date the
// quantity of data that is left to actually upload.
this.#chunks[j].setAsUploaded = () => {
this.#chunkState[j].uploaded = size
}
}
}
} else {
this.#chunks = [{
Expand Down Expand Up @@ -181,6 +197,9 @@ class MultipartUploader {
if (!this.#abortController.signal.aborted) this.#abortController.abort(pausingUploadReason)
this.#abortController = new AbortController()
this.#resumeUpload()
} else if (this.#isRestoring) {
this.options.companionComm.restoreUploadFile(this.#file, { uploadId: this.options.uploadId, key: this.options.key })
this.#resumeUpload()
} else {
this.#createUpload()
}
Expand Down
12 changes: 9 additions & 3 deletions packages/@uppy/aws-s3-multipart/src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -264,6 +264,10 @@ class HTTPCommunicationQueue {
}
}

restoreUploadFile (file, uploadIdAndKey) {
this.#cache.set(file.data, uploadIdAndKey)
}

async resumeUploadFile (file, chunks, signal) {
throwIfAborted(signal)
if (chunks.length === 1 && !chunks[0].shouldUseMultipart) {
Expand All @@ -278,9 +282,11 @@ class HTTPCommunicationQueue {
.map((chunk, i) => {
const partNumber = i + 1
const alreadyUploadedInfo = alreadyUploadedParts.find(({ PartNumber }) => PartNumber === partNumber)
return alreadyUploadedInfo == null
? this.uploadChunk(file, partNumber, chunk, signal)
: { PartNumber: partNumber, ETag: alreadyUploadedInfo.ETag }
if (alreadyUploadedInfo == null) {
return this.uploadChunk(file, partNumber, chunk, signal)
}
chunk.setAsUploaded?.()
return { PartNumber: partNumber, ETag: alreadyUploadedInfo.ETag }
}),
)
throwIfAborted(signal)
Expand Down
1 change: 1 addition & 0 deletions packages/@uppy/aws-s3-multipart/types/chunk.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,5 @@ export interface Chunk {
onProgress: (ev: ProgressEvent) => void
onComplete: (etag: string) => void
shouldUseMultipart: boolean
setAsUploaded?: () => void
}

0 comments on commit d1697b0

Please sign in to comment.