From 18968316fa8ef85bf67c3ac5220daad930d1b090 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Mon, 21 Jun 2021 06:32:17 -0700 Subject: [PATCH 1/2] test: add WPT streams tests Signed-off-by: James M Snell --- test/fixtures/wpt/README.md | 5 +- test/fixtures/wpt/interfaces/dom.idl | 19 + test/fixtures/wpt/interfaces/html.idl | 36 +- test/fixtures/wpt/interfaces/streams.idl | 226 ++ test/fixtures/wpt/streams/META.yml | 7 + test/fixtures/wpt/streams/README.md | 3 + test/fixtures/wpt/streams/idlharness.any.js | 79 + test/fixtures/wpt/streams/piping/abort.any.js | 375 +++ .../piping/close-propagation-backward.any.js | 153 ++ .../piping/close-propagation-forward.any.js | 589 +++++ .../piping/error-propagation-backward.any.js | 630 +++++ .../piping/error-propagation-forward.any.js | 569 ++++ .../wpt/streams/piping/flow-control.any.js | 297 +++ .../wpt/streams/piping/general.any.js | 211 ++ .../piping/multiple-propagation.any.js | 227 ++ .../wpt/streams/piping/pipe-through.any.js | 268 ++ .../streams/piping/then-interception.any.js | 68 + .../streams/piping/throwing-options.any.js | 65 + .../streams/piping/transform-streams.any.js | 22 + ...ategies-size-function-per-global.window.js | 14 + .../wpt/streams/queuing-strategies.any.js | 135 + .../bad-buffers-and-views.any.js | 349 +++ .../construct-byob-request.any.js | 53 + .../enqueue-with-detached-buffer.window.js | 19 + .../readable-byte-streams/general.any.js | 2329 +++++++++++++++++ .../non-transferable-buffers.any.js | 58 + .../readable-streams/async-iterator.any.js | 650 +++++ .../readable-streams/bad-strategies.any.js | 159 ++ .../bad-underlying-sources.any.js | 400 +++ .../streams/readable-streams/cancel.any.js | 236 ++ .../readable-streams/constructor.any.js | 17 + .../count-queuing-strategy-integration.any.js | 208 ++ .../readable-streams/default-reader.any.js | 514 ++++ .../floating-point-total-queue-size.any.js | 116 + .../garbage-collection.any.js | 70 + .../streams/readable-streams/general.any.js | 840 ++++++ .../readable-streams/patched-global.any.js | 142 + .../reentrant-strategies.any.js | 264 ++ .../wpt/streams/readable-streams/tee.any.js | 541 ++++ .../streams/readable-streams/templated.any.js | 143 + .../streams/resources/recording-streams.js | 130 + .../streams/resources/rs-test-templates.js | 638 +++++ .../wpt/streams/resources/rs-utils.js | 197 ++ .../wpt/streams/resources/test-utils.js | 74 + .../transferable/deserialize-error.window.js | 39 + .../streams/transferable/readable-stream.html | 255 ++ .../wpt/streams/transferable/reason.html | 132 + .../resources/create-wasm-module.js | 11 + .../resources/deserialize-error-frame.html | 39 + .../transferable/resources/echo-iframe.html | 7 + .../transferable/resources/echo-worker.js | 2 + .../streams/transferable/resources/helpers.js | 132 + .../resources/receiving-shared-worker.js | 11 + .../resources/receiving-worker.js | 7 + .../resources/sending-shared-worker.js | 12 + .../transferable/resources/sending-worker.js | 5 + .../resources/service-worker-iframe.html | 39 + .../transferable/resources/service-worker.js | 30 + .../transferable/service-worker.https.html | 28 + .../streams/transferable/shared-worker.html | 25 + .../transferable/transform-stream.html | 104 + .../wpt/streams/transferable/window.html | 60 + .../wpt/streams/transferable/worker.html | 76 + .../streams/transferable/writable-stream.html | 136 + .../transform-streams/backpressure.any.js | 195 ++ .../streams/transform-streams/errors.any.js | 341 +++ .../streams/transform-streams/flush.any.js | 131 + .../streams/transform-streams/general.any.js | 437 ++++ .../streams/transform-streams/lipfuzz.any.js | 163 ++ .../transform-streams/patched-global.any.js | 53 + .../transform-streams/properties.any.js | 49 + .../reentrant-strategies.any.js | 319 +++ .../transform-streams/strategies.any.js | 150 ++ .../transform-streams/terminate.any.js | 100 + .../streams/writable-streams/aborting.any.js | 1378 ++++++++++ .../writable-streams/bad-strategies.any.js | 95 + .../bad-underlying-sinks.any.js | 204 ++ .../byte-length-queuing-strategy.any.js | 28 + .../wpt/streams/writable-streams/close.any.js | 470 ++++ .../writable-streams/constructor.any.js | 155 ++ .../count-queuing-strategy.any.js | 124 + .../wpt/streams/writable-streams/error.any.js | 64 + .../floating-point-total-queue-size.any.js | 87 + .../streams/writable-streams/general.any.js | 277 ++ .../writable-streams/properties.any.js | 53 + .../reentrant-strategy.any.js | 174 ++ .../wpt/streams/writable-streams/start.any.js | 163 ++ .../wpt/streams/writable-streams/write.any.js | 284 ++ test/fixtures/wpt/versions.json | 6 +- test/wpt/status/streams.json | 1 + test/wpt/test-streams.js | 47 + 91 files changed, 18833 insertions(+), 10 deletions(-) create mode 100644 test/fixtures/wpt/interfaces/streams.idl create mode 100644 test/fixtures/wpt/streams/META.yml create mode 100644 test/fixtures/wpt/streams/README.md create mode 100644 test/fixtures/wpt/streams/idlharness.any.js create mode 100644 test/fixtures/wpt/streams/piping/abort.any.js create mode 100644 test/fixtures/wpt/streams/piping/close-propagation-backward.any.js create mode 100644 test/fixtures/wpt/streams/piping/close-propagation-forward.any.js create mode 100644 test/fixtures/wpt/streams/piping/error-propagation-backward.any.js create mode 100644 test/fixtures/wpt/streams/piping/error-propagation-forward.any.js create mode 100644 test/fixtures/wpt/streams/piping/flow-control.any.js create mode 100644 test/fixtures/wpt/streams/piping/general.any.js create mode 100644 test/fixtures/wpt/streams/piping/multiple-propagation.any.js create mode 100644 test/fixtures/wpt/streams/piping/pipe-through.any.js create mode 100644 test/fixtures/wpt/streams/piping/then-interception.any.js create mode 100644 test/fixtures/wpt/streams/piping/throwing-options.any.js create mode 100644 test/fixtures/wpt/streams/piping/transform-streams.any.js create mode 100644 test/fixtures/wpt/streams/queuing-strategies-size-function-per-global.window.js create mode 100644 test/fixtures/wpt/streams/queuing-strategies.any.js create mode 100644 test/fixtures/wpt/streams/readable-byte-streams/bad-buffers-and-views.any.js create mode 100644 test/fixtures/wpt/streams/readable-byte-streams/construct-byob-request.any.js create mode 100644 test/fixtures/wpt/streams/readable-byte-streams/enqueue-with-detached-buffer.window.js create mode 100644 test/fixtures/wpt/streams/readable-byte-streams/general.any.js create mode 100644 test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js create mode 100644 test/fixtures/wpt/streams/readable-streams/async-iterator.any.js create mode 100644 test/fixtures/wpt/streams/readable-streams/bad-strategies.any.js create mode 100644 test/fixtures/wpt/streams/readable-streams/bad-underlying-sources.any.js create mode 100644 test/fixtures/wpt/streams/readable-streams/cancel.any.js create mode 100644 test/fixtures/wpt/streams/readable-streams/constructor.any.js create mode 100644 test/fixtures/wpt/streams/readable-streams/count-queuing-strategy-integration.any.js create mode 100644 test/fixtures/wpt/streams/readable-streams/default-reader.any.js create mode 100644 test/fixtures/wpt/streams/readable-streams/floating-point-total-queue-size.any.js create mode 100644 test/fixtures/wpt/streams/readable-streams/garbage-collection.any.js create mode 100644 test/fixtures/wpt/streams/readable-streams/general.any.js create mode 100644 test/fixtures/wpt/streams/readable-streams/patched-global.any.js create mode 100644 test/fixtures/wpt/streams/readable-streams/reentrant-strategies.any.js create mode 100644 test/fixtures/wpt/streams/readable-streams/tee.any.js create mode 100644 test/fixtures/wpt/streams/readable-streams/templated.any.js create mode 100644 test/fixtures/wpt/streams/resources/recording-streams.js create mode 100644 test/fixtures/wpt/streams/resources/rs-test-templates.js create mode 100644 test/fixtures/wpt/streams/resources/rs-utils.js create mode 100644 test/fixtures/wpt/streams/resources/test-utils.js create mode 100644 test/fixtures/wpt/streams/transferable/deserialize-error.window.js create mode 100644 test/fixtures/wpt/streams/transferable/readable-stream.html create mode 100644 test/fixtures/wpt/streams/transferable/reason.html create mode 100644 test/fixtures/wpt/streams/transferable/resources/create-wasm-module.js create mode 100644 test/fixtures/wpt/streams/transferable/resources/deserialize-error-frame.html create mode 100644 test/fixtures/wpt/streams/transferable/resources/echo-iframe.html create mode 100644 test/fixtures/wpt/streams/transferable/resources/echo-worker.js create mode 100644 test/fixtures/wpt/streams/transferable/resources/helpers.js create mode 100644 test/fixtures/wpt/streams/transferable/resources/receiving-shared-worker.js create mode 100644 test/fixtures/wpt/streams/transferable/resources/receiving-worker.js create mode 100644 test/fixtures/wpt/streams/transferable/resources/sending-shared-worker.js create mode 100644 test/fixtures/wpt/streams/transferable/resources/sending-worker.js create mode 100644 test/fixtures/wpt/streams/transferable/resources/service-worker-iframe.html create mode 100644 test/fixtures/wpt/streams/transferable/resources/service-worker.js create mode 100644 test/fixtures/wpt/streams/transferable/service-worker.https.html create mode 100644 test/fixtures/wpt/streams/transferable/shared-worker.html create mode 100644 test/fixtures/wpt/streams/transferable/transform-stream.html create mode 100644 test/fixtures/wpt/streams/transferable/window.html create mode 100644 test/fixtures/wpt/streams/transferable/worker.html create mode 100644 test/fixtures/wpt/streams/transferable/writable-stream.html create mode 100644 test/fixtures/wpt/streams/transform-streams/backpressure.any.js create mode 100644 test/fixtures/wpt/streams/transform-streams/errors.any.js create mode 100644 test/fixtures/wpt/streams/transform-streams/flush.any.js create mode 100644 test/fixtures/wpt/streams/transform-streams/general.any.js create mode 100644 test/fixtures/wpt/streams/transform-streams/lipfuzz.any.js create mode 100644 test/fixtures/wpt/streams/transform-streams/patched-global.any.js create mode 100644 test/fixtures/wpt/streams/transform-streams/properties.any.js create mode 100644 test/fixtures/wpt/streams/transform-streams/reentrant-strategies.any.js create mode 100644 test/fixtures/wpt/streams/transform-streams/strategies.any.js create mode 100644 test/fixtures/wpt/streams/transform-streams/terminate.any.js create mode 100644 test/fixtures/wpt/streams/writable-streams/aborting.any.js create mode 100644 test/fixtures/wpt/streams/writable-streams/bad-strategies.any.js create mode 100644 test/fixtures/wpt/streams/writable-streams/bad-underlying-sinks.any.js create mode 100644 test/fixtures/wpt/streams/writable-streams/byte-length-queuing-strategy.any.js create mode 100644 test/fixtures/wpt/streams/writable-streams/close.any.js create mode 100644 test/fixtures/wpt/streams/writable-streams/constructor.any.js create mode 100644 test/fixtures/wpt/streams/writable-streams/count-queuing-strategy.any.js create mode 100644 test/fixtures/wpt/streams/writable-streams/error.any.js create mode 100644 test/fixtures/wpt/streams/writable-streams/floating-point-total-queue-size.any.js create mode 100644 test/fixtures/wpt/streams/writable-streams/general.any.js create mode 100644 test/fixtures/wpt/streams/writable-streams/properties.any.js create mode 100644 test/fixtures/wpt/streams/writable-streams/reentrant-strategy.any.js create mode 100644 test/fixtures/wpt/streams/writable-streams/start.any.js create mode 100644 test/fixtures/wpt/streams/writable-streams/write.any.js create mode 100644 test/wpt/status/streams.json create mode 100644 test/wpt/test-streams.js diff --git a/test/fixtures/wpt/README.md b/test/fixtures/wpt/README.md index b68466264ebdf8..86f2eaada97b3b 100644 --- a/test/fixtures/wpt/README.md +++ b/test/fixtures/wpt/README.md @@ -19,9 +19,10 @@ Last update: - html/webappapis/atob: https://github.com/web-platform-tests/wpt/tree/f267e1dca6/html/webappapis/atob - html/webappapis/microtask-queuing: https://github.com/web-platform-tests/wpt/tree/2c5c3c4c27/html/webappapis/microtask-queuing - html/webappapis/timers: https://github.com/web-platform-tests/wpt/tree/5873f2d8f1/html/webappapis/timers -- interfaces: https://github.com/web-platform-tests/wpt/tree/79fa4cf76e/interfaces +- interfaces: https://github.com/web-platform-tests/wpt/tree/fcb671ed8b/interfaces - resources: https://github.com/web-platform-tests/wpt/tree/972ca5b669/resources +- streams: https://github.com/web-platform-tests/wpt/tree/b869e60df1/streams - url: https://github.com/web-platform-tests/wpt/tree/1fcb39223d/url [Web Platform Tests]: https://github.com/web-platform-tests/wpt -[`git node wpt`]: https://github.com/nodejs/node-core-utils/blob/master/docs/git-node.md#git-node-wpt +[`git node wpt`]: https://github.com/nodejs/node-core-utils/blob/main/docs/git-node.md#git-node-wpt diff --git a/test/fixtures/wpt/interfaces/dom.idl b/test/fixtures/wpt/interfaces/dom.idl index bd8a17a379311b..4a57b2e1eb4bce 100644 --- a/test/fixtures/wpt/interfaces/dom.idl +++ b/test/fixtures/wpt/interfaces/dom.idl @@ -93,6 +93,8 @@ interface AbortController { [Exposed=(Window,Worker)] interface AbortSignal : EventTarget { + [NewObject] static AbortSignal abort(); + readonly attribute boolean aborted; attribute EventHandler onabort; @@ -331,11 +333,14 @@ interface DocumentFragment : Node { [Exposed=Window] interface ShadowRoot : DocumentFragment { readonly attribute ShadowRootMode mode; + readonly attribute boolean delegatesFocus; + readonly attribute SlotAssignmentMode slotAssignment; readonly attribute Element host; attribute EventHandler onslotchange; }; enum ShadowRootMode { "open", "closed" }; +enum SlotAssignmentMode { "manual", "named" }; [Exposed=Window] interface Element : Node { @@ -386,6 +391,7 @@ interface Element : Node { dictionary ShadowRootInit { required ShadowRootMode mode; boolean delegatesFocus = false; + SlotAssignmentMode slotAssignment = "named"; }; [Exposed=Window, @@ -621,3 +627,16 @@ interface XPathEvaluator { }; XPathEvaluator includes XPathEvaluatorBase; + +[Exposed=Window] +interface XSLTProcessor { + constructor(); + undefined importStylesheet(Node style); + [CEReactions] DocumentFragment transformToFragment(Node source, Document output); + [CEReactions] Document transformToDocument(Node source); + undefined setParameter([LegacyNullToEmptyString] DOMString namespaceURI, DOMString localName, any value); + any getParameter([LegacyNullToEmptyString] DOMString namespaceURI, DOMString localName); + undefined removeParameter([LegacyNullToEmptyString] DOMString namespaceURI, DOMString localName); + undefined clearParameters(); + undefined reset(); +}; diff --git a/test/fixtures/wpt/interfaces/html.idl b/test/fixtures/wpt/interfaces/html.idl index f6789644339b4c..d878cba3367cc6 100644 --- a/test/fixtures/wpt/interfaces/html.idl +++ b/test/fixtures/wpt/interfaces/html.idl @@ -120,6 +120,7 @@ interface HTMLElement : Element { [CEReactions] attribute DOMString autocapitalize; [CEReactions] attribute [LegacyNullToEmptyString] DOMString innerText; + [CEReactions] attribute [LegacyNullToEmptyString] DOMString outerText; ElementInternals attachInternals(); }; @@ -695,7 +696,7 @@ interface TextTrackCue : EventTarget { attribute DOMString id; attribute double startTime; - attribute double endTime; + attribute unrestricted double endTime; attribute boolean pauseOnExit; attribute EventHandler onenter; @@ -1231,6 +1232,7 @@ interface HTMLSlotElement : HTMLElement { [CEReactions] attribute DOMString name; sequence assignedNodes(optional AssignedNodesOptions options = {}); sequence assignedElements(optional AssignedNodesOptions options = {}); + undefined assign((Element or Text)... nodes); }; dictionary AssignedNodesOptions { @@ -1264,11 +1266,14 @@ typedef (HTMLOrSVGImageElement or ImageBitmap or OffscreenCanvas) CanvasImageSource; +enum PredefinedColorSpace { "srgb", "display-p3" }; + enum CanvasFillRule { "nonzero", "evenodd" }; dictionary CanvasRenderingContext2DSettings { boolean alpha = true; boolean desynchronized = false; + PredefinedColorSpace colorSpace = "srgb"; }; enum ImageSmoothingQuality { "low", "medium", "high" }; @@ -1301,6 +1306,7 @@ interface mixin CanvasState { // state undefined save(); // push state on state stack undefined restore(); // pop state stack and restore state + undefined reset(); // reset the rendering context to its default state }; interface mixin CanvasTransform { @@ -1336,6 +1342,7 @@ interface mixin CanvasFillStrokeStyles { attribute (DOMString or CanvasGradient or CanvasPattern) fillStyle; // (default black) CanvasGradient createLinearGradient(double x0, double y0, double x1, double y1); CanvasGradient createRadialGradient(double x0, double y0, double r0, double x1, double y1, double r1); + CanvasGradient createConicGradient(double startAngle, double x, double y); CanvasPattern? createPattern(CanvasImageSource image, [LegacyNullToEmptyString] DOMString repetition); }; @@ -1398,9 +1405,9 @@ interface mixin CanvasDrawImage { interface mixin CanvasImageData { // pixel manipulation - ImageData createImageData([EnforceRange] long sw, [EnforceRange] long sh); + ImageData createImageData([EnforceRange] long sw, [EnforceRange] long sh, optional ImageDataSettings settings = {}); ImageData createImageData(ImageData imagedata); - ImageData getImageData([EnforceRange] long sx, [EnforceRange] long sy, [EnforceRange] long sw, [EnforceRange] long sh); + ImageData getImageData([EnforceRange] long sx, [EnforceRange] long sy, [EnforceRange] long sw, [EnforceRange] long sh, optional ImageDataSettings settings = {}); undefined putImageData(ImageData imagedata, [EnforceRange] long dx, [EnforceRange] long dy); undefined putImageData(ImageData imagedata, [EnforceRange] long dx, [EnforceRange] long dy, [EnforceRange] long dirtyX, [EnforceRange] long dirtyY, [EnforceRange] long dirtyWidth, [EnforceRange] long dirtyHeight); }; @@ -1410,6 +1417,10 @@ enum CanvasLineJoin { "round", "bevel", "miter" }; enum CanvasTextAlign { "start", "end", "left", "right", "center" }; enum CanvasTextBaseline { "top", "hanging", "middle", "alphabetic", "ideographic", "bottom" }; enum CanvasDirection { "ltr", "rtl", "inherit" }; +enum CanvasFontKerning { "auto", "normal", "none" }; +enum CanvasFontStretch { "ultra-condensed", "extra-condensed", "condensed", "semi-condensed", "normal", "semi-expanded", "expanded", "extra-expanded", "ultra-expanded" }; +enum CanvasFontVariantCaps { "normal", "small-caps", "all-small-caps", "petite-caps", "all-petite-caps", "unicase", "titling-caps" }; +enum CanvasTextRendering { "auto", "optimizeSpeed", "optimizeLegibility", "geometricPrecision" }; interface mixin CanvasPathDrawingStyles { // line caps/joins @@ -1430,6 +1441,12 @@ interface mixin CanvasTextDrawingStyles { attribute CanvasTextAlign textAlign; // (default: "start") attribute CanvasTextBaseline textBaseline; // (default: "alphabetic") attribute CanvasDirection direction; // (default: "inherit") + attribute double textLetterSpacing; // (default: 0) + attribute double textWordSpacing; // (default: 0) + attribute CanvasFontKerning fontKerning; // (default: "auto") + attribute CanvasFontStretch fontStretch; // (default: "normal") + attribute CanvasFontVariantCaps fontVariantCaps; // (default: "normal") + attribute CanvasTextRendering textRendering; // (default: "normal") }; interface mixin CanvasPath { @@ -1476,15 +1493,20 @@ interface TextMetrics { readonly attribute double ideographicBaseline; }; +dictionary ImageDataSettings { + PredefinedColorSpace colorSpace; +}; + [Exposed=(Window,Worker), Serializable] interface ImageData { - constructor(unsigned long sw, unsigned long sh); - constructor(Uint8ClampedArray data, unsigned long sw, optional unsigned long sh); + constructor(unsigned long sw, unsigned long sh, optional ImageDataSettings settings = {}); + constructor(Uint8ClampedArray data, unsigned long sw, optional unsigned long sh, optional ImageDataSettings settings = {}); readonly attribute unsigned long width; readonly attribute unsigned long height; readonly attribute Uint8ClampedArray data; + readonly attribute PredefinedColorSpace colorSpace; }; [Exposed=(Window,Worker)] @@ -1738,8 +1760,8 @@ interface History { undefined go(optional long delta = 0); undefined back(); undefined forward(); - undefined pushState(any data, DOMString title, optional USVString? url = null); - undefined replaceState(any data, DOMString title, optional USVString? url = null); + undefined pushState(any data, DOMString unused, optional USVString? url = null); + undefined replaceState(any data, DOMString unused, optional USVString? url = null); }; [Exposed=Window] diff --git a/test/fixtures/wpt/interfaces/streams.idl b/test/fixtures/wpt/interfaces/streams.idl new file mode 100644 index 00000000000000..99c3a5dee91f69 --- /dev/null +++ b/test/fixtures/wpt/interfaces/streams.idl @@ -0,0 +1,226 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Streams Standard (https://streams.spec.whatwg.org/) + +[Exposed=(Window,Worker,Worklet), Transferable] +interface ReadableStream { + constructor(optional object underlyingSource, optional QueuingStrategy strategy = {}); + + readonly attribute boolean locked; + + Promise cancel(optional any reason); + ReadableStreamReader getReader(optional ReadableStreamGetReaderOptions options = {}); + ReadableStream pipeThrough(ReadableWritablePair transform, optional StreamPipeOptions options = {}); + Promise pipeTo(WritableStream destination, optional StreamPipeOptions options = {}); + sequence tee(); + + async iterable(optional ReadableStreamIteratorOptions options = {}); +}; + +typedef (ReadableStreamDefaultReader or ReadableStreamBYOBReader) ReadableStreamReader; + +enum ReadableStreamReaderMode { "byob" }; + +dictionary ReadableStreamGetReaderOptions { + ReadableStreamReaderMode mode; +}; + +dictionary ReadableStreamIteratorOptions { + boolean preventCancel = false; +}; + +dictionary ReadableWritablePair { + required ReadableStream readable; + required WritableStream writable; +}; + +dictionary StreamPipeOptions { + boolean preventClose = false; + boolean preventAbort = false; + boolean preventCancel = false; + AbortSignal signal; +}; + +dictionary UnderlyingSource { + UnderlyingSourceStartCallback start; + UnderlyingSourcePullCallback pull; + UnderlyingSourceCancelCallback cancel; + ReadableStreamType type; + [EnforceRange] unsigned long long autoAllocateChunkSize; +}; + +typedef (ReadableStreamDefaultController or ReadableByteStreamController) ReadableStreamController; + +callback UnderlyingSourceStartCallback = any (ReadableStreamController controller); +callback UnderlyingSourcePullCallback = Promise (ReadableStreamController controller); +callback UnderlyingSourceCancelCallback = Promise (optional any reason); + +enum ReadableStreamType { "bytes" }; + +interface mixin ReadableStreamGenericReader { + readonly attribute Promise closed; + + Promise cancel(optional any reason); +}; + +[Exposed=(Window,Worker,Worklet)] +interface ReadableStreamDefaultReader { + constructor(ReadableStream stream); + + Promise read(); + undefined releaseLock(); +}; +ReadableStreamDefaultReader includes ReadableStreamGenericReader; + +dictionary ReadableStreamDefaultReadResult { + any value; + boolean done; +}; + +[Exposed=(Window,Worker,Worklet)] +interface ReadableStreamBYOBReader { + constructor(ReadableStream stream); + + Promise read(ArrayBufferView view); + undefined releaseLock(); +}; +ReadableStreamBYOBReader includes ReadableStreamGenericReader; + +dictionary ReadableStreamBYOBReadResult { + ArrayBufferView value; + boolean done; +}; + +[Exposed=(Window,Worker,Worklet)] +interface ReadableStreamDefaultController { + readonly attribute unrestricted double? desiredSize; + + undefined close(); + undefined enqueue(optional any chunk); + undefined error(optional any e); +}; + +[Exposed=(Window,Worker,Worklet)] +interface ReadableByteStreamController { + readonly attribute ReadableStreamBYOBRequest? byobRequest; + readonly attribute unrestricted double? desiredSize; + + undefined close(); + undefined enqueue(ArrayBufferView chunk); + undefined error(optional any e); +}; + +[Exposed=(Window,Worker,Worklet)] +interface ReadableStreamBYOBRequest { + readonly attribute ArrayBufferView? view; + + undefined respond([EnforceRange] unsigned long long bytesWritten); + undefined respondWithNewView(ArrayBufferView view); +}; + +[Exposed=(Window,Worker,Worklet), Transferable] +interface WritableStream { + constructor(optional object underlyingSink, optional QueuingStrategy strategy = {}); + + readonly attribute boolean locked; + + Promise abort(optional any reason); + Promise close(); + WritableStreamDefaultWriter getWriter(); +}; + +dictionary UnderlyingSink { + UnderlyingSinkStartCallback start; + UnderlyingSinkWriteCallback write; + UnderlyingSinkCloseCallback close; + UnderlyingSinkAbortCallback abort; + any type; +}; + +callback UnderlyingSinkStartCallback = any (WritableStreamDefaultController controller); +callback UnderlyingSinkWriteCallback = Promise (any chunk, WritableStreamDefaultController controller); +callback UnderlyingSinkCloseCallback = Promise (); +callback UnderlyingSinkAbortCallback = Promise (optional any reason); + +[Exposed=(Window,Worker,Worklet)] +interface WritableStreamDefaultWriter { + constructor(WritableStream stream); + + readonly attribute Promise closed; + readonly attribute unrestricted double? desiredSize; + readonly attribute Promise ready; + + Promise abort(optional any reason); + Promise close(); + undefined releaseLock(); + Promise write(optional any chunk); +}; + +[Exposed=(Window,Worker,Worklet)] +interface WritableStreamDefaultController { + undefined error(optional any e); +}; + +[Exposed=(Window,Worker,Worklet), Transferable] +interface TransformStream { + constructor(optional object transformer, + optional QueuingStrategy writableStrategy = {}, + optional QueuingStrategy readableStrategy = {}); + + readonly attribute ReadableStream readable; + readonly attribute WritableStream writable; +}; + +dictionary Transformer { + TransformerStartCallback start; + TransformerTransformCallback transform; + TransformerFlushCallback flush; + any readableType; + any writableType; +}; + +callback TransformerStartCallback = any (TransformStreamDefaultController controller); +callback TransformerFlushCallback = Promise (TransformStreamDefaultController controller); +callback TransformerTransformCallback = Promise (any chunk, TransformStreamDefaultController controller); + +[Exposed=(Window,Worker,Worklet)] +interface TransformStreamDefaultController { + readonly attribute unrestricted double? desiredSize; + + undefined enqueue(optional any chunk); + undefined error(optional any reason); + undefined terminate(); +}; + +dictionary QueuingStrategy { + unrestricted double highWaterMark; + QueuingStrategySize size; +}; + +callback QueuingStrategySize = unrestricted double (optional any chunk); + +dictionary QueuingStrategyInit { + required unrestricted double highWaterMark; +}; + +[Exposed=(Window,Worker,Worklet)] +interface ByteLengthQueuingStrategy { + constructor(QueuingStrategyInit init); + + readonly attribute unrestricted double highWaterMark; + readonly attribute Function size; +}; + +[Exposed=(Window,Worker,Worklet)] +interface CountQueuingStrategy { + constructor(QueuingStrategyInit init); + + readonly attribute unrestricted double highWaterMark; + readonly attribute Function size; +}; + +interface mixin GenericTransformStream { + readonly attribute ReadableStream readable; + readonly attribute WritableStream writable; +}; diff --git a/test/fixtures/wpt/streams/META.yml b/test/fixtures/wpt/streams/META.yml new file mode 100644 index 00000000000000..1259a55cb5a99e --- /dev/null +++ b/test/fixtures/wpt/streams/META.yml @@ -0,0 +1,7 @@ +spec: https://streams.spec.whatwg.org/ +suggested_reviewers: + - domenic + - yutakahirano + - youennf + - wanderview + - ricea diff --git a/test/fixtures/wpt/streams/README.md b/test/fixtures/wpt/streams/README.md new file mode 100644 index 00000000000000..9ab6e1284ad50d --- /dev/null +++ b/test/fixtures/wpt/streams/README.md @@ -0,0 +1,3 @@ +# Streams Tests + +The work on the streams tests is closely tracked by the specification authors, who maintain a reference implementation intended to match the spec line-by-line while passing all of these tests. See [the whatwg/streams repository for details](https://github.com/whatwg/streams/tree/main/reference-implementation). Some tests may be in that repository while the spec sections they test are still undergoing heavy churn. diff --git a/test/fixtures/wpt/streams/idlharness.any.js b/test/fixtures/wpt/streams/idlharness.any.js new file mode 100644 index 00000000000000..42a17da58c5ae3 --- /dev/null +++ b/test/fixtures/wpt/streams/idlharness.any.js @@ -0,0 +1,79 @@ +// META: global=window,worker +// META: script=/resources/WebIDLParser.js +// META: script=/resources/idlharness.js +// META: timeout=long + +idl_test( + ['streams'], + ['dom'], // for AbortSignal + async idl_array => { + // Empty try/catches ensure that if something isn't implemented (e.g., readable byte streams, or writable streams) + // the harness still sets things up correctly. Note that the corresponding interface tests will still fail. + + try { + new ReadableStream({ + start(c) { + self.readableStreamDefaultController = c; + } + }); + } catch {} + + try { + new ReadableStream({ + start(c) { + self.readableByteStreamController = c; + }, + type: 'bytes' + }); + } catch {} + + try { + let resolvePullCalledPromise; + const pullCalledPromise = new Promise(resolve => { + resolvePullCalledPromise = resolve; + }); + const stream = new ReadableStream({ + pull(c) { + self.readableStreamByobRequest = c.byobRequest; + resolvePullCalledPromise(); + }, + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + reader.read(new Uint8Array(1)); + await pullCalledPromise; + } catch {} + + try { + new WritableStream({ + start(c) { + self.writableStreamDefaultController = c; + } + }); + } catch {} + + try { + new TransformStream({ + start(c) { + self.transformStreamDefaultController = c; + } + }); + } catch {} + + idl_array.add_objects({ + ReadableStream: ["new ReadableStream()"], + ReadableStreamDefaultReader: ["(new ReadableStream()).getReader()"], + ReadableStreamBYOBReader: ["(new ReadableStream({ type: 'bytes' })).getReader({ mode: 'byob' })"], + ReadableStreamDefaultController: ["self.readableStreamDefaultController"], + ReadableByteStreamController: ["self.readableByteStreamController"], + ReadableStreamBYOBRequest: ["self.readableStreamByobRequest"], + WritableStream: ["new WritableStream()"], + WritableStreamDefaultWriter: ["(new WritableStream()).getWriter()"], + WritableStreamDefaultController: ["self.writableStreamDefaultController"], + TransformStream: ["new TransformStream()"], + TransformStreamDefaultController: ["self.transformStreamDefaultController"], + ByteLengthQueuingStrategy: ["new ByteLengthQueuingStrategy({ highWaterMark: 5 })"], + CountQueuingStrategy: ["new CountQueuingStrategy({ highWaterMark: 5 })"] + }); + } +); diff --git a/test/fixtures/wpt/streams/piping/abort.any.js b/test/fixtures/wpt/streams/piping/abort.any.js new file mode 100644 index 00000000000000..3fe029de95a1b8 --- /dev/null +++ b/test/fixtures/wpt/streams/piping/abort.any.js @@ -0,0 +1,375 @@ +// META: global=window,worker,jsshell +// META: script=../resources/recording-streams.js +// META: script=../resources/test-utils.js +'use strict'; + +// Tests for the use of pipeTo with AbortSignal. +// There is some extra complexity to avoid timeouts in environments where abort is not implemented. + +const error1 = new Error('error1'); +error1.name = 'error1'; +const error2 = new Error('error2'); +error2.name = 'error2'; + +const errorOnPull = { + pull(controller) { + // This will cause the test to error if pipeTo abort is not implemented. + controller.error('failed to abort'); + } +}; + +// To stop pull() being called immediately when the stream is created, we need to set highWaterMark to 0. +const hwm0 = { highWaterMark: 0 }; + +for (const invalidSignal of [null, 'AbortSignal', true, -1, Object.create(AbortSignal.prototype)]) { + promise_test(t => { + const rs = recordingReadableStream(errorOnPull, hwm0); + const ws = recordingWritableStream(); + return promise_rejects_js(t, TypeError, rs.pipeTo(ws, { signal: invalidSignal }), 'pipeTo should reject') + .then(() => { + assert_equals(rs.events.length, 0, 'no ReadableStream methods should have been called'); + assert_equals(ws.events.length, 0, 'no WritableStream methods should have been called'); + }); + }, `a signal argument '${invalidSignal}' should cause pipeTo() to reject`); +} + +promise_test(t => { + const rs = recordingReadableStream(errorOnPull, hwm0); + const ws = new WritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal }), 'pipeTo should reject') + .then(() => Promise.all([ + rs.getReader().closed, + promise_rejects_dom(t, 'AbortError', ws.getWriter().closed, 'writer.closed should reject') + ])) + .then(() => { + assert_equals(rs.events.length, 2, 'cancel should have been called'); + assert_equals(rs.events[0], 'cancel', 'first event should be cancel'); + assert_equals(rs.events[1].name, 'AbortError', 'the argument to cancel should be an AbortError'); + assert_equals(rs.events[1].constructor.name, 'DOMException', + 'the argument to cancel should be a DOMException'); + }); +}, 'an aborted signal should cause the writable stream to reject with an AbortError'); + +promise_test(() => { + let error; + const rs = recordingReadableStream(errorOnPull, hwm0); + const ws = new WritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return rs.pipeTo(ws, { signal }) + .catch(e => { + error = e; + }) + .then(() => Promise.all([ + rs.getReader().closed, + ws.getWriter().closed.catch(e => { + assert_equals(e, error, 'the writable should be errored with the same object'); + }) + ])) + .then(() => { + assert_equals(rs.events.length, 2, 'cancel should have been called'); + assert_equals(rs.events[0], 'cancel', 'first event should be cancel'); + assert_equals(rs.events[1], error, 'the readable should be canceled with the same object'); + }); +}, 'all the AbortError objects should be the same object'); + +promise_test(t => { + const rs = recordingReadableStream(errorOnPull, hwm0); + const ws = new WritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal, preventCancel: true }), 'pipeTo should reject') + .then(() => assert_equals(rs.events.length, 0, 'cancel should not be called')); +}, 'preventCancel should prevent canceling the readable'); + +promise_test(t => { + const rs = new ReadableStream(errorOnPull, hwm0); + const ws = recordingWritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal, preventAbort: true }), 'pipeTo should reject') + .then(() => { + assert_equals(ws.events.length, 0, 'writable should not have been aborted'); + return ws.getWriter().ready; + }); +}, 'preventAbort should prevent aborting the readable'); + +promise_test(t => { + const rs = recordingReadableStream(errorOnPull, hwm0); + const ws = recordingWritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal, preventCancel: true, preventAbort: true }), + 'pipeTo should reject') + .then(() => { + assert_equals(rs.events.length, 0, 'cancel should not be called'); + assert_equals(ws.events.length, 0, 'writable should not have been aborted'); + return ws.getWriter().ready; + }); +}, 'preventCancel and preventAbort should prevent canceling the readable and aborting the readable'); + +promise_test(t => { + const rs = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.close(); + } + }); + const abortController = new AbortController(); + const signal = abortController.signal; + const ws = recordingWritableStream({ + write() { + abortController.abort(); + } + }); + return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal }), 'pipeTo should reject') + .then(() => { + assert_equals(ws.events.length, 4, 'only chunk "a" should have been written'); + assert_array_equals(ws.events.slice(0, 3), ['write', 'a', 'abort'], 'events should match'); + assert_equals(ws.events[3].name, 'AbortError', 'abort reason should be an AbortError'); + }); +}, 'abort should prevent further reads'); + +promise_test(t => { + let readController; + const rs = new ReadableStream({ + start(c) { + readController = c; + c.enqueue('a'); + c.enqueue('b'); + } + }); + const abortController = new AbortController(); + const signal = abortController.signal; + let resolveWrite; + const writePromise = new Promise(resolve => { + resolveWrite = resolve; + }); + const ws = recordingWritableStream({ + write() { + return writePromise; + } + }, new CountQueuingStrategy({ highWaterMark: Infinity })); + const pipeToPromise = rs.pipeTo(ws, { signal }); + return delay(0).then(() => { + abortController.abort(); + readController.close(); // Make sure the test terminates when signal is not implemented. + resolveWrite(); + return promise_rejects_dom(t, 'AbortError', pipeToPromise, 'pipeTo should reject'); + }).then(() => { + assert_equals(ws.events.length, 6, 'chunks "a" and "b" should have been written'); + assert_array_equals(ws.events.slice(0, 5), ['write', 'a', 'write', 'b', 'abort'], 'events should match'); + assert_equals(ws.events[5].name, 'AbortError', 'abort reason should be an AbortError'); + }); +}, 'all pending writes should complete on abort'); + +promise_test(t => { + const rs = new ReadableStream({ + pull(controller) { + controller.error('failed to abort'); + }, + cancel() { + return Promise.reject(error1); + } + }, hwm0); + const ws = new WritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { signal }), 'pipeTo should reject'); +}, 'a rejection from underlyingSource.cancel() should be returned by pipeTo()'); + +promise_test(t => { + const rs = new ReadableStream(errorOnPull, hwm0); + const ws = new WritableStream({ + abort() { + return Promise.reject(error1); + } + }); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { signal }), 'pipeTo should reject'); +}, 'a rejection from underlyingSink.abort() should be returned by pipeTo()'); + +promise_test(t => { + const events = []; + const rs = new ReadableStream({ + pull(controller) { + controller.error('failed to abort'); + }, + cancel() { + events.push('cancel'); + return Promise.reject(error1); + } + }, hwm0); + const ws = new WritableStream({ + abort() { + events.push('abort'); + return Promise.reject(error2); + } + }); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_exactly(t, error2, rs.pipeTo(ws, { signal }), 'pipeTo should reject') + .then(() => assert_array_equals(events, ['abort', 'cancel'], 'abort() should be called before cancel()')); +}, 'a rejection from underlyingSink.abort() should be preferred to one from underlyingSource.cancel()'); + +promise_test(t => { + const rs = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + const ws = new WritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal }), 'pipeTo should reject'); +}, 'abort signal takes priority over closed readable'); + +promise_test(t => { + const rs = new ReadableStream({ + start(controller) { + controller.error(error1); + } + }); + const ws = new WritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal }), 'pipeTo should reject'); +}, 'abort signal takes priority over errored readable'); + +promise_test(t => { + const rs = new ReadableStream({ + pull(controller) { + controller.error('failed to abort'); + } + }, hwm0); + const ws = new WritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + const writer = ws.getWriter(); + return writer.close().then(() => { + writer.releaseLock(); + return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal }), 'pipeTo should reject'); + }); +}, 'abort signal takes priority over closed writable'); + +promise_test(t => { + const rs = new ReadableStream({ + pull(controller) { + controller.error('failed to abort'); + } + }, hwm0); + const ws = new WritableStream({ + start(controller) { + controller.error(error1); + } + }); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal }), 'pipeTo should reject'); +}, 'abort signal takes priority over errored writable'); + +promise_test(() => { + let readController; + const rs = new ReadableStream({ + start(c) { + readController = c; + } + }); + const ws = new WritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + const pipeToPromise = rs.pipeTo(ws, { signal, preventClose: true }); + readController.close(); + return Promise.resolve().then(() => { + abortController.abort(); + return pipeToPromise; + }).then(() => ws.getWriter().write('this should succeed')); +}, 'abort should do nothing after the readable is closed'); + +promise_test(t => { + let readController; + const rs = new ReadableStream({ + start(c) { + readController = c; + } + }); + const ws = new WritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + const pipeToPromise = rs.pipeTo(ws, { signal, preventAbort: true }); + readController.error(error1); + return Promise.resolve().then(() => { + abortController.abort(); + return promise_rejects_exactly(t, error1, pipeToPromise, 'pipeTo should reject'); + }).then(() => ws.getWriter().write('this should succeed')); +}, 'abort should do nothing after the readable is errored'); + +promise_test(t => { + let readController; + const rs = new ReadableStream({ + start(c) { + readController = c; + } + }); + let resolveWrite; + const writePromise = new Promise(resolve => { + resolveWrite = resolve; + }); + const ws = new WritableStream({ + write() { + readController.error(error1); + return writePromise; + } + }); + const abortController = new AbortController(); + const signal = abortController.signal; + const pipeToPromise = rs.pipeTo(ws, { signal, preventAbort: true }); + readController.enqueue('a'); + return delay(0).then(() => { + abortController.abort(); + resolveWrite(); + return promise_rejects_exactly(t, error1, pipeToPromise, 'pipeTo should reject'); + }).then(() => ws.getWriter().write('this should succeed')); +}, 'abort should do nothing after the readable is errored, even with pending writes'); + +promise_test(t => { + const rs = recordingReadableStream({ + pull(controller) { + return delay(0).then(() => controller.close()); + } + }); + let writeController; + const ws = new WritableStream({ + start(c) { + writeController = c; + } + }); + const abortController = new AbortController(); + const signal = abortController.signal; + const pipeToPromise = rs.pipeTo(ws, { signal, preventCancel: true }); + return Promise.resolve().then(() => { + writeController.error(error1); + return Promise.resolve(); + }).then(() => { + abortController.abort(); + return promise_rejects_exactly(t, error1, pipeToPromise, 'pipeTo should reject'); + }).then(() => { + assert_array_equals(rs.events, ['pull'], 'cancel should not have been called'); + }); +}, 'abort should do nothing after the writable is errored'); diff --git a/test/fixtures/wpt/streams/piping/close-propagation-backward.any.js b/test/fixtures/wpt/streams/piping/close-propagation-backward.any.js new file mode 100644 index 00000000000000..bd1e9cb92657b1 --- /dev/null +++ b/test/fixtures/wpt/streams/piping/close-propagation-backward.any.js @@ -0,0 +1,153 @@ +// META: global=window,worker,jsshell +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1!'); +error1.name = 'error1'; + +promise_test(() => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + + return rs.pipeTo(ws).then( + () => assert_unreached('the promise must not fulfill'), + err => { + assert_equals(err.name, 'TypeError', 'the promise must reject with a TypeError'); + + assert_array_equals(rs.eventsWithoutPulls, ['cancel', err]); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + } + ); + +}, 'Closing must be propagated backward: starts closed; preventCancel omitted; fulfilled cancel promise'); + +promise_test(t => { + + // Our recording streams do not deal well with errors generated by the system, so give them some help + let recordedError; + const rs = recordingReadableStream({ + cancel(cancelErr) { + recordedError = cancelErr; + throw error1; + } + }); + + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error').then(() => { + assert_equals(recordedError.name, 'TypeError', 'the cancel reason must be a TypeError'); + + assert_array_equals(rs.eventsWithoutPulls, ['cancel', recordedError]); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + }); + +}, 'Closing must be propagated backward: starts closed; preventCancel omitted; rejected cancel promise'); + +for (const falsy of [undefined, null, false, +0, -0, NaN, '']) { + const stringVersion = Object.is(falsy, -0) ? '-0' : String(falsy); + + promise_test(() => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + + return rs.pipeTo(ws, { preventCancel: falsy }).then( + () => assert_unreached('the promise must not fulfill'), + err => { + assert_equals(err.name, 'TypeError', 'the promise must reject with a TypeError'); + + assert_array_equals(rs.eventsWithoutPulls, ['cancel', err]); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + } + ); + + }, `Closing must be propagated backward: starts closed; preventCancel = ${stringVersion} (falsy); fulfilled cancel ` + + `promise`); +} + +for (const truthy of [true, 'a', 1, Symbol(), { }]) { + promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + + return promise_rejects_js(t, TypeError, rs.pipeTo(ws, { preventCancel: truthy })).then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['close']); + + return ws.getWriter().closed; + }); + + }, `Closing must be propagated backward: starts closed; preventCancel = ${String(truthy)} (truthy)`); +} + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + + return promise_rejects_js(t, TypeError, rs.pipeTo(ws, { preventCancel: true, preventAbort: true })) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['close']); + + return ws.getWriter().closed; + }); + +}, 'Closing must be propagated backward: starts closed; preventCancel = true, preventAbort = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + + return promise_rejects_js(t, TypeError, + rs.pipeTo(ws, { preventCancel: true, preventAbort: true, preventClose: true })) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['close']); + + return ws.getWriter().closed; + }); + +}, 'Closing must be propagated backward: starts closed; preventCancel = true, preventAbort = true, preventClose ' + + '= true'); diff --git a/test/fixtures/wpt/streams/piping/close-propagation-forward.any.js b/test/fixtures/wpt/streams/piping/close-propagation-forward.any.js new file mode 100644 index 00000000000000..fc3282eea74b40 --- /dev/null +++ b/test/fixtures/wpt/streams/piping/close-propagation-forward.any.js @@ -0,0 +1,589 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1!'); +error1.name = 'error1'; + +promise_test(() => { + + const rs = recordingReadableStream({ + start(controller) { + controller.close(); + } + }); + + const ws = recordingWritableStream(); + + return rs.pipeTo(ws).then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + }); + +}, 'Closing must be propagated forward: starts closed; preventClose omitted; fulfilled close promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.close(); + } + }); + + const ws = recordingWritableStream({ + close() { + throw error1; + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error').then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + promise_rejects_exactly(t, error1, ws.getWriter().closed) + ]); + }); + +}, 'Closing must be propagated forward: starts closed; preventClose omitted; rejected close promise'); + +for (const falsy of [undefined, null, false, +0, -0, NaN, '']) { + const stringVersion = Object.is(falsy, -0) ? '-0' : String(falsy); + + promise_test(() => { + + const rs = recordingReadableStream({ + start(controller) { + controller.close(); + } + }); + + const ws = recordingWritableStream(); + + return rs.pipeTo(ws, { preventClose: falsy }).then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + }); + + }, `Closing must be propagated forward: starts closed; preventClose = ${stringVersion} (falsy); fulfilled close ` + + `promise`); +} + +for (const truthy of [true, 'a', 1, Symbol(), { }]) { + promise_test(() => { + + const rs = recordingReadableStream({ + start(controller) { + controller.close(); + } + }); + + const ws = recordingWritableStream(); + + return rs.pipeTo(ws, { preventClose: truthy }).then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, []); + + return rs.getReader().closed; + }); + + }, `Closing must be propagated forward: starts closed; preventClose = ${String(truthy)} (truthy)`); +} + +promise_test(() => { + + const rs = recordingReadableStream({ + start(controller) { + controller.close(); + } + }); + + const ws = recordingWritableStream(); + + return rs.pipeTo(ws, { preventClose: true, preventAbort: true }).then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, []); + + return rs.getReader().closed; + }); + +}, 'Closing must be propagated forward: starts closed; preventClose = true, preventAbort = true'); + +promise_test(() => { + + const rs = recordingReadableStream({ + start(controller) { + controller.close(); + } + }); + + const ws = recordingWritableStream(); + + return rs.pipeTo(ws, { preventClose: true, preventAbort: true, preventCancel: true }).then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, []); + + return rs.getReader().closed; + }); + +}, 'Closing must be propagated forward: starts closed; preventClose = true, preventAbort = true, preventCancel = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = rs.pipeTo(ws); + + t.step_timeout(() => rs.controller.close()); + + return pipePromise.then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + }); + +}, 'Closing must be propagated forward: becomes closed asynchronously; preventClose omitted; fulfilled close promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + close() { + throw error1; + } + }); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error'); + + t.step_timeout(() => rs.controller.close()); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + promise_rejects_exactly(t, error1, ws.getWriter().closed) + ]); + }); + +}, 'Closing must be propagated forward: becomes closed asynchronously; preventClose omitted; rejected close promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = rs.pipeTo(ws, { preventClose: true }); + + t.step_timeout(() => rs.controller.close()); + + return pipePromise.then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, []); + + return rs.getReader().closed; + }); + +}, 'Closing must be propagated forward: becomes closed asynchronously; preventClose = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = rs.pipeTo(ws); + + t.step_timeout(() => rs.controller.close()); + + return pipePromise.then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + }); + +}, 'Closing must be propagated forward: becomes closed asynchronously; dest never desires chunks; ' + + 'preventClose omitted; fulfilled close promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + close() { + throw error1; + } + }, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error'); + + t.step_timeout(() => rs.controller.close()); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + promise_rejects_exactly(t, error1, ws.getWriter().closed) + ]); + }); + +}, 'Closing must be propagated forward: becomes closed asynchronously; dest never desires chunks; ' + + 'preventClose omitted; rejected close promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = rs.pipeTo(ws, { preventClose: true }); + + t.step_timeout(() => rs.controller.close()); + + return pipePromise.then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, []); + + return rs.getReader().closed; + }); + +}, 'Closing must be propagated forward: becomes closed asynchronously; dest never desires chunks; ' + + 'preventClose = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = rs.pipeTo(ws); + + t.step_timeout(() => { + rs.controller.enqueue('Hello'); + t.step_timeout(() => rs.controller.close()); + }, 10); + + return pipePromise.then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello', 'close']); + + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + }); + +}, 'Closing must be propagated forward: becomes closed after one chunk; preventClose omitted; fulfilled close promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + close() { + throw error1; + } + }); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error'); + + t.step_timeout(() => { + rs.controller.enqueue('Hello'); + t.step_timeout(() => rs.controller.close()); + }, 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello', 'close']); + + return Promise.all([ + rs.getReader().closed, + promise_rejects_exactly(t, error1, ws.getWriter().closed) + ]); + }); + +}, 'Closing must be propagated forward: becomes closed after one chunk; preventClose omitted; rejected close promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = rs.pipeTo(ws, { preventClose: true }); + + t.step_timeout(() => { + rs.controller.enqueue('Hello'); + t.step_timeout(() => rs.controller.close()); + }, 10); + + return pipePromise.then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello']); + + return rs.getReader().closed; + }); + +}, 'Closing must be propagated forward: becomes closed after one chunk; preventClose = true'); + +promise_test(() => { + + const rs = recordingReadableStream(); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + }); + + let pipeComplete = false; + const pipePromise = rs.pipeTo(ws).then(() => { + pipeComplete = true; + }); + + rs.controller.enqueue('a'); + rs.controller.close(); + + // Flush async events and verify that no shutdown occurs. + return flushAsyncEvents().then(() => { + assert_array_equals(ws.events, ['write', 'a']); // no 'close' + assert_equals(pipeComplete, false, 'the pipe must not be complete'); + + resolveWritePromise(); + + return pipePromise.then(() => { + assert_array_equals(ws.events, ['write', 'a', 'close']); + }); + }); + +}, 'Closing must be propagated forward: shutdown must not occur until the final write completes'); + +promise_test(() => { + + const rs = recordingReadableStream(); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + }); + + let pipeComplete = false; + const pipePromise = rs.pipeTo(ws, { preventClose: true }).then(() => { + pipeComplete = true; + }); + + rs.controller.enqueue('a'); + rs.controller.close(); + + // Flush async events and verify that no shutdown occurs. + return flushAsyncEvents().then(() => { + assert_array_equals(ws.events, ['write', 'a'], + 'the chunk must have been written, but close must not have happened'); + assert_equals(pipeComplete, false, 'the pipe must not be complete'); + + resolveWritePromise(); + + return pipePromise; + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a'], + 'the chunk must have been written, but close must not have happened'); + }); + +}, 'Closing must be propagated forward: shutdown must not occur until the final write completes; preventClose = true'); + +promise_test(() => { + + const rs = recordingReadableStream(); + + let resolveWriteCalled; + const writeCalledPromise = new Promise(resolve => { + resolveWriteCalled = resolve; + }); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + resolveWriteCalled(); + + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + }, new CountQueuingStrategy({ highWaterMark: 2 })); + + let pipeComplete = false; + const pipePromise = rs.pipeTo(ws).then(() => { + pipeComplete = true; + }); + + rs.controller.enqueue('a'); + rs.controller.enqueue('b'); + + return writeCalledPromise.then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a'], + 'the first chunk must have been written, but close must not have happened yet'); + assert_false(pipeComplete, 'the pipe should not complete while the first write is pending'); + + rs.controller.close(); + resolveWritePromise(); + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'b'], + 'the second chunk must have been written, but close must not have happened yet'); + assert_false(pipeComplete, 'the pipe should not complete while the second write is pending'); + + resolveWritePromise(); + return pipePromise; + }).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'close'], + 'all chunks must have been written and close must have happened'); + }); + +}, 'Closing must be propagated forward: shutdown must not occur until the final write completes; becomes closed after first write'); + +promise_test(() => { + + const rs = recordingReadableStream(); + + let resolveWriteCalled; + const writeCalledPromise = new Promise(resolve => { + resolveWriteCalled = resolve; + }); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + resolveWriteCalled(); + + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + }, new CountQueuingStrategy({ highWaterMark: 2 })); + + let pipeComplete = false; + const pipePromise = rs.pipeTo(ws, { preventClose: true }).then(() => { + pipeComplete = true; + }); + + rs.controller.enqueue('a'); + rs.controller.enqueue('b'); + + return writeCalledPromise.then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a'], + 'the first chunk must have been written, but close must not have happened'); + assert_false(pipeComplete, 'the pipe should not complete while the first write is pending'); + + rs.controller.close(); + resolveWritePromise(); + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'b'], + 'the second chunk must have been written, but close must not have happened'); + assert_false(pipeComplete, 'the pipe should not complete while the second write is pending'); + + resolveWritePromise(); + return pipePromise; + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'b'], + 'all chunks must have been written, but close must not have happened'); + }); + +}, 'Closing must be propagated forward: shutdown must not occur until the final write completes; becomes closed after first write; preventClose = true'); + + +promise_test(t => { + const rs = recordingReadableStream({ + start(c) { + c.enqueue('a'); + c.enqueue('b'); + c.close(); + } + }); + let rejectWritePromise; + const ws = recordingWritableStream({ + write() { + return new Promise((resolve, reject) => { + rejectWritePromise = reject; + }); + } + }, { highWaterMark: 3 }); + const pipeToPromise = rs.pipeTo(ws); + return delay(0).then(() => { + rejectWritePromise(error1); + return promise_rejects_exactly(t, error1, pipeToPromise, 'pipeTo should reject'); + }).then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['write', 'a']); + + return Promise.all([ + rs.getReader().closed, + promise_rejects_exactly(t, error1, ws.getWriter().closed, 'ws should be errored') + ]); + }); +}, 'Closing must be propagated forward: erroring the writable while flushing pending writes should error pipeTo'); diff --git a/test/fixtures/wpt/streams/piping/error-propagation-backward.any.js b/test/fixtures/wpt/streams/piping/error-propagation-backward.any.js new file mode 100644 index 00000000000000..6dc203066e3d7e --- /dev/null +++ b/test/fixtures/wpt/streams/piping/error-propagation-backward.any.js @@ -0,0 +1,630 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1!'); +error1.name = 'error1'; + +const error2 = new Error('error2!'); +error2.name = 'error2'; + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + start() { + return Promise.reject(error1); + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated backward: starts errored; preventCancel omitted; fulfilled cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + write() { + return Promise.reject(error1); + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.write('Hello'), 'writer.write() must reject with the write error') + .then(() => promise_rejects_exactly(t, error1, writer.closed, 'writer.closed must reject with the write error')) + .then(() => { + writer.releaseLock(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the write error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + }); + +}, 'Errors must be propagated backward: becomes errored before piping due to write; preventCancel omitted; ' + + 'fulfilled cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + cancel() { + throw error2; + } + }); + + const ws = recordingWritableStream({ + write() { + return Promise.reject(error1); + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.write('Hello'), 'writer.write() must reject with the write error') + .then(() => promise_rejects_exactly(t, error1, writer.closed, 'writer.closed must reject with the write error')) + .then(() => { + writer.releaseLock(); + + return promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the cancel error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + }); + +}, 'Errors must be propagated backward: becomes errored before piping due to write; preventCancel omitted; rejected ' + + 'cancel promise'); + +for (const falsy of [undefined, null, false, +0, -0, NaN, '']) { + const stringVersion = Object.is(falsy, -0) ? '-0' : String(falsy); + + promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + write() { + return Promise.reject(error1); + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.write('Hello'), 'writer.write() must reject with the write error') + .then(() => promise_rejects_exactly(t, error1, writer.closed, 'writer.closed must reject with the write error')) + .then(() => { + writer.releaseLock(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: falsy }), + 'pipeTo must reject with the write error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + }); + + }, `Errors must be propagated backward: becomes errored before piping due to write; preventCancel = ` + + `${stringVersion} (falsy); fulfilled cancel promise`); +} + +for (const truthy of [true, 'a', 1, Symbol(), { }]) { + promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + write() { + return Promise.reject(error1); + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.write('Hello'), 'writer.write() must reject with the write error') + .then(() => promise_rejects_exactly(t, error1, writer.closed, 'writer.closed must reject with the write error')) + .then(() => { + writer.releaseLock(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: truthy }), + 'pipeTo must reject with the write error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + }); + + }, `Errors must be propagated backward: becomes errored before piping due to write; preventCancel = ` + + `${String(truthy)} (truthy)`); +} + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + write() { + return Promise.reject(error1); + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.write('Hello'), 'writer.write() must reject with the write error') + .then(() => promise_rejects_exactly(t, error1, writer.closed, 'writer.closed must reject with the write error')) + .then(() => { + writer.releaseLock(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true, preventAbort: true }), + 'pipeTo must reject with the write error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + }); + +}, 'Errors must be propagated backward: becomes errored before piping due to write, preventCancel = true; ' + + 'preventAbort = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + write() { + return Promise.reject(error1); + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.write('Hello'), 'writer.write() must reject with the write error') + .then(() => promise_rejects_exactly(t, error1, writer.closed, 'writer.closed must reject with the write error')) + .then(() => { + writer.releaseLock(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true, preventAbort: true, preventClose: true }), + 'pipeTo must reject with the write error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + }); + +}, 'Errors must be propagated backward: becomes errored before piping due to write; preventCancel = true, ' + + 'preventAbort = true, preventClose = true'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('Hello'); + } + }); + + const ws = recordingWritableStream({ + write() { + throw error1; + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error').then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + +}, 'Errors must be propagated backward: becomes errored during piping due to write; preventCancel omitted; fulfilled ' + + 'cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('Hello'); + }, + cancel() { + throw error2; + } + }); + + const ws = recordingWritableStream({ + write() { + throw error1; + } + }); + + return promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the cancel error').then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + +}, 'Errors must be propagated backward: becomes errored during piping due to write; preventCancel omitted; rejected ' + + 'cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('Hello'); + } + }); + + const ws = recordingWritableStream({ + write() { + throw error1; + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true }), 'pipeTo must reject with the same error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + +}, 'Errors must be propagated backward: becomes errored during piping due to write; preventCancel = true'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + } + }); + + const ws = recordingWritableStream({ + write() { + if (ws.events.length > 2) { + return delay(0).then(() => { + throw error1; + }); + } + return undefined; + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error').then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b']); + }); + +}, 'Errors must be propagated backward: becomes errored during piping due to write, but async; preventCancel = ' + + 'false; fulfilled cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + }, + cancel() { + throw error2; + } + }); + + const ws = recordingWritableStream({ + write() { + if (ws.events.length > 2) { + return delay(0).then(() => { + throw error1; + }); + } + return undefined; + } + }); + + return promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the cancel error').then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b']); + }); + +}, 'Errors must be propagated backward: becomes errored during piping due to write, but async; preventCancel = ' + + 'false; rejected cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + } + }); + + const ws = recordingWritableStream({ + write() { + if (ws.events.length > 2) { + return delay(0).then(() => { + throw error1; + }); + } + return undefined; + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true }), 'pipeTo must reject with the same error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b']); + }); + +}, 'Errors must be propagated backward: becomes errored during piping due to write, but async; preventCancel = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error'); + + t.step_timeout(() => ws.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated backward: becomes errored after piping; preventCancel omitted; fulfilled cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + cancel() { + throw error2; + } + }); + + const ws = recordingWritableStream(); + + const pipePromise = promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the cancel error'); + + t.step_timeout(() => ws.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated backward: becomes errored after piping; preventCancel omitted; rejected cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true }), + 'pipeTo must reject with the same error'); + + t.step_timeout(() => ws.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated backward: becomes errored after piping; preventCancel = true'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + controller.close(); + } + }); + + const ws = recordingWritableStream({ + write(chunk) { + if (chunk === 'c') { + return Promise.reject(error1); + } + return undefined; + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error').then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'write', 'c']); + }); + +}, 'Errors must be propagated backward: becomes errored after piping due to last write; source is closed; ' + + 'preventCancel omitted (but cancel is never called)'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + controller.close(); + } + }); + + const ws = recordingWritableStream({ + write(chunk) { + if (chunk === 'c') { + return Promise.reject(error1); + } + return undefined; + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true }), 'pipeTo must reject with the same error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'write', 'c']); + }); + +}, 'Errors must be propagated backward: becomes errored after piping due to last write; source is closed; ' + + 'preventCancel = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error'); + + t.step_timeout(() => ws.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated backward: becomes errored after piping; dest never desires chunks; preventCancel = ' + + 'false; fulfilled cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + cancel() { + throw error2; + } + }); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the cancel error'); + + t.step_timeout(() => ws.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated backward: becomes errored after piping; dest never desires chunks; preventCancel = ' + + 'false; rejected cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true }), + 'pipeTo must reject with the same error'); + + t.step_timeout(() => ws.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated backward: becomes errored after piping; dest never desires chunks; preventCancel = ' + + 'true'); + +promise_test(() => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + ws.abort(error1); + + return rs.pipeTo(ws).then( + () => assert_unreached('the promise must not fulfill'), + err => { + assert_equals(err, error1, 'the promise must reject with error1'); + + assert_array_equals(rs.eventsWithoutPulls, ['cancel', err]); + assert_array_equals(ws.events, ['abort', error1]); + } + ); + +}, 'Errors must be propagated backward: becomes errored before piping via abort; preventCancel omitted; fulfilled ' + + 'cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + cancel() { + throw error2; + } + }); + + const ws = recordingWritableStream(); + + ws.abort(error1); + + return promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the cancel error') + .then(() => { + return ws.getWriter().closed.then( + () => assert_unreached('the promise must not fulfill'), + err => { + assert_equals(err, error1, 'the promise must reject with error1'); + + assert_array_equals(rs.eventsWithoutPulls, ['cancel', err]); + assert_array_equals(ws.events, ['abort', error1]); + } + ); + }); + +}, 'Errors must be propagated backward: becomes errored before piping via abort; preventCancel omitted; rejected ' + + 'cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + ws.abort(error1); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true })).then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + +}, 'Errors must be propagated backward: becomes errored before piping via abort; preventCancel = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + let resolveWriteCalled; + const writeCalledPromise = new Promise(resolve => { + resolveWriteCalled = resolve; + }); + + const ws = recordingWritableStream({ + write() { + resolveWriteCalled(); + return flushAsyncEvents(); + } + }); + + const pipePromise = rs.pipeTo(ws); + + rs.controller.enqueue('a'); + + return writeCalledPromise.then(() => { + ws.controller.error(error1); + + return promise_rejects_exactly(t, error1, pipePromise); + }).then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, ['write', 'a']); + }); + +}, 'Errors must be propagated backward: erroring via the controller errors once pending write completes'); diff --git a/test/fixtures/wpt/streams/piping/error-propagation-forward.any.js b/test/fixtures/wpt/streams/piping/error-propagation-forward.any.js new file mode 100644 index 00000000000000..f35ec665eec22f --- /dev/null +++ b/test/fixtures/wpt/streams/piping/error-propagation-forward.any.js @@ -0,0 +1,569 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1!'); +error1.name = 'error1'; + +const error2 = new Error('error2!'); +error2.name = 'error2'; + +promise_test(t => { + + const rs = recordingReadableStream({ + start() { + return Promise.reject(error1); + } + }); + + const ws = recordingWritableStream(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error') + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + +}, 'Errors must be propagated forward: starts errored; preventAbort = false; fulfilled abort promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start() { + return Promise.reject(error1); + } + }); + + const ws = recordingWritableStream({ + abort() { + throw error2; + } + }); + + return promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the abort error') + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + +}, 'Errors must be propagated forward: starts errored; preventAbort = false; rejected abort promise'); + +for (const falsy of [undefined, null, false, +0, -0, NaN, '']) { + const stringVersion = Object.is(falsy, -0) ? '-0' : String(falsy); + + promise_test(t => { + + const rs = recordingReadableStream({ + start() { + return Promise.reject(error1); + } + }); + + const ws = recordingWritableStream(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: falsy }), 'pipeTo must reject with the same error') + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + + }, `Errors must be propagated forward: starts errored; preventAbort = ${stringVersion} (falsy); fulfilled abort ` + + `promise`); +} + +for (const truthy of [true, 'a', 1, Symbol(), { }]) { + promise_test(t => { + + const rs = recordingReadableStream({ + start() { + return Promise.reject(error1); + } + }); + + const ws = recordingWritableStream(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: truthy }), + 'pipeTo must reject with the same error') + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, []); + }); + + }, `Errors must be propagated forward: starts errored; preventAbort = ${String(truthy)} (truthy)`); +} + + +promise_test(t => { + + const rs = recordingReadableStream({ + start() { + return Promise.reject(error1); + } + }); + + const ws = recordingWritableStream(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true, preventCancel: true }), + 'pipeTo must reject with the same error') + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated forward: starts errored; preventAbort = true, preventCancel = true'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start() { + return Promise.reject(error1); + } + }); + + const ws = recordingWritableStream(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true, preventCancel: true, preventClose: true }), + 'pipeTo must reject with the same error') + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated forward: starts errored; preventAbort = true, preventCancel = true, preventClose = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error'); + + t.step_timeout(() => rs.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + +}, 'Errors must be propagated forward: becomes errored while empty; preventAbort = false; fulfilled abort promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + abort() { + throw error2; + } + }); + + const pipePromise = promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the abort error'); + + t.step_timeout(() => rs.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + +}, 'Errors must be propagated forward: becomes errored while empty; preventAbort = false; rejected abort promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true }), + 'pipeTo must reject with the same error'); + + t.step_timeout(() => rs.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated forward: becomes errored while empty; preventAbort = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error'); + + t.step_timeout(() => rs.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + +}, 'Errors must be propagated forward: becomes errored while empty; dest never desires chunks; ' + + 'preventAbort = false; fulfilled abort promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + abort() { + throw error2; + } + }, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the abort error'); + + t.step_timeout(() => rs.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + +}, 'Errors must be propagated forward: becomes errored while empty; dest never desires chunks; ' + + 'preventAbort = false; rejected abort promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true }), + 'pipeTo must reject with the same error'); + + t.step_timeout(() => rs.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated forward: becomes errored while empty; dest never desires chunks; ' + + 'preventAbort = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error'); + + t.step_timeout(() => { + rs.controller.enqueue('Hello'); + t.step_timeout(() => rs.controller.error(error1), 10); + }, 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello', 'abort', error1]); + }); + +}, 'Errors must be propagated forward: becomes errored after one chunk; preventAbort = false; fulfilled abort promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + abort() { + throw error2; + } + }); + + const pipePromise = promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the abort error'); + + t.step_timeout(() => { + rs.controller.enqueue('Hello'); + t.step_timeout(() => rs.controller.error(error1), 10); + }, 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello', 'abort', error1]); + }); + +}, 'Errors must be propagated forward: becomes errored after one chunk; preventAbort = false; rejected abort promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true }), + 'pipeTo must reject with the same error'); + + t.step_timeout(() => { + rs.controller.enqueue('Hello'); + t.step_timeout(() => rs.controller.error(error1), 10); + }, 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + +}, 'Errors must be propagated forward: becomes errored after one chunk; preventAbort = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error'); + + t.step_timeout(() => { + rs.controller.enqueue('Hello'); + t.step_timeout(() => rs.controller.error(error1), 10); + }, 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + +}, 'Errors must be propagated forward: becomes errored after one chunk; dest never desires chunks; ' + + 'preventAbort = false; fulfilled abort promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + abort() { + throw error2; + } + }, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the abort error'); + + t.step_timeout(() => { + rs.controller.enqueue('Hello'); + t.step_timeout(() => rs.controller.error(error1), 10); + }, 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + +}, 'Errors must be propagated forward: becomes errored after one chunk; dest never desires chunks; ' + + 'preventAbort = false; rejected abort promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true }), + 'pipeTo must reject with the same error'); + + t.step_timeout(() => { + rs.controller.enqueue('Hello'); + t.step_timeout(() => rs.controller.error(error1), 10); + }, 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated forward: becomes errored after one chunk; dest never desires chunks; ' + + 'preventAbort = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + let resolveWriteCalled; + const writeCalledPromise = new Promise(resolve => { + resolveWriteCalled = resolve; + }); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + resolveWriteCalled(); + + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + }); + + let pipeComplete = false; + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws)).then(() => { + pipeComplete = true; + }); + + rs.controller.enqueue('a'); + + return writeCalledPromise.then(() => { + rs.controller.error(error1); + + // Flush async events and verify that no shutdown occurs. + return flushAsyncEvents(); + }).then(() => { + assert_array_equals(ws.events, ['write', 'a']); // no 'abort' + assert_equals(pipeComplete, false, 'the pipe must not be complete'); + + resolveWritePromise(); + + return pipePromise.then(() => { + assert_array_equals(ws.events, ['write', 'a', 'abort', error1]); + }); + }); + +}, 'Errors must be propagated forward: shutdown must not occur until the final write completes'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + let resolveWriteCalled; + const writeCalledPromise = new Promise(resolve => { + resolveWriteCalled = resolve; + }); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + resolveWriteCalled(); + + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + }); + + let pipeComplete = false; + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true })).then(() => { + pipeComplete = true; + }); + + rs.controller.enqueue('a'); + + return writeCalledPromise.then(() => { + rs.controller.error(error1); + + // Flush async events and verify that no shutdown occurs. + return flushAsyncEvents(); + }).then(() => { + assert_array_equals(ws.events, ['write', 'a']); // no 'abort' + assert_equals(pipeComplete, false, 'the pipe must not be complete'); + + resolveWritePromise(); + return pipePromise; + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a']); // no 'abort' + }); + +}, 'Errors must be propagated forward: shutdown must not occur until the final write completes; preventAbort = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + let resolveWriteCalled; + const writeCalledPromise = new Promise(resolve => { + resolveWriteCalled = resolve; + }); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + resolveWriteCalled(); + + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + }, new CountQueuingStrategy({ highWaterMark: 2 })); + + let pipeComplete = false; + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws)).then(() => { + pipeComplete = true; + }); + + rs.controller.enqueue('a'); + rs.controller.enqueue('b'); + + return writeCalledPromise.then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a'], + 'the first chunk must have been written, but abort must not have happened yet'); + assert_false(pipeComplete, 'the pipe should not complete while the first write is pending'); + + rs.controller.error(error1); + resolveWritePromise(); + return flushAsyncEvents(); + }).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'b'], + 'the second chunk must have been written, but abort must not have happened yet'); + assert_false(pipeComplete, 'the pipe should not complete while the second write is pending'); + + resolveWritePromise(); + return pipePromise; + }).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'abort', error1], + 'all chunks must have been written and abort must have happened'); + }); + +}, 'Errors must be propagated forward: shutdown must not occur until the final write completes; becomes errored after first write'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + let resolveWriteCalled; + const writeCalledPromise = new Promise(resolve => { + resolveWriteCalled = resolve; + }); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + resolveWriteCalled(); + + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + }, new CountQueuingStrategy({ highWaterMark: 2 })); + + let pipeComplete = false; + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true })).then(() => { + pipeComplete = true; + }); + + rs.controller.enqueue('a'); + rs.controller.enqueue('b'); + + return writeCalledPromise.then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a'], + 'the first chunk must have been written, but abort must not have happened'); + assert_false(pipeComplete, 'the pipe should not complete while the first write is pending'); + + rs.controller.error(error1); + resolveWritePromise(); + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'b'], + 'the second chunk must have been written, but abort must not have happened'); + assert_false(pipeComplete, 'the pipe should not complete while the second write is pending'); + + resolveWritePromise(); + return pipePromise; + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'b'], + 'all chunks must have been written, but abort must not have happened'); + }); + +}, 'Errors must be propagated forward: shutdown must not occur until the final write completes; becomes errored after first write; preventAbort = true'); diff --git a/test/fixtures/wpt/streams/piping/flow-control.any.js b/test/fixtures/wpt/streams/piping/flow-control.any.js new file mode 100644 index 00000000000000..db83c011f4a718 --- /dev/null +++ b/test/fixtures/wpt/streams/piping/flow-control.any.js @@ -0,0 +1,297 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/rs-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1!'); +error1.name = 'error1'; + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.close(); + } + }); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = rs.pipeTo(ws, { preventCancel: true }); + + // Wait and make sure it doesn't do any reading. + return flushAsyncEvents().then(() => { + ws.controller.error(error1); + }) + .then(() => promise_rejects_exactly(t, error1, pipePromise, 'pipeTo must reject with the same error')) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, []); + }) + .then(() => readableStreamToArray(rs)) + .then(chunksNotPreviouslyRead => { + assert_array_equals(chunksNotPreviouslyRead, ['a', 'b']); + }); + +}, 'Piping from a non-empty ReadableStream into a WritableStream that does not desire chunks'); + +promise_test(() => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('b'); + controller.close(); + } + }); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + if (!resolveWritePromise) { + // first write + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + return undefined; + } + }); + + const writer = ws.getWriter(); + const firstWritePromise = writer.write('a'); + assert_equals(writer.desiredSize, 0, 'after writing the writer\'s desiredSize must be 0'); + writer.releaseLock(); + + // firstWritePromise won't settle until we call resolveWritePromise. + + const pipePromise = rs.pipeTo(ws); + + return flushAsyncEvents().then(() => resolveWritePromise()) + .then(() => Promise.all([firstWritePromise, pipePromise])) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'close']); + }); + +}, 'Piping from a non-empty ReadableStream into a WritableStream that does not desire chunks, but then does'); + +promise_test(() => { + + const rs = recordingReadableStream(); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + if (!resolveWritePromise) { + // first write + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + return undefined; + } + }); + + const writer = ws.getWriter(); + writer.write('a'); + + return flushAsyncEvents().then(() => { + assert_array_equals(ws.events, ['write', 'a']); + assert_equals(writer.desiredSize, 0, 'after writing the writer\'s desiredSize must be 0'); + writer.releaseLock(); + + const pipePromise = rs.pipeTo(ws); + + rs.controller.enqueue('b'); + resolveWritePromise(); + rs.controller.close(); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'close']); + }); + }); + +}, 'Piping from an empty ReadableStream into a WritableStream that does not desire chunks, but then the readable ' + + 'stream becomes non-empty and the writable stream starts desiring chunks'); + +promise_test(() => { + const unreadChunks = ['b', 'c', 'd']; + + const rs = recordingReadableStream({ + pull(controller) { + controller.enqueue(unreadChunks.shift()); + if (unreadChunks.length === 0) { + controller.close(); + } + } + }, new CountQueuingStrategy({ highWaterMark: 0 })); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + if (!resolveWritePromise) { + // first write + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + return undefined; + } + }, new CountQueuingStrategy({ highWaterMark: 3 })); + + const writer = ws.getWriter(); + const firstWritePromise = writer.write('a'); + assert_equals(writer.desiredSize, 2, 'after writing the writer\'s desiredSize must be 2'); + writer.releaseLock(); + + // firstWritePromise won't settle until we call resolveWritePromise. + + const pipePromise = rs.pipeTo(ws); + + return flushAsyncEvents().then(() => { + assert_array_equals(ws.events, ['write', 'a']); + assert_equals(unreadChunks.length, 1, 'chunks should continue to be enqueued until the HWM is reached'); + }).then(() => resolveWritePromise()) + .then(() => Promise.all([firstWritePromise, pipePromise])) + .then(() => { + assert_array_equals(rs.events, ['pull', 'pull', 'pull']); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b','write', 'c','write', 'd', 'close']); + }); + +}, 'Piping from a ReadableStream to a WritableStream that desires more chunks before finishing with previous ones'); + +class StepTracker { + constructor() { + this.waiters = []; + this.wakers = []; + } + + // Returns promise which resolves when step `n` is reached. Also schedules step n + 1 to happen shortly after the + // promise is resolved. + waitThenAdvance(n) { + if (this.waiters[n] === undefined) { + this.waiters[n] = new Promise(resolve => { + this.wakers[n] = resolve; + }); + this.waiters[n] + .then(() => flushAsyncEvents()) + .then(() => { + if (this.wakers[n + 1] !== undefined) { + this.wakers[n + 1](); + } + }); + } + if (n == 0) { + this.wakers[0](); + } + return this.waiters[n]; + } +} + +promise_test(() => { + const steps = new StepTracker(); + const desiredSizes = []; + const rs = recordingReadableStream({ + start(controller) { + steps.waitThenAdvance(1).then(() => enqueue('a')); + steps.waitThenAdvance(3).then(() => enqueue('b')); + steps.waitThenAdvance(5).then(() => enqueue('c')); + steps.waitThenAdvance(7).then(() => enqueue('d')); + steps.waitThenAdvance(11).then(() => controller.close()); + + function enqueue(chunk) { + controller.enqueue(chunk); + desiredSizes.push(controller.desiredSize); + } + } + }); + + const chunksFinishedWriting = []; + const writableStartPromise = Promise.resolve(); + let writeCalled = false; + const ws = recordingWritableStream({ + start() { + return writableStartPromise; + }, + write(chunk) { + const waitForStep = writeCalled ? 12 : 9; + writeCalled = true; + return steps.waitThenAdvance(waitForStep).then(() => { + chunksFinishedWriting.push(chunk); + }); + } + }); + + return writableStartPromise.then(() => { + const pipePromise = rs.pipeTo(ws); + steps.waitThenAdvance(0); + + return Promise.all([ + steps.waitThenAdvance(2).then(() => { + assert_array_equals(chunksFinishedWriting, [], 'at step 2, zero chunks must have finished writing'); + assert_array_equals(ws.events, ['write', 'a'], 'at step 2, one chunk must have been written'); + + // When 'a' (the very first chunk) was enqueued, it was immediately used to fulfill the outstanding read request + // promise, leaving the queue empty. + assert_array_equals(desiredSizes, [1], + 'at step 2, the desiredSize at the last enqueue (step 1) must have been 1'); + assert_equals(rs.controller.desiredSize, 1, 'at step 2, the current desiredSize must be 1'); + }), + + steps.waitThenAdvance(4).then(() => { + assert_array_equals(chunksFinishedWriting, [], 'at step 4, zero chunks must have finished writing'); + assert_array_equals(ws.events, ['write', 'a'], 'at step 4, one chunk must have been written'); + + // When 'b' was enqueued at step 3, the queue was also empty, since immediately after enqueuing 'a' at + // step 1, it was dequeued in order to fulfill the read() call that was made at step 0. Thus the queue + // had size 1 (thus desiredSize of 0). + assert_array_equals(desiredSizes, [1, 0], + 'at step 4, the desiredSize at the last enqueue (step 3) must have been 0'); + assert_equals(rs.controller.desiredSize, 0, 'at step 4, the current desiredSize must be 0'); + }), + + steps.waitThenAdvance(6).then(() => { + assert_array_equals(chunksFinishedWriting, [], 'at step 6, zero chunks must have finished writing'); + assert_array_equals(ws.events, ['write', 'a'], 'at step 6, one chunk must have been written'); + + // When 'c' was enqueued at step 5, the queue was not empty; it had 'b' in it, since 'b' will not be read until + // the first write completes at step 9. Thus, the queue size is 2 after enqueuing 'c', giving a desiredSize of + // -1. + assert_array_equals(desiredSizes, [1, 0, -1], + 'at step 6, the desiredSize at the last enqueue (step 5) must have been -1'); + assert_equals(rs.controller.desiredSize, -1, 'at step 6, the current desiredSize must be -1'); + }), + + steps.waitThenAdvance(8).then(() => { + assert_array_equals(chunksFinishedWriting, [], 'at step 8, zero chunks must have finished writing'); + assert_array_equals(ws.events, ['write', 'a'], 'at step 8, one chunk must have been written'); + + // When 'd' was enqueued at step 7, the situation is the same as before, leading to a queue containing 'b', 'c', + // and 'd'. + assert_array_equals(desiredSizes, [1, 0, -1, -2], + 'at step 8, the desiredSize at the last enqueue (step 7) must have been -2'); + assert_equals(rs.controller.desiredSize, -2, 'at step 8, the current desiredSize must be -2'); + }), + + steps.waitThenAdvance(10).then(() => { + assert_array_equals(chunksFinishedWriting, ['a'], 'at step 10, one chunk must have finished writing'); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b'], + 'at step 10, two chunks must have been written'); + + assert_equals(rs.controller.desiredSize, -1, 'at step 10, the current desiredSize must be -1'); + }), + + pipePromise.then(() => { + assert_array_equals(desiredSizes, [1, 0, -1, -2], 'backpressure must have been exerted at the source'); + assert_array_equals(chunksFinishedWriting, ['a', 'b', 'c', 'd'], 'all chunks finished writing'); + + assert_array_equals(rs.eventsWithoutPulls, [], 'nothing unexpected should happen to the ReadableStream'); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'write', 'c', 'write', 'd', 'close'], + 'all chunks were written (and the WritableStream closed)'); + }) + ]); + }); +}, 'Piping to a WritableStream that does not consume the writes fast enough exerts backpressure on the ReadableStream'); diff --git a/test/fixtures/wpt/streams/piping/general.any.js b/test/fixtures/wpt/streams/piping/general.any.js new file mode 100644 index 00000000000000..2e02dfad78a0fa --- /dev/null +++ b/test/fixtures/wpt/streams/piping/general.any.js @@ -0,0 +1,211 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +test(() => { + + const rs = new ReadableStream(); + const ws = new WritableStream(); + + assert_false(rs.locked, 'sanity check: the ReadableStream must not start locked'); + assert_false(ws.locked, 'sanity check: the WritableStream must not start locked'); + + rs.pipeTo(ws); + + assert_true(rs.locked, 'the ReadableStream must become locked'); + assert_true(ws.locked, 'the WritableStream must become locked'); + +}, 'Piping must lock both the ReadableStream and WritableStream'); + +promise_test(() => { + + const rs = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + const ws = new WritableStream(); + + return rs.pipeTo(ws).then(() => { + assert_false(rs.locked, 'the ReadableStream must become unlocked'); + assert_false(ws.locked, 'the WritableStream must become unlocked'); + }); + +}, 'Piping finishing must unlock both the ReadableStream and WritableStream'); + +promise_test(t => { + + const fakeRS = Object.create(ReadableStream.prototype); + const ws = new WritableStream(); + + return methodRejects(t, ReadableStream.prototype, 'pipeTo', fakeRS, [ws]); + +}, 'pipeTo must check the brand of its ReadableStream this value'); + +promise_test(t => { + + const rs = new ReadableStream(); + const fakeWS = Object.create(WritableStream.prototype); + + return methodRejects(t, ReadableStream.prototype, 'pipeTo', rs, [fakeWS]); + +}, 'pipeTo must check the brand of its WritableStream argument'); + +promise_test(t => { + + const rs = new ReadableStream(); + const ws = new WritableStream(); + + rs.getReader(); + + assert_true(rs.locked, 'sanity check: the ReadableStream starts locked'); + assert_false(ws.locked, 'sanity check: the WritableStream does not start locked'); + + return promise_rejects_js(t, TypeError, rs.pipeTo(ws)).then(() => { + assert_false(ws.locked, 'the WritableStream must still be unlocked'); + }); + +}, 'pipeTo must fail if the ReadableStream is locked, and not lock the WritableStream'); + +promise_test(t => { + + const rs = new ReadableStream(); + const ws = new WritableStream(); + + ws.getWriter(); + + assert_false(rs.locked, 'sanity check: the ReadableStream does not start locked'); + assert_true(ws.locked, 'sanity check: the WritableStream starts locked'); + + return promise_rejects_js(t, TypeError, rs.pipeTo(ws)).then(() => { + assert_false(rs.locked, 'the ReadableStream must still be unlocked'); + }); + +}, 'pipeTo must fail if the WritableStream is locked, and not lock the ReadableStream'); + +promise_test(() => { + + const CHUNKS = 10; + + const rs = new ReadableStream({ + start(c) { + for (let i = 0; i < CHUNKS; ++i) { + c.enqueue(i); + } + c.close(); + } + }); + + const written = []; + const ws = new WritableStream({ + write(chunk) { + written.push(chunk); + }, + close() { + written.push('closed'); + } + }, new CountQueuingStrategy({ highWaterMark: CHUNKS })); + + return rs.pipeTo(ws).then(() => { + const targetValues = []; + for (let i = 0; i < CHUNKS; ++i) { + targetValues.push(i); + } + targetValues.push('closed'); + + assert_array_equals(written, targetValues, 'the correct values must be written'); + + // Ensure both readable and writable are closed by the time the pipe finishes. + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + }); + + // NOTE: no requirement on *when* the pipe finishes; that is left to implementations. + +}, 'Piping from a ReadableStream from which lots of chunks are synchronously readable'); + +promise_test(t => { + + let controller; + const rs = recordingReadableStream({ + start(c) { + controller = c; + } + }); + + const ws = recordingWritableStream(); + + const pipePromise = rs.pipeTo(ws).then(() => { + assert_array_equals(ws.events, ['write', 'Hello', 'close']); + }); + + t.step_timeout(() => { + controller.enqueue('Hello'); + t.step_timeout(() => controller.close(), 10); + }, 10); + + return pipePromise; + +}, 'Piping from a ReadableStream for which a chunk becomes asynchronously readable after the pipeTo'); + +for (const preventAbort of [true, false]) { + promise_test(() => { + + const rs = new ReadableStream({ + pull() { + return Promise.reject(undefined); + } + }); + + return rs.pipeTo(new WritableStream(), { preventAbort }).then( + () => assert_unreached('pipeTo promise should be rejected'), + value => assert_equals(value, undefined, 'rejection value should be undefined')); + + }, `an undefined rejection from pull should cause pipeTo() to reject when preventAbort is ${preventAbort}`); +} + +for (const preventCancel of [true, false]) { + promise_test(() => { + + const rs = new ReadableStream({ + pull(controller) { + controller.enqueue(0); + } + }); + + const ws = new WritableStream({ + write() { + return Promise.reject(undefined); + } + }); + + return rs.pipeTo(ws, { preventCancel }).then( + () => assert_unreached('pipeTo promise should be rejected'), + value => assert_equals(value, undefined, 'rejection value should be undefined')); + + }, `an undefined rejection from write should cause pipeTo() to reject when preventCancel is ${preventCancel}`); +} + +promise_test(t => { + const rs = new ReadableStream(); + const ws = new WritableStream(); + return promise_rejects_js(t, TypeError, rs.pipeTo(ws, { + get preventAbort() { + ws.getWriter(); + } + }), 'pipeTo should reject'); +}, 'pipeTo() should reject if an option getter grabs a writer'); + +promise_test(t => { + const rs = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + const ws = new WritableStream(); + + return rs.pipeTo(ws, null); +}, 'pipeTo() promise should resolve if null is passed'); diff --git a/test/fixtures/wpt/streams/piping/multiple-propagation.any.js b/test/fixtures/wpt/streams/piping/multiple-propagation.any.js new file mode 100644 index 00000000000000..c9a486f3f9ac2f --- /dev/null +++ b/test/fixtures/wpt/streams/piping/multiple-propagation.any.js @@ -0,0 +1,227 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1!'); +error1.name = 'error1'; + +const error2 = new Error('error2!'); +error2.name = 'error2'; + +function createErroredWritableStream(t) { + return Promise.resolve().then(() => { + const ws = recordingWritableStream({ + start(c) { + c.error(error2); + } + }); + + const writer = ws.getWriter(); + return promise_rejects_exactly(t, error2, writer.closed, 'the writable stream must be errored with error2') + .then(() => { + writer.releaseLock(); + assert_array_equals(ws.events, []); + return ws; + }); + }); +} + +promise_test(t => { + const rs = recordingReadableStream({ + start(c) { + c.error(error1); + } + }); + const ws = recordingWritableStream({ + start(c) { + c.error(error2); + } + }); + + // Trying to abort a stream that is erroring will give the writable's error + return promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the writable stream\'s error').then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, []); + + return Promise.all([ + promise_rejects_exactly(t, error1, rs.getReader().closed, 'the readable stream must be errored with error1'), + promise_rejects_exactly(t, error2, ws.getWriter().closed, 'the writable stream must be errored with error2') + ]); + }); + +}, 'Piping from an errored readable stream to an erroring writable stream'); + +promise_test(t => { + const rs = recordingReadableStream({ + start(c) { + c.error(error1); + } + }); + + return createErroredWritableStream(t) + .then(ws => promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the readable stream\'s error')) + .then(() => { + assert_array_equals(rs.events, []); + + return promise_rejects_exactly(t, error1, rs.getReader().closed, 'the readable stream must be errored with error1'); + }); +}, 'Piping from an errored readable stream to an errored writable stream'); + +promise_test(t => { + const rs = recordingReadableStream({ + start(c) { + c.error(error1); + } + }); + const ws = recordingWritableStream({ + start(c) { + c.error(error2); + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true }), + 'pipeTo must reject with the readable stream\'s error') + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, []); + + return Promise.all([ + promise_rejects_exactly(t, error1, rs.getReader().closed, 'the readable stream must be errored with error1'), + promise_rejects_exactly(t, error2, ws.getWriter().closed, 'the writable stream must be errored with error2') + ]); + }); + +}, 'Piping from an errored readable stream to an erroring writable stream; preventAbort = true'); + +promise_test(t => { + const rs = recordingReadableStream({ + start(c) { + c.error(error1); + } + }); + return createErroredWritableStream(t) + .then(ws => promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true }), + 'pipeTo must reject with the readable stream\'s error')) + .then(() => { + assert_array_equals(rs.events, []); + + return promise_rejects_exactly(t, error1, rs.getReader().closed, 'the readable stream must be errored with error1'); + }); + +}, 'Piping from an errored readable stream to an errored writable stream; preventAbort = true'); + +promise_test(t => { + const rs = recordingReadableStream({ + start(c) { + c.error(error1); + } + }); + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + const closePromise = writer.close(); + writer.releaseLock(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the readable stream\'s error').then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['abort', error1]); + + return Promise.all([ + promise_rejects_exactly(t, error1, rs.getReader().closed, 'the readable stream must be errored with error1'), + promise_rejects_exactly(t, error1, ws.getWriter().closed, + 'closed must reject with error1'), + promise_rejects_exactly(t, error1, closePromise, + 'close() must reject with error1') + ]); + }); + +}, 'Piping from an errored readable stream to a closing writable stream'); + +promise_test(t => { + const rs = recordingReadableStream({ + start(c) { + c.error(error1); + } + }); + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + const closePromise = writer.close(); + writer.releaseLock(); + + return flushAsyncEvents().then(() => { + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the readable stream\'s error').then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + promise_rejects_exactly(t, error1, rs.getReader().closed, 'the readable stream must be errored with error1'), + ws.getWriter().closed, + closePromise + ]); + }); + }); + +}, 'Piping from an errored readable stream to a closed writable stream'); + +promise_test(t => { + const rs = recordingReadableStream({ + start(c) { + c.close(); + } + }); + const ws = recordingWritableStream({ + start(c) { + c.error(error1); + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the writable stream\'s error').then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, []); + + return Promise.all([ + rs.getReader().closed, + promise_rejects_exactly(t, error1, ws.getWriter().closed, 'the writable stream must be errored with error1') + ]); + }); + +}, 'Piping from a closed readable stream to an erroring writable stream'); + +promise_test(t => { + const rs = recordingReadableStream({ + start(c) { + c.close(); + } + }); + return createErroredWritableStream(t) + .then(ws => promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the writable stream\'s error')) + .then(() => { + assert_array_equals(rs.events, []); + + return rs.getReader().closed; + }); + +}, 'Piping from a closed readable stream to an errored writable stream'); + +promise_test(() => { + const rs = recordingReadableStream({ + start(c) { + c.close(); + } + }); + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + + return rs.pipeTo(ws).then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + }); + +}, 'Piping from a closed readable stream to a closed writable stream'); diff --git a/test/fixtures/wpt/streams/piping/pipe-through.any.js b/test/fixtures/wpt/streams/piping/pipe-through.any.js new file mode 100644 index 00000000000000..35dbb456b3e2c6 --- /dev/null +++ b/test/fixtures/wpt/streams/piping/pipe-through.any.js @@ -0,0 +1,268 @@ +// META: global=window,worker,jsshell +// META: script=../resources/rs-utils.js +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +function duckTypedPassThroughTransform() { + let enqueueInReadable; + let closeReadable; + + return { + writable: new WritableStream({ + write(chunk) { + enqueueInReadable(chunk); + }, + + close() { + closeReadable(); + } + }), + + readable: new ReadableStream({ + start(c) { + enqueueInReadable = c.enqueue.bind(c); + closeReadable = c.close.bind(c); + } + }) + }; +} + +function uninterestingReadableWritablePair() { + return { writable: new WritableStream(), readable: new ReadableStream() }; +} + +promise_test(() => { + const readableEnd = sequentialReadableStream(5).pipeThrough(duckTypedPassThroughTransform()); + + return readableStreamToArray(readableEnd).then(chunks => + assert_array_equals(chunks, [1, 2, 3, 4, 5]), 'chunks should match'); +}, 'Piping through a duck-typed pass-through transform stream should work'); + +promise_test(() => { + const transform = { + writable: new WritableStream({ + start(c) { + c.error(new Error('this rejection should not be reported as unhandled')); + } + }), + readable: new ReadableStream() + }; + + sequentialReadableStream(5).pipeThrough(transform); + + // The test harness should complain about unhandled rejections by then. + return flushAsyncEvents(); + +}, 'Piping through a transform errored on the writable end does not cause an unhandled promise rejection'); + +test(() => { + let calledPipeTo = false; + class BadReadableStream extends ReadableStream { + pipeTo() { + calledPipeTo = true; + } + } + + const brs = new BadReadableStream({ + start(controller) { + controller.close(); + } + }); + const readable = new ReadableStream(); + const writable = new WritableStream(); + const result = brs.pipeThrough({ readable, writable }); + + assert_false(calledPipeTo, 'the overridden pipeTo should not have been called'); + assert_equals(result, readable, 'return value should be the passed readable property'); +}, 'pipeThrough should not call pipeTo on this'); + +test(t => { + let calledFakePipeTo = false; + const realPipeTo = ReadableStream.prototype.pipeTo; + t.add_cleanup(() => { + ReadableStream.prototype.pipeTo = realPipeTo; + }); + ReadableStream.prototype.pipeTo = () => { + calledFakePipeTo = true; + }; + const rs = new ReadableStream(); + const readable = new ReadableStream(); + const writable = new WritableStream(); + const result = rs.pipeThrough({ readable, writable }); + + assert_false(calledFakePipeTo, 'the monkey-patched pipeTo should not have been called'); + assert_equals(result, readable, 'return value should be the passed readable property'); + +}, 'pipeThrough should not call pipeTo on the ReadableStream prototype'); + +const badReadables = [null, undefined, 0, NaN, true, 'ReadableStream', Object.create(ReadableStream.prototype)]; +for (const readable of badReadables) { + test(() => { + assert_throws_js(TypeError, + ReadableStream.prototype.pipeThrough.bind(readable, uninterestingReadableWritablePair()), + 'pipeThrough should throw'); + }, `pipeThrough should brand-check this and not allow '${readable}'`); + + test(() => { + const rs = new ReadableStream(); + let writableGetterCalled = false; + assert_throws_js( + TypeError, + () => rs.pipeThrough({ + get writable() { + writableGetterCalled = true; + return new WritableStream(); + }, + readable + }), + 'pipeThrough should brand-check readable' + ); + assert_false(writableGetterCalled, 'writable should not have been accessed'); + }, `pipeThrough should brand-check readable and not allow '${readable}'`); +} + +const badWritables = [null, undefined, 0, NaN, true, 'WritableStream', Object.create(WritableStream.prototype)]; +for (const writable of badWritables) { + test(() => { + const rs = new ReadableStream({ + start(c) { + c.close(); + } + }); + let readableGetterCalled = false; + assert_throws_js(TypeError, () => rs.pipeThrough({ + get readable() { + readableGetterCalled = true; + return new ReadableStream(); + }, + writable + }), + 'pipeThrough should brand-check writable'); + assert_true(readableGetterCalled, 'readable should have been accessed'); + }, `pipeThrough should brand-check writable and not allow '${writable}'`); +} + +test(t => { + const error = new Error(); + error.name = 'custom'; + + const rs = new ReadableStream({ + pull: t.unreached_func('pull should not be called') + }, { highWaterMark: 0 }); + + const throwingWritable = { + readable: rs, + get writable() { + throw error; + } + }; + assert_throws_exactly(error, + () => ReadableStream.prototype.pipeThrough.call(rs, throwingWritable, {}), + 'pipeThrough should rethrow the error thrown by the writable getter'); + + const throwingReadable = { + get readable() { + throw error; + }, + writable: {} + }; + assert_throws_exactly(error, + () => ReadableStream.prototype.pipeThrough.call(rs, throwingReadable, {}), + 'pipeThrough should rethrow the error thrown by the readable getter'); + +}, 'pipeThrough should rethrow errors from accessing readable or writable'); + +const badSignals = [null, 0, NaN, true, 'AbortSignal', Object.create(AbortSignal.prototype)]; +for (const signal of badSignals) { + test(() => { + const rs = new ReadableStream(); + assert_throws_js(TypeError, () => rs.pipeThrough(uninterestingReadableWritablePair(), { signal }), + 'pipeThrough should throw'); + }, `invalid values of signal should throw; specifically '${signal}'`); +} + +test(() => { + const rs = new ReadableStream(); + const controller = new AbortController(); + const signal = controller.signal; + rs.pipeThrough(uninterestingReadableWritablePair(), { signal }); +}, 'pipeThrough should accept a real AbortSignal'); + +test(() => { + const rs = new ReadableStream(); + rs.getReader(); + assert_throws_js(TypeError, () => rs.pipeThrough(uninterestingReadableWritablePair()), + 'pipeThrough should throw'); +}, 'pipeThrough should throw if this is locked'); + +test(() => { + const rs = new ReadableStream(); + const writable = new WritableStream(); + const readable = new ReadableStream(); + writable.getWriter(); + assert_throws_js(TypeError, () => rs.pipeThrough({writable, readable}), + 'pipeThrough should throw'); +}, 'pipeThrough should throw if writable is locked'); + +test(() => { + const rs = new ReadableStream(); + const writable = new WritableStream(); + const readable = new ReadableStream(); + readable.getReader(); + assert_equals(rs.pipeThrough({ writable, readable }), readable, + 'pipeThrough should not throw'); +}, 'pipeThrough should not care if readable is locked'); + +promise_test(() => { + const rs = recordingReadableStream(); + const writable = new WritableStream({ + start(controller) { + controller.error(); + } + }); + const readable = new ReadableStream(); + rs.pipeThrough({ writable, readable }, { preventCancel: true }); + return flushAsyncEvents(0).then(() => { + assert_array_equals(rs.events, ['pull'], 'cancel should not have been called'); + }); +}, 'preventCancel should work'); + +promise_test(() => { + const rs = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + const writable = recordingWritableStream(); + const readable = new ReadableStream(); + rs.pipeThrough({ writable, readable }, { preventClose: true }); + return flushAsyncEvents(0).then(() => { + assert_array_equals(writable.events, [], 'writable should not be closed'); + }); +}, 'preventClose should work'); + +promise_test(() => { + const rs = new ReadableStream({ + start(controller) { + controller.error(); + } + }); + const writable = recordingWritableStream(); + const readable = new ReadableStream(); + rs.pipeThrough({ writable, readable }, { preventAbort: true }); + return flushAsyncEvents(0).then(() => { + assert_array_equals(writable.events, [], 'writable should not be aborted'); + }); +}, 'preventAbort should work'); + +test(() => { + const rs = new ReadableStream(); + const readable = new ReadableStream(); + const writable = new WritableStream(); + assert_throws_js(TypeError, () => rs.pipeThrough({readable, writable}, { + get preventAbort() { + writable.getWriter(); + } + }), 'pipeThrough should throw'); +}, 'pipeThrough() should throw if an option getter grabs a writer'); diff --git a/test/fixtures/wpt/streams/piping/then-interception.any.js b/test/fixtures/wpt/streams/piping/then-interception.any.js new file mode 100644 index 00000000000000..9f772ea5841d8f --- /dev/null +++ b/test/fixtures/wpt/streams/piping/then-interception.any.js @@ -0,0 +1,68 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +function interceptThen() { + const intercepted = []; + let callCount = 0; + Object.prototype.then = function(resolver) { + if (!this.done) { + intercepted.push(this.value); + } + const retval = Object.create(null); + retval.done = ++callCount === 3; + retval.value = callCount; + resolver(retval); + if (retval.done) { + delete Object.prototype.then; + } + } + return intercepted; +} + +promise_test(async t => { + const rs = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.close(); + } + }); + const ws = recordingWritableStream(); + + const intercepted = interceptThen(); + t.add_cleanup(() => { + delete Object.prototype.then; + }); + + await rs.pipeTo(ws); + delete Object.prototype.then; + + + assert_array_equals(intercepted, [], 'nothing should have been intercepted'); + assert_array_equals(ws.events, ['write', 'a', 'close'], 'written chunk should be "a"'); +}, 'piping should not be observable'); + +promise_test(async t => { + const rs = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.close(); + } + }); + const ws = recordingWritableStream(); + + const [ branch1, branch2 ] = rs.tee(); + + const intercepted = interceptThen(); + t.add_cleanup(() => { + delete Object.prototype.then; + }); + + await branch1.pipeTo(ws); + delete Object.prototype.then; + branch2.cancel(); + + assert_array_equals(intercepted, [], 'nothing should have been intercepted'); + assert_array_equals(ws.events, ['write', 'a', 'close'], 'written chunk should be "a"'); +}, 'tee should not be observable'); diff --git a/test/fixtures/wpt/streams/piping/throwing-options.any.js b/test/fixtures/wpt/streams/piping/throwing-options.any.js new file mode 100644 index 00000000000000..bc1cf328da61e6 --- /dev/null +++ b/test/fixtures/wpt/streams/piping/throwing-options.any.js @@ -0,0 +1,65 @@ +// META: global=window,worker,jsshell +'use strict'; + +class ThrowingOptions { + constructor(whatShouldThrow) { + this.whatShouldThrow = whatShouldThrow; + this.touched = []; + } + + get preventClose() { + this.maybeThrow('preventClose'); + return false; + } + + get preventAbort() { + this.maybeThrow('preventAbort'); + return false; + } + + get preventCancel() { + this.maybeThrow('preventCancel'); + return false; + } + + get signal() { + this.maybeThrow('signal'); + return undefined; + } + + maybeThrow(forWhat) { + this.touched.push(forWhat); + if (this.whatShouldThrow === forWhat) { + throw new Error(this.whatShouldThrow); + } + } +} + +const checkOrder = ['preventAbort', 'preventCancel', 'preventClose', 'signal']; + +for (let i = 0; i < checkOrder.length; ++i) { + const whatShouldThrow = checkOrder[i]; + const whatShouldBeTouched = checkOrder.slice(0, i + 1); + + promise_test(t => { + const options = new ThrowingOptions(whatShouldThrow); + return promise_rejects_js( + t, Error, + new ReadableStream().pipeTo(new WritableStream(), options), + 'pipeTo should reject') + .then(() => assert_array_equals( + options.touched, whatShouldBeTouched, + 'options should be touched in the right order')); + }, `pipeTo should stop after getting ${whatShouldThrow} throws`); + + test(() => { + const options = new ThrowingOptions(whatShouldThrow); + assert_throws_js( + Error, + () => new ReadableStream().pipeThrough(new TransformStream(), options), + 'pipeThrough should throw'); + assert_array_equals( + options.touched, whatShouldBeTouched, + 'options should be touched in the right order'); + }, `pipeThrough should stop after getting ${whatShouldThrow} throws`); +} diff --git a/test/fixtures/wpt/streams/piping/transform-streams.any.js b/test/fixtures/wpt/streams/piping/transform-streams.any.js new file mode 100644 index 00000000000000..a368fecd6f00d6 --- /dev/null +++ b/test/fixtures/wpt/streams/piping/transform-streams.any.js @@ -0,0 +1,22 @@ +// META: global=window,worker,jsshell +'use strict'; + +promise_test(() => { + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.enqueue('b'); + c.enqueue('c'); + c.close(); + } + }); + + const ts = new TransformStream(); + + const ws = new WritableStream(); + + return rs.pipeThrough(ts).pipeTo(ws).then(() => { + const writer = ws.getWriter(); + return writer.closed; + }); +}, 'Piping through an identity transform stream should close the destination when the source closes'); diff --git a/test/fixtures/wpt/streams/queuing-strategies-size-function-per-global.window.js b/test/fixtures/wpt/streams/queuing-strategies-size-function-per-global.window.js new file mode 100644 index 00000000000000..0f869f13b348da --- /dev/null +++ b/test/fixtures/wpt/streams/queuing-strategies-size-function-per-global.window.js @@ -0,0 +1,14 @@ +const iframe = document.createElement('iframe'); +document.body.appendChild(iframe); + +for (const type of ['CountQueuingStrategy', 'ByteLengthQueuingStrategy']) { + test(() => { + const myQs = new window[type]({ highWaterMark: 1 }); + const yourQs = new iframe.contentWindow[type]({ highWaterMark: 1 }); + assert_not_equals(myQs.size, yourQs.size, + 'size should not be the same object'); + }, `${type} size should be different for objects in different realms`); +} + +// Cleanup the document to avoid messing up the result page. +iframe.remove(); diff --git a/test/fixtures/wpt/streams/queuing-strategies.any.js b/test/fixtures/wpt/streams/queuing-strategies.any.js new file mode 100644 index 00000000000000..1846ea63e35459 --- /dev/null +++ b/test/fixtures/wpt/streams/queuing-strategies.any.js @@ -0,0 +1,135 @@ +// META: global=window,worker,jsshell +'use strict'; + +const highWaterMarkConversions = new Map([ + [-Infinity, -Infinity], + [-5, -5], + [false, 0], + [true, 1], + [NaN, NaN], + ['foo', NaN], + ['0', 0], + [{}, NaN], + [() => {}, NaN] +]); + +for (const QueuingStrategy of [CountQueuingStrategy, ByteLengthQueuingStrategy]) { + test(() => { + new QueuingStrategy({ highWaterMark: 4 }); + }, `${QueuingStrategy.name}: Can construct a with a valid high water mark`); + + test(() => { + const highWaterMark = 1; + const highWaterMarkObjectGetter = { + get highWaterMark() { return highWaterMark; } + }; + const error = new Error('wow!'); + const highWaterMarkObjectGetterThrowing = { + get highWaterMark() { throw error; } + }; + + assert_throws_js(TypeError, () => new QueuingStrategy(), 'construction fails with undefined'); + assert_throws_js(TypeError, () => new QueuingStrategy(null), 'construction fails with null'); + assert_throws_js(TypeError, () => new QueuingStrategy(true), 'construction fails with true'); + assert_throws_js(TypeError, () => new QueuingStrategy(5), 'construction fails with 5'); + assert_throws_js(TypeError, () => new QueuingStrategy({}), 'construction fails with {}'); + assert_throws_exactly(error, () => new QueuingStrategy(highWaterMarkObjectGetterThrowing), + 'construction fails with an object with a throwing highWaterMark getter'); + + assert_equals((new QueuingStrategy(highWaterMarkObjectGetter)).highWaterMark, highWaterMark); + }, `${QueuingStrategy.name}: Constructor behaves as expected with strange arguments`); + + test(() => { + for (const [input, output] of highWaterMarkConversions.entries()) { + const strategy = new QueuingStrategy({ highWaterMark: input }); + assert_equals(strategy.highWaterMark, output, `${input} gets set correctly`); + } + }, `${QueuingStrategy.name}: highWaterMark constructor values are converted per the unrestricted double rules`); + + test(() => { + const size1 = (new QueuingStrategy({ highWaterMark: 5 })).size; + const size2 = (new QueuingStrategy({ highWaterMark: 10 })).size; + + assert_equals(size1, size2); + }, `${QueuingStrategy.name}: size is the same function across all instances`); + + test(() => { + const size = (new QueuingStrategy({ highWaterMark: 5 })).size; + assert_equals(size.name, 'size'); + }, `${QueuingStrategy.name}: size should have the right name`); + + test(() => { + class SubClass extends QueuingStrategy { + size() { + return 2; + } + + subClassMethod() { + return true; + } + } + + const sc = new SubClass({ highWaterMark: 77 }); + assert_equals(sc.constructor.name, 'SubClass', 'constructor.name should be correct'); + assert_equals(sc.highWaterMark, 77, 'highWaterMark should come from the parent class'); + assert_equals(sc.size(), 2, 'size() on the subclass should override the parent'); + assert_true(sc.subClassMethod(), 'subClassMethod() should work'); + }, `${QueuingStrategy.name}: subclassing should work correctly`); +} + +test(() => { + const size = (new CountQueuingStrategy({ highWaterMark: 5 })).size; + assert_equals(size.length, 0); +}, 'CountQueuingStrategy: size should have the right length'); + +test(() => { + const size = (new ByteLengthQueuingStrategy({ highWaterMark: 5 })).size; + assert_equals(size.length, 1); +}, 'ByteLengthQueuingStrategy: size should have the right length'); + +test(() => { + const size = 1024; + const chunk = { byteLength: size }; + const chunkGetter = { + get byteLength() { return size; } + }; + const error = new Error('wow!'); + const chunkGetterThrowing = { + get byteLength() { throw error; } + }; + + const sizeFunction = (new CountQueuingStrategy({ highWaterMark: 5 })).size; + + assert_equals(sizeFunction(), 1, 'size returns 1 with undefined'); + assert_equals(sizeFunction(null), 1, 'size returns 1 with null'); + assert_equals(sizeFunction('potato'), 1, 'size returns 1 with non-object type'); + assert_equals(sizeFunction({}), 1, 'size returns 1 with empty object'); + assert_equals(sizeFunction(chunk), 1, 'size returns 1 with a chunk'); + assert_equals(sizeFunction(chunkGetter), 1, 'size returns 1 with chunk getter'); + assert_equals(sizeFunction(chunkGetterThrowing), 1, + 'size returns 1 with chunk getter that throws'); +}, 'CountQueuingStrategy: size behaves as expected with strange arguments'); + +test(() => { + const size = 1024; + const chunk = { byteLength: size }; + const chunkGetter = { + get byteLength() { return size; } + }; + const error = new Error('wow!'); + const chunkGetterThrowing = { + get byteLength() { throw error; } + }; + + const sizeFunction = (new ByteLengthQueuingStrategy({ highWaterMark: 5 })).size; + + assert_throws_js(TypeError, () => sizeFunction(), 'size fails with undefined'); + assert_throws_js(TypeError, () => sizeFunction(null), 'size fails with null'); + assert_equals(sizeFunction('potato'), undefined, 'size succeeds with undefined with a random non-object type'); + assert_equals(sizeFunction({}), undefined, 'size succeeds with undefined with an object without hwm property'); + assert_equals(sizeFunction(chunk), size, 'size succeeds with the right amount with an object with a hwm'); + assert_equals(sizeFunction(chunkGetter), size, + 'size succeeds with the right amount with an object with a hwm getter'); + assert_throws_exactly(error, () => sizeFunction(chunkGetterThrowing), + 'size fails with the error thrown by the getter'); +}, 'ByteLengthQueuingStrategy: size behaves as expected with strange arguments'); diff --git a/test/fixtures/wpt/streams/readable-byte-streams/bad-buffers-and-views.any.js b/test/fixtures/wpt/streams/readable-byte-streams/bad-buffers-and-views.any.js new file mode 100644 index 00000000000000..eed3a5ed4f8d02 --- /dev/null +++ b/test/fixtures/wpt/streams/readable-byte-streams/bad-buffers-and-views.any.js @@ -0,0 +1,349 @@ +// META: global=window,worker,jsshell +'use strict'; + +promise_test(() => { + const stream = new ReadableStream({ + start(c) { + c.close(); + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + const view = new Uint8Array([1, 2, 3]); + return reader.read(view).then(({ value, done }) => { + // Sanity checks + assert_true(value instanceof Uint8Array, 'The value read must be a Uint8Array'); + assert_not_equals(value, view, 'The value read must not be the *same* Uint8Array'); + assert_array_equals(value, [], 'The value read must be an empty Uint8Array, since the stream is closed'); + assert_true(done, 'done must be true, since the stream is closed'); + + // The important assertions + assert_not_equals(value.buffer, view.buffer, 'a different ArrayBuffer must underlie the value'); + assert_equals(view.buffer.byteLength, 0, 'the original buffer must be detached'); + }); +}, 'ReadableStream with byte source: read()ing from a closed stream still transfers the buffer'); + +promise_test(() => { + const stream = new ReadableStream({ + start(c) { + c.enqueue(new Uint8Array([1, 2, 3])); + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + const view = new Uint8Array([4, 5, 6]); + return reader.read(view).then(({ value, done }) => { + // Sanity checks + assert_true(value instanceof Uint8Array, 'The value read must be a Uint8Array'); + assert_not_equals(value, view, 'The value read must not be the *same* Uint8Array'); + assert_array_equals(value, [1, 2, 3], 'The value read must be the enqueued Uint8Array, not the original values'); + assert_false(done, 'done must be false, since the stream is not closed'); + + // The important assertions + assert_not_equals(value.buffer, view.buffer, 'a different ArrayBuffer must underlie the value'); + assert_equals(view.buffer.byteLength, 0, 'the original buffer must be detached'); + }); +}, 'ReadableStream with byte source: read()ing from a stream with queued chunks still transfers the buffer'); + +test(() => { + new ReadableStream({ + start(c) { + const view = new Uint8Array([1, 2, 3]); + c.enqueue(view); + assert_throws_js(TypeError, () => c.enqueue(view)); + }, + type: 'bytes' + }); +}, 'ReadableStream with byte source: enqueuing an already-detached buffer throws'); + +test(() => { + new ReadableStream({ + start(c) { + const view = new Uint8Array([]); + assert_throws_js(TypeError, () => c.enqueue(view)); + }, + type: 'bytes' + }); +}, 'ReadableStream with byte source: enqueuing a zero-length buffer throws'); + +test(() => { + new ReadableStream({ + start(c) { + const view = new Uint8Array(new ArrayBuffer(10), 0, 0); + assert_throws_js(TypeError, () => c.enqueue(view)); + }, + type: 'bytes' + }); +}, 'ReadableStream with byte source: enqueuing a zero-length view on a non-zero-length buffer throws'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.enqueue(new Uint8Array([1, 2, 3])); + }, + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + const view = new Uint8Array([4, 5, 6]); + return reader.read(view).then(() => { + // view is now detached + return promise_rejects_js(t, TypeError, reader.read(view)); + }); +}, 'ReadableStream with byte source: reading into an already-detached buffer rejects'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.enqueue(new Uint8Array([1, 2, 3])); + }, + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + const view = new Uint8Array(); + return promise_rejects_js(t, TypeError, reader.read(view)); +}, 'ReadableStream with byte source: reading into a zero-length buffer rejects'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.enqueue(new Uint8Array([1, 2, 3])); + }, + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + const view = new Uint8Array(new ArrayBuffer(10), 0, 0); + return promise_rejects_js(t, TypeError, reader.read(view)); +}, 'ReadableStream with byte source: reading into a zero-length view on a non-zero-length buffer rejects'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + // Detach it by reading into it + reader.read(c.byobRequest.view); + + assert_throws_js(TypeError, () => c.byobRequest.respond(1), + 'respond() must throw if the corresponding view has become detached'); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respond() throws if the BYOB request\'s buffer has been detached (in the ' + + 'readable state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + c.close(); + + // Detach it by reading into it + reader.read(c.byobRequest.view); + + assert_throws_js(TypeError, () => c.byobRequest.respond(0), + 'respond() must throw if the corresponding view has become detached'); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respond() throws if the BYOB request\'s buffer has been detached (in the ' + + 'closed state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + // Detach it by reading into it + const view = new Uint8Array([1, 2, 3]); + reader.read(view); + + assert_throws_js(TypeError, () => c.byobRequest.respondWithNewView(view)); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer has been detached ' + + '(in the readable state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + const view = new Uint8Array(); + + assert_throws_js(TypeError, () => c.byobRequest.respondWithNewView(view)); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer is zero-length ' + + '(in the readable state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + const view = new Uint8Array(c.byobRequest.view.buffer, 0, 0); + + assert_throws_js(TypeError, () => c.byobRequest.respondWithNewView(view)); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view is zero-length on a ' + + 'non-zero-length buffer (in the readable state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + const view = new Uint8Array(new ArrayBuffer(10), 0, 3); + + assert_throws_js(RangeError, () => c.byobRequest.respondWithNewView(view)); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer has a ' + + 'different length (in the readable state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + const view = new Uint8Array(c.byobRequest.view.buffer, 0, 4); + view[0] = 20; + view[1] = 21; + view[2] = 22; + view[3] = 23; + + assert_throws_js(RangeError, () => c.byobRequest.respondWithNewView(view)); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + const buffer = new ArrayBuffer(10); + const view = new Uint8Array(buffer, 0, 3); + view[0] = 10; + view[1] = 11; + view[2] = 12; + reader.read(view); +}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view has a larger length ' + + '(in the readable state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + c.close(); + + // Detach it by reading into it + const view = new Uint8Array([1, 2, 3]); + reader.read(view); + + assert_throws_js(TypeError, () => c.byobRequest.respondWithNewView(view)); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer has been detached ' + + '(in the closed state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + const view = new Uint8Array(); + + c.close(); + + assert_throws_js(RangeError, () => c.byobRequest.respondWithNewView(view)); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer is zero-length ' + + '(in the closed state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + const view = new Uint8Array(c.byobRequest.view.buffer, 0, 1); + + c.close(); + + assert_throws_js(TypeError, () => c.byobRequest.respondWithNewView(view)); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view is non-zero-length ' + + '(in the closed state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + const view = new Uint8Array(new ArrayBuffer(10), 0, 0); + + c.close(); + + assert_throws_js(RangeError, () => c.byobRequest.respondWithNewView(view)); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer has a ' + + 'different length (in the closed state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + // Detach it by reading into it + reader.read(c.byobRequest.view); + + assert_throws_js(TypeError, () => c.enqueue(new Uint8Array([1])), + 'enqueue() must throw if the BYOB request\'s buffer has become detached'); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: enqueue() throws if the BYOB request\'s buffer has been detached (in the ' + + 'readable state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + c.close(); + + // Detach it by reading into it + reader.read(c.byobRequest.view); + + assert_throws_js(TypeError, () => c.enqueue(new Uint8Array([1])), + 'enqueue() must throw if the BYOB request\'s buffer has become detached'); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: enqueue() throws if the BYOB request\'s buffer has been detached (in the ' + + 'closed state)'); diff --git a/test/fixtures/wpt/streams/readable-byte-streams/construct-byob-request.any.js b/test/fixtures/wpt/streams/readable-byte-streams/construct-byob-request.any.js new file mode 100644 index 00000000000000..1386d84599a4cd --- /dev/null +++ b/test/fixtures/wpt/streams/readable-byte-streams/construct-byob-request.any.js @@ -0,0 +1,53 @@ +// META: global=window,worker,jsshell +// META: script=../resources/rs-utils.js +'use strict'; + +// Prior to whatwg/stream#870 it was possible to construct a ReadableStreamBYOBRequest directly. This made it possible +// to construct requests that were out-of-sync with the state of the ReadableStream. They could then be used to call +// internal operations, resulting in asserts or bad behaviour. This file contains regression tests for the change. + +function getRealByteStreamController() { + let controller; + new ReadableStream({ + start(c) { + controller = c; + }, + type: 'bytes' + }); + return controller; +} + +// Create an object pretending to have prototype |prototype|, of type |type|. |type| is one of "undefined", "null", +// "fake", or "real". "real" will call the realObjectCreator function to get a real instance of the object. +function createDummyObject(prototype, type, realObjectCreator) { + switch (type) { + case 'undefined': + return undefined; + + case 'null': + return null; + + case 'fake': + return Object.create(prototype); + + case 'real': + return realObjectCreator(); + } + + throw new Error('not reached'); +} + +const dummyTypes = ['undefined', 'null', 'fake', 'real']; + +for (const controllerType of dummyTypes) { + const controller = createDummyObject(ReadableByteStreamController.prototype, controllerType, + getRealByteStreamController); + for (const viewType of dummyTypes) { + const view = createDummyObject(Uint8Array.prototype, viewType, () => new Uint8Array(16)); + test(() => { + assert_throws_js(TypeError, () => new ReadableStreamBYOBRequest(controller, view), + 'constructor should throw'); + }, `ReadableStreamBYOBRequest constructor should throw when passed a ${controllerType} ` + + `ReadableByteStreamController and a ${viewType} view`); + } +} diff --git a/test/fixtures/wpt/streams/readable-byte-streams/enqueue-with-detached-buffer.window.js b/test/fixtures/wpt/streams/readable-byte-streams/enqueue-with-detached-buffer.window.js new file mode 100644 index 00000000000000..15400f69340451 --- /dev/null +++ b/test/fixtures/wpt/streams/readable-byte-streams/enqueue-with-detached-buffer.window.js @@ -0,0 +1,19 @@ +promise_test(async t => { + const error = new Error('cannot proceed'); + const rs = new ReadableStream({ + type: 'bytes', + pull: t.step_func((controller) => { + const buffer = controller.byobRequest.view.buffer; + // Detach the buffer. + postMessage(buffer, '*', [buffer]); + + // Try to enqueue with a new buffer. + assert_throws_js(TypeError, () => controller.enqueue(new Uint8Array([42]))); + + // If we got here the test passed. + controller.error(error); + }) + }); + const reader = rs.getReader({ mode: 'byob' }); + await promise_rejects_exactly(t, error, reader.read(new Uint8Array(1))); +}, 'enqueue after detaching byobRequest.view.buffer should throw'); diff --git a/test/fixtures/wpt/streams/readable-byte-streams/general.any.js b/test/fixtures/wpt/streams/readable-byte-streams/general.any.js new file mode 100644 index 00000000000000..db8ac3a39983fd --- /dev/null +++ b/test/fixtures/wpt/streams/readable-byte-streams/general.any.js @@ -0,0 +1,2329 @@ +// META: global=window,worker,jsshell +// META: script=../resources/rs-utils.js +// META: script=../resources/test-utils.js +'use strict'; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +test(() => { + assert_throws_js(TypeError, () => new ReadableStream().getReader({ mode: 'byob' })); +}, 'getReader({mode: "byob"}) throws on non-bytes streams'); + + +test(() => { + // Constructing ReadableStream with an empty underlying byte source object as parameter shouldn't throw. + new ReadableStream({ type: 'bytes' }).getReader({ mode: 'byob' }); + // Constructor must perform ToString(type). + new ReadableStream({ type: { toString() {return 'bytes';} } }) + .getReader({ mode: 'byob' }); + new ReadableStream({ type: { toString: null, valueOf() {return 'bytes';} } }) + .getReader({ mode: 'byob' }); +}, 'ReadableStream with byte source can be constructed with no errors'); + +test(() => { + const ReadableStreamBYOBReader = new ReadableStream({ type: 'bytes' }).getReader({ mode: 'byob' }).constructor; + const rs = new ReadableStream({ type: 'bytes' }); + + let reader = rs.getReader({ mode: { toString() { return 'byob'; } } }); + assert_true(reader instanceof ReadableStreamBYOBReader, 'must give a BYOB reader'); + reader.releaseLock(); + + reader = rs.getReader({ mode: { toString: null, valueOf() {return 'byob';} } }); + assert_true(reader instanceof ReadableStreamBYOBReader, 'must give a BYOB reader'); + reader.releaseLock(); + + reader = rs.getReader({ mode: 'byob', notmode: 'ignored' }); + assert_true(reader instanceof ReadableStreamBYOBReader, 'must give a BYOB reader'); +}, 'getReader({mode}) must perform ToString()'); + +promise_test(() => { + let startCalled = false; + let startCalledBeforePull = false; + let desiredSize; + let controller; + + let resolveTestPromise; + const testPromise = new Promise(resolve => { + resolveTestPromise = resolve; + }); + + new ReadableStream({ + start(c) { + controller = c; + startCalled = true; + }, + pull() { + startCalledBeforePull = startCalled; + desiredSize = controller.desiredSize; + resolveTestPromise(); + }, + type: 'bytes' + }, { + highWaterMark: 256 + }); + + return testPromise.then(() => { + assert_true(startCalledBeforePull, 'start should be called before pull'); + assert_equals(desiredSize, 256, 'desiredSize should equal highWaterMark'); + }); + +}, 'ReadableStream with byte source: Construct and expect start and pull being called'); + +promise_test(() => { + let pullCount = 0; + let checkedNoPull = false; + + let resolveTestPromise; + const testPromise = new Promise(resolve => { + resolveTestPromise = resolve; + }); + let resolveStartPromise; + + new ReadableStream({ + start() { + return new Promise(resolve => { + resolveStartPromise = resolve; + }); + }, + pull() { + if (checkedNoPull) { + resolveTestPromise(); + } + + ++pullCount; + }, + type: 'bytes' + }, { + highWaterMark: 256 + }); + + Promise.resolve().then(() => { + assert_equals(pullCount, 0); + checkedNoPull = true; + resolveStartPromise(); + }); + + return testPromise; + +}, 'ReadableStream with byte source: No automatic pull call if start doesn\'t finish'); + +test(() => { + assert_throws_js(Error, () => new ReadableStream({ start() { throw new Error(); }, type:'bytes' }), + 'start() can throw an exception with type: bytes'); +}, 'ReadableStream with byte source: start() throws an exception'); + +promise_test(t => { + new ReadableStream({ + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }, { + highWaterMark: 0 + }); + + return Promise.resolve(); +}, 'ReadableStream with byte source: Construct with highWaterMark of 0'); + +test(() => { + new ReadableStream({ + start(c) { + assert_equals(c.desiredSize, 10, 'desiredSize must start at the highWaterMark'); + c.close(); + assert_equals(c.desiredSize, 0, 'after closing, desiredSize must be 0'); + }, + type: 'bytes' + }, { + highWaterMark: 10 + }); +}, 'ReadableStream with byte source: desiredSize when closed'); + +test(() => { + new ReadableStream({ + start(c) { + assert_equals(c.desiredSize, 10, 'desiredSize must start at the highWaterMark'); + c.error(); + assert_equals(c.desiredSize, null, 'after erroring, desiredSize must be null'); + }, + type: 'bytes' + }, { + highWaterMark: 10 + }); +}, 'ReadableStream with byte source: desiredSize when errored'); + +promise_test(t => { + const stream = new ReadableStream({ + type: 'bytes' + }); + + const reader = stream.getReader(); + reader.releaseLock(); + + return promise_rejects_js(t, TypeError, reader.closed, 'closed must reject'); +}, 'ReadableStream with byte source: getReader(), then releaseLock()'); + +promise_test(t => { + const stream = new ReadableStream({ + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + reader.releaseLock(); + + return promise_rejects_js(t, TypeError, reader.closed, 'closed must reject'); +}, 'ReadableStream with byte source: getReader() with mode set to byob, then releaseLock()'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.close(); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader(); + + return reader.closed.then(() => { + assert_throws_js(TypeError, () => stream.getReader(), 'getReader() must throw'); + }); +}, 'ReadableStream with byte source: Test that closing a stream does not release a reader automatically'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.close(); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.closed.then(() => { + assert_throws_js(TypeError, () => stream.getReader({ mode: 'byob' }), 'getReader() must throw'); + }); +}, 'ReadableStream with byte source: Test that closing a stream does not release a BYOB reader automatically'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.error(error1); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader(); + + return promise_rejects_exactly(t, error1, reader.closed, 'closed must reject').then(() => { + assert_throws_js(TypeError, () => stream.getReader(), 'getReader() must throw'); + }); +}, 'ReadableStream with byte source: Test that erroring a stream does not release a reader automatically'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.error(error1); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return promise_rejects_exactly(t, error1, reader.closed, 'closed must reject').then(() => { + assert_throws_js(TypeError, () => stream.getReader({ mode: 'byob' }), 'getReader() must throw'); + }); +}, 'ReadableStream with byte source: Test that erroring a stream does not release a BYOB reader automatically'); + +test(() => { + const stream = new ReadableStream({ + type: 'bytes' + }); + + const reader = stream.getReader(); + reader.read(); + assert_throws_js(TypeError, () => reader.releaseLock(), 'reader.releaseLock() must throw'); +}, 'ReadableStream with byte source: releaseLock() on ReadableStreamDefaultReader with pending read() must throw'); + +promise_test(() => { + let pullCount = 0; + + const stream = new ReadableStream({ + pull() { + ++pullCount; + }, + type: 'bytes' + }, { + highWaterMark: 8 + }); + + stream.getReader(); + + assert_equals(pullCount, 0, 'No pull as start() just finished and is not yet reflected to the state of the stream'); + + return Promise.resolve().then(() => { + assert_equals(pullCount, 1, 'pull must be invoked'); + }); +}, 'ReadableStream with byte source: Automatic pull() after start()'); + +promise_test(() => { + let pullCount = 0; + + const stream = new ReadableStream({ + pull() { + ++pullCount; + }, + type: 'bytes' + }, { + highWaterMark: 0 + }); + + const reader = stream.getReader(); + reader.read(); + + assert_equals(pullCount, 0, 'No pull as start() just finished and is not yet reflected to the state of the stream'); + + return Promise.resolve().then(() => { + assert_equals(pullCount, 1, 'pull must be invoked'); + }); +}, 'ReadableStream with byte source: Automatic pull() after start() and read()'); + +// View buffers are detached after pull() returns, so record the information at the time that pull() was called. +function extractViewInfo(view) { + return { + constructor: view.constructor, + bufferByteLength: view.buffer.byteLength, + byteOffset: view.byteOffset, + byteLength: view.byteLength + }; +} + +promise_test(() => { + let pullCount = 0; + let controller; + const byobRequests = []; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + const byobRequest = controller.byobRequest; + const view = byobRequest.view; + byobRequests[pullCount] = { + nonNull: byobRequest !== null, + viewNonNull: view !== null, + viewInfo: extractViewInfo(view) + }; + if (pullCount === 0) { + view[0] = 0x01; + byobRequest.respond(1); + } else if (pullCount === 1) { + view[0] = 0x02; + view[1] = 0x03; + byobRequest.respond(2); + } + + ++pullCount; + }, + type: 'bytes', + autoAllocateChunkSize: 16 + }, { + highWaterMark: 0 + }); + + const reader = stream.getReader(); + const p0 = reader.read(); + const p1 = reader.read(); + + assert_equals(pullCount, 0, 'No pull() as start() just finished and is not yet reflected to the state of the stream'); + + return Promise.resolve().then(() => { + assert_equals(pullCount, 1, 'pull() must have been invoked once'); + const byobRequest = byobRequests[0]; + assert_true(byobRequest.nonNull, 'first byobRequest must not be null'); + assert_true(byobRequest.viewNonNull, 'first byobRequest.view must not be null'); + const viewInfo = byobRequest.viewInfo; + assert_equals(viewInfo.constructor, Uint8Array, 'first view.constructor should be Uint8Array'); + assert_equals(viewInfo.bufferByteLength, 16, 'first view.buffer.byteLength should be 16'); + assert_equals(viewInfo.byteOffset, 0, 'first view.byteOffset should be 0'); + assert_equals(viewInfo.byteLength, 16, 'first view.byteLength should be 16'); + + return p0; + }).then(result => { + assert_equals(pullCount, 2, 'pull() must have been invoked twice'); + const value = result.value; + assert_not_equals(value, undefined, 'first read should have a value'); + assert_equals(value.constructor, Uint8Array, 'first value should be a Uint8Array'); + assert_equals(value.buffer.byteLength, 16, 'first value.buffer.byteLength should be 16'); + assert_equals(value.byteOffset, 0, 'first value.byteOffset should be 0'); + assert_equals(value.byteLength, 1, 'first value.byteLength should be 1'); + assert_equals(value[0], 0x01, 'first value[0] should be 0x01'); + const byobRequest = byobRequests[1]; + assert_true(byobRequest.nonNull, 'second byobRequest must not be null'); + assert_true(byobRequest.viewNonNull, 'second byobRequest.view must not be null'); + const viewInfo = byobRequest.viewInfo; + assert_equals(viewInfo.constructor, Uint8Array, 'second view.constructor should be Uint8Array'); + assert_equals(viewInfo.bufferByteLength, 16, 'second view.buffer.byteLength should be 16'); + assert_equals(viewInfo.byteOffset, 0, 'second view.byteOffset should be 0'); + assert_equals(viewInfo.byteLength, 16, 'second view.byteLength should be 16'); + + return p1; + }).then(result => { + assert_equals(pullCount, 2, 'pull() should only be invoked twice'); + const value = result.value; + assert_not_equals(value, undefined, 'second read should have a value'); + assert_equals(value.constructor, Uint8Array, 'second value should be a Uint8Array'); + assert_equals(value.buffer.byteLength, 16, 'second value.buffer.byteLength should be 16'); + assert_equals(value.byteOffset, 0, 'second value.byteOffset should be 0'); + assert_equals(value.byteLength, 2, 'second value.byteLength should be 2'); + assert_equals(value[0], 0x02, 'second value[0] should be 0x02'); + assert_equals(value[1], 0x03, 'second value[1] should be 0x03'); + }); +}, 'ReadableStream with byte source: autoAllocateChunkSize'); + +promise_test(() => { + let pullCount = 0; + let controller; + const byobRequests = []; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + const byobRequest = controller.byobRequest; + const view = byobRequest.view; + byobRequests[pullCount] = { + nonNull: byobRequest !== null, + viewNonNull: view !== null, + viewInfo: extractViewInfo(view) + }; + if (pullCount === 0) { + view[0] = 0x01; + byobRequest.respond(1); + } else if (pullCount === 1) { + view[0] = 0x02; + view[1] = 0x03; + byobRequest.respond(2); + } + + ++pullCount; + }, + type: 'bytes', + autoAllocateChunkSize: 16 + }, { + highWaterMark: 0 + }); + + const reader = stream.getReader(); + return reader.read().then(result => { + const value = result.value; + assert_not_equals(value, undefined, 'first read should have a value'); + assert_equals(value.constructor, Uint8Array, 'first value should be a Uint8Array'); + assert_equals(value.buffer.byteLength, 16, 'first value.buffer.byteLength should be 16'); + assert_equals(value.byteOffset, 0, 'first value.byteOffset should be 0'); + assert_equals(value.byteLength, 1, 'first value.byteLength should be 1'); + assert_equals(value[0], 0x01, 'first value[0] should be 0x01'); + const byobRequest = byobRequests[0]; + assert_true(byobRequest.nonNull, 'first byobRequest must not be null'); + assert_true(byobRequest.viewNonNull, 'first byobRequest.view must not be null'); + const viewInfo = byobRequest.viewInfo; + assert_equals(viewInfo.constructor, Uint8Array, 'first view.constructor should be Uint8Array'); + assert_equals(viewInfo.bufferByteLength, 16, 'first view.buffer.byteLength should be 16'); + assert_equals(viewInfo.byteOffset, 0, 'first view.byteOffset should be 0'); + assert_equals(viewInfo.byteLength, 16, 'first view.byteLength should be 16'); + + reader.releaseLock(); + const byobReader = stream.getReader({ mode: 'byob' }); + return byobReader.read(new Uint8Array(32)); + }).then(result => { + const value = result.value; + assert_not_equals(value, undefined, 'second read should have a value'); + assert_equals(value.constructor, Uint8Array, 'second value should be a Uint8Array'); + assert_equals(value.buffer.byteLength, 32, 'second value.buffer.byteLength should be 32'); + assert_equals(value.byteOffset, 0, 'second value.byteOffset should be 0'); + assert_equals(value.byteLength, 2, 'second value.byteLength should be 2'); + assert_equals(value[0], 0x02, 'second value[0] should be 0x02'); + assert_equals(value[1], 0x03, 'second value[1] should be 0x03'); + const byobRequest = byobRequests[1]; + assert_true(byobRequest.nonNull, 'second byobRequest must not be null'); + assert_true(byobRequest.viewNonNull, 'second byobRequest.view must not be null'); + const viewInfo = byobRequest.viewInfo; + assert_equals(viewInfo.constructor, Uint8Array, 'second view.constructor should be Uint8Array'); + assert_equals(viewInfo.bufferByteLength, 32, 'second view.buffer.byteLength should be 32'); + assert_equals(viewInfo.byteOffset, 0, 'second view.byteOffset should be 0'); + assert_equals(viewInfo.byteLength, 32, 'second view.byteLength should be 32'); + assert_equals(pullCount, 2, 'pullCount should be 2'); + }); +}, 'ReadableStream with byte source: Mix of auto allocate and BYOB'); + +promise_test(() => { + let pullCount = 0; + + const stream = new ReadableStream({ + pull() { + ++pullCount; + }, + type: 'bytes' + }, { + highWaterMark: 0 + }); + + const reader = stream.getReader(); + reader.read(new Uint8Array(8)); + + assert_equals(pullCount, 0, 'No pull as start() just finished and is not yet reflected to the state of the stream'); + + return Promise.resolve().then(() => { + assert_equals(pullCount, 1, 'pull must be invoked'); + }); +}, 'ReadableStream with byte source: Automatic pull() after start() and read(view)'); + +promise_test(() => { + let pullCount = 0; + + let controller; + let desiredSizeInStart; + let desiredSizeInPull; + + const stream = new ReadableStream({ + start(c) { + c.enqueue(new Uint8Array(16)); + desiredSizeInStart = c.desiredSize; + controller = c; + }, + pull() { + ++pullCount; + + if (pullCount === 1) { + desiredSizeInPull = controller.desiredSize; + } + }, + type: 'bytes' + }, { + highWaterMark: 8 + }); + + return Promise.resolve().then(() => { + assert_equals(pullCount, 0, 'No pull as the queue was filled by start()'); + assert_equals(desiredSizeInStart, -8, 'desiredSize after enqueue() in start()'); + + const reader = stream.getReader(); + + const promise = reader.read(); + assert_equals(pullCount, 1, 'The first pull() should be made on read()'); + assert_equals(desiredSizeInPull, 8, 'desiredSize in pull()'); + + return promise.then(result => { + assert_false(result.done, 'result.done'); + + const view = result.value; + assert_equals(view.constructor, Uint8Array, 'view.constructor'); + assert_equals(view.buffer.byteLength, 16, 'view.buffer'); + assert_equals(view.byteOffset, 0, 'view.byteOffset'); + assert_equals(view.byteLength, 16, 'view.byteLength'); + }); + }); +}, 'ReadableStream with byte source: enqueue(), getReader(), then read()'); + +promise_test(() => { + let controller; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + type: 'bytes' + }); + + const reader = stream.getReader(); + + const promise = reader.read().then(result => { + assert_false(result.done); + + const view = result.value; + assert_equals(view.constructor, Uint8Array); + assert_equals(view.buffer.byteLength, 1); + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 1); + }); + + controller.enqueue(new Uint8Array(1)); + + return promise; +}, 'ReadableStream with byte source: Push source that doesn\'t understand pull signal'); + +test(() => { + assert_throws_js(TypeError, () => new ReadableStream({ + pull: 'foo', + type: 'bytes' + }), 'constructor should throw'); +}, 'ReadableStream with byte source: pull() function is not callable'); + +promise_test(() => { + const stream = new ReadableStream({ + start(c) { + c.enqueue(new Uint16Array(16)); + }, + type: 'bytes' + }); + + const reader = stream.getReader(); + + return reader.read().then(result => { + assert_false(result.done); + + const view = result.value; + assert_equals(view.constructor, Uint8Array); + assert_equals(view.buffer.byteLength, 32); + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 32); + }); +}, 'ReadableStream with byte source: enqueue() with Uint16Array, getReader(), then read()'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + const view = new Uint8Array(16); + view[0] = 0x01; + view[8] = 0x02; + c.enqueue(view); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const byobReader = stream.getReader({ mode: 'byob' }); + + return byobReader.read(new Uint8Array(8)).then(result => { + assert_false(result.done, 'done'); + + const view = result.value; + assert_equals(view.constructor, Uint8Array, 'value.constructor'); + assert_equals(view.buffer.byteLength, 8, 'value.buffer.byteLength'); + assert_equals(view.byteOffset, 0, 'value.byteOffset'); + assert_equals(view.byteLength, 8, 'value.byteLength'); + assert_equals(view[0], 0x01); + + byobReader.releaseLock(); + + const reader = stream.getReader(); + + return reader.read(); + }).then(result => { + assert_false(result.done, 'done'); + + const view = result.value; + assert_equals(view.constructor, Uint8Array, 'value.constructor'); + assert_equals(view.buffer.byteLength, 16, 'value.buffer.byteLength'); + assert_equals(view.byteOffset, 8, 'value.byteOffset'); + assert_equals(view.byteLength, 8, 'value.byteLength'); + assert_equals(view[0], 0x02); + }); +}, 'ReadableStream with byte source: enqueue(), read(view) partially, then read()'); + +promise_test(t => { + let controller; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader(); + + controller.enqueue(new Uint8Array(16)); + controller.close(); + + return reader.read().then(result => { + assert_false(result.done, 'done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'byteOffset'); + assert_equals(view.byteLength, 16, 'byteLength'); + + return reader.read(); + }).then(result => { + assert_true(result.done, 'done'); + assert_equals(result.value, undefined, 'value'); + }); +}, 'ReadableStream with byte source: getReader(), enqueue(), close(), then read()'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.enqueue(new Uint8Array(16)); + c.close(); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader(); + + return reader.read().then(result => { + assert_false(result.done, 'done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'byteOffset'); + assert_equals(view.byteLength, 16, 'byteLength'); + + return reader.read(); + }).then(result => { + assert_true(result.done, 'done'); + assert_equals(result.value, undefined, 'value'); + }); +}, 'ReadableStream with byte source: enqueue(), close(), getReader(), then read()'); + +promise_test(() => { + let controller; + let byobRequest; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + controller.enqueue(new Uint8Array(16)); + byobRequest = controller.byobRequest; + }, + type: 'bytes' + }); + + const reader = stream.getReader(); + + return reader.read().then(result => { + assert_false(result.done, 'done'); + assert_equals(result.value.byteLength, 16, 'byteLength'); + assert_equals(byobRequest, null, 'byobRequest must be null'); + }); +}, 'ReadableStream with byte source: Respond to pull() by enqueue()'); + +promise_test(() => { + let pullCount = 0; + + let controller; + let byobRequest; + const desiredSizes = []; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + byobRequest = controller.byobRequest; + desiredSizes.push(controller.desiredSize); + controller.enqueue(new Uint8Array(1)); + desiredSizes.push(controller.desiredSize); + controller.enqueue(new Uint8Array(1)); + desiredSizes.push(controller.desiredSize); + + ++pullCount; + }, + type: 'bytes' + }, { + highWaterMark: 0 + }); + + const reader = stream.getReader(); + + const p0 = reader.read(); + const p1 = reader.read(); + const p2 = reader.read(); + + // Respond to the first pull call. + controller.enqueue(new Uint8Array(1)); + + assert_equals(pullCount, 0, 'pullCount after the enqueue() outside pull'); + + return Promise.all([p0, p1, p2]).then(result => { + assert_equals(pullCount, 1, 'pullCount after completion of all read()s'); + + assert_equals(result[0].done, false, 'result[0].done'); + assert_equals(result[0].value.byteLength, 1, 'result[0].value.byteLength'); + assert_equals(result[1].done, false, 'result[1].done'); + assert_equals(result[1].value.byteLength, 1, 'result[1].value.byteLength'); + assert_equals(result[2].done, false, 'result[2].done'); + assert_equals(result[2].value.byteLength, 1, 'result[2].value.byteLength'); + assert_equals(byobRequest, null, 'byobRequest should be null'); + assert_equals(desiredSizes[0], 0, 'desiredSize on pull should be 0'); + assert_equals(desiredSizes[1], 0, 'desiredSize after 1st enqueue() should be 0'); + assert_equals(desiredSizes[2], 0, 'desiredSize after 2nd enqueue() should be 0'); + assert_equals(pullCount, 1, 'pull() should only be called once'); + }); +}, 'ReadableStream with byte source: Respond to pull() by enqueue() asynchronously'); + +promise_test(() => { + let pullCount = 0; + + let byobRequest; + const desiredSizes = []; + + const stream = new ReadableStream({ + pull(c) { + byobRequest = c.byobRequest; + desiredSizes.push(c.desiredSize); + + if (pullCount < 3) { + c.enqueue(new Uint8Array(1)); + } else { + c.close(); + } + + ++pullCount; + }, + type: 'bytes' + }, { + highWaterMark: 256 + }); + + const reader = stream.getReader(); + + const p0 = reader.read(); + const p1 = reader.read(); + const p2 = reader.read(); + + assert_equals(pullCount, 0, 'No pull as start() just finished and is not yet reflected to the state of the stream'); + + return Promise.all([p0, p1, p2]).then(result => { + assert_equals(pullCount, 4, 'pullCount after completion of all read()s'); + + assert_equals(result[0].done, false, 'result[0].done'); + assert_equals(result[0].value.byteLength, 1, 'result[0].value.byteLength'); + assert_equals(result[1].done, false, 'result[1].done'); + assert_equals(result[1].value.byteLength, 1, 'result[1].value.byteLength'); + assert_equals(result[2].done, false, 'result[2].done'); + assert_equals(result[2].value.byteLength, 1, 'result[2].value.byteLength'); + assert_equals(byobRequest, null, 'byobRequest should be null'); + assert_equals(desiredSizes[0], 256, 'desiredSize on pull should be 256'); + assert_equals(desiredSizes[1], 256, 'desiredSize after 1st enqueue() should be 256'); + assert_equals(desiredSizes[2], 256, 'desiredSize after 2nd enqueue() should be 256'); + assert_equals(desiredSizes[3], 256, 'desiredSize after 3rd enqueue() should be 256'); + }); +}, 'ReadableStream with byte source: Respond to multiple pull() by separate enqueue()'); + +promise_test(() => { + let controller; + + let pullCount = 0; + const byobRequestDefined = []; + let byobRequestViewDefined; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + byobRequestDefined.push(controller.byobRequest !== null); + const initialByobRequest = controller.byobRequest; + + const view = controller.byobRequest.view; + view[0] = 0x01; + controller.byobRequest.respond(1); + + byobRequestDefined.push(controller.byobRequest !== null); + byobRequestViewDefined = initialByobRequest.view !== null; + + ++pullCount; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint8Array(1)).then(result => { + assert_false(result.done, 'result.done'); + assert_equals(result.value.byteLength, 1, 'result.value.byteLength'); + assert_equals(result.value[0], 0x01, 'result.value[0]'); + assert_equals(pullCount, 1, 'pull() should be called only once'); + assert_true(byobRequestDefined[0], 'byobRequest must not be null before respond()'); + assert_false(byobRequestDefined[1], 'byobRequest must be null after respond()'); + assert_false(byobRequestViewDefined, 'view of initial byobRequest must be null after respond()'); + }); +}, 'ReadableStream with byte source: read(view), then respond()'); + +promise_test(() => { + let controller; + + let pullCount = 0; + const byobRequestDefined = []; + let byobRequestViewDefined; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + async pull() { + byobRequestDefined.push(controller.byobRequest !== null); + const initialByobRequest = controller.byobRequest; + + const transferredView = await transferArrayBufferView(controller.byobRequest.view); + transferredView[0] = 0x01; + controller.byobRequest.respondWithNewView(transferredView); + + byobRequestDefined.push(controller.byobRequest !== null); + byobRequestViewDefined = initialByobRequest.view !== null; + + ++pullCount; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint8Array(1)).then(result => { + assert_false(result.done, 'result.done'); + assert_equals(result.value.byteLength, 1, 'result.value.byteLength'); + assert_equals(result.value[0], 0x01, 'result.value[0]'); + assert_equals(pullCount, 1, 'pull() should be called only once'); + assert_true(byobRequestDefined[0], 'byobRequest must not be null before respondWithNewView()'); + assert_false(byobRequestDefined[1], 'byobRequest must be null after respondWithNewView()'); + assert_false(byobRequestViewDefined, 'view of initial byobRequest must be null after respondWithNewView()'); + }); +}, 'ReadableStream with byte source: read(view), then respondWithNewView() with a transferred ArrayBuffer'); + +promise_test(() => { + let controller; + let byobRequestWasDefined; + let incorrectRespondException; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + byobRequestWasDefined = controller.byobRequest !== null; + + try { + controller.byobRequest.respond(2); + } catch (e) { + incorrectRespondException = e; + } + + controller.byobRequest.respond(1); + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint8Array(1)).then(() => { + assert_true(byobRequestWasDefined, 'byobRequest should be non-null'); + assert_not_equals(incorrectRespondException, undefined, 'respond() must throw'); + assert_equals(incorrectRespondException.name, 'RangeError', 'respond() must throw a RangeError'); + }); +}, 'ReadableStream with byte source: read(view), then respond() with too big value'); + +promise_test(() => { + let pullCount = 0; + + let controller; + let byobRequest; + let viewInfo; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + ++pullCount; + + byobRequest = controller.byobRequest; + const view = byobRequest.view; + viewInfo = extractViewInfo(view); + + view[0] = 0x01; + view[1] = 0x02; + view[2] = 0x03; + + controller.byobRequest.respond(3); + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint16Array(2)).then(result => { + assert_equals(pullCount, 1); + + assert_false(result.done, 'done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'byteOffset'); + assert_equals(view.byteLength, 2, 'byteLength'); + + assert_equals(view[0], 0x0201); + + return reader.read(new Uint8Array(1)); + }).then(result => { + assert_equals(pullCount, 1); + assert_not_equals(byobRequest, null, 'byobRequest must not be null'); + assert_equals(viewInfo.constructor, Uint8Array, 'view.constructor should be Uint8Array'); + assert_equals(viewInfo.bufferByteLength, 4, 'view.buffer.byteLength should be 4'); + assert_equals(viewInfo.byteOffset, 0, 'view.byteOffset should be 0'); + assert_equals(viewInfo.byteLength, 4, 'view.byteLength should be 4'); + + assert_false(result.done, 'done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'byteOffset'); + assert_equals(view.byteLength, 1, 'byteLength'); + + assert_equals(view[0], 0x03); + }); +}, 'ReadableStream with byte source: respond(3) to read(view) with 2 element Uint16Array enqueues the 1 byte ' + + 'remainder'); + +promise_test(t => { + const stream = new ReadableStream({ + start(controller) { + const view = new Uint8Array(16); + view[15] = 0x01; + controller.enqueue(view); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint8Array(16)).then(result => { + assert_false(result.done); + + const view = result.value; + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 16); + assert_equals(view[15], 0x01); + }); +}, 'ReadableStream with byte source: enqueue(), getReader(), then read(view)'); + +promise_test(t => { + let cancelCount = 0; + let reason; + + const passedReason = new TypeError('foo'); + + const stream = new ReadableStream({ + start(c) { + c.enqueue(new Uint8Array(16)); + }, + pull: t.unreached_func('pull() should not be called'), + cancel(r) { + if (cancelCount === 0) { + reason = r; + } + + ++cancelCount; + }, + type: 'bytes' + }); + + const reader = stream.getReader(); + + return reader.cancel(passedReason).then(result => { + assert_equals(result, undefined); + assert_equals(cancelCount, 1); + assert_equals(reason, passedReason, 'reason should equal the passed reason'); + }); +}, 'ReadableStream with byte source: enqueue(), getReader(), then cancel() (mode = not BYOB)'); + +promise_test(t => { + let cancelCount = 0; + let reason; + + const passedReason = new TypeError('foo'); + + const stream = new ReadableStream({ + start(c) { + c.enqueue(new Uint8Array(16)); + }, + pull: t.unreached_func('pull() should not be called'), + cancel(r) { + if (cancelCount === 0) { + reason = r; + } + + ++cancelCount; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.cancel(passedReason).then(result => { + assert_equals(result, undefined); + assert_equals(cancelCount, 1); + assert_equals(reason, passedReason, 'reason should equal the passed reason'); + }); +}, 'ReadableStream with byte source: enqueue(), getReader(), then cancel() (mode = BYOB)'); + +promise_test(t => { + let cancelCount = 0; + let reason; + + const passedReason = new TypeError('foo'); + + let controller; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull: t.unreached_func('pull() should not be called'), + cancel(r) { + if (cancelCount === 0) { + reason = r; + } + + ++cancelCount; + + return 'bar'; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + const readPromise = reader.read(new Uint8Array(1)).then(result => { + assert_true(result.done, 'result.done'); + assert_equals(result.value, undefined, 'result.value'); + }); + + const cancelPromise = reader.cancel(passedReason).then(result => { + assert_equals(result, undefined, 'cancel() return value should be fulfilled with undefined'); + assert_equals(cancelCount, 1, 'cancel() should be called only once'); + assert_equals(reason, passedReason, 'reason should equal the passed reason'); + }); + + return Promise.all([readPromise, cancelPromise]); +}, 'ReadableStream with byte source: getReader(), read(view), then cancel()'); + +promise_test(() => { + let pullCount = 0; + + let controller; + let byobRequest; + const viewInfos = []; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + byobRequest = controller.byobRequest; + + viewInfos.push(extractViewInfo(controller.byobRequest.view)); + controller.enqueue(new Uint8Array(1)); + viewInfos.push(extractViewInfo(controller.byobRequest.view)); + + ++pullCount; + }, + type: 'bytes' + }); + + return Promise.resolve().then(() => { + assert_equals(pullCount, 0, 'No pull() as no read(view) yet'); + + const reader = stream.getReader({ mode: 'byob' }); + + const promise = reader.read(new Uint16Array(1)).then(result => { + assert_true(result.done, 'result.done'); + assert_equals(result.value, undefined, 'result.value'); + }); + + assert_equals(pullCount, 1, '1 pull() should have been made in response to partial fill by enqueue()'); + assert_not_equals(byobRequest, null, 'byobRequest should not be null'); + assert_equals(viewInfos[0].byteLength, 2, 'byteLength before enqueue() shouild be 2'); + assert_equals(viewInfos[1].byteLength, 1, 'byteLength after enqueue() should be 1'); + + reader.cancel(); + + assert_equals(pullCount, 1, 'pull() should only be called once'); + return promise; + }); +}, 'ReadableStream with byte source: cancel() with partially filled pending pull() request'); + +promise_test(() => { + let controller; + let pullCalled = false; + + const stream = new ReadableStream({ + start(c) { + const view = new Uint8Array(8); + view[7] = 0x01; + c.enqueue(view); + + controller = c; + }, + pull() { + pullCalled = true; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + const buffer = new ArrayBuffer(16); + + return reader.read(new Uint8Array(buffer, 8, 8)).then(result => { + assert_false(result.done); + + assert_false(pullCalled, 'pull() must not have been called'); + + const view = result.value; + assert_equals(view.constructor, Uint8Array); + assert_equals(view.buffer.byteLength, 16); + assert_equals(view.byteOffset, 8); + assert_equals(view.byteLength, 8); + assert_equals(view[7], 0x01); + }); +}, 'ReadableStream with byte source: enqueue(), getReader(), then read(view) where view.buffer is not fully ' + + 'covered by view'); + +promise_test(() => { + let controller; + let pullCalled = false; + + const stream = new ReadableStream({ + start(c) { + let view; + + view = new Uint8Array(16); + view[15] = 123; + c.enqueue(view); + + view = new Uint8Array(8); + view[7] = 111; + c.enqueue(view); + + controller = c; + }, + pull() { + pullCalled = true; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint8Array(24)).then(result => { + assert_false(result.done, 'done'); + + assert_false(pullCalled, 'pull() must not have been called'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'byteOffset'); + assert_equals(view.byteLength, 24, 'byteLength'); + assert_equals(view[15], 123, 'Contents are set from the first chunk'); + assert_equals(view[23], 111, 'Contents are set from the second chunk'); + }); +}, 'ReadableStream with byte source: Multiple enqueue(), getReader(), then read(view)'); + +promise_test(() => { + let pullCalled = false; + + const stream = new ReadableStream({ + start(c) { + const view = new Uint8Array(16); + view[15] = 0x01; + c.enqueue(view); + }, + pull() { + pullCalled = true; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint8Array(24)).then(result => { + assert_false(result.done); + + assert_false(pullCalled, 'pull() must not have been called'); + + const view = result.value; + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 16); + assert_equals(view[15], 0x01); + }); +}, 'ReadableStream with byte source: enqueue(), getReader(), then read(view) with a bigger view'); + +promise_test(() => { + let pullCalled = false; + + const stream = new ReadableStream({ + start(c) { + const view = new Uint8Array(16); + view[7] = 0x01; + view[15] = 0x02; + c.enqueue(view); + }, + pull() { + pullCalled = true; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint8Array(8)).then(result => { + assert_false(result.done, 'done'); + + const view = result.value; + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 8); + assert_equals(view[7], 0x01); + + return reader.read(new Uint8Array(8)); + }).then(result => { + assert_false(result.done, 'done'); + + assert_false(pullCalled, 'pull() must not have been called'); + + const view = result.value; + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 8); + assert_equals(view[7], 0x02); + }); +}, 'ReadableStream with byte source: enqueue(), getReader(), then read(view) with smaller views'); + +promise_test(() => { + let controller; + let viewInfo; + + const stream = new ReadableStream({ + start(c) { + const view = new Uint8Array(1); + view[0] = 0xff; + c.enqueue(view); + + controller = c; + }, + pull() { + if (controller.byobRequest === null) { + return; + } + + const view = controller.byobRequest.view; + viewInfo = extractViewInfo(view); + + view[0] = 0xaa; + controller.byobRequest.respond(1); + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint16Array(1)).then(result => { + assert_false(result.done); + + const view = result.value; + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 2); + assert_equals(view[0], 0xaaff); + + assert_equals(viewInfo.constructor, Uint8Array, 'view.constructor should be Uint8Array'); + assert_equals(viewInfo.bufferByteLength, 2, 'view.buffer.byteLength should be 2'); + assert_equals(viewInfo.byteOffset, 1, 'view.byteOffset should be 1'); + assert_equals(viewInfo.byteLength, 1, 'view.byteLength should be 1'); + }); +}, 'ReadableStream with byte source: enqueue() 1 byte, getReader(), then read(view) with Uint16Array'); + +promise_test(() => { + let pullCount = 0; + + let controller; + let byobRequest; + let viewInfo; + let desiredSize; + + const stream = new ReadableStream({ + start(c) { + const view = new Uint8Array(3); + view[0] = 0x01; + view[2] = 0x02; + c.enqueue(view); + + controller = c; + }, + pull() { + byobRequest = controller.byobRequest; + + const view = controller.byobRequest.view; + + viewInfo = extractViewInfo(view); + + view[0] = 0x03; + controller.byobRequest.respond(1); + + desiredSize = controller.desiredSize; + + ++pullCount; + }, + type: 'bytes' + }); + + // Wait for completion of the start method to be reflected. + return Promise.resolve().then(() => { + const reader = stream.getReader({ mode: 'byob' }); + + const promise = reader.read(new Uint16Array(2)).then(result => { + assert_false(result.done, 'done'); + + const view = result.value; + assert_equals(view.constructor, Uint16Array, 'constructor'); + assert_equals(view.buffer.byteLength, 4, 'buffer.byteLength'); + assert_equals(view.byteOffset, 0, 'byteOffset'); + assert_equals(view.byteLength, 2, 'byteLength'); + assert_equals(view[0], 0x0001, 'Contents are set'); + + const p = reader.read(new Uint16Array(1)); + + assert_equals(pullCount, 1); + + return p; + }).then(result => { + assert_false(result.done, 'done'); + + const view = result.value; + assert_equals(view.buffer.byteLength, 2, 'buffer.byteLength'); + assert_equals(view.byteOffset, 0, 'byteOffset'); + assert_equals(view.byteLength, 2, 'byteLength'); + assert_equals(view[0], 0x0302, 'Contents are set'); + + assert_not_equals(byobRequest, null, 'byobRequest must not be null'); + assert_equals(viewInfo.constructor, Uint8Array, 'view.constructor should be Uint8Array'); + assert_equals(viewInfo.bufferByteLength, 2, 'view.buffer.byteLength should be 2'); + assert_equals(viewInfo.byteOffset, 1, 'view.byteOffset should be 1'); + assert_equals(viewInfo.byteLength, 1, 'view.byteLength should be 1'); + assert_equals(desiredSize, 0, 'desiredSize should be zero'); + }); + + assert_equals(pullCount, 0); + + return promise; + }); +}, 'ReadableStream with byte source: enqueue() 3 byte, getReader(), then read(view) with 2-element Uint16Array'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + const view = new Uint8Array(1); + view[0] = 0xff; + c.enqueue(view); + c.close(); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + + return promise_rejects_js(t, TypeError, reader.read(new Uint16Array(1)), 'read(view) must fail') + .then(() => promise_rejects_js(t, TypeError, reader.closed, 'reader.closed should reject')); +}, 'ReadableStream with byte source: read(view) with Uint16Array on close()-d stream with 1 byte enqueue()-d must ' + + 'fail'); + +promise_test(t => { + let controller; + + const stream = new ReadableStream({ + start(c) { + const view = new Uint8Array(1); + view[0] = 0xff; + c.enqueue(view); + + controller = c; + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + const readPromise = reader.read(new Uint16Array(1)); + + assert_throws_js(TypeError, () => controller.close(), 'controller.close() must throw'); + + return promise_rejects_js(t, TypeError, readPromise, 'read(view) must fail') + .then(() => promise_rejects_js(t, TypeError, reader.closed, 'reader.closed must reject')); +}, 'ReadableStream with byte source: A stream must be errored if close()-d before fulfilling read(view) with ' + + 'Uint16Array'); + +test(() => { + let controller; + + new ReadableStream({ + start(c) { + controller = c; + }, + type: 'bytes' + }); + + // Enqueue a chunk so that the stream doesn't get closed. This is to check duplicate close() calls are rejected + // even if the stream has not yet entered the closed state. + const view = new Uint8Array(1); + controller.enqueue(view); + controller.close(); + + assert_throws_js(TypeError, () => controller.close(), 'controller.close() must throw'); +}, 'ReadableStream with byte source: Throw if close()-ed more than once'); + +test(() => { + let controller; + + new ReadableStream({ + start(c) { + controller = c; + }, + type: 'bytes' + }); + + // Enqueue a chunk so that the stream doesn't get closed. This is to check enqueue() after close() is rejected + // even if the stream has not yet entered the closed state. + const view = new Uint8Array(1); + controller.enqueue(view); + controller.close(); + + assert_throws_js(TypeError, () => controller.enqueue(view), 'controller.close() must throw'); +}, 'ReadableStream with byte source: Throw on enqueue() after close()'); + +promise_test(() => { + let controller; + let byobRequest; + let viewInfo; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + byobRequest = controller.byobRequest; + const view = controller.byobRequest.view; + viewInfo = extractViewInfo(view); + + view[15] = 0x01; + controller.byobRequest.respond(16); + controller.close(); + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint8Array(16)).then(result => { + assert_false(result.done); + + const view = result.value; + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 16); + assert_equals(view[15], 0x01); + + return reader.read(new Uint8Array(16)); + }).then(result => { + assert_true(result.done); + + const view = result.value; + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 0); + + assert_not_equals(byobRequest, null, 'byobRequest must not be null'); + assert_equals(viewInfo.constructor, Uint8Array, 'view.constructor should be Uint8Array'); + assert_equals(viewInfo.bufferByteLength, 16, 'view.buffer.byteLength should be 16'); + assert_equals(viewInfo.byteOffset, 0, 'view.byteOffset should be 0'); + assert_equals(viewInfo.byteLength, 16, 'view.byteLength should be 16'); + }); +}, 'ReadableStream with byte source: read(view), then respond() and close() in pull()'); + +promise_test(() => { + let pullCount = 0; + + let controller; + const viewInfos = []; + const viewInfosAfterRespond = []; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + if (controller.byobRequest === null) { + return; + } + + for (let i = 0; i < 4; ++i) { + const view = controller.byobRequest.view; + viewInfos.push(extractViewInfo(view)); + + view[0] = 0x01; + controller.byobRequest.respond(1); + viewInfosAfterRespond.push(extractViewInfo(view)); + } + + ++pullCount; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint32Array(1)).then(result => { + assert_false(result.done, 'result.done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'result.value.byteOffset'); + assert_equals(view.byteLength, 4, 'result.value.byteLength'); + assert_equals(view[0], 0x01010101, 'result.value[0]'); + + assert_equals(pullCount, 1, 'pull() should only be called once'); + + for (let i = 0; i < 4; ++i) { + assert_equals(viewInfos[i].constructor, Uint8Array, 'view.constructor should be Uint8Array'); + assert_equals(viewInfos[i].bufferByteLength, 4, 'view.buffer.byteLength should be 4'); + + assert_equals(viewInfos[i].byteOffset, i, 'view.byteOffset should be i'); + assert_equals(viewInfos[i].byteLength, 4 - i, 'view.byteLength should be 4 - i'); + + assert_equals(viewInfosAfterRespond[i].bufferByteLength, 0, 'view.buffer should be transferred after respond()'); + } + }); +}, 'ReadableStream with byte source: read(view) with Uint32Array, then fill it by multiple respond() calls'); + +promise_test(() => { + let pullCount = 0; + + let controller; + const viewInfos = []; + const viewInfosAfterEnqueue = []; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + if (controller.byobRequest === null) { + return; + } + + for (let i = 0; i < 4; ++i) { + const view = controller.byobRequest.view; + viewInfos.push(extractViewInfo(view)); + + controller.enqueue(new Uint8Array([0x01])); + viewInfosAfterEnqueue.push(extractViewInfo(view)); + } + + ++pullCount; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint32Array(1)).then(result => { + assert_false(result.done, 'result.done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'result.value.byteOffset'); + assert_equals(view.byteLength, 4, 'result.value.byteLength'); + assert_equals(view[0], 0x01010101, 'result.value[0]'); + + assert_equals(pullCount, 1, 'pull() should only be called once'); + + for (let i = 0; i < 4; ++i) { + assert_equals(viewInfos[i].constructor, Uint8Array, 'view.constructor should be Uint8Array'); + assert_equals(viewInfos[i].bufferByteLength, 4, 'view.buffer.byteLength should be 4'); + + assert_equals(viewInfos[i].byteOffset, i, 'view.byteOffset should be i'); + assert_equals(viewInfos[i].byteLength, 4 - i, 'view.byteLength should be 4 - i'); + + assert_equals(viewInfosAfterEnqueue[i].bufferByteLength, 0, 'view.buffer should be transferred after enqueue()'); + } + }); +}, 'ReadableStream with byte source: read(view) with Uint32Array, then fill it by multiple enqueue() calls'); + +promise_test(() => { + let pullCount = 0; + + let controller; + let byobRequest; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + byobRequest = controller.byobRequest; + + ++pullCount; + }, + type: 'bytes' + }); + + const reader = stream.getReader(); + + const p0 = reader.read().then(result => { + assert_equals(pullCount, 1); + + controller.enqueue(new Uint8Array(2)); + + // Since the queue has data no less than HWM, no more pull. + assert_equals(pullCount, 1); + + assert_false(result.done); + + const view = result.value; + assert_equals(view.constructor, Uint8Array); + assert_equals(view.buffer.byteLength, 1); + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 1); + }); + + assert_equals(pullCount, 0, 'No pull should have been made since the startPromise has not yet been handled'); + + const p1 = reader.read().then(result => { + assert_equals(pullCount, 1); + + assert_false(result.done); + + const view = result.value; + assert_equals(view.constructor, Uint8Array); + assert_equals(view.buffer.byteLength, 2); + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 2); + + assert_equals(byobRequest, null, 'byobRequest must be null'); + }); + + assert_equals(pullCount, 0, 'No pull should have been made since the startPromise has not yet been handled'); + + controller.enqueue(new Uint8Array(1)); + + assert_equals(pullCount, 0, 'No pull should have been made since the startPromise has not yet been handled'); + + return Promise.all([p0, p1]); +}, 'ReadableStream with byte source: read() twice, then enqueue() twice'); + +promise_test(t => { + let controller; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + const p0 = reader.read(new Uint8Array(16)).then(result => { + assert_true(result.done, '1st read: done'); + + const view = result.value; + assert_equals(view.buffer.byteLength, 16, '1st read: buffer.byteLength'); + assert_equals(view.byteOffset, 0, '1st read: byteOffset'); + assert_equals(view.byteLength, 0, '1st read: byteLength'); + }); + + const p1 = reader.read(new Uint8Array(32)).then(result => { + assert_true(result.done, '2nd read: done'); + + const view = result.value; + assert_equals(view.buffer.byteLength, 32, '2nd read: buffer.byteLength'); + assert_equals(view.byteOffset, 0, '2nd read: byteOffset'); + assert_equals(view.byteLength, 0, '2nd read: byteLength'); + }); + + controller.close(); + controller.byobRequest.respond(0); + + return Promise.all([p0, p1]); +}, 'ReadableStream with byte source: Multiple read(view), close() and respond()'); + +promise_test(t => { + let controller; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + const p0 = reader.read(new Uint8Array(16)).then(result => { + assert_false(result.done, '1st read: done'); + + const view = result.value; + assert_equals(view.buffer.byteLength, 16, '1st read: buffer.byteLength'); + assert_equals(view.byteOffset, 0, '1st read: byteOffset'); + assert_equals(view.byteLength, 16, '1st read: byteLength'); + }); + + const p1 = reader.read(new Uint8Array(16)).then(result => { + assert_false(result.done, '2nd read: done'); + + const view = result.value; + assert_equals(view.buffer.byteLength, 16, '2nd read: buffer.byteLength'); + assert_equals(view.byteOffset, 0, '2nd read: byteOffset'); + assert_equals(view.byteLength, 8, '2nd read: byteLength'); + }); + + controller.enqueue(new Uint8Array(24)); + + return Promise.all([p0, p1]); +}, 'ReadableStream with byte source: Multiple read(view), big enqueue()'); + +promise_test(t => { + let controller; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + let bytesRead = 0; + + function pump() { + return reader.read(new Uint8Array(7)).then(result => { + if (result.done) { + assert_equals(bytesRead, 1024); + return undefined; + } + + bytesRead += result.value.byteLength; + + return pump(); + }); + } + const promise = pump(); + + controller.enqueue(new Uint8Array(512)); + controller.enqueue(new Uint8Array(512)); + controller.close(); + + return promise; +}, 'ReadableStream with byte source: Multiple read(view) and multiple enqueue()'); + +promise_test(t => { + let pullCalled = false; + const stream = new ReadableStream({ + pull(controller) { + pullCalled = true; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return promise_rejects_js(t, TypeError, reader.read(), 'read() must fail') + .then(() => assert_false(pullCalled, 'pull() must not have been called')); +}, 'ReadableStream with byte source: read(view) with passing undefined as view must fail'); + +promise_test(t => { + const stream = new ReadableStream({ + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return promise_rejects_js(t, TypeError, reader.read({}), 'read(view) must fail'); +}, 'ReadableStream with byte source: read(view) with passing an empty object as view must fail'); + +promise_test(t => { + const stream = new ReadableStream({ + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return promise_rejects_js(t, TypeError, + reader.read({ buffer: new ArrayBuffer(10), byteOffset: 0, byteLength: 10 }), + 'read(view) must fail'); +}, 'ReadableStream with byte source: Even read(view) with passing ArrayBufferView like object as view must fail'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.error(error1); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader(); + + return promise_rejects_exactly(t, error1, reader.read(), 'read() must fail'); +}, 'ReadableStream with byte source: read() on an errored stream'); + +promise_test(t => { + let controller; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + type: 'bytes' + }); + + const reader = stream.getReader(); + + const promise = promise_rejects_exactly(t, error1, reader.read(), 'read() must fail'); + + controller.error(error1); + + return promise; +}, 'ReadableStream with byte source: read(), then error()'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.error(error1); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return promise_rejects_exactly(t, error1, reader.read(new Uint8Array(1)), 'read() must fail'); +}, 'ReadableStream with byte source: read(view) on an errored stream'); + +promise_test(t => { + let controller; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + const promise = promise_rejects_exactly(t, error1, reader.read(new Uint8Array(1)), 'read() must fail'); + + controller.error(error1); + + return promise; +}, 'ReadableStream with byte source: read(view), then error()'); + +promise_test(t => { + let controller; + let byobRequest; + + const testError = new TypeError('foo'); + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + byobRequest = controller.byobRequest; + throw testError; + }, + type: 'bytes' + }); + + const reader = stream.getReader(); + + const promise = promise_rejects_exactly(t, testError, reader.read(), 'read() must fail'); + return promise_rejects_exactly(t, testError, promise.then(() => reader.closed)) + .then(() => assert_equals(byobRequest, null, 'byobRequest must be null')); +}, 'ReadableStream with byte source: Throwing in pull function must error the stream'); + +promise_test(t => { + let byobRequest; + + const stream = new ReadableStream({ + pull(controller) { + byobRequest = controller.byobRequest; + controller.error(error1); + throw new TypeError('foo'); + }, + type: 'bytes' + }); + + const reader = stream.getReader(); + + return promise_rejects_exactly(t, error1, reader.read(), 'read() must fail') + .then(() => promise_rejects_exactly(t, error1, reader.closed, 'closed must fail')) + .then(() => assert_equals(byobRequest, null, 'byobRequest must be null')); +}, 'ReadableStream with byte source: Throwing in pull in response to read() must be ignored if the stream is ' + + 'errored in it'); + +promise_test(t => { + let byobRequest; + + const testError = new TypeError('foo'); + + const stream = new ReadableStream({ + pull(controller) { + byobRequest = controller.byobRequest; + throw testError; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return promise_rejects_exactly(t, testError, reader.read(new Uint8Array(1)), 'read(view) must fail') + .then(() => promise_rejects_exactly(t, testError, reader.closed, 'reader.closed must reject')) + .then(() => assert_not_equals(byobRequest, null, 'byobRequest must not be null')); +}, 'ReadableStream with byte source: Throwing in pull in response to read(view) function must error the stream'); + +promise_test(t => { + let byobRequest; + + const stream = new ReadableStream({ + pull(controller) { + byobRequest = controller.byobRequest; + controller.error(error1); + throw new TypeError('foo'); + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return promise_rejects_exactly(t, error1, reader.read(new Uint8Array(1)), 'read(view) must fail') + .then(() => promise_rejects_exactly(t, error1, reader.closed, 'closed must fail')) + .then(() => assert_not_equals(byobRequest, null, 'byobRequest must not be null')); +}, 'ReadableStream with byte source: Throwing in pull in response to read(view) must be ignored if the stream is ' + + 'errored in it'); + +promise_test(() => { + let byobRequest; + const rs = new ReadableStream({ + pull(controller) { + byobRequest = controller.byobRequest; + byobRequest.respond(4); + }, + type: 'bytes' + }); + const reader = rs.getReader({ mode: 'byob' }); + const view = new Uint8Array(16); + return reader.read(view).then(() => { + assert_throws_js(TypeError, () => byobRequest.respond(4), 'respond() should throw a TypeError'); + }); +}, 'calling respond() twice on the same byobRequest should throw'); + +promise_test(() => { + let byobRequest; + const newView = () => new Uint8Array(16); + const rs = new ReadableStream({ + pull(controller) { + byobRequest = controller.byobRequest; + byobRequest.respondWithNewView(newView()); + }, + type: 'bytes' + }); + const reader = rs.getReader({ mode: 'byob' }); + return reader.read(newView()).then(() => { + assert_throws_js(TypeError, () => byobRequest.respondWithNewView(newView()), + 'respondWithNewView() should throw a TypeError'); + }); +}, 'calling respondWithNewView() twice on the same byobRequest should throw'); + +promise_test(() => { + let controller; + let byobRequest; + let resolvePullCalledPromise; + const pullCalledPromise = new Promise(resolve => { + resolvePullCalledPromise = resolve; + }); + let resolvePull; + const rs = new ReadableStream({ + start(c) { + controller = c; + }, + pull(c) { + byobRequest = c.byobRequest; + resolvePullCalledPromise(); + return new Promise(resolve => { + resolvePull = resolve; + }); + }, + type: 'bytes' + }); + const reader = rs.getReader({ mode: 'byob' }); + const readPromise = reader.read(new Uint8Array(16)); + return pullCalledPromise.then(() => { + controller.close(); + byobRequest.respond(0); + resolvePull(); + return readPromise.then(() => { + assert_throws_js(TypeError, () => byobRequest.respond(0), 'respond() should throw'); + }); + }); +}, 'calling respond(0) twice on the same byobRequest should throw even when closed'); + +promise_test(() => { + let controller; + let byobRequest; + let resolvePullCalledPromise; + const pullCalledPromise = new Promise(resolve => { + resolvePullCalledPromise = resolve; + }); + let resolvePull; + const rs = new ReadableStream({ + start(c) { + controller = c; + }, + pull(c) { + byobRequest = c.byobRequest; + resolvePullCalledPromise(); + return new Promise(resolve => { + resolvePull = resolve; + }); + }, + type: 'bytes' + }); + const reader = rs.getReader({ mode: 'byob' }); + const readPromise = reader.read(new Uint8Array(16)); + return pullCalledPromise.then(() => { + const cancelPromise = reader.cancel('meh'); + assert_throws_js(TypeError, () => byobRequest.respond(0), 'respond() should throw'); + resolvePull(); + return Promise.all([readPromise, cancelPromise]); + }); +}, 'calling respond() should throw when canceled'); + +promise_test(() => { + let resolvePullCalledPromise; + const pullCalledPromise = new Promise(resolve => { + resolvePullCalledPromise = resolve; + }); + let resolvePull; + const rs = new ReadableStream({ + pull() { + resolvePullCalledPromise(); + return new Promise(resolve => { + resolvePull = resolve; + }); + }, + type: 'bytes' + }); + const reader = rs.getReader({ mode: 'byob' }); + reader.read(new Uint8Array(16)); + return pullCalledPromise.then(() => { + resolvePull(); + return delay(0).then(() => { + assert_throws_js(TypeError, () => reader.releaseLock(), 'releaseLock() should throw'); + }); + }); +}, 'pull() resolving should not make releaseLock() possible'); + +promise_test(() => { + // Tests https://github.com/whatwg/streams/issues/686 + + let controller; + const rs = new ReadableStream({ + autoAllocateChunkSize: 128, + start(c) { + controller = c; + }, + type: 'bytes' + }); + + const readPromise = rs.getReader().read(); + + const br = controller.byobRequest; + controller.close(); + + br.respond(0); + + return readPromise; +}, 'ReadableStream with byte source: default reader + autoAllocateChunkSize + byobRequest interaction'); + +test(() => { + assert_throws_js(TypeError, () => new ReadableStream({ autoAllocateChunkSize: 0, type: 'bytes' }), + 'controller cannot be setup with autoAllocateChunkSize = 0'); +}, 'ReadableStream with byte source: autoAllocateChunkSize cannot be 0'); + +test(() => { + const ReadableStreamBYOBReader = new ReadableStream({ type: 'bytes' }).getReader({ mode: 'byob' }).constructor; + const stream = new ReadableStream({ type: 'bytes' }); + new ReadableStreamBYOBReader(stream); +}, 'ReadableStreamBYOBReader can be constructed directly'); + +test(() => { + const ReadableStreamBYOBReader = new ReadableStream({ type: 'bytes' }).getReader({ mode: 'byob' }).constructor; + assert_throws_js(TypeError, () => new ReadableStreamBYOBReader({}), 'constructor must throw'); +}, 'ReadableStreamBYOBReader constructor requires a ReadableStream argument'); + +test(() => { + const ReadableStreamBYOBReader = new ReadableStream({ type: 'bytes' }).getReader({ mode: 'byob' }).constructor; + const stream = new ReadableStream({ type: 'bytes' }); + stream.getReader(); + assert_throws_js(TypeError, () => new ReadableStreamBYOBReader(stream), 'constructor must throw'); +}, 'ReadableStreamBYOBReader constructor requires an unlocked ReadableStream'); + +test(() => { + const ReadableStreamBYOBReader = new ReadableStream({ type: 'bytes' }).getReader({ mode: 'byob' }).constructor; + const stream = new ReadableStream(); + assert_throws_js(TypeError, () => new ReadableStreamBYOBReader(stream), 'constructor must throw'); +}, 'ReadableStreamBYOBReader constructor requires a ReadableStream with type "bytes"'); + +test(() => { + assert_throws_js(RangeError, () => new ReadableStream({ type: 'bytes' }, { + size() { + return 1; + } + }), 'constructor should throw for size function'); + + assert_throws_js(RangeError, + () => new ReadableStream({ type: 'bytes' }, new CountQueuingStrategy({ highWaterMark: 1 })), + 'constructor should throw when strategy is CountQueuingStrategy'); + + assert_throws_js(RangeError, + () => new ReadableStream({ type: 'bytes' }, new ByteLengthQueuingStrategy({ highWaterMark: 512 })), + 'constructor should throw when strategy is ByteLengthQueuingStrategy'); + + class HasSizeMethod { + size() {} + } + + assert_throws_js(RangeError, () => new ReadableStream({ type: 'bytes' }, new HasSizeMethod()), + 'constructor should throw when size on the prototype chain'); +}, 'ReadableStream constructor should not accept a strategy with a size defined if type is "bytes"'); + +promise_test(async t => { + const stream = new ReadableStream({ + pull: t.step_func(c => { + const view = new Uint8Array(c.byobRequest.view.buffer, 0, 1); + view[0] = 1; + + c.byobRequest.respondWithNewView(view); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + const result = await reader.read(new Uint8Array([4, 5, 6])); + assert_false(result.done, 'result.done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'result.value.byteOffset'); + assert_equals(view.byteLength, 1, 'result.value.byteLength'); + assert_equals(view[0], 1, 'result.value[0]'); + assert_equals(view.buffer.byteLength, 3, 'result.value.buffer.byteLength'); + assert_array_equals([...new Uint8Array(view.buffer)], [1, 5, 6], 'result.value.buffer'); +}, 'ReadableStream with byte source: respondWithNewView() with a smaller view'); + +promise_test(async t => { + const stream = new ReadableStream({ + pull: t.step_func(c => { + const view = new Uint8Array(c.byobRequest.view.buffer, 0, 0); + + c.close(); + + c.byobRequest.respondWithNewView(view); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + const result = await reader.read(new Uint8Array([4, 5, 6])); + assert_true(result.done, 'result.done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'result.value.byteOffset'); + assert_equals(view.byteLength, 0, 'result.value.byteLength'); + assert_equals(view.buffer.byteLength, 3, 'result.value.buffer.byteLength'); + assert_array_equals([...new Uint8Array(view.buffer)], [4, 5, 6], 'result.value.buffer'); +}, 'ReadableStream with byte source: respondWithNewView() with a zero-length view (in the closed state)'); + +promise_test(async t => { + let controller; + let resolvePullCalledPromise; + const pullCalledPromise = new Promise(resolve => { + resolvePullCalledPromise = resolve; + }); + const stream = new ReadableStream({ + start: t.step_func((c) => { + controller = c; + }), + pull: t.step_func(() => { + resolvePullCalledPromise(); + }), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + const readPromise = reader.read(new Uint8Array([4, 5, 6])); + await pullCalledPromise; + + // Transfer the original BYOB request's buffer, and respond with a new view on that buffer + const transferredView = await transferArrayBufferView(controller.byobRequest.view); + const newView = transferredView.subarray(0, 1); + newView[0] = 42; + + controller.byobRequest.respondWithNewView(newView); + + const result = await readPromise; + assert_false(result.done, 'result.done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'result.value.byteOffset'); + assert_equals(view.byteLength, 1, 'result.value.byteLength'); + assert_equals(view[0], 42, 'result.value[0]'); + assert_equals(view.buffer.byteLength, 3, 'result.value.buffer.byteLength'); + assert_array_equals([...new Uint8Array(view.buffer)], [42, 5, 6], 'result.value.buffer'); + +}, 'ReadableStream with byte source: respondWithNewView() with a transferred non-zero-length view ' + + '(in the readable state)'); + +promise_test(async t => { + let controller; + let resolvePullCalledPromise; + const pullCalledPromise = new Promise(resolve => { + resolvePullCalledPromise = resolve; + }); + const stream = new ReadableStream({ + start: t.step_func((c) => { + controller = c; + }), + pull: t.step_func(() => { + resolvePullCalledPromise(); + }), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + const readPromise = reader.read(new Uint8Array([4, 5, 6])); + await pullCalledPromise; + + // Transfer the original BYOB request's buffer, and respond with an empty view on that buffer + const transferredView = await transferArrayBufferView(controller.byobRequest.view); + const newView = transferredView.subarray(0, 0); + + controller.close(); + controller.byobRequest.respondWithNewView(newView); + + const result = await readPromise; + assert_true(result.done, 'result.done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'result.value.byteOffset'); + assert_equals(view.byteLength, 0, 'result.value.byteLength'); + assert_equals(view.buffer.byteLength, 3, 'result.value.buffer.byteLength'); + assert_array_equals([...new Uint8Array(view.buffer)], [4, 5, 6], 'result.value.buffer'); + +}, 'ReadableStream with byte source: respondWithNewView() with a transferred zero-length view ' + + '(in the closed state)'); diff --git a/test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js b/test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js new file mode 100644 index 00000000000000..2dcab69f42db0d --- /dev/null +++ b/test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js @@ -0,0 +1,58 @@ +// META: global=window,worker,jsshell +'use strict'; + +promise_test(async t => { + const rs = new ReadableStream({ + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = rs.getReader({ mode: 'byob' }); + const memory = new WebAssembly.Memory({ initial: 1 }); + const view = new Uint8Array(memory.buffer, 0, 1); + await promise_rejects_js(t, TypeError, reader.read(view)); +}, 'ReadableStream with byte source: read() with a non-transferable buffer'); + +test(t => { + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const memory = new WebAssembly.Memory({ initial: 1 }); + const view = new Uint8Array(memory.buffer, 0, 1); + assert_throws_js(t, TypeError, controller.enqueue(view)); +}, 'ReadableStream with byte source: enqueue() with a non-transferable buffer'); + +promise_test(async t => { + let byobRequest; + let resolvePullCalledPromise; + const pullCalledPromise = new Promise(resolve => { + resolvePullCalledPromise = resolve; + }); + const rs = new ReadableStream({ + pull(controller) { + byobRequest = controller.byobRequest; + resolvePullCalledPromise(); + }, + type: 'bytes' + }); + + const memory = new WebAssembly.Memory({ initial: 1 }); + // Make sure the backing buffers of both views have the same length + const byobView = new Uint8Array(new ArrayBuffer(memory.buffer.byteLength), 0, 1); + const newView = new Uint8Array(memory.buffer, byobView.byteOffset, byobView.byteLength); + + const reader = rs.getReader({ mode: 'byob' }); + reader.read(byobView).then( + t.unreached_func('read() should not resolve'), + t.unreached_func('read() should not reject') + ); + await pullCalledPromise; + + assert_throws_js(t, TypeError, byobRequest.respondWithNewView(newView)); +}, 'ReadableStream with byte source: respondWithNewView() with a non-transferable buffer'); diff --git a/test/fixtures/wpt/streams/readable-streams/async-iterator.any.js b/test/fixtures/wpt/streams/readable-streams/async-iterator.any.js new file mode 100644 index 00000000000000..7669a35d9a7a0c --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/async-iterator.any.js @@ -0,0 +1,650 @@ +// META: global=window,worker,jsshell +// META: script=../resources/rs-utils.js +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1'); + +function assert_iter_result(iterResult, value, done, message) { + const prefix = message === undefined ? '' : `${message} `; + assert_equals(typeof iterResult, 'object', `${prefix}type is object`); + assert_equals(Object.getPrototypeOf(iterResult), Object.prototype, `${prefix}[[Prototype]]`); + assert_array_equals(Object.getOwnPropertyNames(iterResult).sort(), ['done', 'value'], `${prefix}property names`); + assert_equals(iterResult.value, value, `${prefix}value`); + assert_equals(iterResult.done, done, `${prefix}done`); +} + +test(() => { + const s = new ReadableStream(); + const it = s.values(); + const proto = Object.getPrototypeOf(it); + + const AsyncIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf(async function* () {}).prototype); + assert_equals(Object.getPrototypeOf(proto), AsyncIteratorPrototype, 'prototype should extend AsyncIteratorPrototype'); + + const methods = ['next', 'return'].sort(); + assert_array_equals(Object.getOwnPropertyNames(proto).sort(), methods, 'should have all the correct methods'); + + for (const m of methods) { + const propDesc = Object.getOwnPropertyDescriptor(proto, m); + assert_true(propDesc.enumerable, 'method should be enumerable'); + assert_true(propDesc.configurable, 'method should be configurable'); + assert_true(propDesc.writable, 'method should be writable'); + assert_equals(typeof it[m], 'function', 'method should be a function'); + assert_equals(it[m].name, m, 'method should have the correct name'); + } + + assert_equals(it.next.length, 0, 'next should have no parameters'); + assert_equals(it.return.length, 1, 'return should have 1 parameter'); + assert_equals(typeof it.throw, 'undefined', 'throw should not exist'); +}, 'Async iterator instances should have the correct list of properties'); + +promise_test(async () => { + const s = new ReadableStream({ + start(c) { + c.enqueue(1); + c.enqueue(2); + c.enqueue(3); + c.close(); + } + }); + + const chunks = []; + for await (const chunk of s) { + chunks.push(chunk); + } + assert_array_equals(chunks, [1, 2, 3]); +}, 'Async-iterating a push source'); + +promise_test(async () => { + let i = 1; + const s = new ReadableStream({ + pull(c) { + c.enqueue(i); + if (i >= 3) { + c.close(); + } + i += 1; + } + }); + + const chunks = []; + for await (const chunk of s) { + chunks.push(chunk); + } + assert_array_equals(chunks, [1, 2, 3]); +}, 'Async-iterating a pull source'); + +promise_test(async () => { + const s = new ReadableStream({ + start(c) { + c.enqueue(undefined); + c.enqueue(undefined); + c.enqueue(undefined); + c.close(); + } + }); + + const chunks = []; + for await (const chunk of s) { + chunks.push(chunk); + } + assert_array_equals(chunks, [undefined, undefined, undefined]); +}, 'Async-iterating a push source with undefined values'); + +promise_test(async () => { + let i = 1; + const s = new ReadableStream({ + pull(c) { + c.enqueue(undefined); + if (i >= 3) { + c.close(); + } + i += 1; + } + }); + + const chunks = []; + for await (const chunk of s) { + chunks.push(chunk); + } + assert_array_equals(chunks, [undefined, undefined, undefined]); +}, 'Async-iterating a pull source with undefined values'); + +promise_test(async () => { + let i = 1; + const s = recordingReadableStream({ + pull(c) { + c.enqueue(i); + if (i >= 3) { + c.close(); + } + i += 1; + }, + }, new CountQueuingStrategy({ highWaterMark: 0 })); + + const it = s.values(); + assert_array_equals(s.events, []); + + const read1 = await it.next(); + assert_iter_result(read1, 1, false); + assert_array_equals(s.events, ['pull']); + + const read2 = await it.next(); + assert_iter_result(read2, 2, false); + assert_array_equals(s.events, ['pull', 'pull']); + + const read3 = await it.next(); + assert_iter_result(read3, 3, false); + assert_array_equals(s.events, ['pull', 'pull', 'pull']); + + const read4 = await it.next(); + assert_iter_result(read4, undefined, true); + assert_array_equals(s.events, ['pull', 'pull', 'pull']); +}, 'Async-iterating a pull source manually'); + +promise_test(async () => { + const s = new ReadableStream({ + start(c) { + c.error('e'); + }, + }); + + try { + for await (const chunk of s) {} + assert_unreached(); + } catch (e) { + assert_equals(e, 'e'); + } +}, 'Async-iterating an errored stream throws'); + +promise_test(async () => { + const s = new ReadableStream({ + start(c) { + c.close(); + } + }); + + for await (const chunk of s) { + assert_unreached(); + } +}, 'Async-iterating a closed stream never executes the loop body, but works fine'); + +promise_test(async () => { + const s = new ReadableStream(); + + const loop = async () => { + for await (const chunk of s) { + assert_unreached(); + } + assert_unreached(); + }; + + await Promise.race([ + loop(), + flushAsyncEvents() + ]); +}, 'Async-iterating an empty but not closed/errored stream never executes the loop body and stalls the async function'); + +promise_test(async () => { + const s = new ReadableStream({ + start(c) { + c.enqueue(1); + c.enqueue(2); + c.enqueue(3); + c.close(); + }, + }); + + const reader = s.getReader(); + const readResult = await reader.read(); + assert_iter_result(readResult, 1, false); + reader.releaseLock(); + + const chunks = []; + for await (const chunk of s) { + chunks.push(chunk); + } + assert_array_equals(chunks, [2, 3]); +}, 'Async-iterating a partially consumed stream'); + +for (const type of ['throw', 'break', 'return']) { + for (const preventCancel of [false, true]) { + promise_test(async () => { + const s = recordingReadableStream({ + start(c) { + c.enqueue(0); + } + }); + + // use a separate function for the loop body so return does not stop the test + const loop = async () => { + for await (const c of s.values({ preventCancel })) { + if (type === 'throw') { + throw new Error(); + } else if (type === 'break') { + break; + } else if (type === 'return') { + return; + } + } + }; + + try { + await loop(); + } catch (e) {} + + if (preventCancel) { + assert_array_equals(s.events, ['pull'], `cancel() should not be called`); + } else { + assert_array_equals(s.events, ['pull', 'cancel', undefined], `cancel() should be called`); + } + }, `Cancellation behavior when ${type}ing inside loop body; preventCancel = ${preventCancel}`); + } +} + +for (const preventCancel of [false, true]) { + promise_test(async () => { + const s = recordingReadableStream({ + start(c) { + c.enqueue(0); + } + }); + + const it = s.values({ preventCancel }); + await it.return(); + + if (preventCancel) { + assert_array_equals(s.events, [], `cancel() should not be called`); + } else { + assert_array_equals(s.events, ['cancel', undefined], `cancel() should be called`); + } + }, `Cancellation behavior when manually calling return(); preventCancel = ${preventCancel}`); +} + +promise_test(async t => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + if (timesPulled === 0) { + c.enqueue(0); + ++timesPulled; + } else { + c.error(error1); + } + } + }); + + const it = s[Symbol.asyncIterator](); + + const iterResult1 = await it.next(); + assert_iter_result(iterResult1, 0, false, '1st next()'); + + await promise_rejects_exactly(t, error1, it.next(), '2nd next()'); +}, 'next() rejects if the stream errors'); + +promise_test(async () => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + if (timesPulled === 0) { + c.enqueue(0); + ++timesPulled; + } else { + c.error(error1); + } + } + }); + + const it = s[Symbol.asyncIterator](); + + const iterResult = await it.return('return value'); + assert_iter_result(iterResult, 'return value', true); +}, 'return() does not rejects if the stream has not errored yet'); + +promise_test(async t => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + // Do not error in start() because doing so would prevent acquiring a reader/async iterator. + c.error(error1); + } + }); + + const it = s[Symbol.asyncIterator](); + + await flushAsyncEvents(); + await promise_rejects_exactly(t, error1, it.return('return value')); +}, 'return() rejects if the stream has errored'); + +promise_test(async t => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + if (timesPulled === 0) { + c.enqueue(0); + ++timesPulled; + } else { + c.error(error1); + } + } + }); + + const it = s[Symbol.asyncIterator](); + + const iterResult1 = await it.next(); + assert_iter_result(iterResult1, 0, false, '1st next()'); + + await promise_rejects_exactly(t, error1, it.next(), '2nd next()'); + + const iterResult3 = await it.next(); + assert_iter_result(iterResult3, undefined, true, '3rd next()'); +}, 'next() that succeeds; next() that reports an error; next()'); + +promise_test(async () => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + if (timesPulled === 0) { + c.enqueue(0); + ++timesPulled; + } else { + c.error(error1); + } + } + }); + + const it = s[Symbol.asyncIterator](); + + const iterResults = await Promise.allSettled([it.next(), it.next(), it.next()]); + + assert_equals(iterResults[0].status, 'fulfilled', '1st next() promise status'); + assert_iter_result(iterResults[0].value, 0, false, '1st next()'); + + assert_equals(iterResults[1].status, 'rejected', '2nd next() promise status'); + assert_equals(iterResults[1].reason, error1, '2nd next() rejection reason'); + + assert_equals(iterResults[2].status, 'fulfilled', '3rd next() promise status'); + assert_iter_result(iterResults[2].value, undefined, true, '3rd next()'); +}, 'next() that succeeds; next() that reports an error(); next() [no awaiting]'); + +promise_test(async t => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + if (timesPulled === 0) { + c.enqueue(0); + ++timesPulled; + } else { + c.error(error1); + } + } + }); + + const it = s[Symbol.asyncIterator](); + + const iterResult1 = await it.next(); + assert_iter_result(iterResult1, 0, false, '1st next()'); + + await promise_rejects_exactly(t, error1, it.next(), '2nd next()'); + + const iterResult3 = await it.return('return value'); + assert_iter_result(iterResult3, 'return value', true, 'return()'); +}, 'next() that succeeds; next() that reports an error(); return()'); + +promise_test(async () => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + if (timesPulled === 0) { + c.enqueue(0); + ++timesPulled; + } else { + c.error(error1); + } + } + }); + + const it = s[Symbol.asyncIterator](); + + const iterResults = await Promise.allSettled([it.next(), it.next(), it.return('return value')]); + + assert_equals(iterResults[0].status, 'fulfilled', '1st next() promise status'); + assert_iter_result(iterResults[0].value, 0, false, '1st next()'); + + assert_equals(iterResults[1].status, 'rejected', '2nd next() promise status'); + assert_equals(iterResults[1].reason, error1, '2nd next() rejection reason'); + + assert_equals(iterResults[2].status, 'fulfilled', 'return() promise status'); + assert_iter_result(iterResults[2].value, 'return value', true, 'return()'); +}, 'next() that succeeds; next() that reports an error(); return() [no awaiting]'); + +promise_test(async () => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + c.enqueue(timesPulled); + ++timesPulled; + } + }); + const it = s[Symbol.asyncIterator](); + + const iterResult1 = await it.next(); + assert_iter_result(iterResult1, 0, false, 'next()'); + + const iterResult2 = await it.return('return value'); + assert_iter_result(iterResult2, 'return value', true, 'return()'); + + assert_equals(timesPulled, 2); +}, 'next() that succeeds; return()'); + +promise_test(async () => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + c.enqueue(timesPulled); + ++timesPulled; + } + }); + const it = s[Symbol.asyncIterator](); + + const iterResults = await Promise.allSettled([it.next(), it.return('return value')]); + + assert_equals(iterResults[0].status, 'fulfilled', 'next() promise status'); + assert_iter_result(iterResults[0].value, 0, false, 'next()'); + + assert_equals(iterResults[1].status, 'fulfilled', 'return() promise status'); + assert_iter_result(iterResults[1].value, 'return value', true, 'return()'); + + assert_equals(timesPulled, 2); +}, 'next() that succeeds; return() [no awaiting]'); + +promise_test(async () => { + const rs = new ReadableStream(); + const it = rs.values(); + + const iterResult1 = await it.return('return value'); + assert_iter_result(iterResult1, 'return value', true, 'return()'); + + const iterResult2 = await it.next(); + assert_iter_result(iterResult2, undefined, true, 'next()'); +}, 'return(); next()'); + +promise_test(async () => { + const rs = new ReadableStream(); + const it = rs.values(); + + const iterResults = await Promise.allSettled([it.return('return value'), it.next()]); + + assert_equals(iterResults[0].status, 'fulfilled', 'return() promise status'); + assert_iter_result(iterResults[0].value, 'return value', true, 'return()'); + + assert_equals(iterResults[1].status, 'fulfilled', 'next() promise status'); + assert_iter_result(iterResults[1].value, undefined, true, 'next()'); +}, 'return(); next() [no awaiting]'); + +promise_test(async () => { + const rs = new ReadableStream(); + const it = rs.values(); + + const iterResult1 = await it.return('return value 1'); + assert_iter_result(iterResult1, 'return value 1', true, '1st return()'); + + const iterResult2 = await it.return('return value 2'); + assert_iter_result(iterResult2, 'return value 2', true, '1st return()'); +}, 'return(); return()'); + +promise_test(async () => { + const rs = new ReadableStream(); + const it = rs.values(); + + const iterResults = await Promise.allSettled([it.return('return value 1'), it.return('return value 2')]); + + assert_equals(iterResults[0].status, 'fulfilled', '1st return() promise status'); + assert_iter_result(iterResults[0].value, 'return value 1', true, '1st return()'); + + assert_equals(iterResults[1].status, 'fulfilled', '2nd return() promise status'); + assert_iter_result(iterResults[1].value, 'return value 2', true, '1st return()'); +}, 'return(); return() [no awaiting]'); + +test(() => { + const s = new ReadableStream({ + start(c) { + c.enqueue(0); + c.close(); + }, + }); + s.values(); + assert_throws_js(TypeError, () => s.values(), 'values() should throw'); +}, 'values() throws if there\'s already a lock'); + +promise_test(async () => { + const s = new ReadableStream({ + start(c) { + c.enqueue(1); + c.enqueue(2); + c.enqueue(3); + c.close(); + } + }); + + const chunks = []; + for await (const chunk of s) { + chunks.push(chunk); + } + assert_array_equals(chunks, [1, 2, 3]); + + const reader = s.getReader(); + await reader.closed; +}, 'Acquiring a reader after exhaustively async-iterating a stream'); + +promise_test(async t => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + if (timesPulled === 0) { + c.enqueue(0); + ++timesPulled; + } else { + c.error(error1); + } + } + }); + + const it = s[Symbol.asyncIterator]({ preventCancel: true }); + + const iterResult1 = await it.next(); + assert_iter_result(iterResult1, 0, false, '1st next()'); + + await promise_rejects_exactly(t, error1, it.next(), '2nd next()'); + + const iterResult2 = await it.return('return value'); + assert_iter_result(iterResult2, 'return value', true, 'return()'); + + // i.e. it should not reject with a generic "this stream is locked" TypeError. + const reader = s.getReader(); + await promise_rejects_exactly(t, error1, reader.closed, 'closed on the new reader should reject with the error'); +}, 'Acquiring a reader after return()ing from a stream that errors'); + +promise_test(async () => { + const s = new ReadableStream({ + start(c) { + c.enqueue(1); + c.enqueue(2); + c.enqueue(3); + c.close(); + }, + }); + + // read the first two chunks, then cancel + const chunks = []; + for await (const chunk of s) { + chunks.push(chunk); + if (chunk >= 2) { + break; + } + } + assert_array_equals(chunks, [1, 2]); + + const reader = s.getReader(); + await reader.closed; +}, 'Acquiring a reader after partially async-iterating a stream'); + +promise_test(async () => { + const s = new ReadableStream({ + start(c) { + c.enqueue(1); + c.enqueue(2); + c.enqueue(3); + c.close(); + }, + }); + + // read the first two chunks, then release lock + const chunks = []; + for await (const chunk of s.values({preventCancel: true})) { + chunks.push(chunk); + if (chunk >= 2) { + break; + } + } + assert_array_equals(chunks, [1, 2]); + + const reader = s.getReader(); + const readResult = await reader.read(); + assert_iter_result(readResult, 3, false); + await reader.closed; +}, 'Acquiring a reader and reading the remaining chunks after partially async-iterating a stream with preventCancel = true'); + +for (const preventCancel of [false, true]) { + test(() => { + const rs = new ReadableStream(); + rs.values({ preventCancel }).return(); + // The test passes if this line doesn't throw. + rs.getReader(); + }, `return() should unlock the stream synchronously when preventCancel = ${preventCancel}`); +} + +promise_test(async () => { + const rs = new ReadableStream({ + async start(c) { + c.enqueue('a'); + c.enqueue('b'); + c.enqueue('c'); + await flushAsyncEvents(); + // At this point, the async iterator has a read request in the stream's queue for its pending next() promise. + // Closing the stream now causes two things to happen *synchronously*: + // 1. ReadableStreamClose resolves reader.[[closedPromise]] with undefined. + // 2. ReadableStreamClose calls the read request's close steps, which calls ReadableStreamReaderGenericRelease, + // which replaces reader.[[closedPromise]] with a rejected promise. + c.close(); + } + }); + + const chunks = []; + for await (const chunk of rs) { + chunks.push(chunk); + } + assert_array_equals(chunks, ['a', 'b', 'c']); +}, 'close() while next() is pending'); diff --git a/test/fixtures/wpt/streams/readable-streams/bad-strategies.any.js b/test/fixtures/wpt/streams/readable-streams/bad-strategies.any.js new file mode 100644 index 00000000000000..b795360fb7c7bb --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/bad-strategies.any.js @@ -0,0 +1,159 @@ +// META: global=window,worker,jsshell +'use strict'; + +test(() => { + + const theError = new Error('a unique string'); + + assert_throws_exactly(theError, () => { + new ReadableStream({}, { + get size() { + throw theError; + }, + highWaterMark: 5 + }); + }, 'construction should re-throw the error'); + +}, 'Readable stream: throwing strategy.size getter'); + +promise_test(t => { + + const controllerError = { name: 'controller error' }; + const thrownError = { name: 'thrown error' }; + + let controller; + const rs = new ReadableStream( + { + start(c) { + controller = c; + } + }, + { + size() { + controller.error(controllerError); + throw thrownError; + }, + highWaterMark: 5 + } + ); + + assert_throws_exactly(thrownError, () => controller.enqueue('a'), 'enqueue should re-throw the error'); + + return promise_rejects_exactly(t, controllerError, rs.getReader().closed); + +}, 'Readable stream: strategy.size errors the stream and then throws'); + +promise_test(t => { + + const theError = { name: 'my error' }; + + let controller; + const rs = new ReadableStream( + { + start(c) { + controller = c; + } + }, + { + size() { + controller.error(theError); + return Infinity; + }, + highWaterMark: 5 + } + ); + + assert_throws_js(RangeError, () => controller.enqueue('a'), 'enqueue should throw a RangeError'); + + return promise_rejects_exactly(t, theError, rs.getReader().closed, 'closed should reject with the error'); + +}, 'Readable stream: strategy.size errors the stream and then returns Infinity'); + +promise_test(() => { + + const theError = new Error('a unique string'); + const rs = new ReadableStream( + { + start(c) { + assert_throws_exactly(theError, () => c.enqueue('a'), 'enqueue should throw the error'); + } + }, + { + size() { + throw theError; + }, + highWaterMark: 5 + } + ); + + return rs.getReader().closed.catch(e => { + assert_equals(e, theError, 'closed should reject with the error'); + }); + +}, 'Readable stream: throwing strategy.size method'); + +test(() => { + + const theError = new Error('a unique string'); + + assert_throws_exactly(theError, () => { + new ReadableStream({}, { + size() { + return 1; + }, + get highWaterMark() { + throw theError; + } + }); + }, 'construction should re-throw the error'); + +}, 'Readable stream: throwing strategy.highWaterMark getter'); + +test(() => { + + for (const highWaterMark of [-1, -Infinity, NaN, 'foo', {}]) { + assert_throws_js(RangeError, () => { + new ReadableStream({}, { + size() { + return 1; + }, + highWaterMark + }); + }, 'construction should throw a RangeError for ' + highWaterMark); + } + +}, 'Readable stream: invalid strategy.highWaterMark'); + +promise_test(() => { + + const promises = []; + for (const size of [NaN, -Infinity, Infinity, -1]) { + let theError; + const rs = new ReadableStream( + { + start(c) { + try { + c.enqueue('hi'); + assert_unreached('enqueue didn\'t throw'); + } catch (error) { + assert_equals(error.name, 'RangeError', 'enqueue should throw a RangeError for ' + size); + theError = error; + } + } + }, + { + size() { + return size; + }, + highWaterMark: 5 + } + ); + + promises.push(rs.getReader().closed.catch(e => { + assert_equals(e, theError, 'closed should reject with the error for ' + size); + })); + } + + return Promise.all(promises); + +}, 'Readable stream: invalid strategy.size return value'); diff --git a/test/fixtures/wpt/streams/readable-streams/bad-underlying-sources.any.js b/test/fixtures/wpt/streams/readable-streams/bad-underlying-sources.any.js new file mode 100644 index 00000000000000..6f59197a49b18b --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/bad-underlying-sources.any.js @@ -0,0 +1,400 @@ +// META: global=window,worker,jsshell +'use strict'; + + +test(() => { + + const theError = new Error('a unique string'); + + assert_throws_exactly(theError, () => { + new ReadableStream({ + get start() { + throw theError; + } + }); + }, 'constructing the stream should re-throw the error'); + +}, 'Underlying source start: throwing getter'); + + +test(() => { + + const theError = new Error('a unique string'); + + assert_throws_exactly(theError, () => { + new ReadableStream({ + start() { + throw theError; + } + }); + }, 'constructing the stream should re-throw the error'); + +}, 'Underlying source start: throwing method'); + + +test(() => { + + const theError = new Error('a unique string'); + assert_throws_exactly(theError, () => new ReadableStream({ + get pull() { + throw theError; + } + }), 'constructor should throw'); + +}, 'Underlying source: throwing pull getter (initial pull)'); + + +promise_test(t => { + + const theError = new Error('a unique string'); + const rs = new ReadableStream({ + pull() { + throw theError; + } + }); + + return promise_rejects_exactly(t, theError, rs.getReader().closed); + +}, 'Underlying source: throwing pull method (initial pull)'); + + +promise_test(t => { + + const theError = new Error('a unique string'); + + let counter = 0; + const rs = new ReadableStream({ + get pull() { + ++counter; + if (counter === 1) { + return c => c.enqueue('a'); + } + + throw theError; + } + }); + const reader = rs.getReader(); + + return Promise.all([ + reader.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'the first chunk read should be correct'); + }), + reader.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'the second chunk read should be correct'); + assert_equals(counter, 1, 'counter should be 1'); + }) + ]); + +}, 'Underlying source pull: throwing getter (second pull does not result in a second get)'); + +promise_test(t => { + + const theError = new Error('a unique string'); + + let counter = 0; + const rs = new ReadableStream({ + pull(c) { + ++counter; + if (counter === 1) { + c.enqueue('a'); + return; + } + + throw theError; + } + }); + const reader = rs.getReader(); + + return Promise.all([ + reader.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'the chunk read should be correct'); + }), + promise_rejects_exactly(t, theError, reader.closed) + ]); + +}, 'Underlying source pull: throwing method (second pull)'); + +test(() => { + + const theError = new Error('a unique string'); + assert_throws_exactly(theError, () => new ReadableStream({ + get cancel() { + throw theError; + } + }), 'constructor should throw'); + +}, 'Underlying source cancel: throwing getter'); + +promise_test(t => { + + const theError = new Error('a unique string'); + const rs = new ReadableStream({ + cancel() { + throw theError; + } + }); + + return promise_rejects_exactly(t, theError, rs.cancel()); + +}, 'Underlying source cancel: throwing method'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + rs.cancel(); + assert_throws_js(TypeError, () => controller.enqueue('a'), 'Calling enqueue after canceling should throw'); + + return rs.getReader().closed; + +}, 'Underlying source: calling enqueue on an empty canceled stream should throw'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.enqueue('b'); + controller = c; + } + }); + + rs.cancel(); + assert_throws_js(TypeError, () => controller.enqueue('c'), 'Calling enqueue after canceling should throw'); + + return rs.getReader().closed; + +}, 'Underlying source: calling enqueue on a non-empty canceled stream should throw'); + +promise_test(() => { + + return new ReadableStream({ + start(c) { + c.close(); + assert_throws_js(TypeError, () => c.enqueue('a'), 'call to enqueue should throw a TypeError'); + } + }).getReader().closed; + +}, 'Underlying source: calling enqueue on a closed stream should throw'); + +promise_test(t => { + + const theError = new Error('boo'); + const closed = new ReadableStream({ + start(c) { + c.error(theError); + assert_throws_js(TypeError, () => c.enqueue('a'), 'call to enqueue should throw the error'); + } + }).getReader().closed; + + return promise_rejects_exactly(t, theError, closed); + +}, 'Underlying source: calling enqueue on an errored stream should throw'); + +promise_test(() => { + + return new ReadableStream({ + start(c) { + c.close(); + assert_throws_js(TypeError, () => c.close(), 'second call to close should throw a TypeError'); + } + }).getReader().closed; + +}, 'Underlying source: calling close twice on an empty stream should throw the second time'); + +promise_test(() => { + + let startCalled = false; + let readCalled = false; + const reader = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.close(); + assert_throws_js(TypeError, () => c.close(), 'second call to close should throw a TypeError'); + startCalled = true; + } + }).getReader(); + + return Promise.all([ + reader.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'read() should read the enqueued chunk'); + readCalled = true; + }), + reader.closed.then(() => { + assert_true(startCalled); + assert_true(readCalled); + }) + ]); + +}, 'Underlying source: calling close twice on a non-empty stream should throw the second time'); + +promise_test(() => { + + let controller; + let startCalled = false; + const rs = new ReadableStream({ + start(c) { + controller = c; + startCalled = true; + } + }); + + rs.cancel(); + assert_throws_js(TypeError, () => controller.close(), 'Calling close after canceling should throw'); + + return rs.getReader().closed.then(() => { + assert_true(startCalled); + }); + +}, 'Underlying source: calling close on an empty canceled stream should throw'); + +promise_test(() => { + + let controller; + let startCalled = false; + const rs = new ReadableStream({ + start(c) { + controller = c; + c.enqueue('a'); + startCalled = true; + } + }); + + rs.cancel(); + assert_throws_js(TypeError, () => controller.close(), 'Calling close after canceling should throw'); + + return rs.getReader().closed.then(() => { + assert_true(startCalled); + }); + +}, 'Underlying source: calling close on a non-empty canceled stream should throw'); + +promise_test(() => { + + const theError = new Error('boo'); + let startCalled = false; + + const closed = new ReadableStream({ + start(c) { + c.error(theError); + assert_throws_js(TypeError, () => c.close(), 'call to close should throw a TypeError'); + startCalled = true; + } + }).getReader().closed; + + return closed.catch(e => { + assert_true(startCalled); + assert_equals(e, theError, 'closed should reject with the error'); + }); + +}, 'Underlying source: calling close after error should throw'); + +promise_test(() => { + + const theError = new Error('boo'); + let startCalled = false; + + const closed = new ReadableStream({ + start(c) { + c.error(theError); + c.error(); + startCalled = true; + } + }).getReader().closed; + + return closed.catch(e => { + assert_true(startCalled); + assert_equals(e, theError, 'closed should reject with the error'); + }); + +}, 'Underlying source: calling error twice should not throw'); + +promise_test(() => { + + let startCalled = false; + + const closed = new ReadableStream({ + start(c) { + c.close(); + c.error(); + startCalled = true; + } + }).getReader().closed; + + return closed.then(() => assert_true(startCalled)); + +}, 'Underlying source: calling error after close should not throw'); + +promise_test(() => { + + let startCalled = false; + const firstError = new Error('1'); + const secondError = new Error('2'); + + const closed = new ReadableStream({ + start(c) { + c.error(firstError); + startCalled = true; + return Promise.reject(secondError); + } + }).getReader().closed; + + return closed.catch(e => { + assert_true(startCalled); + assert_equals(e, firstError, 'closed should reject with the first error'); + }); + +}, 'Underlying source: calling error and returning a rejected promise from start should cause the stream to error ' + + 'with the first error'); + +promise_test(() => { + + let startCalled = false; + const firstError = new Error('1'); + const secondError = new Error('2'); + + const closed = new ReadableStream({ + pull(c) { + c.error(firstError); + startCalled = true; + return Promise.reject(secondError); + } + }).getReader().closed; + + return closed.catch(e => { + assert_true(startCalled); + assert_equals(e, firstError, 'closed should reject with the first error'); + }); + +}, 'Underlying source: calling error and returning a rejected promise from pull should cause the stream to error ' + + 'with the first error'); + +const error1 = { name: 'error1' }; + +promise_test(t => { + + let pullShouldThrow = false; + const rs = new ReadableStream({ + pull(controller) { + if (pullShouldThrow) { + throw error1; + } + controller.enqueue(0); + } + }, new CountQueuingStrategy({highWaterMark: 1})); + const reader = rs.getReader(); + return Promise.resolve().then(() => { + pullShouldThrow = true; + return Promise.all([ + reader.read(), + promise_rejects_exactly(t, error1, reader.closed, '.closed promise should reject') + ]); + }); + +}, 'read should not error if it dequeues and pull() throws'); diff --git a/test/fixtures/wpt/streams/readable-streams/cancel.any.js b/test/fixtures/wpt/streams/readable-streams/cancel.any.js new file mode 100644 index 00000000000000..c3723a465c9988 --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/cancel.any.js @@ -0,0 +1,236 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/rs-utils.js +'use strict'; + +promise_test(t => { + + const randomSource = new RandomPushSource(); + + let cancellationFinished = false; + const rs = new ReadableStream({ + start(c) { + randomSource.ondata = c.enqueue.bind(c); + randomSource.onend = c.close.bind(c); + randomSource.onerror = c.error.bind(c); + }, + + pull() { + randomSource.readStart(); + }, + + cancel() { + randomSource.readStop(); + + return new Promise(resolve => { + t.step_timeout(() => { + cancellationFinished = true; + resolve(); + }, 1); + }); + } + }); + + const reader = rs.getReader(); + + // We call delay multiple times to avoid cancelling too early for the + // source to enqueue at least one chunk. + const cancel = delay(5).then(() => delay(5)).then(() => delay(5)).then(() => { + const cancelPromise = reader.cancel(); + assert_false(cancellationFinished, 'cancellation in source should happen later'); + return cancelPromise; + }); + + return readableStreamToArray(rs, reader).then(chunks => { + assert_greater_than(chunks.length, 0, 'at least one chunk should be read'); + for (let i = 0; i < chunks.length; i++) { + assert_equals(chunks[i].length, 128, 'chunk ' + i + ' should have 128 bytes'); + } + return cancel; + }).then(() => { + assert_true(cancellationFinished, 'it returns a promise that is fulfilled when the cancellation finishes'); + }); + +}, 'ReadableStream cancellation: integration test on an infinite stream derived from a random push source'); + +test(() => { + + let recordedReason; + const rs = new ReadableStream({ + cancel(reason) { + recordedReason = reason; + } + }); + + const passedReason = new Error('Sorry, it just wasn\'t meant to be.'); + rs.cancel(passedReason); + + assert_equals(recordedReason, passedReason, + 'the error passed to the underlying source\'s cancel method should equal the one passed to the stream\'s cancel'); + +}, 'ReadableStream cancellation: cancel(reason) should pass through the given reason to the underlying source'); + +promise_test(() => { + + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.close(); + }, + cancel() { + assert_unreached('underlying source cancel() should not have been called'); + } + }); + + const reader = rs.getReader(); + + return rs.cancel().then(() => { + assert_unreached('cancel() should be rejected'); + }, e => { + assert_equals(e.name, 'TypeError', 'cancel() should be rejected with a TypeError'); + }).then(() => { + return reader.read(); + }).then(result => { + assert_object_equals(result, { value: 'a', done: false }, 'read() should still work after the attempted cancel'); + return reader.closed; + }); + +}, 'ReadableStream cancellation: cancel() on a locked stream should fail and not call the underlying source cancel'); + +promise_test(() => { + + let cancelReceived = false; + const cancelReason = new Error('I am tired of this stream, I prefer to cancel it'); + const rs = new ReadableStream({ + cancel(reason) { + cancelReceived = true; + assert_equals(reason, cancelReason, 'cancellation reason given to the underlying source should be equal to the one passed'); + } + }); + + return rs.cancel(cancelReason).then(() => { + assert_true(cancelReceived); + }); + +}, 'ReadableStream cancellation: should fulfill promise when cancel callback went fine'); + +promise_test(() => { + + const rs = new ReadableStream({ + cancel() { + return 'Hello'; + } + }); + + return rs.cancel().then(v => { + assert_equals(v, undefined, 'cancel() return value should be fulfilled with undefined'); + }); + +}, 'ReadableStream cancellation: returning a value from the underlying source\'s cancel should not affect the fulfillment value of the promise returned by the stream\'s cancel'); + +promise_test(() => { + + const thrownError = new Error('test'); + let cancelCalled = false; + + const rs = new ReadableStream({ + cancel() { + cancelCalled = true; + throw thrownError; + } + }); + + return rs.cancel('test').then(() => { + assert_unreached('cancel should reject'); + }, e => { + assert_true(cancelCalled); + assert_equals(e, thrownError); + }); + +}, 'ReadableStream cancellation: should reject promise when cancel callback raises an exception'); + +promise_test(() => { + + const cancelReason = new Error('test'); + + const rs = new ReadableStream({ + cancel(error) { + assert_equals(error, cancelReason); + return delay(1); + } + }); + + return rs.cancel(cancelReason); + +}, 'ReadableStream cancellation: if the underlying source\'s cancel method returns a promise, the promise returned by the stream\'s cancel should fulfill when that one does (1)'); + +promise_test(t => { + + let resolveSourceCancelPromise; + let sourceCancelPromiseHasFulfilled = false; + + const rs = new ReadableStream({ + cancel() { + const sourceCancelPromise = new Promise(resolve => resolveSourceCancelPromise = resolve); + + sourceCancelPromise.then(() => { + sourceCancelPromiseHasFulfilled = true; + }); + + return sourceCancelPromise; + } + }); + + t.step_timeout(() => resolveSourceCancelPromise('Hello'), 1); + + return rs.cancel().then(value => { + assert_true(sourceCancelPromiseHasFulfilled, 'cancel() return value should be fulfilled only after the promise returned by the underlying source\'s cancel'); + assert_equals(value, undefined, 'cancel() return value should be fulfilled with undefined'); + }); + +}, 'ReadableStream cancellation: if the underlying source\'s cancel method returns a promise, the promise returned by the stream\'s cancel should fulfill when that one does (2)'); + +promise_test(t => { + + let rejectSourceCancelPromise; + let sourceCancelPromiseHasRejected = false; + + const rs = new ReadableStream({ + cancel() { + const sourceCancelPromise = new Promise((resolve, reject) => rejectSourceCancelPromise = reject); + + sourceCancelPromise.catch(() => { + sourceCancelPromiseHasRejected = true; + }); + + return sourceCancelPromise; + } + }); + + const errorInCancel = new Error('Sorry, it just wasn\'t meant to be.'); + + t.step_timeout(() => rejectSourceCancelPromise(errorInCancel), 1); + + return rs.cancel().then(() => { + assert_unreached('cancel() return value should be rejected'); + }, r => { + assert_true(sourceCancelPromiseHasRejected, 'cancel() return value should be rejected only after the promise returned by the underlying source\'s cancel'); + assert_equals(r, errorInCancel, 'cancel() return value should be rejected with the underlying source\'s rejection reason'); + }); + +}, 'ReadableStream cancellation: if the underlying source\'s cancel method returns a promise, the promise returned by the stream\'s cancel should reject when that one does'); + +promise_test(() => { + + const rs = new ReadableStream({ + start() { + return new Promise(() => {}); + }, + pull() { + assert_unreached('pull should not have been called'); + } + }); + + return Promise.all([rs.cancel(), rs.getReader().closed]); + +}, 'ReadableStream cancellation: cancelling before start finishes should prevent pull() from being called'); diff --git a/test/fixtures/wpt/streams/readable-streams/constructor.any.js b/test/fixtures/wpt/streams/readable-streams/constructor.any.js new file mode 100644 index 00000000000000..dcfd9e9c33861f --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/constructor.any.js @@ -0,0 +1,17 @@ +// META: global=window,worker,jsshell +'use strict'; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +const error2 = new Error('error2'); +error2.name = 'error2'; + +test(() => { + const underlyingSource = { get start() { throw error1; } }; + const queuingStrategy = { highWaterMark: 0, get size() { throw error2; } }; + + // underlyingSource is converted in prose in the method body, whereas queuingStrategy is done at the IDL layer. + // So the queuingStrategy exception should be encountered first. + assert_throws_exactly(error2, () => new ReadableStream(underlyingSource, queuingStrategy)); +}, 'underlyingSource argument should be converted after queuingStrategy argument'); diff --git a/test/fixtures/wpt/streams/readable-streams/count-queuing-strategy-integration.any.js b/test/fixtures/wpt/streams/readable-streams/count-queuing-strategy-integration.any.js new file mode 100644 index 00000000000000..78a25318b2dd5a --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/count-queuing-strategy-integration.any.js @@ -0,0 +1,208 @@ +// META: global=window,worker,jsshell +'use strict'; + +test(() => { + + new ReadableStream({}, new CountQueuingStrategy({ highWaterMark: 4 })); + +}, 'Can construct a readable stream with a valid CountQueuingStrategy'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream( + { + start(c) { + controller = c; + } + }, + new CountQueuingStrategy({ highWaterMark: 0 }) + ); + const reader = rs.getReader(); + + assert_equals(controller.desiredSize, 0, '0 reads, 0 enqueues: desiredSize should be 0'); + controller.enqueue('a'); + assert_equals(controller.desiredSize, -1, '0 reads, 1 enqueue: desiredSize should be -1'); + controller.enqueue('b'); + assert_equals(controller.desiredSize, -2, '0 reads, 2 enqueues: desiredSize should be -2'); + controller.enqueue('c'); + assert_equals(controller.desiredSize, -3, '0 reads, 3 enqueues: desiredSize should be -3'); + controller.enqueue('d'); + assert_equals(controller.desiredSize, -4, '0 reads, 4 enqueues: desiredSize should be -4'); + + return reader.read() + .then(result => { + assert_object_equals(result, { value: 'a', done: false }, + '1st read gives back the 1st chunk enqueued (queue now contains 3 chunks)'); + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'b', done: false }, + '2nd read gives back the 2nd chunk enqueued (queue now contains 2 chunks)'); + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'c', done: false }, + '3rd read gives back the 3rd chunk enqueued (queue now contains 1 chunk)'); + + assert_equals(controller.desiredSize, -1, '3 reads, 4 enqueues: desiredSize should be -1'); + controller.enqueue('e'); + assert_equals(controller.desiredSize, -2, '3 reads, 5 enqueues: desiredSize should be -2'); + + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'd', done: false }, + '4th read gives back the 4th chunk enqueued (queue now contains 1 chunks)'); + return reader.read(); + + }).then(result => { + assert_object_equals(result, { value: 'e', done: false }, + '5th read gives back the 5th chunk enqueued (queue now contains 0 chunks)'); + + assert_equals(controller.desiredSize, 0, '5 reads, 5 enqueues: desiredSize should be 0'); + controller.enqueue('f'); + assert_equals(controller.desiredSize, -1, '5 reads, 6 enqueues: desiredSize should be -1'); + controller.enqueue('g'); + assert_equals(controller.desiredSize, -2, '5 reads, 7 enqueues: desiredSize should be -2'); + }); + +}, 'Correctly governs a ReadableStreamController\'s desiredSize property (HWM = 0)'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream( + { + start(c) { + controller = c; + } + }, + new CountQueuingStrategy({ highWaterMark: 1 }) + ); + const reader = rs.getReader(); + + assert_equals(controller.desiredSize, 1, '0 reads, 0 enqueues: desiredSize should be 1'); + controller.enqueue('a'); + assert_equals(controller.desiredSize, 0, '0 reads, 1 enqueue: desiredSize should be 0'); + controller.enqueue('b'); + assert_equals(controller.desiredSize, -1, '0 reads, 2 enqueues: desiredSize should be -1'); + controller.enqueue('c'); + assert_equals(controller.desiredSize, -2, '0 reads, 3 enqueues: desiredSize should be -2'); + controller.enqueue('d'); + assert_equals(controller.desiredSize, -3, '0 reads, 4 enqueues: desiredSize should be -3'); + + return reader.read() + .then(result => { + assert_object_equals(result, { value: 'a', done: false }, + '1st read gives back the 1st chunk enqueued (queue now contains 3 chunks)'); + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'b', done: false }, + '2nd read gives back the 2nd chunk enqueued (queue now contains 2 chunks)'); + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'c', done: false }, + '3rd read gives back the 3rd chunk enqueued (queue now contains 1 chunk)'); + + assert_equals(controller.desiredSize, 0, '3 reads, 4 enqueues: desiredSize should be 0'); + controller.enqueue('e'); + assert_equals(controller.desiredSize, -1, '3 reads, 5 enqueues: desiredSize should be -1'); + + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'd', done: false }, + '4th read gives back the 4th chunk enqueued (queue now contains 1 chunks)'); + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'e', done: false }, + '5th read gives back the 5th chunk enqueued (queue now contains 0 chunks)'); + + assert_equals(controller.desiredSize, 1, '5 reads, 5 enqueues: desiredSize should be 1'); + controller.enqueue('f'); + assert_equals(controller.desiredSize, 0, '5 reads, 6 enqueues: desiredSize should be 0'); + controller.enqueue('g'); + assert_equals(controller.desiredSize, -1, '5 reads, 7 enqueues: desiredSize should be -1'); + }); + +}, 'Correctly governs a ReadableStreamController\'s desiredSize property (HWM = 1)'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream( + { + start(c) { + controller = c; + } + }, + new CountQueuingStrategy({ highWaterMark: 4 }) + ); + const reader = rs.getReader(); + + assert_equals(controller.desiredSize, 4, '0 reads, 0 enqueues: desiredSize should be 4'); + controller.enqueue('a'); + assert_equals(controller.desiredSize, 3, '0 reads, 1 enqueue: desiredSize should be 3'); + controller.enqueue('b'); + assert_equals(controller.desiredSize, 2, '0 reads, 2 enqueues: desiredSize should be 2'); + controller.enqueue('c'); + assert_equals(controller.desiredSize, 1, '0 reads, 3 enqueues: desiredSize should be 1'); + controller.enqueue('d'); + assert_equals(controller.desiredSize, 0, '0 reads, 4 enqueues: desiredSize should be 0'); + controller.enqueue('e'); + assert_equals(controller.desiredSize, -1, '0 reads, 5 enqueues: desiredSize should be -1'); + controller.enqueue('f'); + assert_equals(controller.desiredSize, -2, '0 reads, 6 enqueues: desiredSize should be -2'); + + + return reader.read() + .then(result => { + assert_object_equals(result, { value: 'a', done: false }, + '1st read gives back the 1st chunk enqueued (queue now contains 5 chunks)'); + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'b', done: false }, + '2nd read gives back the 2nd chunk enqueued (queue now contains 4 chunks)'); + + assert_equals(controller.desiredSize, 0, '2 reads, 6 enqueues: desiredSize should be 0'); + controller.enqueue('g'); + assert_equals(controller.desiredSize, -1, '2 reads, 7 enqueues: desiredSize should be -1'); + + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'c', done: false }, + '3rd read gives back the 3rd chunk enqueued (queue now contains 4 chunks)'); + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'd', done: false }, + '4th read gives back the 4th chunk enqueued (queue now contains 3 chunks)'); + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'e', done: false }, + '5th read gives back the 5th chunk enqueued (queue now contains 2 chunks)'); + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'f', done: false }, + '6th read gives back the 6th chunk enqueued (queue now contains 0 chunks)'); + + assert_equals(controller.desiredSize, 3, '6 reads, 7 enqueues: desiredSize should be 3'); + controller.enqueue('h'); + assert_equals(controller.desiredSize, 2, '6 reads, 8 enqueues: desiredSize should be 2'); + controller.enqueue('i'); + assert_equals(controller.desiredSize, 1, '6 reads, 9 enqueues: desiredSize should be 1'); + controller.enqueue('j'); + assert_equals(controller.desiredSize, 0, '6 reads, 10 enqueues: desiredSize should be 0'); + controller.enqueue('k'); + assert_equals(controller.desiredSize, -1, '6 reads, 11 enqueues: desiredSize should be -1'); + }); + +}, 'Correctly governs a ReadableStreamController\'s desiredSize property (HWM = 4)'); diff --git a/test/fixtures/wpt/streams/readable-streams/default-reader.any.js b/test/fixtures/wpt/streams/readable-streams/default-reader.any.js new file mode 100644 index 00000000000000..60c740a8288631 --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/default-reader.any.js @@ -0,0 +1,514 @@ +// META: global=window,worker,jsshell +// META: script=../resources/rs-utils.js +'use strict'; + +test(() => { + + assert_throws_js(TypeError, () => new ReadableStreamDefaultReader('potato')); + assert_throws_js(TypeError, () => new ReadableStreamDefaultReader({})); + assert_throws_js(TypeError, () => new ReadableStreamDefaultReader()); + +}, 'ReadableStreamDefaultReader constructor should get a ReadableStream object as argument'); + +test(() => { + + const rsReader = new ReadableStreamDefaultReader(new ReadableStream()); + assert_equals(rsReader.closed, rsReader.closed, 'closed should return the same promise'); + +}, 'ReadableStreamDefaultReader closed should always return the same promise object'); + +test(() => { + + const rs = new ReadableStream(); + new ReadableStreamDefaultReader(rs); // Constructing directly the first time should be fine. + assert_throws_js(TypeError, () => new ReadableStreamDefaultReader(rs), + 'constructing directly the second time should fail'); + +}, 'Constructing a ReadableStreamDefaultReader directly should fail if the stream is already locked (via direct ' + + 'construction)'); + +test(() => { + + const rs = new ReadableStream(); + new ReadableStreamDefaultReader(rs); // Constructing directly should be fine. + assert_throws_js(TypeError, () => rs.getReader(), 'getReader() should fail'); + +}, 'Getting a ReadableStreamDefaultReader via getReader should fail if the stream is already locked (via direct ' + + 'construction)'); + +test(() => { + + const rs = new ReadableStream(); + rs.getReader(); // getReader() should be fine. + assert_throws_js(TypeError, () => new ReadableStreamDefaultReader(rs), 'constructing directly should fail'); + +}, 'Constructing a ReadableStreamDefaultReader directly should fail if the stream is already locked (via getReader)'); + +test(() => { + + const rs = new ReadableStream(); + rs.getReader(); // getReader() should be fine. + assert_throws_js(TypeError, () => rs.getReader(), 'getReader() should fail'); + +}, 'Getting a ReadableStreamDefaultReader via getReader should fail if the stream is already locked (via getReader)'); + +test(() => { + + const rs = new ReadableStream({ + start(c) { + c.close(); + } + }); + + new ReadableStreamDefaultReader(rs); // Constructing directly should not throw. + +}, 'Constructing a ReadableStreamDefaultReader directly should be OK if the stream is closed'); + +test(() => { + + const theError = new Error('don\'t say i didn\'t warn ya'); + const rs = new ReadableStream({ + start(c) { + c.error(theError); + } + }); + + new ReadableStreamDefaultReader(rs); // Constructing directly should not throw. + +}, 'Constructing a ReadableStreamDefaultReader directly should be OK if the stream is errored'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + const reader = rs.getReader(); + + const promise = reader.read().then(result => { + assert_object_equals(result, { value: 'a', done: false }, 'read() should fulfill with the enqueued chunk'); + }); + + controller.enqueue('a'); + return promise; + +}, 'Reading from a reader for an empty stream will wait until a chunk is available'); + +promise_test(() => { + + let cancelCalled = false; + const passedReason = new Error('it wasn\'t the right time, sorry'); + const rs = new ReadableStream({ + cancel(reason) { + assert_true(rs.locked, 'the stream should still be locked'); + assert_throws_js(TypeError, () => rs.getReader(), 'should not be able to get another reader'); + assert_equals(reason, passedReason, 'the cancellation reason is passed through to the underlying source'); + cancelCalled = true; + } + }); + + const reader = rs.getReader(); + return reader.cancel(passedReason).then(() => assert_true(cancelCalled)); + +}, 'cancel() on a reader does not release the reader'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const reader = rs.getReader(); + const promise = reader.closed; + + controller.close(); + return promise; + +}, 'closed should be fulfilled after stream is closed (.closed access before acquiring)'); + +promise_test(t => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const reader1 = rs.getReader(); + + reader1.releaseLock(); + + const reader2 = rs.getReader(); + controller.close(); + + return Promise.all([ + promise_rejects_js(t, TypeError, reader1.closed), + reader2.closed + ]); + +}, 'closed should be rejected after reader releases its lock (multiple stream locks)'); + +promise_test(t => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const reader = rs.getReader(); + const promise1 = reader.closed; + + controller.close(); + + reader.releaseLock(); + const promise2 = reader.closed; + + assert_not_equals(promise1, promise2, '.closed should be replaced'); + return Promise.all([ + promise1, + promise_rejects_js(t, TypeError, promise2, '.closed after releasing lock'), + ]); + +}, 'closed is replaced when stream closes and reader releases its lock'); + +promise_test(t => { + + const theError = { name: 'unique error' }; + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const reader = rs.getReader(); + const promise1 = reader.closed; + + controller.error(theError); + + reader.releaseLock(); + const promise2 = reader.closed; + + assert_not_equals(promise1, promise2, '.closed should be replaced'); + return Promise.all([ + promise_rejects_exactly(t, theError, promise1, '.closed before releasing lock'), + promise_rejects_js(t, TypeError, promise2, '.closed after releasing lock') + ]); + +}, 'closed is replaced when stream errors and reader releases its lock'); + +promise_test(() => { + + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.enqueue('b'); + c.close(); + } + }); + + const reader1 = rs.getReader(); + const promise1 = reader1.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'reading the first chunk from reader1 works'); + }); + reader1.releaseLock(); + + const reader2 = rs.getReader(); + const promise2 = reader2.read().then(r => { + assert_object_equals(r, { value: 'b', done: false }, 'reading the second chunk from reader2 works'); + }); + reader2.releaseLock(); + + return Promise.all([promise1, promise2]); + +}, 'Multiple readers can access the stream in sequence'); + +promise_test(() => { + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + } + }); + + const reader1 = rs.getReader(); + reader1.releaseLock(); + + const reader2 = rs.getReader(); + + // Should be a no-op + reader1.releaseLock(); + + return reader2.read().then(result => { + assert_object_equals(result, { value: 'a', done: false }, + 'read() should still work on reader2 even after reader1 is released'); + }); + +}, 'Cannot use an already-released reader to unlock a stream again'); + +promise_test(t => { + + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + }, + cancel() { + assert_unreached('underlying source cancel should not be called'); + } + }); + + const reader = rs.getReader(); + reader.releaseLock(); + const cancelPromise = reader.cancel(); + + const reader2 = rs.getReader(); + const readPromise = reader2.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'a new reader should be able to read a chunk'); + }); + + return Promise.all([ + promise_rejects_js(t, TypeError, cancelPromise), + readPromise + ]); + +}, 'cancel() on a released reader is a no-op and does not pass through'); + +promise_test(t => { + + const promiseAsserts = []; + + let controller; + const theError = { name: 'unique error' }; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const reader1 = rs.getReader(); + + promiseAsserts.push( + promise_rejects_exactly(t, theError, reader1.closed), + promise_rejects_exactly(t, theError, reader1.read()) + ); + + assert_throws_js(TypeError, () => rs.getReader(), 'trying to get another reader before erroring should throw'); + + controller.error(theError); + + reader1.releaseLock(); + + const reader2 = rs.getReader(); + + promiseAsserts.push( + promise_rejects_exactly(t, theError, reader2.closed), + promise_rejects_exactly(t, theError, reader2.read()) + ); + + return Promise.all(promiseAsserts); + +}, 'Getting a second reader after erroring the stream and releasing the reader should succeed'); + +promise_test(t => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const promise = rs.getReader().closed.then( + t.unreached_func('closed promise should not be fulfilled when stream is errored'), + err => { + assert_equals(err, undefined, 'passed error should be undefined as it was'); + } + ); + + controller.error(); + return promise; + +}, 'ReadableStreamDefaultReader closed promise should be rejected with undefined if that is the error'); + + +promise_test(t => { + + const rs = new ReadableStream({ + start() { + return Promise.reject(); + } + }); + + return rs.getReader().read().then( + t.unreached_func('read promise should not be fulfilled when stream is errored'), + err => { + assert_equals(err, undefined, 'passed error should be undefined as it was'); + } + ); + +}, 'ReadableStreamDefaultReader: if start rejects with no parameter, it should error the stream with an undefined ' + + 'error'); + +promise_test(t => { + + const theError = { name: 'unique string' }; + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const promise = promise_rejects_exactly(t, theError, rs.getReader().closed); + + controller.error(theError); + return promise; + +}, 'Erroring a ReadableStream after checking closed should reject ReadableStreamDefaultReader closed promise'); + +promise_test(t => { + + const theError = { name: 'unique string' }; + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + controller.error(theError); + + // Let's call getReader twice for extra test coverage of this code path. + rs.getReader().releaseLock(); + + return promise_rejects_exactly(t, theError, rs.getReader().closed); + +}, 'Erroring a ReadableStream before checking closed should reject ReadableStreamDefaultReader closed promise'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + const reader = rs.getReader(); + + const promise = Promise.all([ + reader.read().then(result => { + assert_object_equals(result, { value: undefined, done: true }, 'read() should fulfill with close (1)'); + }), + reader.read().then(result => { + assert_object_equals(result, { value: undefined, done: true }, 'read() should fulfill with close (2)'); + }), + reader.closed + ]); + + controller.close(); + return promise; + +}, 'Reading twice on a stream that gets closed'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + controller.close(); + const reader = rs.getReader(); + + return Promise.all([ + reader.read().then(result => { + assert_object_equals(result, { value: undefined, done: true }, 'read() should fulfill with close (1)'); + }), + reader.read().then(result => { + assert_object_equals(result, { value: undefined, done: true }, 'read() should fulfill with close (2)'); + }), + reader.closed + ]); + +}, 'Reading twice on a closed stream'); + +promise_test(t => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const myError = { name: 'mashed potatoes' }; + controller.error(myError); + + const reader = rs.getReader(); + + return Promise.all([ + promise_rejects_exactly(t, myError, reader.read()), + promise_rejects_exactly(t, myError, reader.read()), + promise_rejects_exactly(t, myError, reader.closed) + ]); + +}, 'Reading twice on an errored stream'); + +promise_test(t => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const myError = { name: 'mashed potatoes' }; + const reader = rs.getReader(); + + const promise = Promise.all([ + promise_rejects_exactly(t, myError, reader.read()), + promise_rejects_exactly(t, myError, reader.read()), + promise_rejects_exactly(t, myError, reader.closed) + ]); + + controller.error(myError); + return promise; + +}, 'Reading twice on a stream that gets errored'); + +test(() => { + const rs = new ReadableStream(); + let toStringCalled = false; + const mode = { + toString() { + toStringCalled = true; + return ''; + } + }; + assert_throws_js(TypeError, () => rs.getReader({ mode }), 'getReader() should throw'); + assert_true(toStringCalled, 'toString() should be called'); +}, 'getReader() should call ToString() on mode'); + +promise_test(() => { + const rs = new ReadableStream({ + pull(controller) { + controller.close(); + } + }); + + const reader = rs.getReader(); + return reader.read().then(() => { + // The test passes if releaseLock() does not throw. + reader.releaseLock(); + }); +}, 'controller.close() should clear the list of pending read requests'); diff --git a/test/fixtures/wpt/streams/readable-streams/floating-point-total-queue-size.any.js b/test/fixtures/wpt/streams/readable-streams/floating-point-total-queue-size.any.js new file mode 100644 index 00000000000000..400482a450cad2 --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/floating-point-total-queue-size.any.js @@ -0,0 +1,116 @@ +// META: global=window,worker,jsshell +'use strict'; + +// Due to the limitations of floating-point precision, the calculation of desiredSize sometimes gives different answers +// than adding up the items in the queue would. It is important that implementations give the same result in these edge +// cases so that developers do not come to depend on non-standard behaviour. See +// https://github.com/whatwg/streams/issues/582 and linked issues for further discussion. + +promise_test(() => { + const { reader, controller } = setupTestStream(); + + controller.enqueue(2); + assert_equals(controller.desiredSize, 0 - 2, 'desiredSize must be -2 after enqueueing such a chunk'); + + controller.enqueue(Number.MAX_SAFE_INTEGER); + assert_equals(controller.desiredSize, 0 - Number.MAX_SAFE_INTEGER - 2, + 'desiredSize must be calculated using double-precision floating-point arithmetic (adding a second chunk)'); + + return reader.read().then(() => { + assert_equals(controller.desiredSize, 0 - Number.MAX_SAFE_INTEGER - 2 + 2, + 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a chunk)'); + + return reader.read(); + }).then(() => { + assert_equals(controller.desiredSize, 0, '[[queueTotalSize]] must clamp to 0 if it becomes negative'); + }); +}, 'Floating point arithmetic must manifest near NUMBER.MAX_SAFE_INTEGER (total ends up positive)'); + +promise_test(() => { + const { reader, controller } = setupTestStream(); + + controller.enqueue(1e-16); + assert_equals(controller.desiredSize, 0 - 1e-16, 'desiredSize must be -1e16 after enqueueing such a chunk'); + + controller.enqueue(1); + assert_equals(controller.desiredSize, 0 - 1e-16 - 1, + 'desiredSize must be calculated using double-precision floating-point arithmetic (adding a second chunk)'); + + return reader.read().then(() => { + assert_equals(controller.desiredSize, 0 - 1e-16 - 1 + 1e-16, + 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a chunk)'); + + return reader.read(); + }).then(() => { + assert_equals(controller.desiredSize, 0, '[[queueTotalSize]] must clamp to 0 if it becomes negative'); + }); +}, 'Floating point arithmetic must manifest near 0 (total ends up positive, but clamped)'); + +promise_test(() => { + const { reader, controller } = setupTestStream(); + + controller.enqueue(1e-16); + assert_equals(controller.desiredSize, 0 - 1e-16, 'desiredSize must be -2e16 after enqueueing such a chunk'); + + controller.enqueue(1); + assert_equals(controller.desiredSize, 0 - 1e-16 - 1, + 'desiredSize must be calculated using double-precision floating-point arithmetic (adding a second chunk)'); + + controller.enqueue(2e-16); + assert_equals(controller.desiredSize, 0 - 1e-16 - 1 - 2e-16, + 'desiredSize must be calculated using double-precision floating-point arithmetic (adding a third chunk)'); + + return reader.read().then(() => { + assert_equals(controller.desiredSize, 0 - 1e-16 - 1 - 2e-16 + 1e-16, + 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a chunk)'); + + return reader.read(); + }).then(() => { + assert_equals(controller.desiredSize, 0 - 1e-16 - 1 - 2e-16 + 1e-16 + 1, + 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a second chunk)'); + + return reader.read(); + }).then(() => { + assert_equals(controller.desiredSize, 0 - 1e-16 - 1 - 2e-16 + 1e-16 + 1 + 2e-16, + 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a third chunk)'); + }); +}, 'Floating point arithmetic must manifest near 0 (total ends up positive, and not clamped)'); + +promise_test(() => { + const { reader, controller } = setupTestStream(); + + controller.enqueue(2e-16); + assert_equals(controller.desiredSize, 0 - 2e-16, 'desiredSize must be -2e16 after enqueueing such a chunk'); + + controller.enqueue(1); + assert_equals(controller.desiredSize, 0 - 2e-16 - 1, + 'desiredSize must be calculated using double-precision floating-point arithmetic (adding a second chunk)'); + + return reader.read().then(() => { + assert_equals(controller.desiredSize, 0 - 2e-16 - 1 + 2e-16, + 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a chunk)'); + + return reader.read(); + }).then(() => { + assert_equals(controller.desiredSize, 0, + 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a second chunk)'); + }); +}, 'Floating point arithmetic must manifest near 0 (total ends up zero)'); + +function setupTestStream() { + const strategy = { + size(x) { + return x; + }, + highWaterMark: 0 + }; + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, strategy); + + return { reader: rs.getReader(), controller }; +} diff --git a/test/fixtures/wpt/streams/readable-streams/garbage-collection.any.js b/test/fixtures/wpt/streams/readable-streams/garbage-collection.any.js new file mode 100644 index 00000000000000..dad0ad1535a5bf --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/garbage-collection.any.js @@ -0,0 +1,70 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +'use strict'; + +promise_test(() => { + + let controller; + new ReadableStream({ + start(c) { + controller = c; + } + }); + + garbageCollect(); + + return delay(50).then(() => { + controller.close(); + assert_throws_js(TypeError, () => controller.close(), 'close should throw a TypeError the second time'); + controller.error(); + }); + +}, 'ReadableStreamController methods should continue working properly when scripts lose their reference to the ' + + 'readable stream'); + +promise_test(() => { + + let controller; + + const closedPromise = new ReadableStream({ + start(c) { + controller = c; + } + }).getReader().closed; + + garbageCollect(); + + return delay(50).then(() => controller.close()).then(() => closedPromise); + +}, 'ReadableStream closed promise should fulfill even if the stream and reader JS references are lost'); + +promise_test(t => { + + const theError = new Error('boo'); + let controller; + + const closedPromise = new ReadableStream({ + start(c) { + controller = c; + } + }).getReader().closed; + + garbageCollect(); + + return delay(50).then(() => controller.error(theError)) + .then(() => promise_rejects_exactly(t, theError, closedPromise)); + +}, 'ReadableStream closed promise should reject even if stream and reader JS references are lost'); + +promise_test(() => { + + const rs = new ReadableStream({}); + + rs.getReader(); + + garbageCollect(); + + return delay(50).then(() => assert_throws_js(TypeError, () => rs.getReader(), + 'old reader should still be locking the stream even after garbage collection')); + +}, 'Garbage-collecting a ReadableStreamDefaultReader should not unlock its stream'); diff --git a/test/fixtures/wpt/streams/readable-streams/general.any.js b/test/fixtures/wpt/streams/readable-streams/general.any.js new file mode 100644 index 00000000000000..4e54990823ab01 --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/general.any.js @@ -0,0 +1,840 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/rs-utils.js +'use strict'; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +test(() => { + + new ReadableStream(); // ReadableStream constructed with no parameters + new ReadableStream({ }); // ReadableStream constructed with an empty object as parameter + new ReadableStream({ type: undefined }); // ReadableStream constructed with undefined type + new ReadableStream(undefined); // ReadableStream constructed with undefined as parameter + + let x; + new ReadableStream(x); // ReadableStream constructed with an undefined variable as parameter + +}, 'ReadableStream can be constructed with no errors'); + +test(() => { + + assert_throws_js(TypeError, () => new ReadableStream(null), 'constructor should throw when the source is null'); + +}, 'ReadableStream can\'t be constructed with garbage'); + +test(() => { + + assert_throws_js(TypeError, () => new ReadableStream({ type: null }), + 'constructor should throw when the type is null'); + assert_throws_js(TypeError, () => new ReadableStream({ type: '' }), + 'constructor should throw when the type is empty string'); + assert_throws_js(TypeError, () => new ReadableStream({ type: 'asdf' }), + 'constructor should throw when the type is asdf'); + assert_throws_exactly( + error1, + () => new ReadableStream({ type: { get toString() { throw error1; } } }), + 'constructor should throw when ToString() throws' + ); + assert_throws_exactly( + error1, + () => new ReadableStream({ type: { toString() { throw error1; } } }), + 'constructor should throw when ToString() throws' + ); + +}, 'ReadableStream can\'t be constructed with an invalid type'); + +test(() => { + + assert_throws_js(TypeError, () => { + new ReadableStream({ start: 'potato' }); + }, 'constructor should throw when start is not a function'); + +}, 'ReadableStream constructor should throw for non-function start arguments'); + +test(() => { + + assert_throws_js(TypeError, () => new ReadableStream({ cancel: '2' }), 'constructor should throw'); + +}, 'ReadableStream constructor will not tolerate initial garbage as cancel argument'); + +test(() => { + + assert_throws_js(TypeError, () => new ReadableStream({ pull: { } }), 'constructor should throw'); + +}, 'ReadableStream constructor will not tolerate initial garbage as pull argument'); + +test(() => { + + let startCalled = false; + + const source = { + start() { + assert_equals(this, source, 'source is this during start'); + startCalled = true; + } + }; + + new ReadableStream(source); + assert_true(startCalled); + +}, 'ReadableStream start should be called with the proper thisArg'); + +test(() => { + + let startCalled = false; + const source = { + start(controller) { + const properties = ['close', 'constructor', 'desiredSize', 'enqueue', 'error']; + assert_array_equals(Object.getOwnPropertyNames(Object.getPrototypeOf(controller)).sort(), properties, + 'prototype should have the right properties'); + + controller.test = ''; + assert_array_equals(Object.getOwnPropertyNames(Object.getPrototypeOf(controller)).sort(), properties, + 'prototype should still have the right properties'); + assert_not_equals(Object.getOwnPropertyNames(controller).indexOf('test'), -1, + '"test" should be a property of the controller'); + + startCalled = true; + } + }; + + new ReadableStream(source); + assert_true(startCalled); + +}, 'ReadableStream start controller parameter should be extensible'); + +test(() => { + (new ReadableStream()).getReader(undefined); + (new ReadableStream()).getReader({}); + (new ReadableStream()).getReader({ mode: undefined, notmode: 'ignored' }); + assert_throws_js(TypeError, () => (new ReadableStream()).getReader({ mode: 'potato' })); +}, 'default ReadableStream getReader() should only accept mode:undefined'); + +promise_test(() => { + + function SimpleStreamSource() {} + let resolve; + const promise = new Promise(r => resolve = r); + SimpleStreamSource.prototype = { + start: resolve + }; + + new ReadableStream(new SimpleStreamSource()); + return promise; + +}, 'ReadableStream should be able to call start method within prototype chain of its source'); + +promise_test(() => { + + const rs = new ReadableStream({ + start(c) { + return delay(5).then(() => { + c.enqueue('a'); + c.close(); + }); + } + }); + + const reader = rs.getReader(); + return reader.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'value read should be the one enqueued'); + return reader.closed; + }); + +}, 'ReadableStream start should be able to return a promise'); + +promise_test(() => { + + const theError = new Error('rejected!'); + const rs = new ReadableStream({ + start() { + return delay(1).then(() => { + throw theError; + }); + } + }); + + return rs.getReader().closed.then(() => { + assert_unreached('closed promise should be rejected'); + }, e => { + assert_equals(e, theError, 'promise should be rejected with the same error'); + }); + +}, 'ReadableStream start should be able to return a promise and reject it'); + +promise_test(() => { + + const objects = [ + { potato: 'Give me more!' }, + 'test', + 1 + ]; + + const rs = new ReadableStream({ + start(c) { + for (const o of objects) { + c.enqueue(o); + } + c.close(); + } + }); + + const reader = rs.getReader(); + + return Promise.all([reader.read(), reader.read(), reader.read(), reader.closed]).then(r => { + assert_object_equals(r[0], { value: objects[0], done: false }, 'value read should be the one enqueued'); + assert_object_equals(r[1], { value: objects[1], done: false }, 'value read should be the one enqueued'); + assert_object_equals(r[2], { value: objects[2], done: false }, 'value read should be the one enqueued'); + }); + +}, 'ReadableStream should be able to enqueue different objects.'); + +promise_test(() => { + + const error = new Error('pull failure'); + const rs = new ReadableStream({ + pull() { + return Promise.reject(error); + } + }); + + const reader = rs.getReader(); + + let closed = false; + let read = false; + + return Promise.all([ + reader.closed.then(() => { + assert_unreached('closed should be rejected'); + }, e => { + closed = true; + assert_false(read); + assert_equals(e, error, 'closed should be rejected with the thrown error'); + }), + reader.read().then(() => { + assert_unreached('read() should be rejected'); + }, e => { + read = true; + assert_true(closed); + assert_equals(e, error, 'read() should be rejected with the thrown error'); + }) + ]); + +}, 'ReadableStream: if pull rejects, it should error the stream'); + +promise_test(() => { + + let pullCount = 0; + + new ReadableStream({ + pull() { + pullCount++; + } + }); + + return flushAsyncEvents().then(() => { + assert_equals(pullCount, 1, 'pull should be called once start finishes'); + return delay(10); + }).then(() => { + assert_equals(pullCount, 1, 'pull should be called exactly once'); + }); + +}, 'ReadableStream: should only call pull once upon starting the stream'); + +promise_test(() => { + + let pullCount = 0; + + const rs = new ReadableStream({ + pull(c) { + // Don't enqueue immediately after start. We want the stream to be empty when we call .read() on it. + if (pullCount > 0) { + c.enqueue(pullCount); + } + ++pullCount; + } + }); + + return flushAsyncEvents().then(() => { + assert_equals(pullCount, 1, 'pull should be called once start finishes'); + }).then(() => { + const reader = rs.getReader(); + const read = reader.read(); + assert_equals(pullCount, 2, 'pull should be called when read is called'); + return read; + }).then(result => { + assert_equals(pullCount, 3, 'pull should be called again in reaction to calling read'); + assert_object_equals(result, { value: 1, done: false }, 'the result read should be the one enqueued'); + }); + +}, 'ReadableStream: should call pull when trying to read from a started, empty stream'); + +promise_test(() => { + + let pullCount = 0; + + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + }, + pull() { + pullCount++; + } + }); + + const read = rs.getReader().read(); + assert_equals(pullCount, 0, 'calling read() should not cause pull to be called yet'); + + return flushAsyncEvents().then(() => { + assert_equals(pullCount, 1, 'pull should be called once start finishes'); + return read; + }).then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'first read() should return first chunk'); + assert_equals(pullCount, 1, 'pull should not have been called again'); + return delay(10); + }).then(() => { + assert_equals(pullCount, 1, 'pull should be called exactly once'); + }); + +}, 'ReadableStream: should only call pull once on a non-empty stream read from before start fulfills'); + +promise_test(() => { + + let pullCount = 0; + const startPromise = Promise.resolve(); + + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + }, + pull() { + pullCount++; + } + }); + + return flushAsyncEvents().then(() => { + assert_equals(pullCount, 0, 'pull should not be called once start finishes, since the queue is full'); + + const read = rs.getReader().read(); + assert_equals(pullCount, 1, 'calling read() should cause pull to be called immediately'); + return read; + }).then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'first read() should return first chunk'); + return delay(10); + }).then(() => { + assert_equals(pullCount, 1, 'pull should be called exactly once'); + }); + +}, 'ReadableStream: should only call pull once on a non-empty stream read from after start fulfills'); + +promise_test(() => { + + let pullCount = 0; + let controller; + + const rs = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + ++pullCount; + } + }); + + const reader = rs.getReader(); + return flushAsyncEvents().then(() => { + assert_equals(pullCount, 1, 'pull should have been called once by the time the stream starts'); + + controller.enqueue('a'); + assert_equals(pullCount, 1, 'pull should not have been called again after enqueue'); + + return reader.read(); + }).then(() => { + assert_equals(pullCount, 2, 'pull should have been called again after read'); + + return delay(10); + }).then(() => { + assert_equals(pullCount, 2, 'pull should be called exactly twice'); + }); +}, 'ReadableStream: should call pull in reaction to read()ing the last chunk, if not draining'); + +promise_test(() => { + + let pullCount = 0; + let controller; + + const rs = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + ++pullCount; + } + }); + + const reader = rs.getReader(); + + return flushAsyncEvents().then(() => { + assert_equals(pullCount, 1, 'pull should have been called once by the time the stream starts'); + + controller.enqueue('a'); + assert_equals(pullCount, 1, 'pull should not have been called again after enqueue'); + + controller.close(); + + return reader.read(); + }).then(() => { + assert_equals(pullCount, 1, 'pull should not have been called a second time after read'); + + return delay(10); + }).then(() => { + assert_equals(pullCount, 1, 'pull should be called exactly once'); + }); + +}, 'ReadableStream: should not call pull() in reaction to read()ing the last chunk, if draining'); + +promise_test(() => { + + let resolve; + let returnedPromise; + let timesCalled = 0; + + const rs = new ReadableStream({ + pull(c) { + c.enqueue(++timesCalled); + returnedPromise = new Promise(r => resolve = r); + return returnedPromise; + } + }); + const reader = rs.getReader(); + + return reader.read() + .then(result1 => { + assert_equals(timesCalled, 1, + 'pull should have been called once after start, but not yet have been called a second time'); + assert_object_equals(result1, { value: 1, done: false }, 'read() should fulfill with the enqueued value'); + + return delay(10); + }).then(() => { + assert_equals(timesCalled, 1, 'after 10 ms, pull should still only have been called once'); + + resolve(); + return returnedPromise; + }).then(() => { + assert_equals(timesCalled, 2, + 'after the promise returned by pull is fulfilled, pull should be called a second time'); + }); + +}, 'ReadableStream: should not call pull until the previous pull call\'s promise fulfills'); + +promise_test(() => { + + let timesCalled = 0; + + const rs = new ReadableStream( + { + start(c) { + c.enqueue('a'); + c.enqueue('b'); + c.enqueue('c'); + }, + pull() { + ++timesCalled; + } + }, + { + size() { + return 1; + }, + highWaterMark: Infinity + } + ); + const reader = rs.getReader(); + + return flushAsyncEvents().then(() => { + return reader.read(); + }).then(result1 => { + assert_object_equals(result1, { value: 'a', done: false }, 'first chunk should be as expected'); + + return reader.read(); + }).then(result2 => { + assert_object_equals(result2, { value: 'b', done: false }, 'second chunk should be as expected'); + + return reader.read(); + }).then(result3 => { + assert_object_equals(result3, { value: 'c', done: false }, 'third chunk should be as expected'); + + return delay(10); + }).then(() => { + // Once for after start, and once for every read. + assert_equals(timesCalled, 4, 'pull() should be called exactly four times'); + }); + +}, 'ReadableStream: should pull after start, and after every read'); + +promise_test(() => { + + let timesCalled = 0; + const startPromise = Promise.resolve(); + + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.close(); + return startPromise; + }, + pull() { + ++timesCalled; + } + }); + + const reader = rs.getReader(); + return startPromise.then(() => { + assert_equals(timesCalled, 0, 'after start finishes, pull should not have been called'); + + return reader.read(); + }).then(() => { + assert_equals(timesCalled, 0, 'reading should not have triggered a pull call'); + + return reader.closed; + }).then(() => { + assert_equals(timesCalled, 0, 'stream should have closed with still no calls to pull'); + }); + +}, 'ReadableStream: should not call pull after start if the stream is now closed'); + +promise_test(() => { + + let timesCalled = 0; + let resolve; + const ready = new Promise(r => resolve = r); + + new ReadableStream( + { + start() {}, + pull(c) { + c.enqueue(++timesCalled); + + if (timesCalled === 4) { + resolve(); + } + } + }, + { + size() { + return 1; + }, + highWaterMark: 4 + } + ); + + return ready.then(() => { + // after start: size = 0, pull() + // after enqueue(1): size = 1, pull() + // after enqueue(2): size = 2, pull() + // after enqueue(3): size = 3, pull() + // after enqueue(4): size = 4, do not pull + assert_equals(timesCalled, 4, 'pull() should have been called four times'); + }); + +}, 'ReadableStream: should call pull after enqueueing from inside pull (with no read requests), if strategy allows'); + +promise_test(() => { + + let pullCalled = false; + + const rs = new ReadableStream({ + pull(c) { + pullCalled = true; + c.close(); + } + }); + + const reader = rs.getReader(); + return reader.closed.then(() => { + assert_true(pullCalled); + }); + +}, 'ReadableStream pull should be able to close a stream.'); + +promise_test(t => { + + const controllerError = { name: 'controller error' }; + + const rs = new ReadableStream({ + pull(c) { + c.error(controllerError); + } + }); + + return promise_rejects_exactly(t, controllerError, rs.getReader().closed); + +}, 'ReadableStream pull should be able to error a stream.'); + +promise_test(t => { + + const controllerError = { name: 'controller error' }; + const thrownError = { name: 'thrown error' }; + + const rs = new ReadableStream({ + pull(c) { + c.error(controllerError); + throw thrownError; + } + }); + + return promise_rejects_exactly(t, controllerError, rs.getReader().closed); + +}, 'ReadableStream pull should be able to error a stream and throw.'); + +test(() => { + + let startCalled = false; + + new ReadableStream({ + start(c) { + assert_equals(c.enqueue('a'), undefined, 'the first enqueue should return undefined'); + c.close(); + + assert_throws_js(TypeError, () => c.enqueue('b'), 'enqueue after close should throw a TypeError'); + startCalled = true; + } + }); + + assert_true(startCalled); + +}, 'ReadableStream: enqueue should throw when the stream is readable but draining'); + +test(() => { + + let startCalled = false; + + new ReadableStream({ + start(c) { + c.close(); + + assert_throws_js(TypeError, () => c.enqueue('a'), 'enqueue after close should throw a TypeError'); + startCalled = true; + } + }); + + assert_true(startCalled); + +}, 'ReadableStream: enqueue should throw when the stream is closed'); + +promise_test(() => { + + let startCalled = 0; + let pullCalled = 0; + let cancelCalled = 0; + + /* eslint-disable no-use-before-define */ + class Source { + start(c) { + startCalled++; + assert_equals(this, theSource, 'start() should be called with the correct this'); + c.enqueue('a'); + } + + pull() { + pullCalled++; + assert_equals(this, theSource, 'pull() should be called with the correct this'); + } + + cancel() { + cancelCalled++; + assert_equals(this, theSource, 'cancel() should be called with the correct this'); + } + } + /* eslint-enable no-use-before-define */ + + const theSource = new Source(); + theSource.debugName = 'the source object passed to the constructor'; // makes test failures easier to diagnose + + const rs = new ReadableStream(theSource); + const reader = rs.getReader(); + + return reader.read().then(() => { + reader.releaseLock(); + rs.cancel(); + assert_equals(startCalled, 1); + assert_equals(pullCalled, 1); + assert_equals(cancelCalled, 1); + return rs.getReader().closed; + }); + +}, 'ReadableStream: should call underlying source methods as methods'); + +test(() => { + new ReadableStream({ + start(c) { + assert_equals(c.desiredSize, 10, 'desiredSize must start at highWaterMark'); + c.close(); + assert_equals(c.desiredSize, 0, 'after closing, desiredSize must be 0'); + } + }, { + highWaterMark: 10 + }); +}, 'ReadableStream: desiredSize when closed'); + +test(() => { + new ReadableStream({ + start(c) { + assert_equals(c.desiredSize, 10, 'desiredSize must start at highWaterMark'); + c.error(); + assert_equals(c.desiredSize, null, 'after erroring, desiredSize must be null'); + } + }, { + highWaterMark: 10 + }); +}, 'ReadableStream: desiredSize when errored'); + +test(() => { + class Subclass extends ReadableStream { + extraFunction() { + return true; + } + } + assert_equals( + Object.getPrototypeOf(Subclass.prototype), ReadableStream.prototype, + 'Subclass.prototype\'s prototype should be ReadableStream.prototype'); + assert_equals(Object.getPrototypeOf(Subclass), ReadableStream, + 'Subclass\'s prototype should be ReadableStream'); + const sub = new Subclass(); + assert_true(sub instanceof ReadableStream, + 'Subclass object should be an instance of ReadableStream'); + assert_true(sub instanceof Subclass, + 'Subclass object should be an instance of Subclass'); + const lockedGetter = Object.getOwnPropertyDescriptor( + ReadableStream.prototype, 'locked').get; + assert_equals(lockedGetter.call(sub), sub.locked, + 'Subclass object should pass brand check'); + assert_true(sub.extraFunction(), + 'extraFunction() should be present on Subclass object'); +}, 'Subclassing ReadableStream should work'); + +test(() => { + + let startCalled = false; + new ReadableStream({ + start(c) { + assert_equals(c.desiredSize, 1); + c.enqueue('a'); + assert_equals(c.desiredSize, 0); + c.enqueue('b'); + assert_equals(c.desiredSize, -1); + c.enqueue('c'); + assert_equals(c.desiredSize, -2); + c.enqueue('d'); + assert_equals(c.desiredSize, -3); + c.enqueue('e'); + startCalled = true; + } + }); + + assert_true(startCalled); + +}, 'ReadableStream strategies: the default strategy should give desiredSize of 1 to start, decreasing by 1 per enqueue'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + const reader = rs.getReader(); + + assert_equals(controller.desiredSize, 1, 'desiredSize should start at 1'); + controller.enqueue('a'); + assert_equals(controller.desiredSize, 0, 'desiredSize should decrease to 0 after first enqueue'); + + return reader.read().then(result1 => { + assert_object_equals(result1, { value: 'a', done: false }, 'first chunk read should be correct'); + + assert_equals(controller.desiredSize, 1, 'desiredSize should go up to 1 after the first read'); + controller.enqueue('b'); + assert_equals(controller.desiredSize, 0, 'desiredSize should go down to 0 after the second enqueue'); + + return reader.read(); + }).then(result2 => { + assert_object_equals(result2, { value: 'b', done: false }, 'second chunk read should be correct'); + + assert_equals(controller.desiredSize, 1, 'desiredSize should go up to 1 after the second read'); + controller.enqueue('c'); + assert_equals(controller.desiredSize, 0, 'desiredSize should go down to 0 after the third enqueue'); + + return reader.read(); + }).then(result3 => { + assert_object_equals(result3, { value: 'c', done: false }, 'third chunk read should be correct'); + + assert_equals(controller.desiredSize, 1, 'desiredSize should go up to 1 after the third read'); + controller.enqueue('d'); + assert_equals(controller.desiredSize, 0, 'desiredSize should go down to 0 after the fourth enqueue'); + }); + +}, 'ReadableStream strategies: the default strategy should continue giving desiredSize of 1 if the chunks are read immediately'); + +promise_test(t => { + + const randomSource = new RandomPushSource(8); + + const rs = new ReadableStream({ + start(c) { + assert_equals(typeof c, 'object', 'c should be an object in start'); + assert_equals(typeof c.enqueue, 'function', 'enqueue should be a function in start'); + assert_equals(typeof c.close, 'function', 'close should be a function in start'); + assert_equals(typeof c.error, 'function', 'error should be a function in start'); + + randomSource.ondata = t.step_func(chunk => { + if (!c.enqueue(chunk) <= 0) { + randomSource.readStop(); + } + }); + + randomSource.onend = c.close.bind(c); + randomSource.onerror = c.error.bind(c); + }, + + pull(c) { + assert_equals(typeof c, 'object', 'c should be an object in pull'); + assert_equals(typeof c.enqueue, 'function', 'enqueue should be a function in pull'); + assert_equals(typeof c.close, 'function', 'close should be a function in pull'); + + randomSource.readStart(); + } + }); + + return readableStreamToArray(rs).then(chunks => { + assert_equals(chunks.length, 8, '8 chunks should be read'); + for (const chunk of chunks) { + assert_equals(chunk.length, 128, 'chunk should have 128 bytes'); + } + }); + +}, 'ReadableStream integration test: adapting a random push source'); + +promise_test(() => { + + const rs = sequentialReadableStream(10); + + return readableStreamToArray(rs).then(chunks => { + assert_true(rs.source.closed, 'source should be closed after all chunks are read'); + assert_array_equals(chunks, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 'the expected 10 chunks should be read'); + }); + +}, 'ReadableStream integration test: adapting a sync pull source'); + +promise_test(() => { + + const rs = sequentialReadableStream(10, { async: true }); + + return readableStreamToArray(rs).then(chunks => { + assert_true(rs.source.closed, 'source should be closed after all chunks are read'); + assert_array_equals(chunks, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 'the expected 10 chunks should be read'); + }); + +}, 'ReadableStream integration test: adapting an async pull source'); diff --git a/test/fixtures/wpt/streams/readable-streams/patched-global.any.js b/test/fixtures/wpt/streams/readable-streams/patched-global.any.js new file mode 100644 index 00000000000000..576a39f6777e2b --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/patched-global.any.js @@ -0,0 +1,142 @@ +// META: global=window,worker,jsshell +'use strict'; + +// Tests which patch the global environment are kept separate to avoid +// interfering with other tests. + +const ReadableStream_prototype_locked_get = + Object.getOwnPropertyDescriptor(ReadableStream.prototype, 'locked').get; + +// Verify that |rs| passes the brand check as a readable stream. +function isReadableStream(rs) { + try { + ReadableStream_prototype_locked_get.call(rs); + return true; + } catch (e) { + return false; + } +} + +test(t => { + const rs = new ReadableStream(); + + const trappedProperties = ['highWaterMark', 'size', 'start', 'type', 'mode']; + for (const property of trappedProperties) { + // eslint-disable-next-line no-extend-native, accessor-pairs + Object.defineProperty(Object.prototype, property, { + get() { throw new Error(`${property} getter called`); }, + configurable: true + }); + } + t.add_cleanup(() => { + for (const property of trappedProperties) { + delete Object.prototype[property]; + } + }); + + const [branch1, branch2] = rs.tee(); + assert_true(isReadableStream(branch1), 'branch1 should be a ReadableStream'); + assert_true(isReadableStream(branch2), 'branch2 should be a ReadableStream'); +}, 'ReadableStream tee() should not touch Object.prototype properties'); + +test(t => { + const rs = new ReadableStream(); + + const oldReadableStream = self.ReadableStream; + + self.ReadableStream = function() { + throw new Error('ReadableStream called on global object'); + }; + + t.add_cleanup(() => { + self.ReadableStream = oldReadableStream; + }); + + const [branch1, branch2] = rs.tee(); + + assert_true(isReadableStream(branch1), 'branch1 should be a ReadableStream'); + assert_true(isReadableStream(branch2), 'branch2 should be a ReadableStream'); +}, 'ReadableStream tee() should not call the global ReadableStream'); + +promise_test(async t => { + const rs = new ReadableStream({ + start(c) { + c.enqueue(1); + c.enqueue(2); + c.enqueue(3); + c.close(); + } + }); + + const oldReadableStreamGetReader = ReadableStream.prototype.getReader; + + const ReadableStreamDefaultReader = (new ReadableStream()).getReader().constructor; + const oldDefaultReaderRead = ReadableStreamDefaultReader.prototype.read; + const oldDefaultReaderCancel = ReadableStreamDefaultReader.prototype.cancel; + const oldDefaultReaderReleaseLock = ReadableStreamDefaultReader.prototype.releaseLock; + + self.ReadableStream.prototype.getReader = function() { + throw new Error('patched getReader() called'); + }; + + ReadableStreamDefaultReader.prototype.read = function() { + throw new Error('patched read() called'); + }; + ReadableStreamDefaultReader.prototype.cancel = function() { + throw new Error('patched cancel() called'); + }; + ReadableStreamDefaultReader.prototype.releaseLock = function() { + throw new Error('patched releaseLock() called'); + }; + + t.add_cleanup(() => { + self.ReadableStream.prototype.getReader = oldReadableStreamGetReader; + + ReadableStreamDefaultReader.prototype.read = oldDefaultReaderRead; + ReadableStreamDefaultReader.prototype.cancel = oldDefaultReaderCancel; + ReadableStreamDefaultReader.prototype.releaseLock = oldDefaultReaderReleaseLock; + }); + + // read the first chunk, then cancel + for await (const chunk of rs) { + break; + } + + // should be able to acquire a new reader + const reader = oldReadableStreamGetReader.call(rs); + // stream should be cancelled + await reader.closed; +}, 'ReadableStream async iterator should use the original values of getReader() and ReadableStreamDefaultReader ' + + 'methods'); + +test(t => { + const oldPromiseThen = Promise.prototype.then; + Promise.prototype.then = () => { + throw new Error('patched then() called'); + }; + t.add_cleanup(() => { + Promise.prototype.then = oldPromiseThen; + }); + const [branch1, branch2] = new ReadableStream().tee(); + assert_true(isReadableStream(branch1), 'branch1 should be a ReadableStream'); + assert_true(isReadableStream(branch2), 'branch2 should be a ReadableStream'); +}, 'tee() should not call Promise.prototype.then()'); + +test(t => { + const oldPromiseThen = Promise.prototype.then; + Promise.prototype.then = () => { + throw new Error('patched then() called'); + }; + t.add_cleanup(() => { + Promise.prototype.then = oldPromiseThen; + }); + let readableController; + const rs = new ReadableStream({ + start(c) { + readableController = c; + } + }); + const ws = new WritableStream(); + rs.pipeTo(ws); + readableController.close(); +}, 'pipeTo() should not call Promise.prototype.then()'); diff --git a/test/fixtures/wpt/streams/readable-streams/reentrant-strategies.any.js b/test/fixtures/wpt/streams/readable-streams/reentrant-strategies.any.js new file mode 100644 index 00000000000000..a02d08b0acc50a --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/reentrant-strategies.any.js @@ -0,0 +1,264 @@ +// META: global=window,worker,jsshell +// META: script=../resources/recording-streams.js +// META: script=../resources/rs-utils.js +// META: script=../resources/test-utils.js +'use strict'; + +// The size() function of the readable strategy can re-entrantly call back into the ReadableStream implementation. This +// makes it risky to cache state across the call to ReadableStreamDefaultControllerEnqueue. These tests attempt to catch +// such errors. They are separated from the other strategy tests because no real user code should ever do anything like +// this. + +const error1 = new Error('error1'); +error1.name = 'error1'; + +promise_test(() => { + let controller; + let calls = 0; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, { + size() { + ++calls; + if (calls < 2) { + controller.enqueue('b'); + } + return 1; + } + }); + controller.enqueue('a'); + controller.close(); + return readableStreamToArray(rs) + .then(array => assert_array_equals(array, ['b', 'a'], 'array should contain two chunks')); +}, 'enqueue() inside size() should work'); + +promise_test(() => { + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, { + size() { + // The queue is empty. + controller.close(); + // The state has gone from "readable" to "closed". + return 1; + // This chunk will be enqueued, but will be impossible to read because the state is already "closed". + } + }); + controller.enqueue('a'); + return readableStreamToArray(rs) + .then(array => assert_array_equals(array, [], 'array should contain no chunks')); + // The chunk 'a' is still in rs's queue. It is closed so 'a' cannot be read. +}, 'close() inside size() should not crash'); + +promise_test(() => { + let controller; + let calls = 0; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, { + size() { + ++calls; + if (calls === 2) { + // The queue contains one chunk. + controller.close(); + // The state is still "readable", but closeRequest is now true. + } + return 1; + } + }); + controller.enqueue('a'); + controller.enqueue('b'); + return readableStreamToArray(rs) + .then(array => assert_array_equals(array, ['a', 'b'], 'array should contain two chunks')); +}, 'close request inside size() should work'); + +promise_test(t => { + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, { + size() { + controller.error(error1); + return 1; + } + }); + controller.enqueue('a'); + return promise_rejects_exactly(t, error1, rs.getReader().read(), 'read() should reject'); +}, 'error() inside size() should work'); + +promise_test(() => { + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, { + size() { + assert_equals(controller.desiredSize, 1, 'desiredSize should be 1'); + return 1; + }, + highWaterMark: 1 + }); + controller.enqueue('a'); + controller.close(); + return readableStreamToArray(rs) + .then(array => assert_array_equals(array, ['a'], 'array should contain one chunk')); +}, 'desiredSize inside size() should work'); + +promise_test(t => { + let cancelPromise; + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + }, + cancel: t.step_func(reason => { + assert_equals(reason, error1, 'reason should be error1'); + assert_throws_js(TypeError, () => controller.enqueue(), 'enqueue() should throw'); + }) + }, { + size() { + cancelPromise = rs.cancel(error1); + return 1; + }, + highWaterMark: Infinity + }); + controller.enqueue('a'); + const reader = rs.getReader(); + return Promise.all([ + reader.closed, + cancelPromise + ]); +}, 'cancel() inside size() should work'); + +promise_test(() => { + let controller; + let pipeToPromise; + const ws = recordingWritableStream(); + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, { + size() { + if (!pipeToPromise) { + pipeToPromise = rs.pipeTo(ws); + } + return 1; + }, + highWaterMark: 1 + }); + controller.enqueue('a'); + assert_not_equals(pipeToPromise, undefined); + + // Some pipeTo() implementations need an additional chunk enqueued in order for the first one to be processed. See + // https://github.com/whatwg/streams/issues/794 for background. + controller.enqueue('a'); + + // Give pipeTo() a chance to process the queued chunks. + return delay(0).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'a'], 'ws should contain two chunks'); + controller.close(); + return pipeToPromise; + }).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'a', 'close'], 'target should have been closed'); + }); +}, 'pipeTo() inside size() should behave as expected'); + +promise_test(() => { + let controller; + let readPromise; + let calls = 0; + let readResolved = false; + let reader; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, { + size() { + // This is triggered by controller.enqueue(). The queue is empty and there are no pending reads. This read is + // added to the list of pending reads. + readPromise = reader.read(); + ++calls; + return 1; + }, + highWaterMark: 0 + }); + reader = rs.getReader(); + controller.enqueue('a'); + readPromise.then(() => { + readResolved = true; + }); + return flushAsyncEvents().then(() => { + assert_false(readResolved); + controller.enqueue('b'); + assert_equals(calls, 1, 'size() should have been called once'); + return delay(0); + }).then(() => { + assert_true(readResolved); + assert_equals(calls, 1, 'size() should only be called once'); + return readPromise; + }).then(({ value, done }) => { + assert_false(done, 'done should be false'); + // See https://github.com/whatwg/streams/issues/794 for why this chunk is not 'a'. + assert_equals(value, 'b', 'chunk should have been read'); + assert_equals(calls, 1, 'calls should still be 1'); + return reader.read(); + }).then(({ value, done }) => { + assert_false(done, 'done should be false again'); + assert_equals(value, 'a', 'chunk a should come after b'); + }); +}, 'read() inside of size() should behave as expected'); + +promise_test(() => { + let controller; + let reader; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, { + size() { + reader = rs.getReader(); + return 1; + } + }); + controller.enqueue('a'); + return reader.read().then(({ value, done }) => { + assert_false(done, 'done should be false'); + assert_equals(value, 'a', 'value should be a'); + }); +}, 'getReader() inside size() should work'); + +promise_test(() => { + let controller; + let branch1; + let branch2; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, { + size() { + [branch1, branch2] = rs.tee(); + return 1; + } + }); + controller.enqueue('a'); + assert_true(rs.locked, 'rs should be locked'); + controller.close(); + return Promise.all([ + readableStreamToArray(branch1).then(array => assert_array_equals(array, ['a'], 'branch1 should have one chunk')), + readableStreamToArray(branch2).then(array => assert_array_equals(array, ['a'], 'branch2 should have one chunk')) + ]); +}, 'tee() inside size() should work'); diff --git a/test/fixtures/wpt/streams/readable-streams/tee.any.js b/test/fixtures/wpt/streams/readable-streams/tee.any.js new file mode 100644 index 00000000000000..761f6e9c3599c1 --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/tee.any.js @@ -0,0 +1,541 @@ +// META: global=window,worker,jsshell +// META: script=../resources/rs-utils.js +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +test(() => { + + const rs = new ReadableStream(); + const result = rs.tee(); + + assert_true(Array.isArray(result), 'return value should be an array'); + assert_equals(result.length, 2, 'array should have length 2'); + assert_equals(result[0].constructor, ReadableStream, '0th element should be a ReadableStream'); + assert_equals(result[1].constructor, ReadableStream, '1st element should be a ReadableStream'); + +}, 'ReadableStream teeing: rs.tee() returns an array of two ReadableStreams'); + +promise_test(t => { + + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.enqueue('b'); + c.close(); + } + }); + + const branch = rs.tee(); + const branch1 = branch[0]; + const branch2 = branch[1]; + const reader1 = branch1.getReader(); + const reader2 = branch2.getReader(); + + reader2.closed.then(t.unreached_func('branch2 should not be closed')); + + return Promise.all([ + reader1.closed, + reader1.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'first chunk from branch1 should be correct'); + }), + reader1.read().then(r => { + assert_object_equals(r, { value: 'b', done: false }, 'second chunk from branch1 should be correct'); + }), + reader1.read().then(r => { + assert_object_equals(r, { value: undefined, done: true }, 'third read() from branch1 should be done'); + }), + reader2.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'first chunk from branch2 should be correct'); + }) + ]); + +}, 'ReadableStream teeing: should be able to read one branch to the end without affecting the other'); + +promise_test(() => { + + const theObject = { the: 'test object' }; + const rs = new ReadableStream({ + start(c) { + c.enqueue(theObject); + } + }); + + const branch = rs.tee(); + const branch1 = branch[0]; + const branch2 = branch[1]; + const reader1 = branch1.getReader(); + const reader2 = branch2.getReader(); + + return Promise.all([reader1.read(), reader2.read()]).then(values => { + assert_object_equals(values[0], values[1], 'the values should be equal'); + }); + +}, 'ReadableStream teeing: values should be equal across each branch'); + +promise_test(t => { + + const theError = { name: 'boo!' }; + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.enqueue('b'); + }, + pull() { + throw theError; + } + }); + + const branches = rs.tee(); + const reader1 = branches[0].getReader(); + const reader2 = branches[1].getReader(); + + reader1.label = 'reader1'; + reader2.label = 'reader2'; + + return Promise.all([ + promise_rejects_exactly(t, theError, reader1.closed), + promise_rejects_exactly(t, theError, reader2.closed), + reader1.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'should be able to read the first chunk in branch1'); + }), + reader1.read().then(r => { + assert_object_equals(r, { value: 'b', done: false }, 'should be able to read the second chunk in branch1'); + + return promise_rejects_exactly(t, theError, reader2.read()); + }) + .then(() => promise_rejects_exactly(t, theError, reader1.read())) + ]); + +}, 'ReadableStream teeing: errors in the source should propagate to both branches'); + +promise_test(() => { + + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.enqueue('b'); + c.close(); + } + }); + + const branches = rs.tee(); + const branch1 = branches[0]; + const branch2 = branches[1]; + branch1.cancel(); + + return Promise.all([ + readableStreamToArray(branch1).then(chunks => { + assert_array_equals(chunks, [], 'branch1 should have no chunks'); + }), + readableStreamToArray(branch2).then(chunks => { + assert_array_equals(chunks, ['a', 'b'], 'branch2 should have two chunks'); + }) + ]); + +}, 'ReadableStream teeing: canceling branch1 should not impact branch2'); + +promise_test(() => { + + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.enqueue('b'); + c.close(); + } + }); + + const branches = rs.tee(); + const branch1 = branches[0]; + const branch2 = branches[1]; + branch2.cancel(); + + return Promise.all([ + readableStreamToArray(branch1).then(chunks => { + assert_array_equals(chunks, ['a', 'b'], 'branch1 should have two chunks'); + }), + readableStreamToArray(branch2).then(chunks => { + assert_array_equals(chunks, [], 'branch2 should have no chunks'); + }) + ]); + +}, 'ReadableStream teeing: canceling branch2 should not impact branch1'); + +promise_test(() => { + + const reason1 = new Error('We\'re wanted men.'); + const reason2 = new Error('I have the death sentence on twelve systems.'); + + let resolve; + const promise = new Promise(r => resolve = r); + const rs = new ReadableStream({ + cancel(reason) { + assert_array_equals(reason, [reason1, reason2], + 'the cancel reason should be an array containing those from the branches'); + resolve(); + } + }); + + const branch = rs.tee(); + const branch1 = branch[0]; + const branch2 = branch[1]; + branch1.cancel(reason1); + branch2.cancel(reason2); + + return promise; + +}, 'ReadableStream teeing: canceling both branches should aggregate the cancel reasons into an array'); + +promise_test(() => { + + const reason1 = new Error('This little one\'s not worth the effort.'); + const reason2 = new Error('Come, let me get you something.'); + + let resolve; + const promise = new Promise(r => resolve = r); + const rs = new ReadableStream({ + cancel(reason) { + assert_array_equals(reason, [reason1, reason2], + 'the cancel reason should be an array containing those from the branches'); + resolve(); + } + }); + + const branch = rs.tee(); + const branch1 = branch[0]; + const branch2 = branch[1]; + return Promise.all([ + branch2.cancel(reason2), + branch1.cancel(reason1), + promise + ]); + +}, 'ReadableStream teeing: canceling both branches in reverse order should aggregate the cancel reasons into an array'); + +promise_test(t => { + + const theError = { name: 'I\'ll be careful.' }; + const rs = new ReadableStream({ + cancel() { + throw theError; + } + }); + + const branch = rs.tee(); + const branch1 = branch[0]; + const branch2 = branch[1]; + + return Promise.all([ + promise_rejects_exactly(t, theError, branch1.cancel()), + promise_rejects_exactly(t, theError, branch2.cancel()) + ]); + +}, 'ReadableStream teeing: failing to cancel the original stream should cause cancel() to reject on branches'); + +promise_test(t => { + + const theError = { name: 'You just watch yourself!' }; + let controller; + const stream = new ReadableStream({ start(c) { controller = c; } }); + const [branch1, branch2] = stream.tee(); + + controller.error(theError); + + return Promise.all([ + promise_rejects_exactly(t, theError, branch1.cancel()), + promise_rejects_exactly(t, theError, branch2.cancel()) + ]); + +}, 'ReadableStream teeing: erroring a teed stream should properly handle canceled branches'); + +promise_test(t => { + + let controller; + const stream = new ReadableStream({ start(c) { controller = c; } }); + const [branch1, branch2] = stream.tee(); + + const error = new Error(); + error.name = 'distinctive'; + + // Ensure neither branch is waiting in ReadableStreamDefaultReaderRead(). + controller.enqueue(); + controller.enqueue(); + + return delay(0).then(() => { + // This error will have to be detected via [[closedPromise]]. + controller.error(error); + + const reader1 = branch1.getReader(); + const reader2 = branch2.getReader(); + + return Promise.all([ + promise_rejects_exactly(t, error, reader1.closed, 'reader1.closed should reject'), + promise_rejects_exactly(t, error, reader2.closed, 'reader2.closed should reject') + ]); + }); + +}, 'ReadableStream teeing: erroring a teed stream should error both branches'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const branches = rs.tee(); + const reader1 = branches[0].getReader(); + const reader2 = branches[1].getReader(); + + const promise = Promise.all([reader1.closed, reader2.closed]); + + controller.close(); + return promise; + +}, 'ReadableStream teeing: closing the original should immediately close the branches'); + +promise_test(t => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const branches = rs.tee(); + const reader1 = branches[0].getReader(); + const reader2 = branches[1].getReader(); + + const theError = { name: 'boo!' }; + const promise = Promise.all([ + promise_rejects_exactly(t, theError, reader1.closed), + promise_rejects_exactly(t, theError, reader2.closed) + ]); + + controller.error(theError); + return promise; + +}, 'ReadableStream teeing: erroring the original should immediately error the branches'); + +promise_test(async t => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const [reader1, reader2] = rs.tee().map(branch => branch.getReader()); + const cancelPromise = reader2.cancel(); + + controller.enqueue('a'); + + const read1 = await reader1.read(); + assert_object_equals(read1, { value: 'a', done: false }, 'first read() from branch1 should fulfill with the chunk'); + + controller.close(); + + const read2 = await reader1.read(); + assert_object_equals(read2, { value: undefined, done: true }, 'second read() from branch1 should be done'); + + await Promise.all([ + reader1.closed, + cancelPromise + ]); + +}, 'ReadableStream teeing: canceling branch1 should finish when branch2 reads until end of stream'); + +promise_test(async t => { + + let controller; + const theError = { name: 'boo!' }; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const [reader1, reader2] = rs.tee().map(branch => branch.getReader()); + const cancelPromise = reader2.cancel(); + + controller.error(theError); + + await Promise.all([ + promise_rejects_exactly(t, theError, reader1.read()), + cancelPromise + ]); + +}, 'ReadableStream teeing: canceling branch1 should finish when original stream errors'); + +promise_test(async () => { + + const rs = new ReadableStream({}); + + const [branch1, branch2] = rs.tee(); + + const cancel1 = branch1.cancel(); + await flushAsyncEvents(); + const cancel2 = branch2.cancel(); + + await Promise.all([cancel1, cancel2]); + +}, 'ReadableStream teeing: canceling both branches in sequence with delay'); + +promise_test(async t => { + + const theError = { name: 'boo!' }; + const rs = new ReadableStream({ + cancel() { + throw theError; + } + }); + + const [branch1, branch2] = rs.tee(); + + const cancel1 = branch1.cancel(); + await flushAsyncEvents(); + const cancel2 = branch2.cancel(); + + await Promise.all([ + promise_rejects_exactly(t, theError, cancel1), + promise_rejects_exactly(t, theError, cancel2) + ]); + +}, 'ReadableStream teeing: failing to cancel when canceling both branches in sequence with delay'); + +test(t => { + + // Copy original global. + const oldReadableStream = ReadableStream; + const getReader = ReadableStream.prototype.getReader; + + const origRS = new ReadableStream(); + + // Replace the global ReadableStream constructor with one that doesn't work. + ReadableStream = function() { + throw new Error('global ReadableStream constructor called'); + }; + t.add_cleanup(() => { + ReadableStream = oldReadableStream; + }); + + // This will probably fail if the global ReadableStream constructor was used. + const [rs1, rs2] = origRS.tee(); + + // These will definitely fail if the global ReadableStream constructor was used. + assert_not_equals(getReader.call(rs1), undefined, 'getReader should work on rs1'); + assert_not_equals(getReader.call(rs2), undefined, 'getReader should work on rs2'); + +}, 'ReadableStreamTee should not use a modified ReadableStream constructor from the global object'); + +promise_test(t => { + + const rs = recordingReadableStream({}, { highWaterMark: 0 }); + + // Create two branches, each with a HWM of 1. This should result in one + // chunk being pulled, not two. + rs.tee(); + return flushAsyncEvents().then(() => { + assert_array_equals(rs.events, ['pull'], 'pull should only be called once'); + }); + +}, 'ReadableStreamTee should not pull more chunks than can fit in the branch queue'); + +promise_test(t => { + + const rs = recordingReadableStream({ + pull(controller) { + controller.enqueue('a'); + } + }, { highWaterMark: 0 }); + + const [reader1, reader2] = rs.tee().map(branch => branch.getReader()); + return Promise.all([reader1.read(), reader2.read()]) + .then(() => { + assert_array_equals(rs.events, ['pull', 'pull'], 'pull should be called twice'); + }); + +}, 'ReadableStreamTee should only pull enough to fill the emptiest queue'); + +promise_test(t => { + + const rs = recordingReadableStream({}, { highWaterMark: 0 }); + const theError = { name: 'boo!' }; + + rs.controller.error(theError); + + const [reader1, reader2] = rs.tee().map(branch => branch.getReader()); + + return flushAsyncEvents().then(() => { + assert_array_equals(rs.events, [], 'pull should not be called'); + + return Promise.all([ + promise_rejects_exactly(t, theError, reader1.closed), + promise_rejects_exactly(t, theError, reader2.closed) + ]); + }); + +}, 'ReadableStreamTee should not pull when original is already errored'); + +for (const branch of [1, 2]) { + promise_test(t => { + + const rs = recordingReadableStream({}, { highWaterMark: 0 }); + const theError = { name: 'boo!' }; + + const [reader1, reader2] = rs.tee().map(branch => branch.getReader()); + + return flushAsyncEvents().then(() => { + assert_array_equals(rs.events, ['pull'], 'pull should be called once'); + + rs.controller.enqueue('a'); + + const reader = (branch === 1) ? reader1 : reader2; + return reader.read(); + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(rs.events, ['pull', 'pull'], 'pull should be called twice'); + + rs.controller.error(theError); + + return Promise.all([ + promise_rejects_exactly(t, theError, reader1.closed), + promise_rejects_exactly(t, theError, reader2.closed) + ]); + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(rs.events, ['pull', 'pull'], 'pull should be called twice'); + }); + + }, `ReadableStreamTee stops pulling when original stream errors while branch ${branch} is reading`); +} + +promise_test(t => { + + const rs = recordingReadableStream({}, { highWaterMark: 0 }); + const theError = { name: 'boo!' }; + + const [reader1, reader2] = rs.tee().map(branch => branch.getReader()); + + return flushAsyncEvents().then(() => { + assert_array_equals(rs.events, ['pull'], 'pull should be called once'); + + rs.controller.enqueue('a'); + + return Promise.all([reader1.read(), reader2.read()]); + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(rs.events, ['pull', 'pull'], 'pull should be called twice'); + + rs.controller.error(theError); + + return Promise.all([ + promise_rejects_exactly(t, theError, reader1.closed), + promise_rejects_exactly(t, theError, reader2.closed) + ]); + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(rs.events, ['pull', 'pull'], 'pull should be called twice'); + }); + +}, 'ReadableStreamTee stops pulling when original stream errors while both branches are reading'); diff --git a/test/fixtures/wpt/streams/readable-streams/templated.any.js b/test/fixtures/wpt/streams/readable-streams/templated.any.js new file mode 100644 index 00000000000000..4d524e69fee19e --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/templated.any.js @@ -0,0 +1,143 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/rs-test-templates.js +'use strict'; + +// Run the readable stream test templates against readable streams created directly using the constructor + +const theError = { name: 'boo!' }; +const chunks = ['a', 'b']; + +templatedRSEmpty('ReadableStream (empty)', () => { + return new ReadableStream(); +}); + +templatedRSEmptyReader('ReadableStream (empty) reader', () => { + return streamAndDefaultReader(new ReadableStream()); +}); + +templatedRSClosed('ReadableStream (closed via call in start)', () => { + return new ReadableStream({ + start(c) { + c.close(); + } + }); +}); + +templatedRSClosedReader('ReadableStream reader (closed before getting reader)', () => { + let controller; + const stream = new ReadableStream({ + start(c) { + controller = c; + } + }); + controller.close(); + const result = streamAndDefaultReader(stream); + return result; +}); + +templatedRSClosedReader('ReadableStream reader (closed after getting reader)', () => { + let controller; + const stream = new ReadableStream({ + start(c) { + controller = c; + } + }); + const result = streamAndDefaultReader(stream); + controller.close(); + return result; +}); + +templatedRSClosed('ReadableStream (closed via cancel)', () => { + const stream = new ReadableStream(); + stream.cancel(); + return stream; +}); + +templatedRSClosedReader('ReadableStream reader (closed via cancel after getting reader)', () => { + const stream = new ReadableStream(); + const result = streamAndDefaultReader(stream); + result.reader.cancel(); + return result; +}); + +templatedRSErrored('ReadableStream (errored via call in start)', () => { + return new ReadableStream({ + start(c) { + c.error(theError); + } + }); +}, theError); + +templatedRSErroredSyncOnly('ReadableStream (errored via call in start)', () => { + return new ReadableStream({ + start(c) { + c.error(theError); + } + }); +}, theError); + +templatedRSErrored('ReadableStream (errored via returning a rejected promise in start)', () => { + return new ReadableStream({ + start() { + return Promise.reject(theError); + } + }); +}, theError); + +templatedRSErroredReader('ReadableStream (errored via returning a rejected promise in start) reader', () => { + return streamAndDefaultReader(new ReadableStream({ + start() { + return Promise.reject(theError); + } + })); +}, theError); + +templatedRSErroredReader('ReadableStream reader (errored before getting reader)', () => { + let controller; + const stream = new ReadableStream({ + start(c) { + controller = c; + } + }); + controller.error(theError); + return streamAndDefaultReader(stream); +}, theError); + +templatedRSErroredReader('ReadableStream reader (errored after getting reader)', () => { + let controller; + const result = streamAndDefaultReader(new ReadableStream({ + start(c) { + controller = c; + } + })); + controller.error(theError); + return result; +}, theError); + +templatedRSTwoChunksOpenReader('ReadableStream (two chunks enqueued, still open) reader', () => { + return streamAndDefaultReader(new ReadableStream({ + start(c) { + c.enqueue(chunks[0]); + c.enqueue(chunks[1]); + } + })); +}, chunks); + +templatedRSTwoChunksClosedReader('ReadableStream (two chunks enqueued, then closed) reader', () => { + let doClose; + const stream = new ReadableStream({ + start(c) { + c.enqueue(chunks[0]); + c.enqueue(chunks[1]); + doClose = c.close.bind(c); + } + }); + const result = streamAndDefaultReader(stream); + doClose(); + return result; +}, chunks); + +function streamAndDefaultReader(stream) { + return { stream, reader: stream.getReader() }; +} diff --git a/test/fixtures/wpt/streams/resources/recording-streams.js b/test/fixtures/wpt/streams/resources/recording-streams.js new file mode 100644 index 00000000000000..34d02a143dccdb --- /dev/null +++ b/test/fixtures/wpt/streams/resources/recording-streams.js @@ -0,0 +1,130 @@ +'use strict'; + +self.recordingReadableStream = (extras = {}, strategy) => { + let controllerToCopyOver; + const stream = new ReadableStream({ + start(controller) { + controllerToCopyOver = controller; + + if (extras.start) { + return extras.start(controller); + } + + return undefined; + }, + pull(controller) { + stream.events.push('pull'); + + if (extras.pull) { + return extras.pull(controller); + } + + return undefined; + }, + cancel(reason) { + stream.events.push('cancel', reason); + stream.eventsWithoutPulls.push('cancel', reason); + + if (extras.cancel) { + return extras.cancel(reason); + } + + return undefined; + } + }, strategy); + + stream.controller = controllerToCopyOver; + stream.events = []; + stream.eventsWithoutPulls = []; + + return stream; +}; + +self.recordingWritableStream = (extras = {}, strategy) => { + let controllerToCopyOver; + const stream = new WritableStream({ + start(controller) { + controllerToCopyOver = controller; + + if (extras.start) { + return extras.start(controller); + } + + return undefined; + }, + write(chunk, controller) { + stream.events.push('write', chunk); + + if (extras.write) { + return extras.write(chunk, controller); + } + + return undefined; + }, + close() { + stream.events.push('close'); + + if (extras.close) { + return extras.close(); + } + + return undefined; + }, + abort(e) { + stream.events.push('abort', e); + + if (extras.abort) { + return extras.abort(e); + } + + return undefined; + } + }, strategy); + + stream.controller = controllerToCopyOver; + stream.events = []; + + return stream; +}; + +self.recordingTransformStream = (extras = {}, writableStrategy, readableStrategy) => { + let controllerToCopyOver; + const stream = new TransformStream({ + start(controller) { + controllerToCopyOver = controller; + + if (extras.start) { + return extras.start(controller); + } + + return undefined; + }, + + transform(chunk, controller) { + stream.events.push('transform', chunk); + + if (extras.transform) { + return extras.transform(chunk, controller); + } + + controller.enqueue(chunk); + + return undefined; + }, + + flush(controller) { + stream.events.push('flush'); + + if (extras.flush) { + return extras.flush(controller); + } + + return undefined; + } + }, writableStrategy, readableStrategy); + + stream.controller = controllerToCopyOver; + stream.events = []; + + return stream; +}; diff --git a/test/fixtures/wpt/streams/resources/rs-test-templates.js b/test/fixtures/wpt/streams/resources/rs-test-templates.js new file mode 100644 index 00000000000000..700bd9c3ca9f9e --- /dev/null +++ b/test/fixtures/wpt/streams/resources/rs-test-templates.js @@ -0,0 +1,638 @@ +'use strict'; + +// These tests can be run against any readable stream produced by the web platform that meets the given descriptions. +// For readable stream tests, the factory should return the stream. For reader tests, the factory should return a +// { stream, reader } object. (You can use this to vary the time at which you acquire a reader.) + +self.templatedRSEmpty = (label, factory) => { + test(() => {}, 'Running templatedRSEmpty with ' + label); + + test(() => { + + const rs = factory(); + + assert_equals(typeof rs.locked, 'boolean', 'has a boolean locked getter'); + assert_equals(typeof rs.cancel, 'function', 'has a cancel method'); + assert_equals(typeof rs.getReader, 'function', 'has a getReader method'); + assert_equals(typeof rs.pipeThrough, 'function', 'has a pipeThrough method'); + assert_equals(typeof rs.pipeTo, 'function', 'has a pipeTo method'); + assert_equals(typeof rs.tee, 'function', 'has a tee method'); + + }, label + ': instances have the correct methods and properties'); + + test(() => { + const rs = factory(); + + assert_throws_js(TypeError, () => rs.getReader({ mode: '' }), 'empty string mode should throw'); + assert_throws_js(TypeError, () => rs.getReader({ mode: null }), 'null mode should throw'); + assert_throws_js(TypeError, () => rs.getReader({ mode: 'asdf' }), 'asdf mode should throw'); + assert_throws_js(TypeError, () => rs.getReader(5), '5 should throw'); + + // Should not throw + rs.getReader(null); + + }, label + ': calling getReader with invalid arguments should throw appropriate errors'); +}; + +self.templatedRSClosed = (label, factory) => { + test(() => {}, 'Running templatedRSClosed with ' + label); + + promise_test(() => { + + const rs = factory(); + const cancelPromise1 = rs.cancel(); + const cancelPromise2 = rs.cancel(); + + assert_not_equals(cancelPromise1, cancelPromise2, 'cancel() calls should return distinct promises'); + + return Promise.all([ + cancelPromise1.then(v => assert_equals(v, undefined, 'first cancel() call should fulfill with undefined')), + cancelPromise2.then(v => assert_equals(v, undefined, 'second cancel() call should fulfill with undefined')) + ]); + + }, label + ': cancel() should return a distinct fulfilled promise each time'); + + test(() => { + + const rs = factory(); + assert_false(rs.locked, 'locked getter should return false'); + + }, label + ': locked should be false'); + + test(() => { + + const rs = factory(); + rs.getReader(); // getReader() should not throw. + + }, label + ': getReader() should be OK'); + + test(() => { + + const rs = factory(); + + const reader = rs.getReader(); + reader.releaseLock(); + + const reader2 = rs.getReader(); // Getting a second reader should not throw. + reader2.releaseLock(); + + rs.getReader(); // Getting a third reader should not throw. + + }, label + ': should be able to acquire multiple readers if they are released in succession'); + + test(() => { + + const rs = factory(); + + rs.getReader(); + + assert_throws_js(TypeError, () => rs.getReader(), 'getting a second reader should throw'); + assert_throws_js(TypeError, () => rs.getReader(), 'getting a third reader should throw'); + + }, label + ': should not be able to acquire a second reader if we don\'t release the first one'); +}; + +self.templatedRSErrored = (label, factory, error) => { + test(() => {}, 'Running templatedRSErrored with ' + label); + + promise_test(t => { + + const rs = factory(); + const reader = rs.getReader(); + + return Promise.all([ + promise_rejects_exactly(t, error, reader.closed), + promise_rejects_exactly(t, error, reader.read()) + ]); + + }, label + ': getReader() should return a reader that acts errored'); + + promise_test(t => { + + const rs = factory(); + const reader = rs.getReader(); + + return Promise.all([ + promise_rejects_exactly(t, error, reader.read()), + promise_rejects_exactly(t, error, reader.read()), + promise_rejects_exactly(t, error, reader.closed) + ]); + + }, label + ': read() twice should give the error each time'); + + test(() => { + const rs = factory(); + + assert_false(rs.locked, 'locked getter should return false'); + }, label + ': locked should be false'); +}; + +self.templatedRSErroredSyncOnly = (label, factory, error) => { + test(() => {}, 'Running templatedRSErroredSyncOnly with ' + label); + + promise_test(t => { + + const rs = factory(); + rs.getReader().releaseLock(); + const reader = rs.getReader(); // Calling getReader() twice does not throw (the stream is not locked). + + return promise_rejects_exactly(t, error, reader.closed); + + }, label + ': should be able to obtain a second reader, with the correct closed promise'); + + test(() => { + + const rs = factory(); + rs.getReader(); + + assert_throws_js(TypeError, () => rs.getReader(), 'getting a second reader should throw a TypeError'); + assert_throws_js(TypeError, () => rs.getReader(), 'getting a third reader should throw a TypeError'); + + }, label + ': should not be able to obtain additional readers if we don\'t release the first lock'); + + promise_test(t => { + + const rs = factory(); + const cancelPromise1 = rs.cancel(); + const cancelPromise2 = rs.cancel(); + + assert_not_equals(cancelPromise1, cancelPromise2, 'cancel() calls should return distinct promises'); + + return Promise.all([ + promise_rejects_exactly(t, error, cancelPromise1), + promise_rejects_exactly(t, error, cancelPromise2) + ]); + + }, label + ': cancel() should return a distinct rejected promise each time'); + + promise_test(t => { + + const rs = factory(); + const reader = rs.getReader(); + const cancelPromise1 = reader.cancel(); + const cancelPromise2 = reader.cancel(); + + assert_not_equals(cancelPromise1, cancelPromise2, 'cancel() calls should return distinct promises'); + + return Promise.all([ + promise_rejects_exactly(t, error, cancelPromise1), + promise_rejects_exactly(t, error, cancelPromise2) + ]); + + }, label + ': reader cancel() should return a distinct rejected promise each time'); +}; + +self.templatedRSEmptyReader = (label, factory) => { + test(() => {}, 'Running templatedRSEmptyReader with ' + label); + + test(() => { + + const reader = factory().reader; + + assert_true('closed' in reader, 'has a closed property'); + assert_equals(typeof reader.closed.then, 'function', 'closed property is thenable'); + + assert_equals(typeof reader.cancel, 'function', 'has a cancel method'); + assert_equals(typeof reader.read, 'function', 'has a read method'); + assert_equals(typeof reader.releaseLock, 'function', 'has a releaseLock method'); + + }, label + ': instances have the correct methods and properties'); + + test(() => { + + const stream = factory().stream; + + assert_true(stream.locked, 'locked getter should return true'); + + }, label + ': locked should be true'); + + promise_test(t => { + + const reader = factory().reader; + + reader.read().then( + t.unreached_func('read() should not fulfill'), + t.unreached_func('read() should not reject') + ); + + return delay(500); + + }, label + ': read() should never settle'); + + promise_test(t => { + + const reader = factory().reader; + + reader.read().then( + t.unreached_func('read() should not fulfill'), + t.unreached_func('read() should not reject') + ); + + reader.read().then( + t.unreached_func('read() should not fulfill'), + t.unreached_func('read() should not reject') + ); + + return delay(500); + + }, label + ': two read()s should both never settle'); + + test(() => { + + const reader = factory().reader; + assert_not_equals(reader.read(), reader.read(), 'the promises returned should be distinct'); + + }, label + ': read() should return distinct promises each time'); + + test(() => { + + const stream = factory().stream; + assert_throws_js(TypeError, () => stream.getReader(), 'stream.getReader() should throw a TypeError'); + + }, label + ': getReader() again on the stream should fail'); + + promise_test(t => { + + const streamAndReader = factory(); + const stream = streamAndReader.stream; + const reader = streamAndReader.reader; + + reader.read().then( + t.unreached_func('first read() should not fulfill'), + t.unreached_func('first read() should not reject') + ); + + reader.read().then( + t.unreached_func('second read() should not fulfill'), + t.unreached_func('second read() should not reject') + ); + + reader.closed.then( + t.unreached_func('closed should not fulfill'), + t.unreached_func('closed should not reject') + ); + + assert_throws_js(TypeError, () => reader.releaseLock(), 'releaseLock should throw a TypeError'); + + assert_true(stream.locked, 'the stream should still be locked'); + + return delay(500); + + }, label + ': releasing the lock with pending read requests should throw but the read requests should stay pending'); + + promise_test(t => { + + const reader = factory().reader; + reader.releaseLock(); + + return Promise.all([ + promise_rejects_js(t, TypeError, reader.read()), + promise_rejects_js(t, TypeError, reader.read()) + ]); + + }, label + ': releasing the lock should cause further read() calls to reject with a TypeError'); + + promise_test(t => { + + const reader = factory().reader; + + const closedBefore = reader.closed; + reader.releaseLock(); + const closedAfter = reader.closed; + + assert_equals(closedBefore, closedAfter, 'the closed promise should not change identity'); + + return promise_rejects_js(t, TypeError, closedBefore); + + }, label + ': releasing the lock should cause closed calls to reject with a TypeError'); + + test(() => { + + const streamAndReader = factory(); + const stream = streamAndReader.stream; + const reader = streamAndReader.reader; + + reader.releaseLock(); + assert_false(stream.locked, 'locked getter should return false'); + + }, label + ': releasing the lock should cause locked to become false'); + + promise_test(() => { + + const reader = factory().reader; + reader.cancel(); + + return reader.read().then(r => { + assert_object_equals(r, { value: undefined, done: true }, 'read()ing from the reader should give a done result'); + }); + + }, label + ': canceling via the reader should cause the reader to act closed'); + + promise_test(t => { + + const stream = factory().stream; + return promise_rejects_js(t, TypeError, stream.cancel()); + + }, label + ': canceling via the stream should fail'); +}; + +self.templatedRSClosedReader = (label, factory) => { + test(() => {}, 'Running templatedRSClosedReader with ' + label); + + promise_test(() => { + + const reader = factory().reader; + + return reader.read().then(v => { + assert_object_equals(v, { value: undefined, done: true }, 'read() should fulfill correctly'); + }); + + }, label + ': read() should fulfill with { value: undefined, done: true }'); + + promise_test(() => { + + const reader = factory().reader; + + return Promise.all([ + reader.read().then(v => { + assert_object_equals(v, { value: undefined, done: true }, 'read() should fulfill correctly'); + }), + reader.read().then(v => { + assert_object_equals(v, { value: undefined, done: true }, 'read() should fulfill correctly'); + }) + ]); + + }, label + ': read() multiple times should fulfill with { value: undefined, done: true }'); + + promise_test(() => { + + const reader = factory().reader; + + return reader.read().then(() => reader.read()).then(v => { + assert_object_equals(v, { value: undefined, done: true }, 'read() should fulfill correctly'); + }); + + }, label + ': read() should work when used within another read() fulfill callback'); + + promise_test(() => { + + const reader = factory().reader; + + return reader.closed.then(v => assert_equals(v, undefined, 'reader closed should fulfill with undefined')); + + }, label + ': closed should fulfill with undefined'); + + promise_test(t => { + + const reader = factory().reader; + + const closedBefore = reader.closed; + reader.releaseLock(); + const closedAfter = reader.closed; + + assert_not_equals(closedBefore, closedAfter, 'the closed promise should change identity'); + + return Promise.all([ + closedBefore.then(v => assert_equals(v, undefined, 'reader.closed acquired before release should fulfill')), + promise_rejects_js(t, TypeError, closedAfter) + ]); + + }, label + ': releasing the lock should cause closed to reject and change identity'); + + promise_test(() => { + + const reader = factory().reader; + const cancelPromise1 = reader.cancel(); + const cancelPromise2 = reader.cancel(); + const closedReaderPromise = reader.closed; + + assert_not_equals(cancelPromise1, cancelPromise2, 'cancel() calls should return distinct promises'); + assert_not_equals(cancelPromise1, closedReaderPromise, 'cancel() promise 1 should be distinct from reader.closed'); + assert_not_equals(cancelPromise2, closedReaderPromise, 'cancel() promise 2 should be distinct from reader.closed'); + + return Promise.all([ + cancelPromise1.then(v => assert_equals(v, undefined, 'first cancel() should fulfill with undefined')), + cancelPromise2.then(v => assert_equals(v, undefined, 'second cancel() should fulfill with undefined')) + ]); + + }, label + ': cancel() should return a distinct fulfilled promise each time'); +}; + +self.templatedRSErroredReader = (label, factory, error) => { + test(() => {}, 'Running templatedRSErroredReader with ' + label); + + promise_test(t => { + + const reader = factory().reader; + return promise_rejects_exactly(t, error, reader.closed); + + }, label + ': closed should reject with the error'); + + promise_test(t => { + + const reader = factory().reader; + const closedBefore = reader.closed; + + return promise_rejects_exactly(t, error, closedBefore).then(() => { + reader.releaseLock(); + + const closedAfter = reader.closed; + assert_not_equals(closedBefore, closedAfter, 'the closed promise should change identity'); + + return promise_rejects_js(t, TypeError, closedAfter); + }); + + }, label + ': releasing the lock should cause closed to reject and change identity'); + + promise_test(t => { + + const reader = factory().reader; + return promise_rejects_exactly(t, error, reader.read()); + + }, label + ': read() should reject with the error'); +}; + +self.templatedRSTwoChunksOpenReader = (label, factory, chunks) => { + test(() => {}, 'Running templatedRSTwoChunksOpenReader with ' + label); + + promise_test(() => { + + const reader = factory().reader; + + return Promise.all([ + reader.read().then(r => { + assert_object_equals(r, { value: chunks[0], done: false }, 'first result should be correct'); + }), + reader.read().then(r => { + assert_object_equals(r, { value: chunks[1], done: false }, 'second result should be correct'); + }) + ]); + + }, label + ': calling read() twice without waiting will eventually give both chunks (sequential)'); + + promise_test(() => { + + const reader = factory().reader; + + return reader.read().then(r => { + assert_object_equals(r, { value: chunks[0], done: false }, 'first result should be correct'); + + return reader.read().then(r2 => { + assert_object_equals(r2, { value: chunks[1], done: false }, 'second result should be correct'); + }); + }); + + }, label + ': calling read() twice without waiting will eventually give both chunks (nested)'); + + test(() => { + + const reader = factory().reader; + assert_not_equals(reader.read(), reader.read(), 'the promises returned should be distinct'); + + }, label + ': read() should return distinct promises each time'); + + promise_test(() => { + + const reader = factory().reader; + + const promise1 = reader.closed.then(v => { + assert_equals(v, undefined, 'reader closed should fulfill with undefined'); + }); + + const promise2 = reader.read().then(r => { + assert_object_equals(r, { value: chunks[0], done: false }, + 'promise returned before cancellation should fulfill with a chunk'); + }); + + reader.cancel(); + + const promise3 = reader.read().then(r => { + assert_object_equals(r, { value: undefined, done: true }, + 'promise returned after cancellation should fulfill with an end-of-stream signal'); + }); + + return Promise.all([promise1, promise2, promise3]); + + }, label + ': cancel() after a read() should still give that single read result'); +}; + +self.templatedRSTwoChunksClosedReader = function (label, factory, chunks) { + test(() => {}, 'Running templatedRSTwoChunksClosedReader with ' + label); + + promise_test(() => { + + const reader = factory().reader; + + return Promise.all([ + reader.read().then(r => { + assert_object_equals(r, { value: chunks[0], done: false }, 'first result should be correct'); + }), + reader.read().then(r => { + assert_object_equals(r, { value: chunks[1], done: false }, 'second result should be correct'); + }), + reader.read().then(r => { + assert_object_equals(r, { value: undefined, done: true }, 'third result should be correct'); + }) + ]); + + }, label + ': third read(), without waiting, should give { value: undefined, done: true } (sequential)'); + + promise_test(() => { + + const reader = factory().reader; + + return reader.read().then(r => { + assert_object_equals(r, { value: chunks[0], done: false }, 'first result should be correct'); + + return reader.read().then(r2 => { + assert_object_equals(r2, { value: chunks[1], done: false }, 'second result should be correct'); + + return reader.read().then(r3 => { + assert_object_equals(r3, { value: undefined, done: true }, 'third result should be correct'); + }); + }); + }); + + }, label + ': third read(), without waiting, should give { value: undefined, done: true } (nested)'); + + promise_test(() => { + + const streamAndReader = factory(); + const stream = streamAndReader.stream; + const reader = streamAndReader.reader; + + assert_true(stream.locked, 'stream should start locked'); + + const promise = reader.closed.then(v => { + assert_equals(v, undefined, 'reader closed should fulfill with undefined'); + assert_true(stream.locked, 'stream should remain locked'); + }); + + reader.read(); + reader.read(); + + return promise; + + }, label + + ': draining the stream via read() should cause the reader closed promise to fulfill, but locked stays true'); + + promise_test(() => { + + const streamAndReader = factory(); + const stream = streamAndReader.stream; + const reader = streamAndReader.reader; + + const promise = reader.closed.then(() => { + assert_true(stream.locked, 'the stream should start locked'); + reader.releaseLock(); // Releasing the lock after reader closed should not throw. + assert_false(stream.locked, 'the stream should end unlocked'); + }); + + reader.read(); + reader.read(); + + return promise; + + }, label + ': releasing the lock after the stream is closed should cause locked to become false'); + + promise_test(t => { + + const reader = factory().reader; + + reader.releaseLock(); + + return Promise.all([ + promise_rejects_js(t, TypeError, reader.read()), + promise_rejects_js(t, TypeError, reader.read()), + promise_rejects_js(t, TypeError, reader.read()) + ]); + + }, label + ': releasing the lock should cause further read() calls to reject with a TypeError'); + + promise_test(() => { + + const streamAndReader = factory(); + const stream = streamAndReader.stream; + const reader = streamAndReader.reader; + + const readerClosed = reader.closed; + + assert_equals(reader.closed, readerClosed, 'accessing reader.closed twice in succession gives the same value'); + + const promise = reader.read().then(() => { + assert_equals(reader.closed, readerClosed, 'reader.closed is the same after read() fulfills'); + + reader.releaseLock(); + + assert_equals(reader.closed, readerClosed, 'reader.closed is the same after releasing the lock'); + + const newReader = stream.getReader(); + return newReader.read(); + }); + + assert_equals(reader.closed, readerClosed, 'reader.closed is the same after calling read()'); + + return promise; + + }, label + ': reader\'s closed property always returns the same promise'); +}; diff --git a/test/fixtures/wpt/streams/resources/rs-utils.js b/test/fixtures/wpt/streams/resources/rs-utils.js new file mode 100644 index 00000000000000..f1a014275a2fbc --- /dev/null +++ b/test/fixtures/wpt/streams/resources/rs-utils.js @@ -0,0 +1,197 @@ +'use strict'; +(function () { + + class RandomPushSource { + constructor(toPush) { + this.pushed = 0; + this.toPush = toPush; + this.started = false; + this.paused = false; + this.closed = false; + + this._intervalHandle = null; + } + + readStart() { + if (this.closed) { + return; + } + + if (!this.started) { + this._intervalHandle = setInterval(writeChunk, 2); + this.started = true; + } + + if (this.paused) { + this._intervalHandle = setInterval(writeChunk, 2); + this.paused = false; + } + + const source = this; + function writeChunk() { + if (source.paused) { + return; + } + + source.pushed++; + + if (source.toPush > 0 && source.pushed > source.toPush) { + if (source._intervalHandle) { + clearInterval(source._intervalHandle); + source._intervalHandle = undefined; + } + source.closed = true; + source.onend(); + } else { + source.ondata(randomChunk(128)); + } + } + } + + readStop() { + if (this.paused) { + return; + } + + if (this.started) { + this.paused = true; + clearInterval(this._intervalHandle); + this._intervalHandle = undefined; + } else { + throw new Error('Can\'t pause reading an unstarted source.'); + } + } + } + + function randomChunk(size) { + let chunk = ''; + + for (let i = 0; i < size; ++i) { + // Add a random character from the basic printable ASCII set. + chunk += String.fromCharCode(Math.round(Math.random() * 84) + 32); + } + + return chunk; + } + + function readableStreamToArray(readable, reader) { + if (reader === undefined) { + reader = readable.getReader(); + } + + const chunks = []; + + return pump(); + + function pump() { + return reader.read().then(result => { + if (result.done) { + return chunks; + } + + chunks.push(result.value); + return pump(); + }); + } + } + + class SequentialPullSource { + constructor(limit, options) { + const async = options && options.async; + + this.current = 0; + this.limit = limit; + this.opened = false; + this.closed = false; + + this._exec = f => f(); + if (async) { + this._exec = f => step_timeout(f, 0); + } + } + + open(cb) { + this._exec(() => { + this.opened = true; + cb(); + }); + } + + read(cb) { + this._exec(() => { + if (++this.current <= this.limit) { + cb(null, false, this.current); + } else { + cb(null, true, null); + } + }); + } + + close(cb) { + this._exec(() => { + this.closed = true; + cb(); + }); + } + } + + function sequentialReadableStream(limit, options) { + const sequentialSource = new SequentialPullSource(limit, options); + + const stream = new ReadableStream({ + start() { + return new Promise((resolve, reject) => { + sequentialSource.open(err => { + if (err) { + reject(err); + } + resolve(); + }); + }); + }, + + pull(c) { + return new Promise((resolve, reject) => { + sequentialSource.read((err, done, chunk) => { + if (err) { + reject(err); + } else if (done) { + sequentialSource.close(err2 => { + if (err2) { + reject(err2); + } + c.close(); + resolve(); + }); + } else { + c.enqueue(chunk); + resolve(); + } + }); + }); + } + }); + + stream.source = sequentialSource; + + return stream; + } + + function transferArrayBufferView(view) { + const noopByteStream = new ReadableStream({ + type: 'bytes', + pull(c) { + c.byobRequest.respond(c.byobRequest.view.byteLength); + c.close(); + } + }); + const reader = noopByteStream.getReader({ mode: 'byob' }); + return reader.read(view).then((result) => result.value); + } + + self.RandomPushSource = RandomPushSource; + self.readableStreamToArray = readableStreamToArray; + self.sequentialReadableStream = sequentialReadableStream; + self.transferArrayBufferView = transferArrayBufferView; + +}()); diff --git a/test/fixtures/wpt/streams/resources/test-utils.js b/test/fixtures/wpt/streams/resources/test-utils.js new file mode 100644 index 00000000000000..0593980e1055b5 --- /dev/null +++ b/test/fixtures/wpt/streams/resources/test-utils.js @@ -0,0 +1,74 @@ +'use strict'; + +self.getterRejects = (t, obj, getterName, target) => { + const getter = Object.getOwnPropertyDescriptor(obj, getterName).get; + + return promise_rejects_js(t, TypeError, getter.call(target), getterName + ' should reject with a TypeError'); +}; + +self.getterRejectsForAll = (t, obj, getterName, targets) => { + return Promise.all(targets.map(target => self.getterRejects(t, obj, getterName, target))); +}; + +self.methodRejects = (t, obj, methodName, target, args) => { + const method = obj[methodName]; + + return promise_rejects_js(t, TypeError, method.apply(target, args), + methodName + ' should reject with a TypeError'); +}; + +self.methodRejectsForAll = (t, obj, methodName, targets, args) => { + return Promise.all(targets.map(target => self.methodRejects(t, obj, methodName, target, args))); +}; + +self.getterThrows = (obj, getterName, target) => { + const getter = Object.getOwnPropertyDescriptor(obj, getterName).get; + + assert_throws_js(TypeError, () => getter.call(target), getterName + ' should throw a TypeError'); +}; + +self.getterThrowsForAll = (obj, getterName, targets) => { + targets.forEach(target => self.getterThrows(obj, getterName, target)); +}; + +self.methodThrows = (obj, methodName, target, args) => { + const method = obj[methodName]; + assert_equals(typeof method, 'function', methodName + ' should exist'); + + assert_throws_js(TypeError, () => method.apply(target, args), methodName + ' should throw a TypeError'); +}; + +self.methodThrowsForAll = (obj, methodName, targets, args) => { + targets.forEach(target => self.methodThrows(obj, methodName, target, args)); +}; + +self.constructorThrowsForAll = (constructor, firstArgs) => { + firstArgs.forEach(firstArg => assert_throws_js(TypeError, () => new constructor(firstArg), + 'constructor should throw a TypeError')); +}; + +self.garbageCollect = () => { + if (self.gc) { + // Use --expose_gc for V8 (and Node.js) + // to pass this flag at chrome launch use: --js-flags="--expose-gc" + // Exposed in SpiderMonkey shell as well + self.gc(); + } else if (self.GCController) { + // Present in some WebKit development environments + GCController.collect(); + } else { + /* eslint-disable no-console */ + console.warn('Tests are running without the ability to do manual garbage collection. They will still work, but ' + + 'coverage will be suboptimal.'); + /* eslint-enable no-console */ + } +}; + +self.delay = ms => new Promise(resolve => step_timeout(resolve, ms)); + +// For tests which verify that the implementation doesn't do something it shouldn't, it's better not to use a +// timeout. Instead, assume that any reasonable implementation is going to finish work after 2 times around the event +// loop, and use flushAsyncEvents().then(() => assert_array_equals(...)); +// Some tests include promise resolutions which may mean the test code takes a couple of event loop visits itself. So go +// around an extra 2 times to avoid complicating those tests. +self.flushAsyncEvents = () => delay(0).then(() => delay(0)).then(() => delay(0)).then(() => delay(0)); diff --git a/test/fixtures/wpt/streams/transferable/deserialize-error.window.js b/test/fixtures/wpt/streams/transferable/deserialize-error.window.js new file mode 100644 index 00000000000000..64cf2bbfb1293e --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/deserialize-error.window.js @@ -0,0 +1,39 @@ +// META: script=/common/get-host-info.sub.js +// META: script=resources/create-wasm-module.js +// META: timeout=long + +const { HTTPS_NOTSAMESITE_ORIGIN } = get_host_info(); +const iframe = document.createElement('iframe'); +iframe.src = `${HTTPS_NOTSAMESITE_ORIGIN}/streams/transferable/resources/deserialize-error-frame.html`; + +window.addEventListener('message', async evt => { + // Tests are serialized to make the results deterministic. + switch (evt.data) { + case 'init done': { + const ws = new WritableStream(); + iframe.contentWindow.postMessage(ws, '*', [ws]); + return; + } + + case 'ws done': { + const module = await createWasmModule(); + const rs = new ReadableStream({ + start(controller) { + controller.enqueue(module); + } + }); + iframe.contentWindow.postMessage(rs, '*', [rs]); + return; + } + + case 'rs done': { + iframe.remove(); + } + } +}); + +// Need to do this after adding the listener to ensure we catch the first +// message. +document.body.appendChild(iframe); + +fetch_tests_from_window(iframe.contentWindow); diff --git a/test/fixtures/wpt/streams/transferable/readable-stream.html b/test/fixtures/wpt/streams/transferable/readable-stream.html new file mode 100644 index 00000000000000..59b57ce6723c10 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/readable-stream.html @@ -0,0 +1,255 @@ + + + + + + + + diff --git a/test/fixtures/wpt/streams/transferable/reason.html b/test/fixtures/wpt/streams/transferable/reason.html new file mode 100644 index 00000000000000..4251aa85b816bb --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/reason.html @@ -0,0 +1,132 @@ + + + + + + diff --git a/test/fixtures/wpt/streams/transferable/resources/create-wasm-module.js b/test/fixtures/wpt/streams/transferable/resources/create-wasm-module.js new file mode 100644 index 00000000000000..37064af95c55c0 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/create-wasm-module.js @@ -0,0 +1,11 @@ +// There aren't many cloneable types that will cause an error on +// deserialization. WASM modules have the property that it's an error to +// deserialize them cross-site, which works for our purposes. +async function createWasmModule() { + // It doesn't matter what the module is, so we use one from another + // test. + const response = + await fetch("/wasm/serialization/module/resources/incrementer.wasm"); + const ab = await response.arrayBuffer(); + return WebAssembly.compile(ab); +} diff --git a/test/fixtures/wpt/streams/transferable/resources/deserialize-error-frame.html b/test/fixtures/wpt/streams/transferable/resources/deserialize-error-frame.html new file mode 100644 index 00000000000000..5ec2fcda2cdd8d --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/deserialize-error-frame.html @@ -0,0 +1,39 @@ + + + + diff --git a/test/fixtures/wpt/streams/transferable/resources/echo-iframe.html b/test/fixtures/wpt/streams/transferable/resources/echo-iframe.html new file mode 100644 index 00000000000000..68f68503439fdb --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/echo-iframe.html @@ -0,0 +1,7 @@ + + + diff --git a/test/fixtures/wpt/streams/transferable/resources/echo-worker.js b/test/fixtures/wpt/streams/transferable/resources/echo-worker.js new file mode 100644 index 00000000000000..806c2371083399 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/echo-worker.js @@ -0,0 +1,2 @@ +// A worker that just transfers back any message that is sent to it. +onmessage = evt => postMessage(evt.data, [evt.data]); diff --git a/test/fixtures/wpt/streams/transferable/resources/helpers.js b/test/fixtures/wpt/streams/transferable/resources/helpers.js new file mode 100644 index 00000000000000..12504537f91eab --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/helpers.js @@ -0,0 +1,132 @@ +'use strict'; + +(() => { + // Create a ReadableStream that will pass the tests in + // testTransferredReadableStream(), below. + function createOriginalReadableStream() { + return new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.close(); + } + }); + } + + // Common tests to roughly determine that |rs| is a correctly transferred + // version of a stream created by createOriginalReadableStream(). + function testTransferredReadableStream(rs) { + assert_equals(rs.constructor, ReadableStream, + 'rs should be a ReadableStream in this realm'); + assert_true(rs instanceof ReadableStream, + 'instanceof check should pass'); + + // Perform a brand-check on |rs| in the process of calling getReader(). + const reader = ReadableStream.prototype.getReader.call(rs); + + return reader.read().then(({value, done}) => { + assert_false(done, 'done should be false'); + assert_equals(value, 'a', 'value should be "a"'); + return reader.read(); + }).then(({done}) => { + assert_true(done, 'done should be true'); + }); + } + + function testMessage(msg) { + assert_array_equals(msg.ports, [], 'there should be no ports in the event'); + return testTransferredReadableStream(msg.data); + } + + function testMessageEvent(target) { + return new Promise((resolve, reject) => { + target.addEventListener('message', ev => { + try { + resolve(testMessage(ev)); + } catch (e) { + reject(e); + } + }, {once: true}); + }); + } + + function testMessageEventOrErrorMessage(target) { + return new Promise((resolve, reject) => { + target.addEventListener('message', ev => { + if (typeof ev.data === 'string') { + // Assume it's an error message and reject with it. + reject(ev.data); + return; + } + + try { + resolve(testMessage(ev)); + } catch (e) { + reject(e); + } + }, {once: true}); + }); + } + + function checkTestResults(target) { + return new Promise((resolve, reject) => { + target.onmessage = msg => { + // testharness.js sends us objects which we need to ignore. + if (typeof msg.data !== 'string') + return; + + if (msg.data === 'OK') { + resolve(); + } else { + reject(msg.data); + } + }; + }); + } + + // These tests assume that a transferred ReadableStream will behave the same + // regardless of how it was transferred. This enables us to simply transfer the + // stream to ourselves. + function createTransferredReadableStream(underlyingSource) { + const original = new ReadableStream(underlyingSource); + const promise = new Promise((resolve, reject) => { + addEventListener('message', msg => { + const rs = msg.data; + if (rs instanceof ReadableStream) { + resolve(rs); + } else { + reject(new Error(`what is this thing: "${rs}"?`)); + } + }, {once: true}); + }); + postMessage(original, '*', [original]); + return promise; + } + + function recordingTransferredReadableStream(underlyingSource, strategy) { + const original = recordingReadableStream(underlyingSource, strategy); + const promise = new Promise((resolve, reject) => { + addEventListener('message', msg => { + const rs = msg.data; + if (rs instanceof ReadableStream) { + rs.events = original.events; + rs.eventsWithoutPulls = original.eventsWithoutPulls; + rs.controller = original.controller; + resolve(rs); + } else { + reject(new Error(`what is this thing: "${rs}"?`)); + } + }, {once: true}); + }); + postMessage(original, '*', [original]); + return promise; + } + + self.createOriginalReadableStream = createOriginalReadableStream; + self.testMessage = testMessage; + self.testMessageEvent = testMessageEvent; + self.testMessageEventOrErrorMessage = testMessageEventOrErrorMessage; + self.checkTestResults = checkTestResults; + self.createTransferredReadableStream = createTransferredReadableStream; + self.recordingTransferredReadableStream = recordingTransferredReadableStream; + +})(); diff --git a/test/fixtures/wpt/streams/transferable/resources/receiving-shared-worker.js b/test/fixtures/wpt/streams/transferable/resources/receiving-shared-worker.js new file mode 100644 index 00000000000000..84f779c3db6e13 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/receiving-shared-worker.js @@ -0,0 +1,11 @@ +'use strict'; +importScripts('/resources/testharness.js', 'helpers.js'); + +onconnect = evt => { + const port = evt.source; + const promise = testMessageEvent(port); + port.start(); + promise + .then(() => port.postMessage('OK')) + .catch(err => port.postMessage(`BAD: ${err}`)); +}; diff --git a/test/fixtures/wpt/streams/transferable/resources/receiving-worker.js b/test/fixtures/wpt/streams/transferable/resources/receiving-worker.js new file mode 100644 index 00000000000000..4ebb9c5f8fcec2 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/receiving-worker.js @@ -0,0 +1,7 @@ +'use strict'; +importScripts('/resources/testharness.js', 'helpers.js'); + +const promise = testMessageEvent(self); +promise + .then(() => postMessage('OK')) + .catch(err => postMessage(`BAD: ${err}`)); diff --git a/test/fixtures/wpt/streams/transferable/resources/sending-shared-worker.js b/test/fixtures/wpt/streams/transferable/resources/sending-shared-worker.js new file mode 100644 index 00000000000000..e579077894d5b9 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/sending-shared-worker.js @@ -0,0 +1,12 @@ +'use strict'; +importScripts('helpers.js'); + +onconnect = msg => { + const port = msg.source; + const orig = createOriginalReadableStream(); + try { + port.postMessage(orig, [orig]); + } catch (e) { + port.postMessage(e.message); + } +}; diff --git a/test/fixtures/wpt/streams/transferable/resources/sending-worker.js b/test/fixtures/wpt/streams/transferable/resources/sending-worker.js new file mode 100644 index 00000000000000..0b79733f74d97b --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/sending-worker.js @@ -0,0 +1,5 @@ +'use strict'; +importScripts('helpers.js'); + +const orig = createOriginalReadableStream(); +postMessage(orig, [orig]); diff --git a/test/fixtures/wpt/streams/transferable/resources/service-worker-iframe.html b/test/fixtures/wpt/streams/transferable/resources/service-worker-iframe.html new file mode 100644 index 00000000000000..348d067c926f58 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/service-worker-iframe.html @@ -0,0 +1,39 @@ + + + + + diff --git a/test/fixtures/wpt/streams/transferable/resources/service-worker.js b/test/fixtures/wpt/streams/transferable/resources/service-worker.js new file mode 100644 index 00000000000000..af76b6c11b4ed1 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/service-worker.js @@ -0,0 +1,30 @@ +'use strict'; +importScripts('/resources/testharness.js', 'helpers.js'); + +onmessage = msg => { + const client = msg.source; + if (msg.data === 'SEND') { + sendingTest(client); + } else { + receivingTest(msg, client); + } +}; + +function sendingTest(client) { + const orig = createOriginalReadableStream(); + try { + client.postMessage(orig, [orig]); + } catch (e) { + client.postMessage(e.message); + } +} + +function receivingTest(msg, client) { + try { + msg.waitUntil(testMessage(msg) + .then(() => client.postMessage('OK')) + .catch(e => client.postMessage(`BAD: ${e}`))); + } catch (e) { + client.postMessage(`BAD: ${e}`); + } +} diff --git a/test/fixtures/wpt/streams/transferable/service-worker.https.html b/test/fixtures/wpt/streams/transferable/service-worker.https.html new file mode 100644 index 00000000000000..2ca7f19c910f76 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/service-worker.https.html @@ -0,0 +1,28 @@ + + + + + + diff --git a/test/fixtures/wpt/streams/transferable/shared-worker.html b/test/fixtures/wpt/streams/transferable/shared-worker.html new file mode 100644 index 00000000000000..cd0415402d5018 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/shared-worker.html @@ -0,0 +1,25 @@ + + + + + + diff --git a/test/fixtures/wpt/streams/transferable/transform-stream.html b/test/fixtures/wpt/streams/transferable/transform-stream.html new file mode 100644 index 00000000000000..fbfbfe8fc1347a --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/transform-stream.html @@ -0,0 +1,104 @@ + + + + + + diff --git a/test/fixtures/wpt/streams/transferable/window.html b/test/fixtures/wpt/streams/transferable/window.html new file mode 100644 index 00000000000000..beaf548fe641c5 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/window.html @@ -0,0 +1,60 @@ + + + + + + + + diff --git a/test/fixtures/wpt/streams/transferable/worker.html b/test/fixtures/wpt/streams/transferable/worker.html new file mode 100644 index 00000000000000..c5dc9fc62f8cf2 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/worker.html @@ -0,0 +1,76 @@ + + + + + + + diff --git a/test/fixtures/wpt/streams/transferable/writable-stream.html b/test/fixtures/wpt/streams/transferable/writable-stream.html new file mode 100644 index 00000000000000..adc6f457c27e87 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/writable-stream.html @@ -0,0 +1,136 @@ + + + + + + + + diff --git a/test/fixtures/wpt/streams/transform-streams/backpressure.any.js b/test/fixtures/wpt/streams/transform-streams/backpressure.any.js new file mode 100644 index 00000000000000..64c9d0930ed2f2 --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/backpressure.any.js @@ -0,0 +1,195 @@ +// META: global=window,worker,jsshell +// META: script=../resources/recording-streams.js +// META: script=../resources/test-utils.js +'use strict'; + +const error1 = new Error('error1 message'); +error1.name = 'error1'; + +promise_test(() => { + const ts = recordingTransformStream(); + const writer = ts.writable.getWriter(); + // This call never resolves. + writer.write('a'); + return flushAsyncEvents().then(() => { + assert_array_equals(ts.events, [], 'transform should not be called'); + }); +}, 'backpressure allows no transforms with a default identity transform and no reader'); + +promise_test(() => { + const ts = recordingTransformStream({}, undefined, { highWaterMark: 1 }); + const writer = ts.writable.getWriter(); + // This call to write() resolves asynchronously. + writer.write('a'); + // This call to write() waits for backpressure that is never relieved and never calls transform(). + writer.write('b'); + return flushAsyncEvents().then(() => { + assert_array_equals(ts.events, ['transform', 'a'], 'transform should be called once'); + }); +}, 'backpressure only allows one transform() with a identity transform with a readable HWM of 1 and no reader'); + +promise_test(() => { + // Without a transform() implementation, recordingTransformStream() never enqueues anything. + const ts = recordingTransformStream({ + transform() { + // Discard all chunks. As a result, the readable side is never full enough to exert backpressure and transform() + // keeps being called. + } + }, undefined, { highWaterMark: 1 }); + const writer = ts.writable.getWriter(); + const writePromises = []; + for (let i = 0; i < 4; ++i) { + writePromises.push(writer.write(i)); + } + return Promise.all(writePromises).then(() => { + assert_array_equals(ts.events, ['transform', 0, 'transform', 1, 'transform', 2, 'transform', 3], + 'all 4 events should be transformed'); + }); +}, 'transform() should keep being called as long as there is no backpressure'); + +promise_test(() => { + const ts = new TransformStream({}, undefined, { highWaterMark: 1 }); + const writer = ts.writable.getWriter(); + const reader = ts.readable.getReader(); + const events = []; + const writerPromises = [ + writer.write('a').then(() => events.push('a')), + writer.write('b').then(() => events.push('b')), + writer.close().then(() => events.push('closed'))]; + return delay(0).then(() => { + assert_array_equals(events, ['a'], 'the first write should have resolved'); + return reader.read(); + }).then(({ value, done }) => { + assert_false(done, 'done should not be true'); + assert_equals('a', value, 'value should be "a"'); + return delay(0); + }).then(() => { + assert_array_equals(events, ['a', 'b', 'closed'], 'both writes and close() should have resolved'); + return reader.read(); + }).then(({ value, done }) => { + assert_false(done, 'done should still not be true'); + assert_equals('b', value, 'value should be "b"'); + return reader.read(); + }).then(({ done }) => { + assert_true(done, 'done should be true'); + return writerPromises; + }); +}, 'writes should resolve as soon as transform completes'); + +promise_test(() => { + const ts = new TransformStream(undefined, undefined, { highWaterMark: 0 }); + const writer = ts.writable.getWriter(); + const reader = ts.readable.getReader(); + const readPromise = reader.read(); + writer.write('a'); + return readPromise.then(({ value, done }) => { + assert_false(done, 'not done'); + assert_equals(value, 'a', 'value should be "a"'); + }); +}, 'calling pull() before the first write() with backpressure should work'); + +promise_test(() => { + let reader; + const ts = recordingTransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk); + return reader.read(); + } + }, undefined, { highWaterMark: 1 }); + const writer = ts.writable.getWriter(); + reader = ts.readable.getReader(); + return writer.write('a'); +}, 'transform() should be able to read the chunk it just enqueued'); + +promise_test(() => { + let resolveTransform; + const transformPromise = new Promise(resolve => { + resolveTransform = resolve; + }); + const ts = recordingTransformStream({ + transform() { + return transformPromise; + } + }, undefined, new CountQueuingStrategy({ highWaterMark: Infinity })); + const writer = ts.writable.getWriter(); + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1'); + return delay(0).then(() => { + writer.write('a'); + assert_array_equals(ts.events, ['transform', 'a']); + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0'); + return flushAsyncEvents(); + }).then(() => { + assert_equals(writer.desiredSize, 0, 'desiredSize should still be 0'); + resolveTransform(); + return delay(0); + }).then(() => { + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1'); + }); +}, 'blocking transform() should cause backpressure'); + +promise_test(t => { + const ts = new TransformStream(); + ts.readable.cancel(error1); + return promise_rejects_exactly(t, error1, ts.writable.getWriter().closed, 'closed should reject'); +}, 'writer.closed should resolve after readable is canceled during start'); + +promise_test(t => { + const ts = new TransformStream({}, undefined, { highWaterMark: 0 }); + return delay(0).then(() => { + ts.readable.cancel(error1); + return promise_rejects_exactly(t, error1, ts.writable.getWriter().closed, 'closed should reject'); + }); +}, 'writer.closed should resolve after readable is canceled with backpressure'); + +promise_test(t => { + const ts = new TransformStream({}, undefined, { highWaterMark: 1 }); + return delay(0).then(() => { + ts.readable.cancel(error1); + return promise_rejects_exactly(t, error1, ts.writable.getWriter().closed, 'closed should reject'); + }); +}, 'writer.closed should resolve after readable is canceled with no backpressure'); + +promise_test(() => { + const ts = new TransformStream({}, undefined, { highWaterMark: 1 }); + const writer = ts.writable.getWriter(); + return delay(0).then(() => { + const writePromise = writer.write('a'); + ts.readable.cancel(error1); + return writePromise; + }); +}, 'cancelling the readable should cause a pending write to resolve'); + +promise_test(t => { + const rs = new ReadableStream(); + const ts = new TransformStream(); + const pipePromise = rs.pipeTo(ts.writable); + ts.readable.cancel(error1); + return promise_rejects_exactly(t, error1, pipePromise, 'promise returned from pipeTo() should be rejected'); +}, 'cancelling the readable side of a TransformStream should abort an empty pipe'); + +promise_test(t => { + const rs = new ReadableStream(); + const ts = new TransformStream(); + const pipePromise = rs.pipeTo(ts.writable); + return delay(0).then(() => { + ts.readable.cancel(error1); + return promise_rejects_exactly(t, error1, pipePromise, 'promise returned from pipeTo() should be rejected'); + }); +}, 'cancelling the readable side of a TransformStream should abort an empty pipe after startup'); + +promise_test(t => { + const rs = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + } + }); + const ts = new TransformStream(); + const pipePromise = rs.pipeTo(ts.writable); + // Allow data to flow into the pipe. + return delay(0).then(() => { + ts.readable.cancel(error1); + return promise_rejects_exactly(t, error1, pipePromise, 'promise returned from pipeTo() should be rejected'); + }); +}, 'cancelling the readable side of a TransformStream should abort a full pipe'); diff --git a/test/fixtures/wpt/streams/transform-streams/errors.any.js b/test/fixtures/wpt/streams/transform-streams/errors.any.js new file mode 100644 index 00000000000000..ba26b32b75a6a4 --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/errors.any.js @@ -0,0 +1,341 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +'use strict'; + +const thrownError = new Error('bad things are happening!'); +thrownError.name = 'error1'; + +promise_test(t => { + const ts = new TransformStream({ + transform() { + throw thrownError; + } + }); + + const reader = ts.readable.getReader(); + + const writer = ts.writable.getWriter(); + + return Promise.all([ + promise_rejects_exactly(t, thrownError, writer.write('a'), + 'writable\'s write should reject with the thrown error'), + promise_rejects_exactly(t, thrownError, reader.read(), + 'readable\'s read should reject with the thrown error'), + promise_rejects_exactly(t, thrownError, reader.closed, + 'readable\'s closed should be rejected with the thrown error'), + promise_rejects_exactly(t, thrownError, writer.closed, + 'writable\'s closed should be rejected with the thrown error') + ]); +}, 'TransformStream errors thrown in transform put the writable and readable in an errored state'); + +promise_test(t => { + const ts = new TransformStream({ + transform() { + }, + flush() { + throw thrownError; + } + }); + + const reader = ts.readable.getReader(); + + const writer = ts.writable.getWriter(); + + return Promise.all([ + writer.write('a'), + promise_rejects_exactly(t, thrownError, writer.close(), + 'writable\'s close should reject with the thrown error'), + promise_rejects_exactly(t, thrownError, reader.read(), + 'readable\'s read should reject with the thrown error'), + promise_rejects_exactly(t, thrownError, reader.closed, + 'readable\'s closed should be rejected with the thrown error'), + promise_rejects_exactly(t, thrownError, writer.closed, + 'writable\'s closed should be rejected with the thrown error') + ]); +}, 'TransformStream errors thrown in flush put the writable and readable in an errored state'); + +test(() => { + new TransformStream({ + start(c) { + c.enqueue('a'); + c.error(new Error('generic error')); + assert_throws_js(TypeError, () => c.enqueue('b'), 'enqueue() should throw'); + } + }); +}, 'errored TransformStream should not enqueue new chunks'); + +promise_test(t => { + const ts = new TransformStream({ + start() { + return flushAsyncEvents().then(() => { + throw thrownError; + }); + }, + transform: t.unreached_func('transform should not be called'), + flush: t.unreached_func('flush should not be called') + }); + + const writer = ts.writable.getWriter(); + const reader = ts.readable.getReader(); + return Promise.all([ + promise_rejects_exactly(t, thrownError, writer.write('a'), 'writer should reject with thrownError'), + promise_rejects_exactly(t, thrownError, writer.close(), 'close() should reject with thrownError'), + promise_rejects_exactly(t, thrownError, reader.read(), 'reader should reject with thrownError') + ]); +}, 'TransformStream transformer.start() rejected promise should error the stream'); + +promise_test(t => { + const controllerError = new Error('start failure'); + controllerError.name = 'controllerError'; + const ts = new TransformStream({ + start(c) { + return flushAsyncEvents() + .then(() => { + c.error(controllerError); + throw new Error('ignored error'); + }); + }, + transform: t.unreached_func('transform should never be called if start() fails'), + flush: t.unreached_func('flush should never be called if start() fails') + }); + + const writer = ts.writable.getWriter(); + const reader = ts.readable.getReader(); + return Promise.all([ + promise_rejects_exactly(t, controllerError, writer.write('a'), 'writer should reject with controllerError'), + promise_rejects_exactly(t, controllerError, writer.close(), 'close should reject with same error'), + promise_rejects_exactly(t, controllerError, reader.read(), 'reader should reject with same error') + ]); +}, 'when controller.error is followed by a rejection, the error reason should come from controller.error'); + +test(() => { + assert_throws_js(URIError, () => new TransformStream({ + start() { throw new URIError('start thrown error'); }, + transform() {} + }), 'constructor should throw'); +}, 'TransformStream constructor should throw when start does'); + +test(() => { + const strategy = { + size() { throw new URIError('size thrown error'); } + }; + + assert_throws_js(URIError, () => new TransformStream({ + start(c) { + c.enqueue('a'); + }, + transform() {} + }, undefined, strategy), 'constructor should throw the same error strategy.size throws'); +}, 'when strategy.size throws inside start(), the constructor should throw the same error'); + +test(() => { + const controllerError = new URIError('controller.error'); + + let controller; + const strategy = { + size() { + controller.error(controllerError); + throw new Error('redundant error'); + } + }; + + assert_throws_js(URIError, () => new TransformStream({ + start(c) { + controller = c; + c.enqueue('a'); + }, + transform() {} + }, undefined, strategy), 'the first error should be thrown'); +}, 'when strategy.size calls controller.error() then throws, the constructor should throw the first error'); + +promise_test(t => { + const ts = new TransformStream(); + const writer = ts.writable.getWriter(); + const closedPromise = writer.closed; + return Promise.all([ + ts.readable.cancel(thrownError), + promise_rejects_exactly(t, thrownError, closedPromise, 'closed should throw a TypeError') + ]); +}, 'cancelling the readable side should error the writable'); + +promise_test(t => { + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }); + const writer = ts.writable.getWriter(); + const reader = ts.readable.getReader(); + const writePromise = writer.write('a'); + const closePromise = writer.close(); + controller.error(thrownError); + return Promise.all([ + promise_rejects_exactly(t, thrownError, reader.closed, 'reader.closed should reject'), + promise_rejects_exactly(t, thrownError, writePromise, 'writePromise should reject'), + promise_rejects_exactly(t, thrownError, closePromise, 'closePromise should reject')]); +}, 'it should be possible to error the readable between close requested and complete'); + +promise_test(t => { + const ts = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk); + controller.terminate(); + throw thrownError; + } + }, undefined, { highWaterMark: 1 }); + const writePromise = ts.writable.getWriter().write('a'); + const closedPromise = ts.readable.getReader().closed; + return Promise.all([ + promise_rejects_exactly(t, thrownError, writePromise, 'write() should reject'), + promise_rejects_exactly(t, thrownError, closedPromise, 'reader.closed should reject') + ]); +}, 'an exception from transform() should error the stream if terminate has been requested but not completed'); + +promise_test(t => { + const ts = new TransformStream(); + const writer = ts.writable.getWriter(); + // The microtask following transformer.start() hasn't completed yet, so the abort is queued and not notified to the + // TransformStream yet. + const abortPromise = writer.abort(thrownError); + const cancelPromise = ts.readable.cancel(new Error('cancel reason')); + return Promise.all([ + abortPromise, + cancelPromise, + promise_rejects_exactly(t, thrownError, writer.closed, 'writer.closed should reject with thrownError')]); +}, 'abort should set the close reason for the writable when it happens before cancel during start, but cancel should ' + + 'still succeed'); + +promise_test(t => { + let resolveTransform; + const transformPromise = new Promise(resolve => { + resolveTransform = resolve; + }); + const ts = new TransformStream({ + transform() { + return transformPromise; + } + }, undefined, { highWaterMark: 2 }); + const writer = ts.writable.getWriter(); + return delay(0).then(() => { + const writePromise = writer.write(); + const abortPromise = writer.abort(thrownError); + const cancelPromise = ts.readable.cancel(new Error('cancel reason')); + resolveTransform(); + return Promise.all([ + writePromise, + abortPromise, + cancelPromise, + promise_rejects_exactly(t, thrownError, writer.closed, 'writer.closed should reject with thrownError')]); + }); +}, 'abort should set the close reason for the writable when it happens before cancel during underlying sink write, ' + + 'but cancel should still succeed'); + +const ignoredError = new Error('ignoredError'); +ignoredError.name = 'ignoredError'; + +promise_test(t => { + const ts = new TransformStream({ + start(controller) { + controller.error(thrownError); + controller.error(ignoredError); + } + }); + return promise_rejects_exactly(t, thrownError, ts.writable.abort(), 'abort() should reject with thrownError'); +}, 'controller.error() should do nothing the second time it is called'); + +promise_test(t => { + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }); + const cancelPromise = ts.readable.cancel(thrownError); + controller.error(ignoredError); + return Promise.all([ + cancelPromise, + promise_rejects_exactly(t, thrownError, ts.writable.getWriter().closed, 'closed should reject with thrownError') + ]); +}, 'controller.error() should do nothing after readable.cancel()'); + +promise_test(t => { + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }); + return ts.writable.abort(thrownError).then(() => { + controller.error(ignoredError); + return promise_rejects_exactly(t, thrownError, ts.writable.getWriter().closed, 'closed should reject with thrownError'); + }); +}, 'controller.error() should do nothing after writable.abort() has completed'); + +promise_test(t => { + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + }, + transform() { + throw thrownError; + } + }, undefined, { highWaterMark: Infinity }); + const writer = ts.writable.getWriter(); + return promise_rejects_exactly(t, thrownError, writer.write(), 'write() should reject').then(() => { + controller.error(); + return promise_rejects_exactly(t, thrownError, writer.closed, 'closed should reject with thrownError'); + }); +}, 'controller.error() should do nothing after a transformer method has thrown an exception'); + +promise_test(t => { + let controller; + let calls = 0; + const ts = new TransformStream({ + start(c) { + controller = c; + }, + transform() { + ++calls; + } + }, undefined, { highWaterMark: 1 }); + return delay(0).then(() => { + // Create backpressure. + controller.enqueue('a'); + const writer = ts.writable.getWriter(); + // transform() will not be called until backpressure is relieved. + const writePromise = writer.write('b'); + assert_equals(calls, 0, 'transform() should not have been called'); + controller.error(thrownError); + // Now backpressure has been relieved and the write can proceed. + return promise_rejects_exactly(t, thrownError, writePromise, 'write() should reject').then(() => { + assert_equals(calls, 0, 'transform() should not be called'); + }); + }); +}, 'erroring during write with backpressure should result in the write failing'); + +promise_test(t => { + const ts = new TransformStream({}, undefined, { highWaterMark: 0 }); + return delay(0).then(() => { + const writer = ts.writable.getWriter(); + // write should start synchronously + const writePromise = writer.write(0); + // The underlying sink's abort() is not called until the write() completes. + const abortPromise = writer.abort(thrownError); + // Perform a read to relieve backpressure and permit the write() to complete. + const readPromise = ts.readable.getReader().read(); + return Promise.all([ + promise_rejects_exactly(t, thrownError, readPromise, 'read() should reject'), + promise_rejects_exactly(t, thrownError, writePromise, 'write() should reject'), + abortPromise + ]); + }); +}, 'a write() that was waiting for backpressure should reject if the writable is aborted'); + +promise_test(t => { + const ts = new TransformStream(); + ts.writable.abort(thrownError); + const reader = ts.readable.getReader(); + return promise_rejects_exactly(t, thrownError, reader.read(), 'read() should reject with thrownError'); +}, 'the readable should be errored with the reason passed to the writable abort() method'); diff --git a/test/fixtures/wpt/streams/transform-streams/flush.any.js b/test/fixtures/wpt/streams/transform-streams/flush.any.js new file mode 100644 index 00000000000000..dc40532957b14b --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/flush.any.js @@ -0,0 +1,131 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +'use strict'; + +promise_test(() => { + let flushCalled = false; + const ts = new TransformStream({ + transform() { }, + flush() { + flushCalled = true; + } + }); + + return ts.writable.getWriter().close().then(() => { + return assert_true(flushCalled, 'closing the writable triggers the transform flush immediately'); + }); +}, 'TransformStream flush is called immediately when the writable is closed, if no writes are queued'); + +promise_test(() => { + let flushCalled = false; + let resolveTransform; + const ts = new TransformStream({ + transform() { + return new Promise(resolve => { + resolveTransform = resolve; + }); + }, + flush() { + flushCalled = true; + return new Promise(() => {}); // never resolves + } + }, undefined, { highWaterMark: 1 }); + + const writer = ts.writable.getWriter(); + writer.write('a'); + writer.close(); + assert_false(flushCalled, 'closing the writable does not immediately call flush if writes are not finished'); + + let rsClosed = false; + ts.readable.getReader().closed.then(() => { + rsClosed = true; + }); + + return delay(0).then(() => { + assert_false(flushCalled, 'closing the writable does not asynchronously call flush if writes are not finished'); + resolveTransform(); + return delay(0); + }).then(() => { + assert_true(flushCalled, 'flush is eventually called'); + assert_false(rsClosed, 'if flushPromise does not resolve, the readable does not become closed'); + }); +}, 'TransformStream flush is called after all queued writes finish, once the writable is closed'); + +promise_test(() => { + let c; + const ts = new TransformStream({ + start(controller) { + c = controller; + }, + transform() { + }, + flush() { + c.enqueue('x'); + c.enqueue('y'); + } + }); + + const reader = ts.readable.getReader(); + + const writer = ts.writable.getWriter(); + writer.write('a'); + writer.close(); + return reader.read().then(result1 => { + assert_equals(result1.value, 'x', 'the first chunk read is the first one enqueued in flush'); + assert_equals(result1.done, false, 'the first chunk read is the first one enqueued in flush'); + + return reader.read().then(result2 => { + assert_equals(result2.value, 'y', 'the second chunk read is the second one enqueued in flush'); + assert_equals(result2.done, false, 'the second chunk read is the second one enqueued in flush'); + }); + }); +}, 'TransformStream flush gets a chance to enqueue more into the readable'); + +promise_test(() => { + let c; + const ts = new TransformStream({ + start(controller) { + c = controller; + }, + transform() { + }, + flush() { + c.enqueue('x'); + c.enqueue('y'); + return delay(0); + } + }); + + const reader = ts.readable.getReader(); + + const writer = ts.writable.getWriter(); + writer.write('a'); + writer.close(); + + return Promise.all([ + reader.read().then(result1 => { + assert_equals(result1.value, 'x', 'the first chunk read is the first one enqueued in flush'); + assert_equals(result1.done, false, 'the first chunk read is the first one enqueued in flush'); + + return reader.read().then(result2 => { + assert_equals(result2.value, 'y', 'the second chunk read is the second one enqueued in flush'); + assert_equals(result2.done, false, 'the second chunk read is the second one enqueued in flush'); + }); + }), + reader.closed.then(() => { + assert_true(true, 'readable reader becomes closed'); + }) + ]); +}, 'TransformStream flush gets a chance to enqueue more into the readable, and can then async close'); + +const error1 = new Error('error1'); +error1.name = 'error1'; + +promise_test(t => { + const ts = new TransformStream({ + flush(controller) { + controller.error(error1); + } + }); + return promise_rejects_exactly(t, error1, ts.writable.getWriter().close(), 'close() should reject'); +}, 'error() during flush should cause writer.close() to reject'); diff --git a/test/fixtures/wpt/streams/transform-streams/general.any.js b/test/fixtures/wpt/streams/transform-streams/general.any.js new file mode 100644 index 00000000000000..d4f2a1d5a29cf6 --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/general.any.js @@ -0,0 +1,437 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/rs-utils.js +'use strict'; + +test(() => { + new TransformStream({ transform() { } }); +}, 'TransformStream can be constructed with a transform function'); + +test(() => { + new TransformStream(); + new TransformStream({}); +}, 'TransformStream can be constructed with no transform function'); + +test(() => { + const ts = new TransformStream({ transform() { } }); + + const writer = ts.writable.getWriter(); + assert_equals(writer.desiredSize, 1, 'writer.desiredSize should be 1'); +}, 'TransformStream writable starts in the writable state'); + +promise_test(() => { + const ts = new TransformStream(); + + const writer = ts.writable.getWriter(); + writer.write('a'); + assert_equals(writer.desiredSize, 0, 'writer.desiredSize should be 0 after write()'); + + return ts.readable.getReader().read().then(result => { + assert_equals(result.value, 'a', + 'result from reading the readable is the same as was written to writable'); + assert_false(result.done, 'stream should not be done'); + + return delay(0).then(() => assert_equals(writer.desiredSize, 1, 'desiredSize should be 1 again')); + }); +}, 'Identity TransformStream: can read from readable what is put into writable'); + +promise_test(() => { + let c; + const ts = new TransformStream({ + start(controller) { + c = controller; + }, + transform(chunk) { + c.enqueue(chunk.toUpperCase()); + } + }); + + const writer = ts.writable.getWriter(); + writer.write('a'); + + return ts.readable.getReader().read().then(result => { + assert_equals(result.value, 'A', + 'result from reading the readable is the transformation of what was written to writable'); + assert_false(result.done, 'stream should not be done'); + }); +}, 'Uppercaser sync TransformStream: can read from readable transformed version of what is put into writable'); + +promise_test(() => { + let c; + const ts = new TransformStream({ + start(controller) { + c = controller; + }, + transform(chunk) { + c.enqueue(chunk.toUpperCase()); + c.enqueue(chunk.toUpperCase()); + } + }); + + const writer = ts.writable.getWriter(); + writer.write('a'); + + const reader = ts.readable.getReader(); + + return reader.read().then(result1 => { + assert_equals(result1.value, 'A', + 'the first chunk read is the transformation of the single chunk written'); + assert_false(result1.done, 'stream should not be done'); + + return reader.read().then(result2 => { + assert_equals(result2.value, 'A', + 'the second chunk read is also the transformation of the single chunk written'); + assert_false(result2.done, 'stream should not be done'); + }); + }); +}, 'Uppercaser-doubler sync TransformStream: can read both chunks put into the readable'); + +promise_test(() => { + let c; + const ts = new TransformStream({ + start(controller) { + c = controller; + }, + transform(chunk) { + return delay(0).then(() => c.enqueue(chunk.toUpperCase())); + } + }); + + const writer = ts.writable.getWriter(); + writer.write('a'); + + return ts.readable.getReader().read().then(result => { + assert_equals(result.value, 'A', + 'result from reading the readable is the transformation of what was written to writable'); + assert_false(result.done, 'stream should not be done'); + }); +}, 'Uppercaser async TransformStream: can read from readable transformed version of what is put into writable'); + +promise_test(() => { + let doSecondEnqueue; + let returnFromTransform; + const ts = new TransformStream({ + transform(chunk, controller) { + delay(0).then(() => controller.enqueue(chunk.toUpperCase())); + doSecondEnqueue = () => controller.enqueue(chunk.toUpperCase()); + return new Promise(resolve => { + returnFromTransform = resolve; + }); + } + }); + + const reader = ts.readable.getReader(); + + const writer = ts.writable.getWriter(); + writer.write('a'); + + return reader.read().then(result1 => { + assert_equals(result1.value, 'A', + 'the first chunk read is the transformation of the single chunk written'); + assert_false(result1.done, 'stream should not be done'); + doSecondEnqueue(); + + return reader.read().then(result2 => { + assert_equals(result2.value, 'A', + 'the second chunk read is also the transformation of the single chunk written'); + assert_false(result2.done, 'stream should not be done'); + returnFromTransform(); + }); + }); +}, 'Uppercaser-doubler async TransformStream: can read both chunks put into the readable'); + +promise_test(() => { + const ts = new TransformStream({ transform() { } }); + + const writer = ts.writable.getWriter(); + writer.close(); + + return Promise.all([writer.closed, ts.readable.getReader().closed]); +}, 'TransformStream: by default, closing the writable closes the readable (when there are no queued writes)'); + +promise_test(() => { + let transformResolve; + const transformPromise = new Promise(resolve => { + transformResolve = resolve; + }); + const ts = new TransformStream({ + transform() { + return transformPromise; + } + }, undefined, { highWaterMark: 1 }); + + const writer = ts.writable.getWriter(); + writer.write('a'); + writer.close(); + + let rsClosed = false; + ts.readable.getReader().closed.then(() => { + rsClosed = true; + }); + + return delay(0).then(() => { + assert_equals(rsClosed, false, 'readable is not closed after a tick'); + transformResolve(); + + return writer.closed.then(() => { + // TODO: Is this expectation correct? + assert_equals(rsClosed, true, 'readable is closed at that point'); + }); + }); +}, 'TransformStream: by default, closing the writable waits for transforms to finish before closing both'); + +promise_test(() => { + let c; + const ts = new TransformStream({ + start(controller) { + c = controller; + }, + transform() { + c.enqueue('x'); + c.enqueue('y'); + return delay(0); + } + }); + + const writer = ts.writable.getWriter(); + writer.write('a'); + writer.close(); + + const readableChunks = readableStreamToArray(ts.readable); + + return writer.closed.then(() => { + return readableChunks.then(chunks => { + assert_array_equals(chunks, ['x', 'y'], 'both enqueued chunks can be read from the readable'); + }); + }); +}, 'TransformStream: by default, closing the writable closes the readable after sync enqueues and async done'); + +promise_test(() => { + let c; + const ts = new TransformStream({ + start(controller) { + c = controller; + }, + transform() { + return delay(0) + .then(() => c.enqueue('x')) + .then(() => c.enqueue('y')) + .then(() => delay(0)); + } + }); + + const writer = ts.writable.getWriter(); + writer.write('a'); + writer.close(); + + const readableChunks = readableStreamToArray(ts.readable); + + return writer.closed.then(() => { + return readableChunks.then(chunks => { + assert_array_equals(chunks, ['x', 'y'], 'both enqueued chunks can be read from the readable'); + }); + }); +}, 'TransformStream: by default, closing the writable closes the readable after async enqueues and async done'); + +promise_test(() => { + let c; + const ts = new TransformStream({ + suffix: '-suffix', + + start(controller) { + c = controller; + c.enqueue('start' + this.suffix); + }, + + transform(chunk) { + c.enqueue(chunk + this.suffix); + }, + + flush() { + c.enqueue('flushed' + this.suffix); + } + }); + + const writer = ts.writable.getWriter(); + writer.write('a'); + writer.close(); + + const readableChunks = readableStreamToArray(ts.readable); + + return writer.closed.then(() => { + return readableChunks.then(chunks => { + assert_array_equals(chunks, ['start-suffix', 'a-suffix', 'flushed-suffix'], 'all enqueued chunks have suffixes'); + }); + }); +}, 'Transform stream should call transformer methods as methods'); + +promise_test(() => { + function functionWithOverloads() {} + functionWithOverloads.apply = () => assert_unreached('apply() should not be called'); + functionWithOverloads.call = () => assert_unreached('call() should not be called'); + const ts = new TransformStream({ + start: functionWithOverloads, + transform: functionWithOverloads, + flush: functionWithOverloads + }); + const writer = ts.writable.getWriter(); + writer.write('a'); + writer.close(); + + return readableStreamToArray(ts.readable); +}, 'methods should not not have .apply() or .call() called'); + +promise_test(t => { + let startCalled = false; + let startDone = false; + let transformDone = false; + let flushDone = false; + const ts = new TransformStream({ + start() { + startCalled = true; + return flushAsyncEvents().then(() => { + startDone = true; + }); + }, + transform() { + return t.step(() => { + assert_true(startDone, 'transform() should not be called until the promise returned from start() has resolved'); + return flushAsyncEvents().then(() => { + transformDone = true; + }); + }); + }, + flush() { + return t.step(() => { + assert_true(transformDone, + 'flush() should not be called until the promise returned from transform() has resolved'); + return flushAsyncEvents().then(() => { + flushDone = true; + }); + }); + } + }, undefined, { highWaterMark: 1 }); + + assert_true(startCalled, 'start() should be called synchronously'); + + const writer = ts.writable.getWriter(); + const writePromise = writer.write('a'); + return writer.close().then(() => { + assert_true(flushDone, 'promise returned from flush() should have resolved'); + return writePromise; + }); +}, 'TransformStream start, transform, and flush should be strictly ordered'); + +promise_test(() => { + let transformCalled = false; + const ts = new TransformStream({ + transform() { + transformCalled = true; + } + }, undefined, { highWaterMark: Infinity }); + // transform() is only called synchronously when there is no backpressure and all microtasks have run. + return delay(0).then(() => { + const writePromise = ts.writable.getWriter().write(); + assert_true(transformCalled, 'transform() should have been called'); + return writePromise; + }); +}, 'it should be possible to call transform() synchronously'); + +promise_test(() => { + const ts = new TransformStream({}, undefined, { highWaterMark: 0 }); + + const writer = ts.writable.getWriter(); + writer.close(); + + return Promise.all([writer.closed, ts.readable.getReader().closed]); +}, 'closing the writable should close the readable when there are no queued chunks, even with backpressure'); + +test(() => { + new TransformStream({ + start(controller) { + controller.terminate(); + assert_throws_js(TypeError, () => controller.enqueue(), 'enqueue should throw'); + } + }); +}, 'enqueue() should throw after controller.terminate()'); + +promise_test(() => { + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }); + const cancelPromise = ts.readable.cancel(); + assert_throws_js(TypeError, () => controller.enqueue(), 'enqueue should throw'); + return cancelPromise; +}, 'enqueue() should throw after readable.cancel()'); + +test(() => { + new TransformStream({ + start(controller) { + controller.terminate(); + controller.terminate(); + } + }); +}, 'controller.terminate() should do nothing the second time it is called'); + +promise_test(t => { + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }); + const cancelReason = { name: 'cancelReason' }; + const cancelPromise = ts.readable.cancel(cancelReason); + controller.terminate(); + return Promise.all([ + cancelPromise, + promise_rejects_exactly(t, cancelReason, ts.writable.getWriter().closed, 'closed should reject with cancelReason') + ]); +}, 'terminate() should do nothing after readable.cancel()'); + +promise_test(() => { + let calls = 0; + new TransformStream({ + start() { + ++calls; + } + }); + return flushAsyncEvents().then(() => { + assert_equals(calls, 1, 'start() should have been called exactly once'); + }); +}, 'start() should not be called twice'); + +test(() => { + assert_throws_js(RangeError, () => new TransformStream({ readableType: 'bytes' }), 'constructor should throw'); +}, 'specifying a defined readableType should throw'); + +test(() => { + assert_throws_js(RangeError, () => new TransformStream({ writableType: 'bytes' }), 'constructor should throw'); +}, 'specifying a defined writableType should throw'); + +test(() => { + class Subclass extends TransformStream { + extraFunction() { + return true; + } + } + assert_equals( + Object.getPrototypeOf(Subclass.prototype), TransformStream.prototype, + 'Subclass.prototype\'s prototype should be TransformStream.prototype'); + assert_equals(Object.getPrototypeOf(Subclass), TransformStream, + 'Subclass\'s prototype should be TransformStream'); + const sub = new Subclass(); + assert_true(sub instanceof TransformStream, + 'Subclass object should be an instance of TransformStream'); + assert_true(sub instanceof Subclass, + 'Subclass object should be an instance of Subclass'); + const readableGetter = Object.getOwnPropertyDescriptor( + TransformStream.prototype, 'readable').get; + assert_equals(readableGetter.call(sub), sub.readable, + 'Subclass object should pass brand check'); + assert_true(sub.extraFunction(), + 'extraFunction() should be present on Subclass object'); +}, 'Subclassing TransformStream should work'); diff --git a/test/fixtures/wpt/streams/transform-streams/lipfuzz.any.js b/test/fixtures/wpt/streams/transform-streams/lipfuzz.any.js new file mode 100644 index 00000000000000..c8c3803c6dfb4b --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/lipfuzz.any.js @@ -0,0 +1,163 @@ +// META: global=window,worker,jsshell +'use strict'; + +class LipFuzzTransformer { + constructor(substitutions) { + this.substitutions = substitutions; + this.partialChunk = ''; + this.lastIndex = undefined; + } + + transform(chunk, controller) { + chunk = this.partialChunk + chunk; + this.partialChunk = ''; + // lastIndex is the index of the first character after the last substitution. + this.lastIndex = 0; + chunk = chunk.replace(/\{\{([a-zA-Z0-9_-]+)\}\}/g, this.replaceTag.bind(this)); + // Regular expression for an incomplete template at the end of a string. + const partialAtEndRegexp = /\{(\{([a-zA-Z0-9_-]+(\})?)?)?$/g; + // Avoid looking at any characters that have already been substituted. + partialAtEndRegexp.lastIndex = this.lastIndex; + this.lastIndex = undefined; + const match = partialAtEndRegexp.exec(chunk); + if (match) { + this.partialChunk = chunk.substring(match.index); + chunk = chunk.substring(0, match.index); + } + controller.enqueue(chunk); + } + + flush(controller) { + if (this.partialChunk.length > 0) { + controller.enqueue(this.partialChunk); + } + } + + replaceTag(match, p1, offset) { + let replacement = this.substitutions[p1]; + if (replacement === undefined) { + replacement = ''; + } + this.lastIndex = offset + replacement.length; + return replacement; + } +} + +const substitutions = { + in1: 'out1', + in2: 'out2', + quine: '{{quine}}', + bogusPartial: '{{incompleteResult}' +}; + +const cases = [ + { + input: [''], + output: [''] + }, + { + input: [], + output: [] + }, + { + input: ['{{in1}}'], + output: ['out1'] + }, + { + input: ['z{{in1}}'], + output: ['zout1'] + }, + { + input: ['{{in1}}q'], + output: ['out1q'] + }, + { + input: ['{{in1}}{{in1}'], + output: ['out1', '{{in1}'] + }, + { + input: ['{{in1}}{{in1}', '}'], + output: ['out1', 'out1'] + }, + { + input: ['{{in1', '}}'], + output: ['', 'out1'] + }, + { + input: ['{{', 'in1}}'], + output: ['', 'out1'] + }, + { + input: ['{', '{in1}}'], + output: ['', 'out1'] + }, + { + input: ['{{', 'in1}'], + output: ['', '', '{{in1}'] + }, + { + input: ['{'], + output: ['', '{'] + }, + { + input: ['{', ''], + output: ['', '', '{'] + }, + { + input: ['{', '{', 'i', 'n', '1', '}', '}'], + output: ['', '', '', '', '', '', 'out1'] + }, + { + input: ['{{in1}}{{in2}}{{in1}}'], + output: ['out1out2out1'] + }, + { + input: ['{{wrong}}'], + output: [''] + }, + { + input: ['{{wron', 'g}}'], + output: ['', ''] + }, + { + input: ['{{quine}}'], + output: ['{{quine}}'] + }, + { + input: ['{{bogusPartial}}'], + output: ['{{incompleteResult}'] + }, + { + input: ['{{bogusPartial}}}'], + output: ['{{incompleteResult}}'] + } +]; + +for (const testCase of cases) { + const inputChunks = testCase.input; + const outputChunks = testCase.output; + promise_test(() => { + const lft = new TransformStream(new LipFuzzTransformer(substitutions)); + const writer = lft.writable.getWriter(); + const promises = []; + for (const inputChunk of inputChunks) { + promises.push(writer.write(inputChunk)); + } + promises.push(writer.close()); + const reader = lft.readable.getReader(); + let readerChain = Promise.resolve(); + for (const outputChunk of outputChunks) { + readerChain = readerChain.then(() => { + return reader.read().then(({ value, done }) => { + assert_false(done, `done should be false when reading ${outputChunk}`); + assert_equals(value, outputChunk, `value should match outputChunk`); + }); + }); + } + readerChain = readerChain.then(() => { + return reader.read().then(({ done }) => assert_true(done, `done should be true`)); + }); + promises.push(readerChain); + return Promise.all(promises); + }, `testing "${inputChunks}" (length ${inputChunks.length})`); +} diff --git a/test/fixtures/wpt/streams/transform-streams/patched-global.any.js b/test/fixtures/wpt/streams/transform-streams/patched-global.any.js new file mode 100644 index 00000000000000..5142d236fb65e8 --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/patched-global.any.js @@ -0,0 +1,53 @@ +// META: global=window,worker,jsshell +'use strict'; + +// Tests which patch the global environment are kept separate to avoid +// interfering with other tests. + +test(t => { + // eslint-disable-next-line no-extend-native, accessor-pairs + Object.defineProperty(Object.prototype, 'highWaterMark', { + set() { throw new Error('highWaterMark setter called'); }, + configurable: true + }); + + // eslint-disable-next-line no-extend-native, accessor-pairs + Object.defineProperty(Object.prototype, 'size', { + set() { throw new Error('size setter called'); }, + configurable: true + }); + + t.add_cleanup(() => { + delete Object.prototype.highWaterMark; + delete Object.prototype.size; + }); + + assert_not_equals(new TransformStream(), null, 'constructor should work'); +}, 'TransformStream constructor should not call setters for highWaterMark or size'); + +test(t => { + const oldReadableStream = ReadableStream; + const oldWritableStream = WritableStream; + const getReader = ReadableStream.prototype.getReader; + const getWriter = WritableStream.prototype.getWriter; + + // Replace ReadableStream and WritableStream with broken versions. + ReadableStream = function () { + throw new Error('Called the global ReadableStream constructor'); + }; + WritableStream = function () { + throw new Error('Called the global WritableStream constructor'); + }; + t.add_cleanup(() => { + ReadableStream = oldReadableStream; + WritableStream = oldWritableStream; + }); + + const ts = new TransformStream(); + + // Just to be sure, ensure the readable and writable pass brand checks. + assert_not_equals(getReader.call(ts.readable), undefined, + 'getReader should work when called on ts.readable'); + assert_not_equals(getWriter.call(ts.writable), undefined, + 'getWriter should work when called on ts.writable'); +}, 'TransformStream should use the original value of ReadableStream and WritableStream'); diff --git a/test/fixtures/wpt/streams/transform-streams/properties.any.js b/test/fixtures/wpt/streams/transform-streams/properties.any.js new file mode 100644 index 00000000000000..f2ac482e0de223 --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/properties.any.js @@ -0,0 +1,49 @@ +// META: global=window,worker,jsshell +'use strict'; + +const transformerMethods = { + start: { + length: 1, + trigger: () => Promise.resolve() + }, + transform: { + length: 2, + trigger: ts => ts.writable.getWriter().write() + }, + flush: { + length: 1, + trigger: ts => ts.writable.getWriter().close() + } +}; + +for (const method in transformerMethods) { + const { length, trigger } = transformerMethods[method]; + + // Some semantic tests of how transformer methods are called can be found in general.js, as well as in the test files + // specific to each method. + promise_test(() => { + let argCount; + const ts = new TransformStream({ + [method](...args) { + argCount = args.length; + } + }, undefined, { highWaterMark: Infinity }); + return Promise.resolve(trigger(ts)).then(() => { + assert_equals(argCount, length, `${method} should be called with ${length} arguments`); + }); + }, `transformer method ${method} should be called with the right number of arguments`); + + promise_test(() => { + let methodWasCalled = false; + function Transformer() {} + Transformer.prototype = { + [method]() { + methodWasCalled = true; + } + }; + const ts = new TransformStream(new Transformer(), undefined, { highWaterMark: Infinity }); + return Promise.resolve(trigger(ts)).then(() => { + assert_true(methodWasCalled, `${method} should be called`); + }); + }, `transformer method ${method} should be called even when it's located on the prototype chain`); +} diff --git a/test/fixtures/wpt/streams/transform-streams/reentrant-strategies.any.js b/test/fixtures/wpt/streams/transform-streams/reentrant-strategies.any.js new file mode 100644 index 00000000000000..31e53949f3c26e --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/reentrant-strategies.any.js @@ -0,0 +1,319 @@ +// META: global=window,worker,jsshell +// META: script=../resources/recording-streams.js +// META: script=../resources/rs-utils.js +// META: script=../resources/test-utils.js +'use strict'; + +// The size() function of readableStrategy can re-entrantly call back into the TransformStream implementation. This +// makes it risky to cache state across the call to ReadableStreamDefaultControllerEnqueue. These tests attempt to catch +// such errors. They are separated from the other strategy tests because no real user code should ever do anything like +// this. +// +// There is no such issue with writableStrategy size() because it is never called from within TransformStream +// algorithms. + +const error1 = new Error('error1'); +error1.name = 'error1'; + +promise_test(() => { + let controller; + let calls = 0; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }, undefined, { + size() { + ++calls; + if (calls < 2) { + controller.enqueue('b'); + } + return 1; + }, + highWaterMark: Infinity + }); + const writer = ts.writable.getWriter(); + return Promise.all([writer.write('a'), writer.close()]) + .then(() => readableStreamToArray(ts.readable)) + .then(array => assert_array_equals(array, ['b', 'a'], 'array should contain two chunks')); +}, 'enqueue() inside size() should work'); + +promise_test(() => { + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }, undefined, { + size() { + // The readable queue is empty. + controller.terminate(); + // The readable state has gone from "readable" to "closed". + return 1; + // This chunk will be enqueued, but will be impossible to read because the state is already "closed". + }, + highWaterMark: Infinity + }); + const writer = ts.writable.getWriter(); + return writer.write('a') + .then(() => readableStreamToArray(ts.readable)) + .then(array => assert_array_equals(array, [], 'array should contain no chunks')); + // The chunk 'a' is still in readable's queue. readable is closed so 'a' cannot be read. writable's queue is empty and + // it is still writable. +}, 'terminate() inside size() should work'); + +promise_test(t => { + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }, undefined, { + size() { + controller.error(error1); + return 1; + }, + highWaterMark: Infinity + }); + const writer = ts.writable.getWriter(); + return writer.write('a') + .then(() => promise_rejects_exactly(t, error1, ts.readable.getReader().read(), 'read() should reject')); +}, 'error() inside size() should work'); + +promise_test(() => { + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }, undefined, { + size() { + assert_equals(controller.desiredSize, 1, 'desiredSize should be 1'); + return 1; + }, + highWaterMark: 1 + }); + const writer = ts.writable.getWriter(); + return Promise.all([writer.write('a'), writer.close()]) + .then(() => readableStreamToArray(ts.readable)) + .then(array => assert_array_equals(array, ['a'], 'array should contain one chunk')); +}, 'desiredSize inside size() should work'); + +promise_test(t => { + let cancelPromise; + const ts = new TransformStream({}, undefined, { + size() { + cancelPromise = ts.readable.cancel(error1); + return 1; + }, + highWaterMark: Infinity + }); + const writer = ts.writable.getWriter(); + return writer.write('a') + .then(() => { + promise_rejects_exactly(t, error1, writer.closed, 'writer.closed should reject'); + return cancelPromise; + }); +}, 'readable cancel() inside size() should work'); + +promise_test(() => { + let controller; + let pipeToPromise; + const ws = recordingWritableStream(); + const ts = new TransformStream({ + start(c) { + controller = c; + } + }, undefined, { + size() { + if (!pipeToPromise) { + pipeToPromise = ts.readable.pipeTo(ws); + } + return 1; + }, + highWaterMark: 1 + }); + // Allow promise returned by start() to resolve so that enqueue() will happen synchronously. + return delay(0).then(() => { + controller.enqueue('a'); + assert_not_equals(pipeToPromise, undefined); + + // Some pipeTo() implementations need an additional chunk enqueued in order for the first one to be processed. See + // https://github.com/whatwg/streams/issues/794 for background. + controller.enqueue('a'); + + // Give pipeTo() a chance to process the queued chunks. + return delay(0); + }).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'a'], 'ws should contain two chunks'); + controller.terminate(); + return pipeToPromise; + }).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'a', 'close'], 'target should have been closed'); + }); +}, 'pipeTo() inside size() should work'); + +promise_test(() => { + let controller; + let readPromise; + let calls = 0; + let reader; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }, undefined, { + size() { + // This is triggered by controller.enqueue(). The queue is empty and there are no pending reads. pull() is called + // synchronously, allowing transform() to proceed asynchronously. This results in a second call to enqueue(), + // which resolves this pending read() without calling size() again. + readPromise = reader.read(); + ++calls; + return 1; + }, + highWaterMark: 0 + }); + reader = ts.readable.getReader(); + const writer = ts.writable.getWriter(); + let writeResolved = false; + const writePromise = writer.write('b').then(() => { + writeResolved = true; + }); + return flushAsyncEvents().then(() => { + assert_false(writeResolved); + controller.enqueue('a'); + assert_equals(calls, 1, 'size() should have been called once'); + return delay(0); + }).then(() => { + assert_true(writeResolved); + assert_equals(calls, 1, 'size() should only be called once'); + return readPromise; + }).then(({ value, done }) => { + assert_false(done, 'done should be false'); + // See https://github.com/whatwg/streams/issues/794 for why this chunk is not 'a'. + assert_equals(value, 'b', 'chunk should have been read'); + assert_equals(calls, 1, 'calls should still be 1'); + return writePromise; + }); +}, 'read() inside of size() should work'); + +promise_test(() => { + let writer; + let writePromise1; + let calls = 0; + const ts = new TransformStream({}, undefined, { + size() { + ++calls; + if (calls < 2) { + writePromise1 = writer.write('a'); + } + return 1; + }, + highWaterMark: Infinity + }); + writer = ts.writable.getWriter(); + // Give pull() a chance to be called. + return delay(0).then(() => { + // This write results in a synchronous call to transform(), enqueue(), and size(). + const writePromise2 = writer.write('b'); + assert_equals(calls, 1, 'size() should have been called once'); + return Promise.all([writePromise1, writePromise2, writer.close()]); + }).then(() => { + assert_equals(calls, 2, 'size() should have been called twice'); + return readableStreamToArray(ts.readable); + }).then(array => { + assert_array_equals(array, ['b', 'a'], 'both chunks should have been enqueued'); + assert_equals(calls, 2, 'calls should still be 2'); + }); +}, 'writer.write() inside size() should work'); + +promise_test(() => { + let controller; + let writer; + let writePromise; + let calls = 0; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }, undefined, { + size() { + ++calls; + if (calls < 2) { + writePromise = writer.write('a'); + } + return 1; + }, + highWaterMark: Infinity + }); + writer = ts.writable.getWriter(); + // Give pull() a chance to be called. + return delay(0).then(() => { + // This enqueue results in synchronous calls to size(), write(), transform() and enqueue(). + controller.enqueue('b'); + assert_equals(calls, 2, 'size() should have been called twice'); + return Promise.all([writePromise, writer.close()]); + }).then(() => { + return readableStreamToArray(ts.readable); + }).then(array => { + // Because one call to enqueue() is nested inside the other, they finish in the opposite order that they were + // called, so the chunks end up reverse order. + assert_array_equals(array, ['a', 'b'], 'both chunks should have been enqueued'); + assert_equals(calls, 2, 'calls should still be 2'); + }); +}, 'synchronous writer.write() inside size() should work'); + +promise_test(() => { + let writer; + let closePromise; + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }, undefined, { + size() { + closePromise = writer.close(); + return 1; + }, + highWaterMark: 1 + }); + writer = ts.writable.getWriter(); + const reader = ts.readable.getReader(); + // Wait for the promise returned by start() to be resolved so that the call to close() will result in a synchronous + // call to TransformStreamDefaultSink. + return delay(0).then(() => { + controller.enqueue('a'); + return reader.read(); + }).then(({ value, done }) => { + assert_false(done, 'done should be false'); + assert_equals(value, 'a', 'value should be correct'); + return reader.read(); + }).then(({ done }) => { + assert_true(done, 'done should be true'); + return closePromise; + }); +}, 'writer.close() inside size() should work'); + +promise_test(t => { + let abortPromise; + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }, undefined, { + size() { + abortPromise = ts.writable.abort(error1); + return 1; + }, + highWaterMark: 1 + }); + const reader = ts.readable.getReader(); + // Wait for the promise returned by start() to be resolved so that the call to abort() will result in a synchronous + // call to TransformStreamDefaultSink. + return delay(0).then(() => { + controller.enqueue('a'); + return Promise.all([promise_rejects_exactly(t, error1, reader.read(), 'read() should reject'), abortPromise]); + }); +}, 'writer.abort() inside size() should work'); diff --git a/test/fixtures/wpt/streams/transform-streams/strategies.any.js b/test/fixtures/wpt/streams/transform-streams/strategies.any.js new file mode 100644 index 00000000000000..d465d31ab09736 --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/strategies.any.js @@ -0,0 +1,150 @@ +// META: global=window,worker,jsshell +// META: script=../resources/recording-streams.js +// META: script=../resources/test-utils.js +'use strict'; + +// Here we just test that the strategies are correctly passed to the readable and writable sides. We assume that +// ReadableStream and WritableStream will correctly apply the strategies when they are being used by a TransformStream +// and so it isn't necessary to repeat their tests here. + +test(() => { + const ts = new TransformStream({}, { highWaterMark: 17 }); + assert_equals(ts.writable.getWriter().desiredSize, 17, 'desiredSize should be 17'); +}, 'writableStrategy highWaterMark should work'); + +promise_test(() => { + const ts = recordingTransformStream({}, undefined, { highWaterMark: 9 }); + const writer = ts.writable.getWriter(); + for (let i = 0; i < 10; ++i) { + writer.write(i); + } + return delay(0).then(() => { + assert_array_equals(ts.events, [ + 'transform', 0, 'transform', 1, 'transform', 2, 'transform', 3, 'transform', 4, + 'transform', 5, 'transform', 6, 'transform', 7, 'transform', 8], + 'transform() should have been called 9 times'); + }); +}, 'readableStrategy highWaterMark should work'); + +promise_test(t => { + let writableSizeCalled = false; + let readableSizeCalled = false; + let transformCalled = false; + const ts = new TransformStream( + { + transform(chunk, controller) { + t.step(() => { + transformCalled = true; + assert_true(writableSizeCalled, 'writableStrategy.size() should have been called'); + assert_false(readableSizeCalled, 'readableStrategy.size() should not have been called'); + controller.enqueue(chunk); + assert_true(readableSizeCalled, 'readableStrategy.size() should have been called'); + }); + } + }, + { + size() { + writableSizeCalled = true; + return 1; + } + }, + { + size() { + readableSizeCalled = true; + return 1; + }, + highWaterMark: Infinity + }); + return ts.writable.getWriter().write().then(() => { + assert_true(transformCalled, 'transform() should be called'); + }); +}, 'writable should have the correct size() function'); + +test(() => { + const ts = new TransformStream(); + const writer = ts.writable.getWriter(); + assert_equals(writer.desiredSize, 1, 'default writable HWM is 1'); + writer.write(undefined); + assert_equals(writer.desiredSize, 0, 'default chunk size is 1'); +}, 'default writable strategy should be equivalent to { highWaterMark: 1 }'); + +promise_test(t => { + const ts = new TransformStream({ + transform(chunk, controller) { + return t.step(() => { + assert_equals(controller.desiredSize, 0, 'desiredSize should be 0'); + controller.enqueue(undefined); + // The first chunk enqueued is consumed by the pending read(). + assert_equals(controller.desiredSize, 0, 'desiredSize should still be 0'); + controller.enqueue(undefined); + assert_equals(controller.desiredSize, -1, 'desiredSize should be -1'); + }); + } + }); + const writePromise = ts.writable.getWriter().write(); + return ts.readable.getReader().read().then(() => writePromise); +}, 'default readable strategy should be equivalent to { highWaterMark: 0 }'); + +test(() => { + assert_throws_js(RangeError, () => new TransformStream(undefined, { highWaterMark: -1 }), + 'should throw RangeError for negative writableHighWaterMark'); + assert_throws_js(RangeError, () => new TransformStream(undefined, undefined, { highWaterMark: -1 }), + 'should throw RangeError for negative readableHighWaterMark'); + assert_throws_js(RangeError, () => new TransformStream(undefined, { highWaterMark: NaN }), + 'should throw RangeError for NaN writableHighWaterMark'); + assert_throws_js(RangeError, () => new TransformStream(undefined, undefined, { highWaterMark: NaN }), + 'should throw RangeError for NaN readableHighWaterMark'); +}, 'a RangeError should be thrown for an invalid highWaterMark'); + +const objectThatConvertsTo42 = { + toString() { + return '42'; + } +}; + +test(() => { + const ts = new TransformStream(undefined, { highWaterMark: objectThatConvertsTo42 }); + const writer = ts.writable.getWriter(); + assert_equals(writer.desiredSize, 42, 'writable HWM is 42'); +}, 'writableStrategy highWaterMark should be converted to a number'); + +test(() => { + const ts = new TransformStream({ + start(controller) { + assert_equals(controller.desiredSize, 42, 'desiredSize should be 42'); + } + }, undefined, { highWaterMark: objectThatConvertsTo42 }); +}, 'readableStrategy highWaterMark should be converted to a number'); + +promise_test(t => { + const ts = new TransformStream(undefined, undefined, { + size() { return NaN; }, + highWaterMark: 1 + }); + const writer = ts.writable.getWriter(); + return promise_rejects_js(t, RangeError, writer.write(), 'write should reject'); +}, 'a bad readableStrategy size function should cause writer.write() to reject on an identity transform'); + +promise_test(t => { + const ts = new TransformStream({ + transform(chunk, controller) { + // This assert has the important side-effect of catching the error, so transform() does not throw. + assert_throws_js(RangeError, () => controller.enqueue(chunk), 'enqueue should throw'); + } + }, undefined, { + size() { + return -1; + }, + highWaterMark: 1 + }); + + const writer = ts.writable.getWriter(); + return writer.write().then(() => { + return Promise.all([ + promise_rejects_js(t, RangeError, writer.ready, 'ready should reject'), + promise_rejects_js(t, RangeError, writer.closed, 'closed should reject'), + promise_rejects_js(t, RangeError, ts.readable.getReader().closed, 'readable closed should reject') + ]); + }); +}, 'a bad readableStrategy size function should error the stream on enqueue even when transformer.transform() ' + + 'catches the exception'); diff --git a/test/fixtures/wpt/streams/transform-streams/terminate.any.js b/test/fixtures/wpt/streams/transform-streams/terminate.any.js new file mode 100644 index 00000000000000..8cb10679348b50 --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/terminate.any.js @@ -0,0 +1,100 @@ +// META: global=window,worker,jsshell +// META: script=../resources/recording-streams.js +// META: script=../resources/test-utils.js +'use strict'; + +promise_test(t => { + const ts = recordingTransformStream({}, undefined, { highWaterMark: 0 }); + const rs = new ReadableStream({ + start(controller) { + controller.enqueue(0); + } + }); + let pipeToRejected = false; + const pipeToPromise = promise_rejects_js(t, TypeError, rs.pipeTo(ts.writable), 'pipeTo should reject').then(() => { + pipeToRejected = true; + }); + return delay(0).then(() => { + assert_array_equals(ts.events, [], 'transform() should have seen no chunks'); + assert_false(pipeToRejected, 'pipeTo() should not have rejected yet'); + ts.controller.terminate(); + return pipeToPromise; + }).then(() => { + assert_array_equals(ts.events, [], 'transform() should still have seen no chunks'); + assert_true(pipeToRejected, 'pipeToRejected must be true'); + }); +}, 'controller.terminate() should error pipeTo()'); + +promise_test(t => { + const ts = recordingTransformStream({}, undefined, { highWaterMark: 1 }); + const rs = new ReadableStream({ + start(controller) { + controller.enqueue(0); + controller.enqueue(1); + } + }); + const pipeToPromise = rs.pipeTo(ts.writable); + return delay(0).then(() => { + assert_array_equals(ts.events, ['transform', 0], 'transform() should have seen one chunk'); + ts.controller.terminate(); + return promise_rejects_js(t, TypeError, pipeToPromise, 'pipeTo() should reject'); + }).then(() => { + assert_array_equals(ts.events, ['transform', 0], 'transform() should still have seen only one chunk'); + }); +}, 'controller.terminate() should prevent remaining chunks from being processed'); + +test(() => { + new TransformStream({ + start(controller) { + controller.enqueue(0); + controller.terminate(); + assert_throws_js(TypeError, () => controller.enqueue(1), 'enqueue should throw'); + } + }); +}, 'controller.enqueue() should throw after controller.terminate()'); + +const error1 = new Error('error1'); +error1.name = 'error1'; + +promise_test(t => { + const ts = new TransformStream({ + start(controller) { + controller.enqueue(0); + controller.terminate(); + controller.error(error1); + } + }); + return Promise.all([ + promise_rejects_js(t, TypeError, ts.writable.abort(), 'abort() should reject with a TypeError'), + promise_rejects_exactly(t, error1, ts.readable.cancel(), 'cancel() should reject with error1'), + promise_rejects_exactly(t, error1, ts.readable.getReader().closed, 'closed should reject with error1') + ]); +}, 'controller.error() after controller.terminate() with queued chunk should error the readable'); + +promise_test(t => { + const ts = new TransformStream({ + start(controller) { + controller.terminate(); + controller.error(error1); + } + }); + return Promise.all([ + promise_rejects_js(t, TypeError, ts.writable.abort(), 'abort() should reject with a TypeError'), + ts.readable.cancel(), + ts.readable.getReader().closed + ]); +}, 'controller.error() after controller.terminate() without queued chunk should do nothing'); + +promise_test(() => { + const ts = new TransformStream({ + flush(controller) { + controller.terminate(); + } + }); + const writer = ts.writable.getWriter(); + return Promise.all([ + writer.close(), + writer.closed, + ts.readable.getReader().closed + ]); +}, 'controller.terminate() inside flush() should not prevent writer.close() from succeeding'); diff --git a/test/fixtures/wpt/streams/writable-streams/aborting.any.js b/test/fixtures/wpt/streams/writable-streams/aborting.any.js new file mode 100644 index 00000000000000..5c053bab915700 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/aborting.any.js @@ -0,0 +1,1378 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +const error2 = new Error('error2'); +error2.name = 'error2'; + +promise_test(t => { + const ws = new WritableStream({ + write: t.unreached_func('write() should not be called') + }); + + const writer = ws.getWriter(); + const writePromise = writer.write('a'); + + const readyPromise = writer.ready; + + writer.abort(error1); + + assert_equals(writer.ready, readyPromise, 'the ready promise property should not change'); + + return Promise.all([ + promise_rejects_exactly(t, error1, readyPromise, 'the ready promise should reject with error1'), + promise_rejects_exactly(t, error1, writePromise, 'the write() promise should reject with error1') + ]); +}, 'Aborting a WritableStream before it starts should cause the writer\'s unsettled ready promise to reject'); + +promise_test(t => { + const ws = new WritableStream(); + + const writer = ws.getWriter(); + writer.write('a'); + + const readyPromise = writer.ready; + + return readyPromise.then(() => { + writer.abort(error1); + + assert_not_equals(writer.ready, readyPromise, 'the ready promise property should change'); + return promise_rejects_exactly(t, error1, writer.ready, 'the ready promise should reject with error1'); + }); +}, 'Aborting a WritableStream should cause the writer\'s fulfilled ready promise to reset to a rejected one'); + +promise_test(t => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + + writer.releaseLock(); + + return promise_rejects_js(t, TypeError, writer.abort(), 'abort() should reject with a TypeError'); +}, 'abort() on a released writer rejects'); + +promise_test(t => { + const ws = recordingWritableStream(); + + return delay(0) + .then(() => { + const writer = ws.getWriter(); + + const abortPromise = writer.abort(error1); + + return Promise.all([ + promise_rejects_exactly(t, error1, writer.write(1), 'write(1) must reject with error1'), + promise_rejects_exactly(t, error1, writer.write(2), 'write(2) must reject with error1'), + abortPromise + ]); + }) + .then(() => { + assert_array_equals(ws.events, ['abort', error1]); + }); +}, 'Aborting a WritableStream immediately prevents future writes'); + +promise_test(t => { + const ws = recordingWritableStream(); + const results = []; + + return delay(0) + .then(() => { + const writer = ws.getWriter(); + + results.push( + writer.write(1), + promise_rejects_exactly(t, error1, writer.write(2), 'write(2) must reject with error1'), + promise_rejects_exactly(t, error1, writer.write(3), 'write(3) must reject with error1') + ); + + const abortPromise = writer.abort(error1); + + results.push( + promise_rejects_exactly(t, error1, writer.write(4), 'write(4) must reject with error1'), + promise_rejects_exactly(t, error1, writer.write(5), 'write(5) must reject with error1') + ); + + return abortPromise; + }).then(() => { + assert_array_equals(ws.events, ['write', 1, 'abort', error1]); + + return Promise.all(results); + }); +}, 'Aborting a WritableStream prevents further writes after any that are in progress'); + +promise_test(() => { + const ws = new WritableStream({ + abort() { + return 'Hello'; + } + }); + const writer = ws.getWriter(); + + return writer.abort('a').then(value => { + assert_equals(value, undefined, 'fulfillment value must be undefined'); + }); +}, 'Fulfillment value of writer.abort() call must be undefined even if the underlying sink returns a non-undefined ' + + 'value'); + +promise_test(t => { + const ws = new WritableStream({ + abort() { + throw error1; + } + }); + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.abort(undefined), + 'rejection reason of abortPromise must be the error thrown by abort'); +}, 'WritableStream if sink\'s abort throws, the promise returned by writer.abort() rejects'); + +promise_test(t => { + const ws = new WritableStream({ + abort() { + throw error1; + } + }); + const writer = ws.getWriter(); + + const abortPromise1 = writer.abort(undefined); + const abortPromise2 = writer.abort(undefined); + + assert_equals(abortPromise1, abortPromise2, 'the promises must be the same'); + + return promise_rejects_exactly(t, error1, abortPromise1, 'promise must have matching rejection'); +}, 'WritableStream if sink\'s abort throws, the promise returned by multiple writer.abort()s is the same and rejects'); + +promise_test(t => { + const ws = new WritableStream({ + abort() { + throw error1; + } + }); + + return promise_rejects_exactly(t, error1, ws.abort(undefined), + 'rejection reason of abortPromise must be the error thrown by abort'); +}, 'WritableStream if sink\'s abort throws, the promise returned by ws.abort() rejects'); + +promise_test(t => { + let resolveWritePromise; + const ws = new WritableStream({ + write() { + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + }, + abort() { + throw error1; + } + }); + + const writer = ws.getWriter(); + + writer.write().catch(() => {}); + return flushAsyncEvents().then(() => { + const abortPromise = writer.abort(undefined); + + resolveWritePromise(); + return promise_rejects_exactly(t, error1, abortPromise, + 'rejection reason of abortPromise must be the error thrown by abort'); + }); +}, 'WritableStream if sink\'s abort throws, for an abort performed during a write, the promise returned by ' + + 'ws.abort() rejects'); + +promise_test(() => { + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + + return writer.abort(error1).then(() => { + assert_array_equals(ws.events, ['abort', error1]); + }); +}, 'Aborting a WritableStream passes through the given reason'); + +promise_test(t => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + + const abortPromise = writer.abort(error1); + + const events = []; + writer.ready.catch(() => { + events.push('ready'); + }); + writer.closed.catch(() => { + events.push('closed'); + }); + + return Promise.all([ + abortPromise, + promise_rejects_exactly(t, error1, writer.write(), 'writing should reject with error1'), + promise_rejects_exactly(t, error1, writer.close(), 'closing should reject with error1'), + promise_rejects_exactly(t, error1, writer.ready, 'ready should reject with error1'), + promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with error1') + ]).then(() => { + assert_array_equals(['ready', 'closed'], events, 'ready should reject before closed'); + }); +}, 'Aborting a WritableStream puts it in an errored state with the error passed to abort()'); + +promise_test(t => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + + const writePromise = promise_rejects_exactly(t, error1, writer.write('a'), + 'writing should reject with error1'); + + writer.abort(error1); + + return writePromise; +}, 'Aborting a WritableStream causes any outstanding write() promises to be rejected with the reason supplied'); + +promise_test(t => { + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + + const closePromise = writer.close(); + const abortPromise = writer.abort(error1); + + return Promise.all([ + promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with error1'), + promise_rejects_exactly(t, error1, closePromise, 'close() should reject with error1'), + abortPromise + ]).then(() => { + assert_array_equals(ws.events, ['abort', error1]); + }); +}, 'Closing but then immediately aborting a WritableStream causes the stream to error'); + +promise_test(() => { + let resolveClose; + const ws = new WritableStream({ + close() { + return new Promise(resolve => { + resolveClose = resolve; + }); + } + }); + const writer = ws.getWriter(); + + const closePromise = writer.close(); + + return delay(0).then(() => { + const abortPromise = writer.abort(error1); + resolveClose(); + return Promise.all([ + writer.closed, + abortPromise, + closePromise + ]); + }); +}, 'Closing a WritableStream and aborting it while it closes causes the stream to ignore the abort attempt'); + +promise_test(() => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + + writer.close(); + + return delay(0).then(() => writer.abort()); +}, 'Aborting a WritableStream after it is closed is a no-op'); + +promise_test(t => { + // Testing that per https://github.com/whatwg/streams/issues/620#issuecomment-263483953 the fallback to close was + // removed. + + // Cannot use recordingWritableStream since it always has an abort + let closeCalled = false; + const ws = new WritableStream({ + close() { + closeCalled = true; + } + }); + + const writer = ws.getWriter(); + + writer.abort(error1); + + return promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with error1').then(() => { + assert_false(closeCalled, 'close must not have been called'); + }); +}, 'WritableStream should NOT call underlying sink\'s close if no abort is supplied (historical)'); + +promise_test(() => { + let thenCalled = false; + const ws = new WritableStream({ + abort() { + return { + then(onFulfilled) { + thenCalled = true; + onFulfilled(); + } + }; + } + }); + const writer = ws.getWriter(); + return writer.abort().then(() => assert_true(thenCalled, 'then() should be called')); +}, 'returning a thenable from abort() should work'); + +promise_test(t => { + const ws = new WritableStream({ + write() { + return flushAsyncEvents(); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const writePromise = writer.write('a'); + writer.abort(error1); + let closedRejected = false; + return Promise.all([ + writePromise.then(() => assert_false(closedRejected, '.closed should not resolve before write()')), + promise_rejects_exactly(t, error1, writer.closed, '.closed should reject').then(() => { + closedRejected = true; + }) + ]); + }); +}, '.closed should not resolve before fulfilled write()'); + +promise_test(t => { + const ws = new WritableStream({ + write() { + return Promise.reject(error1); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const writePromise = writer.write('a'); + const abortPromise = writer.abort(error2); + let closedRejected = false; + return Promise.all([ + promise_rejects_exactly(t, error1, writePromise, 'write() should reject') + .then(() => assert_false(closedRejected, '.closed should not resolve before write()')), + promise_rejects_exactly(t, error2, writer.closed, '.closed should reject') + .then(() => { + closedRejected = true; + }), + abortPromise + ]); + }); +}, '.closed should not resolve before rejected write(); write() error should not overwrite abort() error'); + +promise_test(t => { + const ws = new WritableStream({ + write() { + return flushAsyncEvents(); + } + }, new CountQueuingStrategy({ highWaterMark: 4 })); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const settlementOrder = []; + return Promise.all([ + writer.write('1').then(() => settlementOrder.push(1)), + promise_rejects_exactly(t, error1, writer.write('2'), 'first queued write should be rejected') + .then(() => settlementOrder.push(2)), + promise_rejects_exactly(t, error1, writer.write('3'), 'second queued write should be rejected') + .then(() => settlementOrder.push(3)), + writer.abort(error1) + ]).then(() => assert_array_equals([1, 2, 3], settlementOrder, 'writes should be satisfied in order')); + }); +}, 'writes should be satisfied in order when aborting'); + +promise_test(t => { + const ws = new WritableStream({ + write() { + return Promise.reject(error1); + } + }, new CountQueuingStrategy({ highWaterMark: 4 })); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const settlementOrder = []; + return Promise.all([ + promise_rejects_exactly(t, error1, writer.write('1'), 'in-flight write should be rejected') + .then(() => settlementOrder.push(1)), + promise_rejects_exactly(t, error2, writer.write('2'), 'first queued write should be rejected') + .then(() => settlementOrder.push(2)), + promise_rejects_exactly(t, error2, writer.write('3'), 'second queued write should be rejected') + .then(() => settlementOrder.push(3)), + writer.abort(error2) + ]).then(() => assert_array_equals([1, 2, 3], settlementOrder, 'writes should be satisfied in order')); + }); +}, 'writes should be satisfied in order after rejected write when aborting'); + +promise_test(t => { + const ws = new WritableStream({ + write() { + return Promise.reject(error1); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + return Promise.all([ + promise_rejects_exactly(t, error1, writer.write('a'), 'writer.write() should reject with error from underlying write()'), + promise_rejects_exactly(t, error2, writer.close(), + 'writer.close() should reject with error from underlying write()'), + writer.abort(error2) + ]); + }); +}, 'close() should reject with abort reason why abort() is first error'); + +promise_test(() => { + let resolveWrite; + const ws = recordingWritableStream({ + write() { + return new Promise(resolve => { + resolveWrite = resolve; + }); + } + }); + + const writer = ws.getWriter(); + return writer.ready.then(() => { + writer.write('a'); + const abortPromise = writer.abort('b'); + return flushAsyncEvents().then(() => { + assert_array_equals(ws.events, ['write', 'a'], 'abort should not be called while write is in-flight'); + resolveWrite(); + return abortPromise.then(() => { + assert_array_equals(ws.events, ['write', 'a', 'abort', 'b'], 'abort should be called after the write finishes'); + }); + }); + }); +}, 'underlying abort() should not be called until underlying write() completes'); + +promise_test(() => { + let resolveClose; + const ws = recordingWritableStream({ + close() { + return new Promise(resolve => { + resolveClose = resolve; + }); + } + }); + + const writer = ws.getWriter(); + return writer.ready.then(() => { + writer.close(); + const abortPromise = writer.abort(); + return flushAsyncEvents().then(() => { + assert_array_equals(ws.events, ['close'], 'abort should not be called while close is in-flight'); + resolveClose(); + return abortPromise.then(() => { + assert_array_equals(ws.events, ['close'], 'abort should not be called'); + }); + }); + }); +}, 'underlying abort() should not be called if underlying close() has started'); + +promise_test(t => { + let rejectClose; + let abortCalled = false; + const ws = new WritableStream({ + close() { + return new Promise((resolve, reject) => { + rejectClose = reject; + }); + }, + abort() { + abortCalled = true; + } + }); + + const writer = ws.getWriter(); + return writer.ready.then(() => { + const closePromise = writer.close(); + const abortPromise = writer.abort(); + return flushAsyncEvents().then(() => { + assert_false(abortCalled, 'underlying abort should not be called while close is in-flight'); + rejectClose(error1); + return promise_rejects_exactly(t, error1, abortPromise, 'abort should reject with the same reason').then(() => { + return promise_rejects_exactly(t, error1, closePromise, 'close should reject with the same reason'); + }).then(() => { + assert_false(abortCalled, 'underlying abort should not be called after close completes'); + }); + }); + }); +}, 'if underlying close() has started and then rejects, the abort() and close() promises should reject with the ' + + 'underlying close rejection reason'); + +promise_test(t => { + let resolveWrite; + const ws = recordingWritableStream({ + write() { + return new Promise(resolve => { + resolveWrite = resolve; + }); + } + }); + + const writer = ws.getWriter(); + return writer.ready.then(() => { + writer.write('a'); + const closePromise = writer.close(); + const abortPromise = writer.abort(error1); + + return flushAsyncEvents().then(() => { + assert_array_equals(ws.events, ['write', 'a'], 'abort should not be called while write is in-flight'); + resolveWrite(); + return abortPromise.then(() => { + assert_array_equals(ws.events, ['write', 'a', 'abort', error1], 'abort should be called after write completes'); + return promise_rejects_exactly(t, error1, closePromise, 'promise returned by close() should be rejected'); + }); + }); + }); +}, 'an abort() that happens during a write() should trigger the underlying abort() even with a close() queued'); + +promise_test(t => { + const ws = new WritableStream({ + write() { + return new Promise(() => {}); + } + }); + + const writer = ws.getWriter(); + return writer.ready.then(() => { + writer.write('a'); + writer.abort(error1); + writer.releaseLock(); + const writer2 = ws.getWriter(); + return promise_rejects_exactly(t, error1, writer2.ready, + 'ready of the second writer should be rejected with error1'); + }); +}, 'if a writer is created for a stream with a pending abort, its ready should be rejected with the abort error'); + +promise_test(() => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const closePromise = writer.close(); + const abortPromise = writer.abort(); + const events = []; + return Promise.all([ + closePromise.then(() => { events.push('close'); }), + abortPromise.then(() => { events.push('abort'); }) + ]).then(() => { + assert_array_equals(events, ['close', 'abort']); + }); + }); +}, 'writer close() promise should resolve before abort() promise'); + +promise_test(t => { + const ws = new WritableStream({ + write(chunk, controller) { + controller.error(error1); + return new Promise(() => {}); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + writer.write('a'); + return promise_rejects_exactly(t, error1, writer.ready, 'writer.ready should reject'); + }); +}, 'writer.ready should reject on controller error without waiting for underlying write'); + +promise_test(t => { + let rejectWrite; + const ws = new WritableStream({ + write() { + return new Promise((resolve, reject) => { + rejectWrite = reject; + }); + } + }); + + let writePromise; + let abortPromise; + + const events = []; + + const writer = ws.getWriter(); + + writer.closed.catch(() => { + events.push('closed'); + }); + + // Wait for ws to start + return flushAsyncEvents().then(() => { + writePromise = writer.write('a'); + writePromise.catch(() => { + events.push('writePromise'); + }); + + abortPromise = writer.abort(error1); + abortPromise.then(() => { + events.push('abortPromise'); + }); + + const writePromise2 = writer.write('a'); + + return Promise.all([ + promise_rejects_exactly(t, error1, writePromise2, 'writePromise2 must reject with the error from abort'), + promise_rejects_exactly(t, error1, writer.ready, 'writer.ready must reject with the error from abort'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals(events, [], 'writePromise, abortPromise and writer.closed must not be rejected yet'); + + rejectWrite(error2); + + return Promise.all([ + promise_rejects_exactly(t, error2, writePromise, + 'writePromise must reject with the error returned from the sink\'s write method'), + abortPromise, + promise_rejects_exactly(t, error1, writer.closed, + 'writer.closed must reject with the error from abort'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals(events, ['writePromise', 'abortPromise', 'closed'], + 'writePromise, abortPromise and writer.closed must settle'); + + const writePromise3 = writer.write('a'); + + return Promise.all([ + promise_rejects_exactly(t, error1, writePromise3, + 'writePromise3 must reject with the error from abort'), + promise_rejects_exactly(t, error1, writer.ready, + 'writer.ready must be still rejected with the error indicating abort') + ]); + }).then(() => { + writer.releaseLock(); + + return Promise.all([ + promise_rejects_js(t, TypeError, writer.ready, + 'writer.ready must be rejected with an error indicating release'), + promise_rejects_js(t, TypeError, writer.closed, + 'writer.closed must be rejected with an error indicating release') + ]); + }); +}, 'writer.abort() while there is an in-flight write, and then finish the write with rejection'); + +promise_test(t => { + let resolveWrite; + let controller; + const ws = new WritableStream({ + write(chunk, c) { + controller = c; + return new Promise(resolve => { + resolveWrite = resolve; + }); + } + }); + + let writePromise; + let abortPromise; + + const events = []; + + const writer = ws.getWriter(); + + writer.closed.catch(() => { + events.push('closed'); + }); + + // Wait for ws to start + return flushAsyncEvents().then(() => { + writePromise = writer.write('a'); + writePromise.then(() => { + events.push('writePromise'); + }); + + abortPromise = writer.abort(error1); + abortPromise.then(() => { + events.push('abortPromise'); + }); + + const writePromise2 = writer.write('a'); + + return Promise.all([ + promise_rejects_exactly(t, error1, writePromise2, 'writePromise2 must reject with the error from abort'), + promise_rejects_exactly(t, error1, writer.ready, 'writer.ready must reject with the error from abort'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals(events, [], 'writePromise, abortPromise and writer.closed must not be fulfilled/rejected yet'); + + // This error is too late to change anything. abort() has already changed the stream state to 'erroring'. + controller.error(error2); + + const writePromise3 = writer.write('a'); + + return Promise.all([ + promise_rejects_exactly(t, error1, writePromise3, + 'writePromise3 must reject with the error from abort'), + promise_rejects_exactly(t, error1, writer.ready, + 'writer.ready must be still rejected with the error indicating abort'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals( + events, [], + 'writePromise, abortPromise and writer.closed must not be fulfilled/rejected yet even after ' + + 'controller.error() call'); + + resolveWrite(); + + return Promise.all([ + writePromise, + abortPromise, + promise_rejects_exactly(t, error1, writer.closed, + 'writer.closed must reject with the error from abort'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals(events, ['writePromise', 'abortPromise', 'closed'], + 'writePromise, abortPromise and writer.closed must settle'); + + const writePromise4 = writer.write('a'); + + return Promise.all([ + writePromise, + promise_rejects_exactly(t, error1, writePromise4, + 'writePromise4 must reject with the error from abort'), + promise_rejects_exactly(t, error1, writer.ready, + 'writer.ready must be still rejected with the error indicating abort') + ]); + }).then(() => { + writer.releaseLock(); + + return Promise.all([ + promise_rejects_js(t, TypeError, writer.ready, + 'writer.ready must be rejected with an error indicating release'), + promise_rejects_js(t, TypeError, writer.closed, + 'writer.closed must be rejected with an error indicating release') + ]); + }); +}, 'writer.abort(), controller.error() while there is an in-flight write, and then finish the write'); + +promise_test(t => { + let resolveClose; + let controller; + const ws = new WritableStream({ + start(c) { + controller = c; + }, + close() { + return new Promise(resolve => { + resolveClose = resolve; + }); + } + }); + + let closePromise; + let abortPromise; + + const events = []; + + const writer = ws.getWriter(); + + writer.closed.then(() => { + events.push('closed'); + }); + + // Wait for ws to start + return flushAsyncEvents().then(() => { + closePromise = writer.close(); + closePromise.then(() => { + events.push('closePromise'); + }); + + abortPromise = writer.abort(error1); + abortPromise.then(() => { + events.push('abortPromise'); + }); + + return Promise.all([ + promise_rejects_js(t, TypeError, writer.close(), + 'writer.close() must reject with an error indicating already closing'), + promise_rejects_exactly(t, error1, writer.ready, 'writer.ready must reject with the error from abort'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals(events, [], 'closePromise, abortPromise and writer.closed must not be fulfilled/rejected yet'); + + controller.error(error2); + + return Promise.all([ + promise_rejects_js(t, TypeError, writer.close(), + 'writer.close() must reject with an error indicating already closing'), + promise_rejects_exactly(t, error1, writer.ready, + 'writer.ready must be still rejected with the error indicating abort'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals( + events, [], + 'closePromise, abortPromise and writer.closed must not be fulfilled/rejected yet even after ' + + 'controller.error() call'); + + resolveClose(); + + return Promise.all([ + closePromise, + abortPromise, + writer.closed, + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals(events, ['closePromise', 'abortPromise', 'closed'], + 'closedPromise, abortPromise and writer.closed must fulfill'); + + return Promise.all([ + promise_rejects_js(t, TypeError, writer.close(), + 'writer.close() must reject with an error indicating already closing'), + promise_rejects_exactly(t, error1, writer.ready, + 'writer.ready must be still rejected with the error indicating abort') + ]); + }).then(() => { + writer.releaseLock(); + + return Promise.all([ + promise_rejects_js(t, TypeError, writer.close(), + 'writer.close() must reject with an error indicating release'), + promise_rejects_js(t, TypeError, writer.ready, + 'writer.ready must be rejected with an error indicating release'), + promise_rejects_js(t, TypeError, writer.closed, + 'writer.closed must be rejected with an error indicating release') + ]); + }); +}, 'writer.abort(), controller.error() while there is an in-flight close, and then finish the close'); + +promise_test(t => { + let resolveWrite; + let controller; + const ws = recordingWritableStream({ + write(chunk, c) { + controller = c; + return new Promise(resolve => { + resolveWrite = resolve; + }); + } + }); + + let writePromise; + let abortPromise; + + const events = []; + + const writer = ws.getWriter(); + + writer.closed.catch(() => { + events.push('closed'); + }); + + // Wait for ws to start + return flushAsyncEvents().then(() => { + writePromise = writer.write('a'); + writePromise.then(() => { + events.push('writePromise'); + }); + + controller.error(error2); + + const writePromise2 = writer.write('a'); + + return Promise.all([ + promise_rejects_exactly(t, error2, writePromise2, + 'writePromise2 must reject with the error passed to the controller\'s error method'), + promise_rejects_exactly(t, error2, writer.ready, + 'writer.ready must reject with the error passed to the controller\'s error method'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals(events, [], 'writePromise and writer.closed must not be fulfilled/rejected yet'); + + abortPromise = writer.abort(error1); + abortPromise.catch(() => { + events.push('abortPromise'); + }); + + const writePromise3 = writer.write('a'); + + return Promise.all([ + promise_rejects_exactly(t, error2, writePromise3, + 'writePromise3 must reject with the error passed to the controller\'s error method'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals( + events, [], + 'writePromise and writer.closed must not be fulfilled/rejected yet even after writer.abort()'); + + resolveWrite(); + + return Promise.all([ + promise_rejects_exactly(t, error2, abortPromise, + 'abort() must reject with the error passed to the controller\'s error method'), + promise_rejects_exactly(t, error2, writer.closed, + 'writer.closed must reject with the error passed to the controller\'s error method'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals(events, ['writePromise', 'abortPromise', 'closed'], + 'writePromise, abortPromise and writer.closed must fulfill/reject'); + assert_array_equals(ws.events, ['write', 'a'], 'sink abort() should not be called'); + + const writePromise4 = writer.write('a'); + + return Promise.all([ + writePromise, + promise_rejects_exactly(t, error2, writePromise4, + 'writePromise4 must reject with the error passed to the controller\'s error method'), + promise_rejects_exactly(t, error2, writer.ready, + 'writer.ready must be still rejected with the error passed to the controller\'s error method') + ]); + }).then(() => { + writer.releaseLock(); + + return Promise.all([ + promise_rejects_js(t, TypeError, writer.ready, + 'writer.ready must be rejected with an error indicating release'), + promise_rejects_js(t, TypeError, writer.closed, + 'writer.closed must be rejected with an error indicating release') + ]); + }); +}, 'controller.error(), writer.abort() while there is an in-flight write, and then finish the write'); + +promise_test(t => { + let resolveClose; + let controller; + const ws = new WritableStream({ + start(c) { + controller = c; + }, + close() { + return new Promise(resolve => { + resolveClose = resolve; + }); + } + }); + + let closePromise; + let abortPromise; + + const events = []; + + const writer = ws.getWriter(); + + writer.closed.then(() => { + events.push('closed'); + }); + + // Wait for ws to start + return flushAsyncEvents().then(() => { + closePromise = writer.close(); + closePromise.then(() => { + events.push('closePromise'); + }); + + controller.error(error2); + + return flushAsyncEvents(); + }).then(() => { + assert_array_equals(events, [], 'closePromise must not be fulfilled/rejected yet'); + + abortPromise = writer.abort(error1); + abortPromise.then(() => { + events.push('abortPromise'); + }); + + return Promise.all([ + promise_rejects_exactly(t, error2, writer.ready, + 'writer.ready must reject with the error passed to the controller\'s error method'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals( + events, [], + 'closePromise and writer.closed must not be fulfilled/rejected yet even after writer.abort()'); + + resolveClose(); + + return Promise.all([ + closePromise, + promise_rejects_exactly(t, error2, writer.ready, + 'writer.ready must be still rejected with the error passed to the controller\'s error method'), + writer.closed, + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals(events, ['closePromise', 'abortPromise', 'closed'], + 'abortPromise, closePromise and writer.closed must fulfill/reject'); + }).then(() => { + writer.releaseLock(); + + return Promise.all([ + promise_rejects_js(t, TypeError, writer.ready, + 'writer.ready must be rejected with an error indicating release'), + promise_rejects_js(t, TypeError, writer.closed, + 'writer.closed must be rejected with an error indicating release') + ]); + }); +}, 'controller.error(), writer.abort() while there is an in-flight close, and then finish the close'); + +promise_test(t => { + let resolveWrite; + const ws = new WritableStream({ + write() { + return new Promise(resolve => { + resolveWrite = resolve; + }); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const writePromise = writer.write('a'); + const closed = writer.closed; + const abortPromise = writer.abort(); + writer.releaseLock(); + resolveWrite(); + return Promise.all([ + writePromise, + abortPromise, + promise_rejects_js(t, TypeError, closed, 'closed should reject')]); + }); +}, 'releaseLock() while aborting should reject the original closed promise'); + +// TODO(ricea): Consider removing this test if it is no longer useful. +promise_test(t => { + let resolveWrite; + let resolveAbort; + let resolveAbortStarted; + const abortStarted = new Promise(resolve => { + resolveAbortStarted = resolve; + }); + const ws = new WritableStream({ + write() { + return new Promise(resolve => { + resolveWrite = resolve; + }); + }, + abort() { + resolveAbortStarted(); + return new Promise(resolve => { + resolveAbort = resolve; + }); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const writePromise = writer.write('a'); + const closed = writer.closed; + const abortPromise = writer.abort(); + resolveWrite(); + return abortStarted.then(() => { + writer.releaseLock(); + assert_equals(writer.closed, closed, 'closed promise should not have changed'); + resolveAbort(); + return Promise.all([ + writePromise, + abortPromise, + promise_rejects_js(t, TypeError, closed, 'closed should reject')]); + }); + }); +}, 'releaseLock() during delayed async abort() should reject the writer.closed promise'); + +promise_test(() => { + let resolveStart; + const ws = recordingWritableStream({ + start() { + return new Promise(resolve => { + resolveStart = resolve; + }); + } + }); + const abortPromise = ws.abort('done'); + return flushAsyncEvents().then(() => { + assert_array_equals(ws.events, [], 'abort() should not be called during start()'); + resolveStart(); + return abortPromise.then(() => { + assert_array_equals(ws.events, ['abort', 'done'], 'abort() should be called after start() is done'); + }); + }); +}, 'sink abort() should not be called until sink start() is done'); + +promise_test(() => { + let resolveStart; + let controller; + const ws = recordingWritableStream({ + start(c) { + controller = c; + return new Promise(resolve => { + resolveStart = resolve; + }); + } + }); + const abortPromise = ws.abort('done'); + controller.error(error1); + resolveStart(); + return abortPromise.then(() => + assert_array_equals(ws.events, ['abort', 'done'], + 'abort() should still be called if start() errors the controller')); +}, 'if start attempts to error the controller after abort() has been called, then it should lose'); + +promise_test(() => { + const ws = recordingWritableStream({ + start() { + return Promise.reject(error1); + } + }); + return ws.abort('done').then(() => + assert_array_equals(ws.events, ['abort', 'done'], 'abort() should still be called if start() rejects')); +}, 'stream abort() promise should still resolve if sink start() rejects'); + +promise_test(t => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + const writerReady1 = writer.ready; + writer.abort(error1); + const writerReady2 = writer.ready; + assert_not_equals(writerReady1, writerReady2, 'abort() should replace the ready promise with a rejected one'); + return Promise.all([writerReady1, + promise_rejects_exactly(t, error1, writerReady2, 'writerReady2 should reject')]); +}, 'writer abort() during sink start() should replace the writer.ready promise synchronously'); + +promise_test(t => { + const events = []; + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + const writePromise1 = writer.write(1); + const abortPromise = writer.abort(error1); + const writePromise2 = writer.write(2); + const closePromise = writer.close(); + writePromise1.catch(() => events.push('write1')); + abortPromise.then(() => events.push('abort')); + writePromise2.catch(() => events.push('write2')); + closePromise.catch(() => events.push('close')); + return Promise.all([ + promise_rejects_exactly(t, error1, writePromise1, 'first write() should reject'), + abortPromise, + promise_rejects_exactly(t, error1, writePromise2, 'second write() should reject'), + promise_rejects_exactly(t, error1, closePromise, 'close() should reject') + ]) + .then(() => { + assert_array_equals(events, ['write2', 'write1', 'abort', 'close'], + 'promises should resolve in the standard order'); + assert_array_equals(ws.events, ['abort', error1], 'underlying sink write() should not be called'); + }); +}, 'promises returned from other writer methods should be rejected when writer abort() happens during sink start()'); + +promise_test(t => { + let writeReject; + let controller; + const ws = new WritableStream({ + write(chunk, c) { + controller = c; + return new Promise((resolve, reject) => { + writeReject = reject; + }); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const writePromise = writer.write('a'); + const abortPromise = writer.abort(); + controller.error(error1); + writeReject(error2); + return Promise.all([ + promise_rejects_exactly(t, error2, writePromise, 'write() should reject with error2'), + abortPromise + ]); + }); +}, 'abort() should succeed despite rejection from write'); + +promise_test(t => { + let closeReject; + let controller; + const ws = new WritableStream({ + start(c) { + controller = c; + }, + close() { + return new Promise((resolve, reject) => { + closeReject = reject; + }); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const closePromise = writer.close(); + const abortPromise = writer.abort(); + controller.error(error1); + closeReject(error2); + return Promise.all([ + promise_rejects_exactly(t, error2, closePromise, 'close() should reject with error2'), + promise_rejects_exactly(t, error2, abortPromise, 'abort() should reject with error2') + ]); + }); +}, 'abort() should be rejected with the rejection returned from close()'); + +promise_test(t => { + let rejectWrite; + const ws = recordingWritableStream({ + write() { + return new Promise((resolve, reject) => { + rejectWrite = reject; + }); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const writePromise = writer.write('1'); + const abortPromise = writer.abort(error2); + rejectWrite(error1); + return Promise.all([ + promise_rejects_exactly(t, error1, writePromise, 'write should reject'), + abortPromise, + promise_rejects_exactly(t, error2, writer.closed, 'closed should reject with error2') + ]); + }).then(() => { + assert_array_equals(ws.events, ['write', '1', 'abort', error2], 'abort sink method should be called'); + }); +}, 'a rejecting sink.write() should not prevent sink.abort() from being called'); + +promise_test(() => { + const ws = recordingWritableStream({ + start() { + return Promise.reject(error1); + } + }); + return ws.abort(error2) + .then(() => { + assert_array_equals(ws.events, ['abort', error2]); + }); +}, 'when start errors after stream abort(), underlying sink abort() should be called anyway'); + +promise_test(() => { + const ws = new WritableStream(); + const abortPromise1 = ws.abort(); + const abortPromise2 = ws.abort(); + assert_equals(abortPromise1, abortPromise2, 'the promises must be the same'); + + return abortPromise1.then( + v => assert_equals(v, undefined, 'abort() should fulfill with undefined')); +}, 'when calling abort() twice on the same stream, both should give the same promise that fulfills with undefined'); + +promise_test(() => { + const ws = new WritableStream(); + const abortPromise1 = ws.abort(); + + return abortPromise1.then(v1 => { + assert_equals(v1, undefined, 'first abort() should fulfill with undefined'); + + const abortPromise2 = ws.abort(); + assert_not_equals(abortPromise2, abortPromise1, 'because we waited, the second promise should be a new promise'); + + return abortPromise2.then(v2 => { + assert_equals(v2, undefined, 'second abort() should fulfill with undefined'); + }); + }); +}, 'when calling abort() twice on the same stream, but sequentially so so there\'s no pending abort the second time, ' + + 'both should fulfill with undefined'); + +promise_test(t => { + const ws = new WritableStream({ + start(c) { + c.error(error1); + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.closed, 'writer.closed should reject').then(() => { + return writer.abort().then( + v => assert_equals(v, undefined, 'abort() should fulfill with undefined')); + }); +}, 'calling abort() on an errored stream should fulfill with undefined'); + +promise_test(t => { + let controller; + let resolveWrite; + const ws = recordingWritableStream({ + start(c) { + controller = c; + }, + write() { + return new Promise(resolve => { + resolveWrite = resolve; + }); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const writePromise = writer.write('chunk'); + controller.error(error1); + const abortPromise = writer.abort(error2); + resolveWrite(); + return Promise.all([ + writePromise, + promise_rejects_exactly(t, error1, abortPromise, 'abort() should reject') + ]).then(() => { + assert_array_equals(ws.events, ['write', 'chunk'], 'sink abort() should not be called'); + }); + }); +}, 'sink abort() should not be called if stream was erroring due to controller.error() before abort() was called'); + +promise_test(t => { + let resolveWrite; + let size = 1; + const ws = recordingWritableStream({ + write() { + return new Promise(resolve => { + resolveWrite = resolve; + }); + } + }, { + size() { + return size; + }, + highWaterMark: 1 + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const writePromise1 = writer.write('chunk1'); + size = NaN; + const writePromise2 = writer.write('chunk2'); + const abortPromise = writer.abort(error2); + resolveWrite(); + return Promise.all([ + writePromise1, + promise_rejects_js(t, RangeError, writePromise2, 'second write() should reject'), + promise_rejects_js(t, RangeError, abortPromise, 'abort() should reject') + ]).then(() => { + assert_array_equals(ws.events, ['write', 'chunk1'], 'sink abort() should not be called'); + }); + }); +}, 'sink abort() should not be called if stream was erroring due to bad strategy before abort() was called'); + +promise_test(t => { + const ws = new WritableStream(); + return ws.abort().then(() => { + const writer = ws.getWriter(); + return writer.closed.then(t.unreached_func('closed promise should not fulfill'), + e => assert_equals(e, undefined, 'e should be undefined')); + }); +}, 'abort with no arguments should set the stored error to undefined'); + +promise_test(t => { + const ws = new WritableStream(); + return ws.abort(undefined).then(() => { + const writer = ws.getWriter(); + return writer.closed.then(t.unreached_func('closed promise should not fulfill'), + e => assert_equals(e, undefined, 'e should be undefined')); + }); +}, 'abort with an undefined argument should set the stored error to undefined'); + +promise_test(t => { + const ws = new WritableStream(); + return ws.abort('string argument').then(() => { + const writer = ws.getWriter(); + return writer.closed.then(t.unreached_func('closed promise should not fulfill'), + e => assert_equals(e, 'string argument', 'e should be \'string argument\'')); + }); +}, 'abort with a string argument should set the stored error to that argument'); + +promise_test(t => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + return promise_rejects_js(t, TypeError, ws.abort(), 'abort should reject') + .then(() => writer.ready); +}, 'abort on a locked stream should reject'); diff --git a/test/fixtures/wpt/streams/writable-streams/bad-strategies.any.js b/test/fixtures/wpt/streams/writable-streams/bad-strategies.any.js new file mode 100644 index 00000000000000..b180bae57c0585 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/bad-strategies.any.js @@ -0,0 +1,95 @@ +// META: global=window,worker,jsshell +'use strict'; + +const error1 = new Error('a unique string'); +error1.name = 'error1'; + +test(() => { + assert_throws_exactly(error1, () => { + new WritableStream({}, { + get size() { + throw error1; + }, + highWaterMark: 5 + }); + }, 'construction should re-throw the error'); +}, 'Writable stream: throwing strategy.size getter'); + +test(() => { + assert_throws_js(TypeError, () => { + new WritableStream({}, { size: 'a string' }); + }); +}, 'reject any non-function value for strategy.size'); + +test(() => { + assert_throws_exactly(error1, () => { + new WritableStream({}, { + size() { + return 1; + }, + get highWaterMark() { + throw error1; + } + }); + }, 'construction should re-throw the error'); +}, 'Writable stream: throwing strategy.highWaterMark getter'); + +test(() => { + + for (const highWaterMark of [-1, -Infinity, NaN, 'foo', {}]) { + assert_throws_js(RangeError, () => { + new WritableStream({}, { + size() { + return 1; + }, + highWaterMark + }); + }, `construction should throw a RangeError for ${highWaterMark}`); + } +}, 'Writable stream: invalid strategy.highWaterMark'); + +promise_test(t => { + const ws = new WritableStream({}, { + size() { + throw error1; + }, + highWaterMark: 5 + }); + + const writer = ws.getWriter(); + + const p1 = promise_rejects_exactly(t, error1, writer.write('a'), 'write should reject with the thrown error'); + + const p2 = promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with the thrown error'); + + return Promise.all([p1, p2]); +}, 'Writable stream: throwing strategy.size method'); + +promise_test(() => { + const sizes = [NaN, -Infinity, Infinity, -1]; + return Promise.all(sizes.map(size => { + const ws = new WritableStream({}, { + size() { + return size; + }, + highWaterMark: 5 + }); + + const writer = ws.getWriter(); + + return writer.write('a').then(() => assert_unreached('write must reject'), writeE => { + assert_equals(writeE.name, 'RangeError', `write must reject with a RangeError for ${size}`); + + return writer.closed.then(() => assert_unreached('write must reject'), closedE => { + assert_equals(closedE, writeE, `closed should reject with the same error as write`); + }); + }); + })); +}, 'Writable stream: invalid strategy.size return value'); + +test(() => { + assert_throws_js(TypeError, () => new WritableStream(undefined, { + size: 'not a function', + highWaterMark: NaN + }), 'WritableStream constructor should throw a TypeError'); +}, 'Writable stream: invalid size beats invalid highWaterMark'); diff --git a/test/fixtures/wpt/streams/writable-streams/bad-underlying-sinks.any.js b/test/fixtures/wpt/streams/writable-streams/bad-underlying-sinks.any.js new file mode 100644 index 00000000000000..0bfc036246a870 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/bad-underlying-sinks.any.js @@ -0,0 +1,204 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +test(() => { + assert_throws_exactly(error1, () => { + new WritableStream({ + get start() { + throw error1; + } + }); + }, 'constructor should throw same error as throwing start getter'); + + assert_throws_exactly(error1, () => { + new WritableStream({ + start() { + throw error1; + } + }); + }, 'constructor should throw same error as throwing start method'); + + assert_throws_js(TypeError, () => { + new WritableStream({ + start: 'not a function or undefined' + }); + }, 'constructor should throw TypeError when passed a non-function start property'); + + assert_throws_js(TypeError, () => { + new WritableStream({ + start: { apply() {} } + }); + }, 'constructor should throw TypeError when passed a non-function start property with an .apply method'); +}, 'start: errors in start cause WritableStream constructor to throw'); + +promise_test(t => { + + const ws = recordingWritableStream({ + close() { + throw error1; + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.close(), 'close() promise must reject with the thrown error') + .then(() => promise_rejects_exactly(t, error1, writer.ready, 'ready promise must reject with the thrown error')) + .then(() => promise_rejects_exactly(t, error1, writer.closed, 'closed promise must reject with the thrown error')) + .then(() => { + assert_array_equals(ws.events, ['close']); + }); + +}, 'close: throwing method should cause writer close() and ready to reject'); + +promise_test(t => { + + const ws = recordingWritableStream({ + close() { + return Promise.reject(error1); + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.close(), 'close() promise must reject with the same error') + .then(() => promise_rejects_exactly(t, error1, writer.ready, 'ready promise must reject with the same error')) + .then(() => assert_array_equals(ws.events, ['close'])); + +}, 'close: returning a rejected promise should cause writer close() and ready to reject'); + +test(() => { + assert_throws_exactly(error1, () => new WritableStream({ + get close() { + throw error1; + } + }), 'constructor should throw'); +}, 'close: throwing getter should cause constructor to throw'); + +test(() => { + assert_throws_exactly(error1, () => new WritableStream({ + get write() { + throw error1; + } + }), 'constructor should throw'); +}, 'write: throwing getter should cause write() and closed to reject'); + +promise_test(t => { + const ws = new WritableStream({ + write() { + throw error1; + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.write('a'), 'write should reject with the thrown error') + .then(() => promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with the thrown error')); +}, 'write: throwing method should cause write() and closed to reject'); + +promise_test(t => { + + let rejectSinkWritePromise; + const ws = recordingWritableStream({ + write() { + return new Promise((r, reject) => { + rejectSinkWritePromise = reject; + }); + } + }); + + return flushAsyncEvents().then(() => { + const writer = ws.getWriter(); + const writePromise = writer.write('a'); + rejectSinkWritePromise(error1); + + return Promise.all([ + promise_rejects_exactly(t, error1, writePromise, 'writer write must reject with the same error'), + promise_rejects_exactly(t, error1, writer.ready, 'ready promise must reject with the same error') + ]); + }) + .then(() => { + assert_array_equals(ws.events, ['write', 'a']); + }); + +}, 'write: returning a promise that becomes rejected after the writer write() should cause writer write() and ready ' + + 'to reject'); + +promise_test(t => { + + const ws = recordingWritableStream({ + write() { + if (ws.events.length === 2) { + return delay(0); + } + + return Promise.reject(error1); + } + }); + + const writer = ws.getWriter(); + + // Do not wait for this; we want to test the ready promise when the stream is "full" (desiredSize = 0), but if we wait + // then the stream will transition back to "empty" (desiredSize = 1) + writer.write('a'); + const readyPromise = writer.ready; + + return promise_rejects_exactly(t, error1, writer.write('b'), 'second write must reject with the same error').then(() => { + assert_equals(writer.ready, readyPromise, + 'the ready promise must not change, since the queue was full after the first write, so the pending one simply ' + + 'transitioned'); + return promise_rejects_exactly(t, error1, writer.ready, 'ready promise must reject with the same error'); + }) + .then(() => assert_array_equals(ws.events, ['write', 'a', 'write', 'b'])); + +}, 'write: returning a rejected promise (second write) should cause writer write() and ready to reject'); + +test(() => { + assert_throws_js(TypeError, () => new WritableStream({ + start: 'test' + }), 'constructor should throw'); +}, 'start: non-function start method'); + +test(() => { + assert_throws_js(TypeError, () => new WritableStream({ + write: 'test' + }), 'constructor should throw'); +}, 'write: non-function write method'); + +test(() => { + assert_throws_js(TypeError, () => new WritableStream({ + close: 'test' + }), 'constructor should throw'); +}, 'close: non-function close method'); + +test(() => { + assert_throws_js(TypeError, () => new WritableStream({ + abort: { apply() {} } + }), 'constructor should throw'); +}, 'abort: non-function abort method with .apply'); + +test(() => { + assert_throws_exactly(error1, () => new WritableStream({ + get abort() { + throw error1; + } + }), 'constructor should throw'); +}, 'abort: throwing getter should cause abort() and closed to reject'); + +promise_test(t => { + const abortReason = new Error('different string'); + const ws = new WritableStream({ + abort() { + throw error1; + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.abort(abortReason), 'abort should reject with the thrown error') + .then(() => promise_rejects_exactly(t, abortReason, writer.closed, 'closed should reject with abortReason')); +}, 'abort: throwing method should cause abort() and closed to reject'); diff --git a/test/fixtures/wpt/streams/writable-streams/byte-length-queuing-strategy.any.js b/test/fixtures/wpt/streams/writable-streams/byte-length-queuing-strategy.any.js new file mode 100644 index 00000000000000..9a61dd7cc69787 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/byte-length-queuing-strategy.any.js @@ -0,0 +1,28 @@ +// META: global=window,worker,jsshell +'use strict'; + +promise_test(t => { + let isDone = false; + const ws = new WritableStream( + { + write() { + return new Promise(resolve => { + t.step_timeout(() => { + isDone = true; + resolve(); + }, 200); + }); + }, + + close() { + assert_true(isDone, 'close is only called once the promise has been resolved'); + } + }, + new ByteLengthQueuingStrategy({ highWaterMark: 1024 * 16 }) + ); + + const writer = ws.getWriter(); + writer.write({ byteLength: 1024 }); + + return writer.close(); +}, 'Closing a writable stream with in-flight writes below the high water mark delays the close call properly'); diff --git a/test/fixtures/wpt/streams/writable-streams/close.any.js b/test/fixtures/wpt/streams/writable-streams/close.any.js new file mode 100644 index 00000000000000..cf997ed84cdcac --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/close.any.js @@ -0,0 +1,470 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +const error2 = new Error('error2'); +error2.name = 'error2'; + +promise_test(() => { + const ws = new WritableStream({ + close() { + return 'Hello'; + } + }); + + const writer = ws.getWriter(); + + const closePromise = writer.close(); + return closePromise.then(value => assert_equals(value, undefined, 'fulfillment value must be undefined')); +}, 'fulfillment value of writer.close() call must be undefined even if the underlying sink returns a non-undefined ' + + 'value'); + +promise_test(() => { + let controller; + let resolveClose; + const ws = new WritableStream({ + start(c) { + controller = c; + }, + close() { + return new Promise(resolve => { + resolveClose = resolve; + }); + } + }); + + const writer = ws.getWriter(); + + const closePromise = writer.close(); + return flushAsyncEvents().then(() => { + controller.error(error1); + return flushAsyncEvents(); + }).then(() => { + resolveClose(); + return Promise.all([ + closePromise, + writer.closed, + flushAsyncEvents().then(() => writer.closed)]); + }); +}, 'when sink calls error asynchronously while sink close is in-flight, the stream should not become errored'); + +promise_test(() => { + let controller; + const passedError = new Error('error me'); + const ws = new WritableStream({ + start(c) { + controller = c; + }, + close() { + controller.error(passedError); + } + }); + + const writer = ws.getWriter(); + + return writer.close().then(() => writer.closed); +}, 'when sink calls error synchronously while closing, the stream should not become errored'); + +promise_test(t => { + const ws = new WritableStream({ + close() { + throw error1; + } + }); + + const writer = ws.getWriter(); + + return Promise.all([ + writer.write('y'), + promise_rejects_exactly(t, error1, writer.close(), 'close() must reject with the error'), + promise_rejects_exactly(t, error1, writer.closed, 'closed must reject with the error') + ]); +}, 'when the sink throws during close, and the close is requested while a write is still in-flight, the stream should ' + + 'become errored during the close'); + +promise_test(() => { + const ws = new WritableStream({ + write(chunk, controller) { + controller.error(error1); + return new Promise(() => {}); + } + }); + + const writer = ws.getWriter(); + writer.write('a'); + + return delay(0).then(() => { + writer.releaseLock(); + }); +}, 'releaseLock on a stream with a pending write in which the stream has been errored'); + +promise_test(() => { + let controller; + const ws = new WritableStream({ + start(c) { + controller = c; + }, + close() { + controller.error(error1); + return new Promise(() => {}); + } + }); + + const writer = ws.getWriter(); + writer.close(); + + return delay(0).then(() => { + writer.releaseLock(); + }); +}, 'releaseLock on a stream with a pending close in which controller.error() was called'); + +promise_test(() => { + const ws = recordingWritableStream(); + + const writer = ws.getWriter(); + + return writer.ready.then(() => { + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1'); + + writer.close(); + assert_equals(writer.desiredSize, 1, 'desiredSize should be still 1'); + + return writer.ready.then(v => { + assert_equals(v, undefined, 'ready promise should be fulfilled with undefined'); + assert_array_equals(ws.events, ['close'], 'write and abort should not be called'); + }); + }); +}, 'when close is called on a WritableStream in writable state, ready should return a fulfilled promise'); + +promise_test(() => { + const ws = recordingWritableStream({ + write() { + return new Promise(() => {}); + } + }); + + const writer = ws.getWriter(); + + return writer.ready.then(() => { + writer.write('a'); + + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0'); + + let calledClose = false; + return Promise.all([ + writer.ready.then(v => { + assert_equals(v, undefined, 'ready promise should be fulfilled with undefined'); + assert_true(calledClose, 'ready should not be fulfilled before writer.close() is called'); + assert_array_equals(ws.events, ['write', 'a'], 'sink abort() should not be called'); + }), + flushAsyncEvents().then(() => { + writer.close(); + calledClose = true; + }) + ]); + }); +}, 'when close is called on a WritableStream in waiting state, ready promise should be fulfilled'); + +promise_test(() => { + let asyncCloseFinished = false; + const ws = recordingWritableStream({ + close() { + return flushAsyncEvents().then(() => { + asyncCloseFinished = true; + }); + } + }); + + const writer = ws.getWriter(); + return writer.ready.then(() => { + writer.write('a'); + + writer.close(); + + return writer.ready.then(v => { + assert_false(asyncCloseFinished, 'ready promise should be fulfilled before async close completes'); + assert_equals(v, undefined, 'ready promise should be fulfilled with undefined'); + assert_array_equals(ws.events, ['write', 'a', 'close'], 'sink abort() should not be called'); + }); + }); +}, 'when close is called on a WritableStream in waiting state, ready should be fulfilled immediately even if close ' + + 'takes a long time'); + +promise_test(t => { + const rejection = { name: 'letter' }; + const ws = new WritableStream({ + close() { + return { + then(onFulfilled, onRejected) { onRejected(rejection); } + }; + } + }); + return promise_rejects_exactly(t, rejection, ws.getWriter().close(), 'close() should return a rejection'); +}, 'returning a thenable from close() should work'); + +promise_test(t => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const closePromise = writer.close(); + const closedPromise = writer.closed; + writer.releaseLock(); + return Promise.all([ + closePromise, + promise_rejects_js(t, TypeError, closedPromise, '.closed promise should be rejected') + ]); + }); +}, 'releaseLock() should not change the result of sync close()'); + +promise_test(t => { + const ws = new WritableStream({ + close() { + return flushAsyncEvents(); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const closePromise = writer.close(); + const closedPromise = writer.closed; + writer.releaseLock(); + return Promise.all([ + closePromise, + promise_rejects_js(t, TypeError, closedPromise, '.closed promise should be rejected') + ]); + }); +}, 'releaseLock() should not change the result of async close()'); + +promise_test(() => { + let resolveClose; + const ws = new WritableStream({ + close() { + const promise = new Promise(resolve => { + resolveClose = resolve; + }); + return promise; + } + }); + const writer = ws.getWriter(); + const closePromise = writer.close(); + writer.releaseLock(); + return delay(0).then(() => { + resolveClose(); + return closePromise.then(() => { + assert_equals(ws.getWriter().desiredSize, 0, 'desiredSize should be 0'); + }); + }); +}, 'close() should set state to CLOSED even if writer has detached'); + +promise_test(() => { + let resolveClose; + const ws = new WritableStream({ + close() { + const promise = new Promise(resolve => { + resolveClose = resolve; + }); + return promise; + } + }); + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + return delay(0).then(() => { + const abortingWriter = ws.getWriter(); + const abortPromise = abortingWriter.abort(); + abortingWriter.releaseLock(); + resolveClose(); + return abortPromise; + }); +}, 'the promise returned by async abort during close should resolve'); + +// Though the order in which the promises are fulfilled or rejected is arbitrary, we're checking it for +// interoperability. We can change the order as long as we file bugs on all implementers to update to the latest tests +// to keep them interoperable. + +promise_test(() => { + const ws = new WritableStream({}); + + const writer = ws.getWriter(); + + const closePromise = writer.close(); + + const events = []; + return Promise.all([ + closePromise.then(() => { + events.push('closePromise'); + }), + writer.closed.then(() => { + events.push('closed'); + }) + ]).then(() => { + assert_array_equals(events, ['closePromise', 'closed'], + 'promises must fulfill/reject in the expected order'); + }); +}, 'promises must fulfill/reject in the expected order on closure'); + +promise_test(() => { + const ws = new WritableStream({}); + + // Wait until the WritableStream starts so that the close() call gets processed. Otherwise, abort() will be + // processed without waiting for completion of the close(). + return delay(0).then(() => { + const writer = ws.getWriter(); + + const closePromise = writer.close(); + const abortPromise = writer.abort(error1); + + const events = []; + return Promise.all([ + closePromise.then(() => { + events.push('closePromise'); + }), + abortPromise.then(() => { + events.push('abortPromise'); + }), + writer.closed.then(() => { + events.push('closed'); + }) + ]).then(() => { + assert_array_equals(events, ['closePromise', 'abortPromise', 'closed'], + 'promises must fulfill/reject in the expected order'); + }); + }); +}, 'promises must fulfill/reject in the expected order on aborted closure'); + +promise_test(t => { + const ws = new WritableStream({ + close() { + return Promise.reject(error1); + } + }); + + // Wait until the WritableStream starts so that the close() call gets processed. + return delay(0).then(() => { + const writer = ws.getWriter(); + + const closePromise = writer.close(); + const abortPromise = writer.abort(error2); + + const events = []; + closePromise.catch(() => events.push('closePromise')); + abortPromise.catch(() => events.push('abortPromise')); + writer.closed.catch(() => events.push('closed')); + return Promise.all([ + promise_rejects_exactly(t, error1, closePromise, + 'closePromise must reject with the error returned from the sink\'s close method'), + promise_rejects_exactly(t, error1, abortPromise, + 'abortPromise must reject with the error returned from the sink\'s close method'), + promise_rejects_exactly(t, error2, writer.closed, + 'writer.closed must reject with error2') + ]).then(() => { + assert_array_equals(events, ['closePromise', 'abortPromise', 'closed'], + 'promises must fulfill/reject in the expected order'); + }); + }); +}, 'promises must fulfill/reject in the expected order on aborted and errored closure'); + +promise_test(t => { + let resolveWrite; + let controller; + const ws = new WritableStream({ + write(chunk, c) { + controller = c; + return new Promise(resolve => { + resolveWrite = resolve; + }); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const writePromise = writer.write('c'); + controller.error(error1); + const closePromise = writer.close(); + let closeRejected = false; + closePromise.catch(() => { + closeRejected = true; + }); + return flushAsyncEvents().then(() => { + assert_false(closeRejected); + resolveWrite(); + return Promise.all([ + writePromise, + promise_rejects_exactly(t, error1, closePromise, 'close() should reject') + ]).then(() => { + assert_true(closeRejected); + }); + }); + }); +}, 'close() should not reject until no sink methods are in flight'); + +promise_test(() => { + const ws = new WritableStream(); + const writer1 = ws.getWriter(); + return writer1.close().then(() => { + writer1.releaseLock(); + const writer2 = ws.getWriter(); + const ready = writer2.ready; + assert_equals(ready.constructor, Promise); + return ready; + }); +}, 'ready promise should be initialised as fulfilled for a writer on a closed stream'); + +promise_test(() => { + const ws = new WritableStream(); + ws.close(); + const writer = ws.getWriter(); + return writer.closed; +}, 'close() on a writable stream should work'); + +promise_test(t => { + const ws = new WritableStream(); + ws.getWriter(); + return promise_rejects_js(t, TypeError, ws.close(), 'close should reject'); +}, 'close() on a locked stream should reject'); + +promise_test(t => { + const ws = new WritableStream({ + start(controller) { + controller.error(error1); + } + }); + return promise_rejects_exactly(t, error1, ws.close(), 'close should reject with error1'); +}, 'close() on an erroring stream should reject'); + +promise_test(t => { + const ws = new WritableStream({ + start(controller) { + controller.error(error1); + } + }); + const writer = ws.getWriter(); + return promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with the error').then(() => { + writer.releaseLock(); + return promise_rejects_js(t, TypeError, ws.close(), 'close should reject'); + }); +}, 'close() on an errored stream should reject'); + +promise_test(t => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + return writer.close().then(() => { + return promise_rejects_js(t, TypeError, ws.close(), 'close should reject'); + }); +}, 'close() on an closed stream should reject'); + +promise_test(t => { + const ws = new WritableStream({ + close() { + return new Promise(() => {}); + } + }); + + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + + return promise_rejects_js(t, TypeError, ws.close(), 'close should reject'); +}, 'close() on a stream with a pending close should reject'); diff --git a/test/fixtures/wpt/streams/writable-streams/constructor.any.js b/test/fixtures/wpt/streams/writable-streams/constructor.any.js new file mode 100644 index 00000000000000..75eed2a993fe5e --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/constructor.any.js @@ -0,0 +1,155 @@ +// META: global=window,worker,jsshell +'use strict'; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +const error2 = new Error('error2'); +error2.name = 'error2'; + +promise_test(() => { + let controller; + const ws = new WritableStream({ + start(c) { + controller = c; + } + }); + + // Now error the stream after its construction. + controller.error(error1); + + const writer = ws.getWriter(); + + assert_equals(writer.desiredSize, null, 'desiredSize should be null'); + return writer.closed.catch(r => { + assert_equals(r, error1, 'ws should be errored by the passed error'); + }); +}, 'controller argument should be passed to start method'); + +promise_test(t => { + const ws = new WritableStream({ + write(chunk, controller) { + controller.error(error1); + } + }); + + const writer = ws.getWriter(); + + return Promise.all([ + writer.write('a'), + promise_rejects_exactly(t, error1, writer.closed, 'controller.error() in write() should error the stream') + ]); +}, 'controller argument should be passed to write method'); + +// Older versions of the standard had the controller argument passed to close(). It wasn't useful, and so has been +// removed. This test remains to identify implementations that haven't been updated. +promise_test(t => { + const ws = new WritableStream({ + close(...args) { + t.step(() => { + assert_array_equals(args, [], 'no arguments should be passed to close'); + }); + } + }); + + return ws.getWriter().close(); +}, 'controller argument should not be passed to close method'); + +promise_test(() => { + const ws = new WritableStream({}, { + highWaterMark: 1000, + size() { return 1; } + }); + + const writer = ws.getWriter(); + + assert_equals(writer.desiredSize, 1000, 'desiredSize should be 1000'); + return writer.ready.then(v => { + assert_equals(v, undefined, 'ready promise should fulfill with undefined'); + }); +}, 'highWaterMark should be reflected to desiredSize'); + +promise_test(() => { + const ws = new WritableStream({}, { + highWaterMark: Infinity, + size() { return 0; } + }); + + const writer = ws.getWriter(); + + assert_equals(writer.desiredSize, Infinity, 'desiredSize should be Infinity'); + + return writer.ready; +}, 'WritableStream should be writable and ready should fulfill immediately if the strategy does not apply ' + + 'backpressure'); + +test(() => { + new WritableStream(); +}, 'WritableStream should be constructible with no arguments'); + +test(() => { + const underlyingSink = { get start() { throw error1; } }; + const queuingStrategy = { highWaterMark: 0, get size() { throw error2; } }; + + // underlyingSink is converted in prose in the method body, whereas queuingStrategy is done at the IDL layer. + // So the queuingStrategy exception should be encountered first. + assert_throws_exactly(error2, () => new WritableStream(underlyingSink, queuingStrategy)); +}, 'underlyingSink argument should be converted after queuingStrategy argument'); + +test(() => { + const ws = new WritableStream({}); + + const writer = ws.getWriter(); + + assert_equals(typeof writer.write, 'function', 'writer should have a write method'); + assert_equals(typeof writer.abort, 'function', 'writer should have an abort method'); + assert_equals(typeof writer.close, 'function', 'writer should have a close method'); + + assert_equals(writer.desiredSize, 1, 'desiredSize should start at 1'); + + assert_not_equals(typeof writer.ready, 'undefined', 'writer should have a ready property'); + assert_equals(typeof writer.ready.then, 'function', 'ready property should be thenable'); + assert_not_equals(typeof writer.closed, 'undefined', 'writer should have a closed property'); + assert_equals(typeof writer.closed.then, 'function', 'closed property should be thenable'); +}, 'WritableStream instances should have standard methods and properties'); + +test(() => { + let WritableStreamDefaultController; + new WritableStream({ + start(c) { + WritableStreamDefaultController = c.constructor; + } + }); + + assert_throws_js(TypeError, () => new WritableStreamDefaultController({}), + 'constructor should throw a TypeError exception'); +}, 'WritableStreamDefaultController constructor should throw'); + +test(() => { + let WritableStreamDefaultController; + const stream = new WritableStream({ + start(c) { + WritableStreamDefaultController = c.constructor; + } + }); + + assert_throws_js(TypeError, () => new WritableStreamDefaultController(stream), + 'constructor should throw a TypeError exception'); +}, 'WritableStreamDefaultController constructor should throw when passed an initialised WritableStream'); + +test(() => { + const stream = new WritableStream(); + const writer = stream.getWriter(); + const WritableStreamDefaultWriter = writer.constructor; + writer.releaseLock(); + assert_throws_js(TypeError, () => new WritableStreamDefaultWriter({}), + 'constructor should throw a TypeError exception'); +}, 'WritableStreamDefaultWriter should throw unless passed a WritableStream'); + +test(() => { + const stream = new WritableStream(); + const writer = stream.getWriter(); + const WritableStreamDefaultWriter = writer.constructor; + assert_throws_js(TypeError, () => new WritableStreamDefaultWriter(stream), + 'constructor should throw a TypeError exception'); +}, 'WritableStreamDefaultWriter constructor should throw when stream argument is locked'); diff --git a/test/fixtures/wpt/streams/writable-streams/count-queuing-strategy.any.js b/test/fixtures/wpt/streams/writable-streams/count-queuing-strategy.any.js new file mode 100644 index 00000000000000..30edb3eb315c62 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/count-queuing-strategy.any.js @@ -0,0 +1,124 @@ +// META: global=window,worker,jsshell +'use strict'; + +test(() => { + new WritableStream({}, new CountQueuingStrategy({ highWaterMark: 4 })); +}, 'Can construct a writable stream with a valid CountQueuingStrategy'); + +promise_test(() => { + const dones = Object.create(null); + + const ws = new WritableStream( + { + write(chunk) { + return new Promise(resolve => { + dones[chunk] = resolve; + }); + } + }, + new CountQueuingStrategy({ highWaterMark: 0 }) + ); + + const writer = ws.getWriter(); + let writePromiseB; + let writePromiseC; + + return Promise.resolve().then(() => { + assert_equals(writer.desiredSize, 0, 'desiredSize should be initially 0'); + + const writePromiseA = writer.write('a'); + assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after 1st write()'); + + writePromiseB = writer.write('b'); + assert_equals(writer.desiredSize, -2, 'desiredSize should be -2 after 2nd write()'); + + dones.a(); + return writePromiseA; + }).then(() => { + assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after completing 1st write()'); + + dones.b(); + return writePromiseB; + }).then(() => { + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after completing 2nd write()'); + + writePromiseC = writer.write('c'); + assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after 3rd write()'); + + dones.c(); + return writePromiseC; + }).then(() => { + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after completing 3rd write()'); + }); +}, 'Correctly governs the value of a WritableStream\'s state property (HWM = 0)'); + +promise_test(() => { + const dones = Object.create(null); + + const ws = new WritableStream( + { + write(chunk) { + return new Promise(resolve => { + dones[chunk] = resolve; + }); + } + }, + new CountQueuingStrategy({ highWaterMark: 4 }) + ); + + const writer = ws.getWriter(); + let writePromiseB; + let writePromiseC; + let writePromiseD; + + return Promise.resolve().then(() => { + assert_equals(writer.desiredSize, 4, 'desiredSize should be initially 4'); + + const writePromiseA = writer.write('a'); + assert_equals(writer.desiredSize, 3, 'desiredSize should be 3 after 1st write()'); + + writePromiseB = writer.write('b'); + assert_equals(writer.desiredSize, 2, 'desiredSize should be 2 after 2nd write()'); + + writePromiseC = writer.write('c'); + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1 after 3rd write()'); + + writePromiseD = writer.write('d'); + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after 4th write()'); + + writer.write('e'); + assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after 5th write()'); + + writer.write('f'); + assert_equals(writer.desiredSize, -2, 'desiredSize should be -2 after 6th write()'); + + writer.write('g'); + assert_equals(writer.desiredSize, -3, 'desiredSize should be -3 after 7th write()'); + + dones.a(); + return writePromiseA; + }).then(() => { + assert_equals(writer.desiredSize, -2, 'desiredSize should be -2 after completing 1st write()'); + + dones.b(); + return writePromiseB; + }).then(() => { + assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after completing 2nd write()'); + + dones.c(); + return writePromiseC; + }).then(() => { + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after completing 3rd write()'); + + writer.write('h'); + assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after 8th write()'); + + dones.d(); + return writePromiseD; + }).then(() => { + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after completing 4th write()'); + + writer.write('i'); + assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after 9th write()'); + }); +}, 'Correctly governs the value of a WritableStream\'s state property (HWM = 4)'); diff --git a/test/fixtures/wpt/streams/writable-streams/error.any.js b/test/fixtures/wpt/streams/writable-streams/error.any.js new file mode 100644 index 00000000000000..be986fccc6eac6 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/error.any.js @@ -0,0 +1,64 @@ +// META: global=window,worker,jsshell +'use strict'; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +const error2 = new Error('error2'); +error2.name = 'error2'; + +promise_test(t => { + const ws = new WritableStream({ + start(controller) { + controller.error(error1); + } + }); + return promise_rejects_exactly(t, error1, ws.getWriter().closed, 'stream should be errored'); +}, 'controller.error() should error the stream'); + +test(() => { + let controller; + const ws = new WritableStream({ + start(c) { + controller = c; + } + }); + ws.abort(); + controller.error(error1); +}, 'controller.error() on erroring stream should not throw'); + +promise_test(t => { + let controller; + const ws = new WritableStream({ + start(c) { + controller = c; + } + }); + controller.error(error1); + controller.error(error2); + return promise_rejects_exactly(t, error1, ws.getWriter().closed, 'first controller.error() should win'); +}, 'surplus calls to controller.error() should be a no-op'); + +promise_test(() => { + let controller; + const ws = new WritableStream({ + start(c) { + controller = c; + } + }); + return ws.abort().then(() => { + controller.error(error1); + }); +}, 'controller.error() on errored stream should not throw'); + +promise_test(() => { + let controller; + const ws = new WritableStream({ + start(c) { + controller = c; + } + }); + return ws.getWriter().close().then(() => { + controller.error(error1); + }); +}, 'controller.error() on closed stream should not throw'); diff --git a/test/fixtures/wpt/streams/writable-streams/floating-point-total-queue-size.any.js b/test/fixtures/wpt/streams/writable-streams/floating-point-total-queue-size.any.js new file mode 100644 index 00000000000000..8e77ba0bb31185 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/floating-point-total-queue-size.any.js @@ -0,0 +1,87 @@ +// META: global=window,worker,jsshell +'use strict'; + +// Due to the limitations of floating-point precision, the calculation of desiredSize sometimes gives different answers +// than adding up the items in the queue would. It is important that implementations give the same result in these edge +// cases so that developers do not come to depend on non-standard behaviour. See +// https://github.com/whatwg/streams/issues/582 and linked issues for further discussion. + +promise_test(() => { + const writer = setupTestStream(); + + const writePromises = [ + writer.write(2), + writer.write(Number.MAX_SAFE_INTEGER) + ]; + + assert_equals(writer.desiredSize, 0 - 2 - Number.MAX_SAFE_INTEGER, + 'desiredSize must be calculated using double-precision floating-point arithmetic (after writing two chunks)'); + + return Promise.all(writePromises).then(() => { + assert_equals(writer.desiredSize, 0, '[[queueTotalSize]] must clamp to 0 if it becomes negative'); + }); +}, 'Floating point arithmetic must manifest near NUMBER.MAX_SAFE_INTEGER (total ends up positive)'); + +promise_test(() => { + const writer = setupTestStream(); + + const writePromises = [ + writer.write(1e-16), + writer.write(1) + ]; + + assert_equals(writer.desiredSize, 0 - 1e-16 - 1, + 'desiredSize must be calculated using double-precision floating-point arithmetic (after writing two chunks)'); + + return Promise.all(writePromises).then(() => { + assert_equals(writer.desiredSize, 0, '[[queueTotalSize]] must clamp to 0 if it becomes negative'); + }); +}, 'Floating point arithmetic must manifest near 0 (total ends up positive, but clamped)'); + +promise_test(() => { + const writer = setupTestStream(); + + const writePromises = [ + writer.write(1e-16), + writer.write(1), + writer.write(2e-16) + ]; + + assert_equals(writer.desiredSize, 0 - 1e-16 - 1 - 2e-16, + 'desiredSize must be calculated using double-precision floating-point arithmetic (after writing three chunks)'); + + return Promise.all(writePromises).then(() => { + assert_equals(writer.desiredSize, 0 - 1e-16 - 1 - 2e-16 + 1e-16 + 1 + 2e-16, + 'desiredSize must be calculated using floating-point arithmetic (after the three chunks have finished writing)'); + }); +}, 'Floating point arithmetic must manifest near 0 (total ends up positive, and not clamped)'); + +promise_test(() => { + const writer = setupTestStream(); + + const writePromises = [ + writer.write(2e-16), + writer.write(1) + ]; + + assert_equals(writer.desiredSize, 0 - 2e-16 - 1, + 'desiredSize must be calculated using double-precision floating-point arithmetic (after writing two chunks)'); + + return Promise.all(writePromises).then(() => { + assert_equals(writer.desiredSize, 0 - 2e-16 - 1 + 2e-16 + 1, + 'desiredSize must be calculated using floating-point arithmetic (after the two chunks have finished writing)'); + }); +}, 'Floating point arithmetic must manifest near 0 (total ends up zero)'); + +function setupTestStream() { + const strategy = { + size(x) { + return x; + }, + highWaterMark: 0 + }; + + const ws = new WritableStream({}, strategy); + + return ws.getWriter(); +} diff --git a/test/fixtures/wpt/streams/writable-streams/general.any.js b/test/fixtures/wpt/streams/writable-streams/general.any.js new file mode 100644 index 00000000000000..fdd10b29aa0ebc --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/general.any.js @@ -0,0 +1,277 @@ +// META: global=window,worker,jsshell +'use strict'; + +test(() => { + const ws = new WritableStream({}); + const writer = ws.getWriter(); + writer.releaseLock(); + + assert_throws_js(TypeError, () => writer.desiredSize, 'desiredSize should throw a TypeError'); +}, 'desiredSize on a released writer'); + +test(() => { + const ws = new WritableStream({}); + + const writer = ws.getWriter(); + + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1'); +}, 'desiredSize initial value'); + +promise_test(() => { + const ws = new WritableStream({}); + + const writer = ws.getWriter(); + + writer.close(); + + return writer.closed.then(() => { + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0'); + }); +}, 'desiredSize on a writer for a closed stream'); + +test(() => { + const ws = new WritableStream({ + start(c) { + c.error(); + } + }); + + const writer = ws.getWriter(); + assert_equals(writer.desiredSize, null, 'desiredSize should be null'); +}, 'desiredSize on a writer for an errored stream'); + +test(() => { + const ws = new WritableStream({}); + + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + + ws.getWriter(); +}, 'ws.getWriter() on a closing WritableStream'); + +promise_test(() => { + const ws = new WritableStream({}); + + const writer = ws.getWriter(); + return writer.close().then(() => { + writer.releaseLock(); + + ws.getWriter(); + }); +}, 'ws.getWriter() on a closed WritableStream'); + +test(() => { + const ws = new WritableStream({}); + + const writer = ws.getWriter(); + writer.abort(); + writer.releaseLock(); + + ws.getWriter(); +}, 'ws.getWriter() on an aborted WritableStream'); + +promise_test(() => { + const ws = new WritableStream({ + start(c) { + c.error(); + } + }); + + const writer = ws.getWriter(); + return writer.closed.then( + v => assert_unreached('writer.closed fulfilled unexpectedly with: ' + v), + () => { + writer.releaseLock(); + + ws.getWriter(); + } + ); +}, 'ws.getWriter() on an errored WritableStream'); + +promise_test(() => { + const ws = new WritableStream({}); + + const writer = ws.getWriter(); + writer.releaseLock(); + + return writer.closed.then( + v => assert_unreached('writer.closed fulfilled unexpectedly with: ' + v), + closedRejection => { + assert_equals(closedRejection.name, 'TypeError', 'closed promise should reject with a TypeError'); + return writer.ready.then( + v => assert_unreached('writer.ready fulfilled unexpectedly with: ' + v), + readyRejection => assert_equals(readyRejection, closedRejection, + 'ready promise should reject with the same error') + ); + } + ); +}, 'closed and ready on a released writer'); + +promise_test(t => { + let thisObject = null; + // Calls to Sink methods after the first are implicitly ignored. Only the first value that is passed to the resolver + // is used. + class Sink { + start() { + // Called twice + t.step(() => { + assert_equals(this, thisObject, 'start should be called as a method'); + }); + } + + write() { + t.step(() => { + assert_equals(this, thisObject, 'write should be called as a method'); + }); + } + + close() { + t.step(() => { + assert_equals(this, thisObject, 'close should be called as a method'); + }); + } + + abort() { + t.step(() => { + assert_equals(this, thisObject, 'abort should be called as a method'); + }); + } + } + + const theSink = new Sink(); + thisObject = theSink; + const ws = new WritableStream(theSink); + + const writer = ws.getWriter(); + + writer.write('a'); + const closePromise = writer.close(); + + const ws2 = new WritableStream(theSink); + const writer2 = ws2.getWriter(); + const abortPromise = writer2.abort(); + + return Promise.all([ + closePromise, + abortPromise + ]); +}, 'WritableStream should call underlying sink methods as methods'); + +promise_test(t => { + function functionWithOverloads() {} + functionWithOverloads.apply = t.unreached_func('apply() should not be called'); + functionWithOverloads.call = t.unreached_func('call() should not be called'); + const underlyingSink = { + start: functionWithOverloads, + write: functionWithOverloads, + close: functionWithOverloads, + abort: functionWithOverloads + }; + // Test start(), write(), close(). + const ws1 = new WritableStream(underlyingSink); + const writer1 = ws1.getWriter(); + writer1.write('a'); + writer1.close(); + + // Test abort(). + const abortError = new Error(); + abortError.name = 'abort error'; + + const ws2 = new WritableStream(underlyingSink); + const writer2 = ws2.getWriter(); + writer2.abort(abortError); + + // Test abort() with a close underlying sink method present. (Historical; see + // https://github.com/whatwg/streams/issues/620#issuecomment-263483953 for what used to be + // tested here. But more coverage can't hurt.) + const ws3 = new WritableStream({ + start: functionWithOverloads, + write: functionWithOverloads, + close: functionWithOverloads + }); + const writer3 = ws3.getWriter(); + writer3.abort(abortError); + + return writer1.closed + .then(() => promise_rejects_exactly(t, abortError, writer2.closed, 'writer2.closed should be rejected')) + .then(() => promise_rejects_exactly(t, abortError, writer3.closed, 'writer3.closed should be rejected')); +}, 'methods should not not have .apply() or .call() called'); + +promise_test(() => { + const strategy = { + size() { + if (this !== undefined) { + throw new Error('size called as a method'); + } + return 1; + } + }; + + const ws = new WritableStream({}, strategy); + const writer = ws.getWriter(); + return writer.write('a'); +}, 'WritableStream\'s strategy.size should not be called as a method'); + +promise_test(() => { + const ws = new WritableStream(); + const writer1 = ws.getWriter(); + assert_equals(undefined, writer1.releaseLock(), 'releaseLock() should return undefined'); + const writer2 = ws.getWriter(); + assert_equals(undefined, writer1.releaseLock(), 'no-op releaseLock() should return undefined'); + // Calling releaseLock() on writer1 should not interfere with writer2. If it did, then the ready promise would be + // rejected. + return writer2.ready; +}, 'redundant releaseLock() is no-op'); + +promise_test(() => { + const events = []; + const ws = new WritableStream(); + const writer = ws.getWriter(); + return writer.ready.then(() => { + // Force the ready promise back to a pending state. + const writerPromise = writer.write('dummy'); + const readyPromise = writer.ready.catch(() => events.push('ready')); + const closedPromise = writer.closed.catch(() => events.push('closed')); + writer.releaseLock(); + return Promise.all([readyPromise, closedPromise]).then(() => { + assert_array_equals(events, ['ready', 'closed'], 'ready promise should fire before closed promise'); + // Stop the writer promise hanging around after the test has finished. + return Promise.all([ + writerPromise, + ws.abort() + ]); + }); + }); +}, 'ready promise should fire before closed on releaseLock'); + +test(() => { + class Subclass extends WritableStream { + extraFunction() { + return true; + } + } + assert_equals( + Object.getPrototypeOf(Subclass.prototype), WritableStream.prototype, + 'Subclass.prototype\'s prototype should be WritableStream.prototype'); + assert_equals(Object.getPrototypeOf(Subclass), WritableStream, + 'Subclass\'s prototype should be WritableStream'); + const sub = new Subclass(); + assert_true(sub instanceof WritableStream, + 'Subclass object should be an instance of WritableStream'); + assert_true(sub instanceof Subclass, + 'Subclass object should be an instance of Subclass'); + const lockedGetter = Object.getOwnPropertyDescriptor( + WritableStream.prototype, 'locked').get; + assert_equals(lockedGetter.call(sub), sub.locked, + 'Subclass object should pass brand check'); + assert_true(sub.extraFunction(), + 'extraFunction() should be present on Subclass object'); +}, 'Subclassing WritableStream should work'); + +test(() => { + const ws = new WritableStream(); + assert_false(ws.locked, 'stream should not be locked'); + ws.getWriter(); + assert_true(ws.locked, 'stream should be locked'); +}, 'the locked getter should return true if the stream has a writer'); diff --git a/test/fixtures/wpt/streams/writable-streams/properties.any.js b/test/fixtures/wpt/streams/writable-streams/properties.any.js new file mode 100644 index 00000000000000..0f7f876d8b6fc4 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/properties.any.js @@ -0,0 +1,53 @@ +// META: global=window,worker,jsshell +'use strict'; + +const sinkMethods = { + start: { + length: 1, + trigger: () => Promise.resolve() + }, + write: { + length: 2, + trigger: writer => writer.write() + }, + close: { + length: 0, + trigger: writer => writer.close() + }, + abort: { + length: 1, + trigger: writer => writer.abort() + } +}; + +for (const method in sinkMethods) { + const { length, trigger } = sinkMethods[method]; + + // Some semantic tests of how sink methods are called can be found in general.js, as well as in the test files + // specific to each method. + promise_test(() => { + let argCount; + const ws = new WritableStream({ + [method](...args) { + argCount = args.length; + } + }); + return Promise.resolve(trigger(ws.getWriter())).then(() => { + assert_equals(argCount, length, `${method} should be called with ${length} arguments`); + }); + }, `sink method ${method} should be called with the right number of arguments`); + + promise_test(() => { + let methodWasCalled = false; + function Sink() {} + Sink.prototype = { + [method]() { + methodWasCalled = true; + } + }; + const ws = new WritableStream(new Sink()); + return Promise.resolve(trigger(ws.getWriter())).then(() => { + assert_true(methodWasCalled, `${method} should be called`); + }); + }, `sink method ${method} should be called even when it's located on the prototype chain`); +} diff --git a/test/fixtures/wpt/streams/writable-streams/reentrant-strategy.any.js b/test/fixtures/wpt/streams/writable-streams/reentrant-strategy.any.js new file mode 100644 index 00000000000000..afde413b4252d1 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/reentrant-strategy.any.js @@ -0,0 +1,174 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +// These tests exercise the pathological case of calling WritableStream* methods from within the strategy.size() +// callback. This is not something any real code should ever do. Failures here indicate subtle deviations from the +// standard that may affect real, non-pathological code. + +const error1 = { name: 'error1' }; + +promise_test(() => { + let writer; + const strategy = { + size(chunk) { + if (chunk > 0) { + writer.write(chunk - 1); + } + return chunk; + } + }; + + const ws = recordingWritableStream({}, strategy); + writer = ws.getWriter(); + return writer.write(2) + .then(() => { + assert_array_equals(ws.events, ['write', 0, 'write', 1, 'write', 2], 'writes should appear in order'); + }); +}, 'writes should be written in the standard order'); + +promise_test(() => { + let writer; + const events = []; + const strategy = { + size(chunk) { + events.push('size', chunk); + if (chunk > 0) { + writer.write(chunk - 1) + .then(() => events.push('writer.write done', chunk - 1)); + } + return chunk; + } + }; + const ws = new WritableStream({ + write(chunk) { + events.push('sink.write', chunk); + } + }, strategy); + writer = ws.getWriter(); + return writer.write(2) + .then(() => events.push('writer.write done', 2)) + .then(() => flushAsyncEvents()) + .then(() => { + assert_array_equals(events, ['size', 2, 'size', 1, 'size', 0, + 'sink.write', 0, 'sink.write', 1, 'writer.write done', 0, + 'sink.write', 2, 'writer.write done', 1, + 'writer.write done', 2], + 'events should happen in standard order'); + }); +}, 'writer.write() promises should resolve in the standard order'); + +promise_test(t => { + let controller; + const strategy = { + size() { + controller.error(error1); + return 1; + } + }; + const ws = recordingWritableStream({ + start(c) { + controller = c; + } + }, strategy); + const resolved = []; + const writer = ws.getWriter(); + const readyPromise1 = writer.ready.then(() => resolved.push('ready1')); + const writePromise = promise_rejects_exactly(t, error1, writer.write(), + 'write() should reject with the error') + .then(() => resolved.push('write')); + const readyPromise2 = promise_rejects_exactly(t, error1, writer.ready, 'ready should reject with error1') + .then(() => resolved.push('ready2')); + const closedPromise = promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with error1') + .then(() => resolved.push('closed')); + return Promise.all([readyPromise1, writePromise, readyPromise2, closedPromise]) + .then(() => { + assert_array_equals(resolved, ['ready1', 'write', 'ready2', 'closed'], + 'promises should resolve in standard order'); + assert_array_equals(ws.events, [], 'underlying sink write should not be called'); + }); +}, 'controller.error() should work when called from within strategy.size()'); + +promise_test(t => { + let writer; + const strategy = { + size() { + writer.close(); + return 1; + } + }; + + const ws = recordingWritableStream({}, strategy); + writer = ws.getWriter(); + return promise_rejects_js(t, TypeError, writer.write('a'), 'write() promise should reject') + .then(() => { + assert_array_equals(ws.events, ['close'], 'sink.write() should not be called'); + }); +}, 'close() should work when called from within strategy.size()'); + +promise_test(t => { + let writer; + const strategy = { + size() { + writer.abort(error1); + return 1; + } + }; + + const ws = recordingWritableStream({}, strategy); + writer = ws.getWriter(); + return promise_rejects_exactly(t, error1, writer.write('a'), 'write() promise should reject') + .then(() => { + assert_array_equals(ws.events, ['abort', error1], 'sink.write() should not be called'); + }); +}, 'abort() should work when called from within strategy.size()'); + +promise_test(t => { + let writer; + const strategy = { + size() { + writer.releaseLock(); + return 1; + } + }; + + const ws = recordingWritableStream({}, strategy); + writer = ws.getWriter(); + const writePromise = promise_rejects_js(t, TypeError, writer.write('a'), 'write() promise should reject'); + const readyPromise = promise_rejects_js(t, TypeError, writer.ready, 'ready promise should reject'); + const closedPromise = promise_rejects_js(t, TypeError, writer.closed, 'closed promise should reject'); + return Promise.all([writePromise, readyPromise, closedPromise]) + .then(() => { + assert_array_equals(ws.events, [], 'sink.write() should not be called'); + }); +}, 'releaseLock() should abort the write() when called within strategy.size()'); + +promise_test(t => { + let writer1; + let ws; + let writePromise2; + let closePromise; + let closedPromise2; + const strategy = { + size(chunk) { + if (chunk > 0) { + writer1.releaseLock(); + const writer2 = ws.getWriter(); + writePromise2 = writer2.write(0); + closePromise = writer2.close(); + closedPromise2 = writer2.closed; + } + return 1; + } + }; + ws = recordingWritableStream({}, strategy); + writer1 = ws.getWriter(); + const writePromise1 = promise_rejects_js(t, TypeError, writer1.write(1), 'write() promise should reject'); + const readyPromise = promise_rejects_js(t, TypeError, writer1.ready, 'ready promise should reject'); + const closedPromise1 = promise_rejects_js(t, TypeError, writer1.closed, 'closed promise should reject'); + return Promise.all([writePromise1, readyPromise, closedPromise1, writePromise2, closePromise, closedPromise2]) + .then(() => { + assert_array_equals(ws.events, ['write', 0, 'close'], 'sink.write() should only be called once'); + }); +}, 'original reader should error when new reader is created within strategy.size()'); diff --git a/test/fixtures/wpt/streams/writable-streams/start.any.js b/test/fixtures/wpt/streams/writable-streams/start.any.js new file mode 100644 index 00000000000000..02b5f2a387625a --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/start.any.js @@ -0,0 +1,163 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = { name: 'error1' }; + +promise_test(() => { + let resolveStartPromise; + const ws = recordingWritableStream({ + start() { + return new Promise(resolve => { + resolveStartPromise = resolve; + }); + } + }); + + const writer = ws.getWriter(); + + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1'); + writer.write('a'); + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after writer.write()'); + + // Wait and verify that write isn't called. + return flushAsyncEvents() + .then(() => { + assert_array_equals(ws.events, [], 'write should not be called until start promise resolves'); + resolveStartPromise(); + return writer.ready; + }) + .then(() => assert_array_equals(ws.events, ['write', 'a'], + 'write should not be called until start promise resolves')); +}, 'underlying sink\'s write should not be called until start finishes'); + +promise_test(() => { + let resolveStartPromise; + const ws = recordingWritableStream({ + start() { + return new Promise(resolve => { + resolveStartPromise = resolve; + }); + } + }); + + const writer = ws.getWriter(); + + writer.close(); + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1'); + + // Wait and verify that write isn't called. + return flushAsyncEvents().then(() => { + assert_array_equals(ws.events, [], 'close should not be called until start promise resolves'); + resolveStartPromise(); + return writer.closed; + }); +}, 'underlying sink\'s close should not be called until start finishes'); + +test(() => { + const passedError = new Error('horrible things'); + + let writeCalled = false; + let closeCalled = false; + assert_throws_exactly(passedError, () => { + // recordingWritableStream cannot be used here because the exception in the + // constructor prevents assigning the object to a variable. + new WritableStream({ + start() { + throw passedError; + }, + write() { + writeCalled = true; + }, + close() { + closeCalled = true; + } + }); + }, 'constructor should throw passedError'); + assert_false(writeCalled, 'write should not be called'); + assert_false(closeCalled, 'close should not be called'); +}, 'underlying sink\'s write or close should not be called if start throws'); + +promise_test(() => { + const ws = recordingWritableStream({ + start() { + return Promise.reject(); + } + }); + + // Wait and verify that write or close aren't called. + return flushAsyncEvents() + .then(() => assert_array_equals(ws.events, [], 'write and close should not be called')); +}, 'underlying sink\'s write or close should not be invoked if the promise returned by start is rejected'); + +promise_test(t => { + const ws = new WritableStream({ + start() { + return { + then(onFulfilled, onRejected) { onRejected(error1); } + }; + } + }); + return promise_rejects_exactly(t, error1, ws.getWriter().closed, 'closed promise should be rejected'); +}, 'returning a thenable from start() should work'); + +promise_test(t => { + const ws = recordingWritableStream({ + start(controller) { + controller.error(error1); + } + }); + return promise_rejects_exactly(t, error1, ws.getWriter().write('a'), 'write() should reject with the error') + .then(() => { + assert_array_equals(ws.events, [], 'sink write() should not have been called'); + }); +}, 'controller.error() during start should cause writes to fail'); + +promise_test(t => { + let controller; + let resolveStart; + const ws = recordingWritableStream({ + start(c) { + controller = c; + return new Promise(resolve => { + resolveStart = resolve; + }); + } + }); + const writer = ws.getWriter(); + const writePromise = writer.write('a'); + const closePromise = writer.close(); + controller.error(error1); + resolveStart(); + return Promise.all([ + promise_rejects_exactly(t, error1, writePromise, 'write() should fail'), + promise_rejects_exactly(t, error1, closePromise, 'close() should fail') + ]).then(() => { + assert_array_equals(ws.events, [], 'sink write() and close() should not have been called'); + }); +}, 'controller.error() during async start should cause existing writes to fail'); + +promise_test(t => { + const events = []; + const promises = []; + function catchAndRecord(promise, name) { + promises.push(promise.then(t.unreached_func(`promise ${name} should not resolve`), + () => { + events.push(name); + })); + } + const ws = new WritableStream({ + start() { + return Promise.reject(); + } + }, { highWaterMark: 0 }); + const writer = ws.getWriter(); + catchAndRecord(writer.ready, 'ready'); + catchAndRecord(writer.closed, 'closed'); + catchAndRecord(writer.write(), 'write'); + return Promise.all(promises) + .then(() => { + assert_array_equals(events, ['ready', 'write', 'closed'], 'promises should reject in standard order'); + }); +}, 'when start() rejects, writer promises should reject in standard order'); diff --git a/test/fixtures/wpt/streams/writable-streams/write.any.js b/test/fixtures/wpt/streams/writable-streams/write.any.js new file mode 100644 index 00000000000000..e3defa834820d4 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/write.any.js @@ -0,0 +1,284 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +const error2 = new Error('error2'); +error2.name = 'error2'; + +function writeArrayToStream(array, writableStreamWriter) { + array.forEach(chunk => writableStreamWriter.write(chunk)); + return writableStreamWriter.close(); +} + +promise_test(() => { + let storage; + const ws = new WritableStream({ + start() { + storage = []; + }, + + write(chunk) { + return delay(0).then(() => storage.push(chunk)); + }, + + close() { + return delay(0); + } + }); + + const writer = ws.getWriter(); + + const input = [1, 2, 3, 4, 5]; + return writeArrayToStream(input, writer) + .then(() => assert_array_equals(storage, input, 'correct data should be relayed to underlying sink')); +}, 'WritableStream should complete asynchronous writes before close resolves'); + +promise_test(() => { + const ws = recordingWritableStream(); + + const writer = ws.getWriter(); + + const input = [1, 2, 3, 4, 5]; + return writeArrayToStream(input, writer) + .then(() => assert_array_equals(ws.events, ['write', 1, 'write', 2, 'write', 3, 'write', 4, 'write', 5, 'close'], + 'correct data should be relayed to underlying sink')); +}, 'WritableStream should complete synchronous writes before close resolves'); + +promise_test(() => { + const ws = new WritableStream({ + write() { + return 'Hello'; + } + }); + + const writer = ws.getWriter(); + + const writePromise = writer.write('a'); + return writePromise + .then(value => assert_equals(value, undefined, 'fulfillment value must be undefined')); +}, 'fulfillment value of ws.write() call should be undefined even if the underlying sink returns a non-undefined ' + + 'value'); + +promise_test(() => { + let resolveSinkWritePromise; + const ws = new WritableStream({ + write() { + return new Promise(resolve => { + resolveSinkWritePromise = resolve; + }); + } + }); + + const writer = ws.getWriter(); + + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1'); + + return writer.ready.then(() => { + const writePromise = writer.write('a'); + let writePromiseResolved = false; + assert_not_equals(resolveSinkWritePromise, undefined, 'resolveSinkWritePromise should not be undefined'); + + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after writer.write()'); + + return Promise.all([ + writePromise.then(value => { + writePromiseResolved = true; + assert_equals(resolveSinkWritePromise, undefined, 'sinkWritePromise should be fulfilled before writePromise'); + + assert_equals(value, undefined, 'writePromise should be fulfilled with undefined'); + }), + writer.ready.then(value => { + assert_equals(resolveSinkWritePromise, undefined, 'sinkWritePromise should be fulfilled before writer.ready'); + assert_true(writePromiseResolved, 'writePromise should be fulfilled before writer.ready'); + + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1 again'); + + assert_equals(value, undefined, 'writePromise should be fulfilled with undefined'); + }), + flushAsyncEvents().then(() => { + resolveSinkWritePromise(); + resolveSinkWritePromise = undefined; + }) + ]); + }); +}, 'WritableStream should transition to waiting until write is acknowledged'); + +promise_test(t => { + let sinkWritePromiseRejectors = []; + const ws = new WritableStream({ + write() { + const sinkWritePromise = new Promise((r, reject) => sinkWritePromiseRejectors.push(reject)); + return sinkWritePromise; + } + }); + + const writer = ws.getWriter(); + + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1'); + + return writer.ready.then(() => { + const writePromise = writer.write('a'); + assert_equals(sinkWritePromiseRejectors.length, 1, 'there should be 1 rejector'); + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0'); + + const writePromise2 = writer.write('b'); + assert_equals(sinkWritePromiseRejectors.length, 1, 'there should be still 1 rejector'); + assert_equals(writer.desiredSize, -1, 'desiredSize should be -1'); + + const closedPromise = writer.close(); + + assert_equals(writer.desiredSize, -1, 'desiredSize should still be -1'); + + return Promise.all([ + promise_rejects_exactly(t, error1, closedPromise, + 'closedPromise should reject with the error returned from the sink\'s write method') + .then(() => assert_equals(sinkWritePromiseRejectors.length, 0, + 'sinkWritePromise should reject before closedPromise')), + promise_rejects_exactly(t, error1, writePromise, + 'writePromise should reject with the error returned from the sink\'s write method') + .then(() => assert_equals(sinkWritePromiseRejectors.length, 0, + 'sinkWritePromise should reject before writePromise')), + promise_rejects_exactly(t, error1, writePromise2, + 'writePromise2 should reject with the error returned from the sink\'s write method') + .then(() => assert_equals(sinkWritePromiseRejectors.length, 0, + 'sinkWritePromise should reject before writePromise2')), + flushAsyncEvents().then(() => { + sinkWritePromiseRejectors[0](error1); + sinkWritePromiseRejectors = []; + }) + ]); + }); +}, 'when write returns a rejected promise, queued writes and close should be cleared'); + +promise_test(t => { + const ws = new WritableStream({ + write() { + throw error1; + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.write('a'), + 'write() should reject with the error returned from the sink\'s write method') + .then(() => promise_rejects_js(t, TypeError, writer.close(), 'close() should be rejected')); +}, 'when sink\'s write throws an error, the stream should become errored and the promise should reject'); + +promise_test(t => { + const ws = new WritableStream({ + write(chunk, controller) { + controller.error(error1); + throw error2; + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error2, writer.write('a'), + 'write() should reject with the error returned from the sink\'s write method ') + .then(() => { + return Promise.all([ + promise_rejects_exactly(t, error1, writer.ready, + 'writer.ready must reject with the error passed to the controller'), + promise_rejects_exactly(t, error1, writer.closed, + 'writer.closed must reject with the error passed to the controller') + ]); + }); +}, 'writer.write(), ready and closed reject with the error passed to controller.error() made before sink.write' + + ' rejection'); + +promise_test(() => { + const numberOfWrites = 1000; + + let resolveFirstWritePromise; + let writeCount = 0; + const ws = new WritableStream({ + write() { + ++writeCount; + if (!resolveFirstWritePromise) { + return new Promise(resolve => { + resolveFirstWritePromise = resolve; + }); + } + return Promise.resolve(); + } + }); + + const writer = ws.getWriter(); + return writer.ready.then(() => { + for (let i = 1; i < numberOfWrites; ++i) { + writer.write('a'); + } + const writePromise = writer.write('a'); + + assert_equals(writeCount, 1, 'should have called sink\'s write once'); + + resolveFirstWritePromise(); + + return writePromise + .then(() => + assert_equals(writeCount, numberOfWrites, `should have called sink's write ${numberOfWrites} times`)); + }); +}, 'a large queue of writes should be processed completely'); + +promise_test(() => { + const stream = recordingWritableStream(); + const w = stream.getWriter(); + const WritableStreamDefaultWriter = w.constructor; + w.releaseLock(); + const writer = new WritableStreamDefaultWriter(stream); + return writer.ready.then(() => { + writer.write('a'); + assert_array_equals(stream.events, ['write', 'a'], 'write() should be passed to sink'); + }); +}, 'WritableStreamDefaultWriter should work when manually constructed'); + +promise_test(() => { + let thenCalled = false; + const ws = new WritableStream({ + write() { + return { + then(onFulfilled) { + thenCalled = true; + onFulfilled(); + } + }; + } + }); + return ws.getWriter().write('a').then(() => assert_true(thenCalled, 'thenCalled should be true')); +}, 'returning a thenable from write() should work'); + +promise_test(() => { + const stream = new WritableStream(); + const writer = stream.getWriter(); + const WritableStreamDefaultWriter = writer.constructor; + assert_throws_js(TypeError, () => new WritableStreamDefaultWriter(stream), + 'should not be able to construct on locked stream'); + // If stream.[[writer]] no longer points to |writer| then the closed Promise + // won't work properly. + return Promise.all([writer.close(), writer.closed]); +}, 'failing DefaultWriter constructor should not release an existing writer'); + +promise_test(t => { + const ws = new WritableStream({ + start() { + return Promise.reject(error1); + } + }, { highWaterMark: 0 }); + const writer = ws.getWriter(); + return Promise.all([ + promise_rejects_exactly(t, error1, writer.ready, 'ready should be rejected'), + promise_rejects_exactly(t, error1, writer.write(), 'write() should be rejected') + ]); +}, 'write() on a stream with HWM 0 should not cause the ready Promise to resolve'); + +promise_test(t => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + writer.releaseLock(); + return promise_rejects_js(t, TypeError, writer.write(), 'write should reject'); +}, 'writing to a released writer should reject the returned promise'); diff --git a/test/fixtures/wpt/versions.json b/test/fixtures/wpt/versions.json index b610d0ab47ec94..50316e8c583eb1 100644 --- a/test/fixtures/wpt/versions.json +++ b/test/fixtures/wpt/versions.json @@ -36,13 +36,17 @@ "path": "html/webappapis/timers" }, "interfaces": { - "commit": "79fa4cf76e0d39e3fc1b7ead85e067b0a064b892", + "commit": "fcb671ed8b068b25cee87429d803833777f35c2c", "path": "interfaces" }, "resources": { "commit": "972ca5b6693bffebebc5805e1b9da68a6876e1f6", "path": "resources" }, + "streams": { + "commit": "b869e60df1b8d3840e09b41c5e987c7e23f6856c", + "path": "streams" + }, "url": { "commit": "1fcb39223d3009fbb46c1b254755d6cc75e290f1", "path": "url" diff --git a/test/wpt/status/streams.json b/test/wpt/status/streams.json new file mode 100644 index 00000000000000..0967ef424bce67 --- /dev/null +++ b/test/wpt/status/streams.json @@ -0,0 +1 @@ +{} diff --git a/test/wpt/test-streams.js b/test/wpt/test-streams.js new file mode 100644 index 00000000000000..6a64f241c10e2d --- /dev/null +++ b/test/wpt/test-streams.js @@ -0,0 +1,47 @@ +'use strict'; + +require('../common'); +const { WPTRunner } = require('../common/wpt'); + +const runner = new WPTRunner('streams'); + +// Set Node.js flags required for the tests. +runner.setFlags(['--expose-internals']); + +// Set a script that will be executed in the worker before running the tests. +runner.setInitScript(` + const { + ReadableStream, + ReadableStreamDefaultReader, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, + ReadableByteStreamController, + ReadableStreamDefaultController, + TransformStream, + TransformStreamDefaultController, + WritableStream, + WritableStreamDefaultWriter, + WritableStreamDefaultController, + ByteLengthQueuingStrategy, + CountQueuingStrategy, + } = require('stream/web'); + + const { internalBinding } = require('internal/test/binding'); + const { DOMException } = internalBinding('messaging'); + global.DOMException = DOMException; + global.ReadableStream = ReadableStream; + global.ReadableStreamDefaultReader = ReadableStreamDefaultReader; + global.ReadableStreamBYOBReader = ReadableStreamBYOBReader; + global.ReadableStreamBYOBRequest = ReadableStreamBYOBRequest; + global.ReadableByteStreamController = ReadableByteStreamController; + global.ReadableStreamDefaultController = ReadableStreamDefaultController; + global.TransformStream = TransformStream; + global.TransformStreamDefaultController = TransformStreamDefaultController; + global.WritableStream = WritableStream; + global.WritableStreamDefaultWriter = WritableStreamDefaultWriter; + global.WritableStreamDefaultController = WritableStreamDefaultController; + global.ByteLengthQueuingStrategy = ByteLengthQueuingStrategy; + global.CountQueuingStrategy = CountQueuingStrategy; +`); + +runner.runJsTests(); From 8ae35d2a7727b12869d67c4d5e9911f61330a6a2 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Wed, 16 Jun 2021 12:44:22 -0700 Subject: [PATCH 2/2] stream: implement WHATWG streams Experimental implementation of the WHATWG streams standard. Signed-off-by: James M Snell --- doc/api/errors.md | 5 + doc/api/index.md | 1 + doc/api/webstreams.md | 1122 +++++++ lib/internal/abort_controller.js | 1 + lib/internal/errors.js | 7 +- lib/internal/per_context/primordials.js | 5 + lib/internal/webstreams/queuingstrategies.js | 168 + lib/internal/webstreams/readablestream.js | 2740 +++++++++++++++++ lib/internal/webstreams/transfer.js | 299 ++ lib/internal/webstreams/transformstream.js | 591 ++++ lib/internal/webstreams/util.js | 237 ++ lib/internal/webstreams/writablestream.js | 1329 ++++++++ lib/stream/web.js | 48 + src/node_buffer.cc | 61 + test/fixtures/wpt/README.md | 2 +- .../readable-byte-streams/general.any.js | 17 +- .../non-transferable-buffers.any.js | 4 +- .../streams/writable-streams/aborting.any.js | 108 + test/fixtures/wpt/versions.json | 2 +- .../test-whatwg-readablebytestream.js | 238 ++ test/parallel/test-whatwg-readablestream.js | 1522 +++++++++ test/parallel/test-whatwg-transformstream.js | 188 ++ .../test-whatwg-webstreams-coverage.js | 70 + .../test-whatwg-webstreams-transfer.js | 503 +++ test/parallel/test-whatwg-writablestream.js | 260 ++ test/wpt/status/streams.json | 12 +- test/wpt/test-streams.js | 120 +- tools/doc/type-parser.mjs | 27 + typings/primordials.d.ts | 13 +- 29 files changed, 9668 insertions(+), 32 deletions(-) create mode 100644 doc/api/webstreams.md create mode 100644 lib/internal/webstreams/queuingstrategies.js create mode 100644 lib/internal/webstreams/readablestream.js create mode 100644 lib/internal/webstreams/transfer.js create mode 100644 lib/internal/webstreams/transformstream.js create mode 100644 lib/internal/webstreams/util.js create mode 100644 lib/internal/webstreams/writablestream.js create mode 100644 lib/stream/web.js create mode 100644 test/parallel/test-whatwg-readablebytestream.js create mode 100644 test/parallel/test-whatwg-readablestream.js create mode 100644 test/parallel/test-whatwg-transformstream.js create mode 100644 test/parallel/test-whatwg-webstreams-coverage.js create mode 100644 test/parallel/test-whatwg-webstreams-transfer.js create mode 100644 test/parallel/test-whatwg-writablestream.js diff --git a/doc/api/errors.md b/doc/api/errors.md index 6257619ba046f1..f36584b7468f98 100644 --- a/doc/api/errors.md +++ b/doc/api/errors.md @@ -1428,6 +1428,11 @@ is set for the `Http2Stream`. `http2.connect()` was passed a URL that uses any protocol other than `http:` or `https:`. + +### `ERR_ILLEGAL_CONSTRUCTOR` + +An attempt was made to construct an object using a non-public constructor. + ### `ERR_INCOMPATIBLE_OPTION_PAIR` diff --git a/doc/api/index.md b/doc/api/index.md index 71c415afaa673a..448f6d599fc8f5 100644 --- a/doc/api/index.md +++ b/doc/api/index.md @@ -64,6 +64,7 @@ * [VM](vm.md) * [WASI](wasi.md) * [Web Crypto API](webcrypto.md) +* [Web Streams API](webstreams.md) * [Worker threads](worker_threads.md) * [Zlib](zlib.md) diff --git a/doc/api/webstreams.md b/doc/api/webstreams.md new file mode 100644 index 00000000000000..90667c1c1bb28c --- /dev/null +++ b/doc/api/webstreams.md @@ -0,0 +1,1122 @@ +# Web Streams API + +> Stability: 1 - Experimental + +An implementation of the [WHATWG Streams Standard][]. + +```mjs +import { + ReadableStream, + WritableStream, + TransformStream, +} from 'node:stream/web'; +``` + +```cjs +const { + ReadableStream, + WritableStream, + TransformStream, +} = require('stream/web'); +``` + +## Overview + +The [WHATWG Streams Standard][] (or "web streams") defines an API for handling +streaming data. It is similar to the Node.js [Streams][] API but emerged later +and has become the "standard" API for streaming data across many JavaScript +environments. + +There are three primary types of objects + +* `ReadableStream` - Represents a source of streaming data. +* `WritableStream` - Represents a destination for streaming data. +* `TransformStream` - Represents an algorithm for transforming streaming data. + +### Example `ReadableStream` + +This example creates a simple `ReadableStream` that pushes the current +`performance.now()` timestamp once every second forever. An async iterable +is used to read the data from the stream. + +```mjs +import { + ReadableStream +} from 'node:stream/web'; + +import { + setInterval as every +} from 'node:timers/promises'; + +import { + performance +} from 'node:perf_hooks'; + +const SECOND = 1000; + +const stream = new ReadableStream({ + async start(controller) { + for await (const _ of every(SECOND)) + controller.enqueue(performance.now()); + } +}); + +for await (const value of stream) + console.log(value); +``` + +```cjs +const { + ReadableStream +} = require('stream/web'); + +const { + setInterval: every +} = require('timers/promises'); + +const { + performance +} = require('perf_hooks'); + +const SECOND = 1000; + +const stream = new ReadableStream({ + async start(controller) { + for await (const _ of every(SECOND)) + controller.enqueue(performance.now()); + } +}); + +(async () => { + for await (const value of stream) + console.log(value); +})(); +``` + +## API + +### Class: `ReadableStream` + + +#### `new ReadableStream([underlyingSource [, strategy]])` + + + +* `underlyingSource` {Object} + * `start` {Function} A user-defined function that is invoked immediately when + the `ReadableStream` is created. + * `controller` {ReadableStreamDefaultController|ReadableByteStreamController} + * Returns: `undefined` or a promise fulfilled with `undefined`. + * `pull` {Function} A user-defined function that is called repeatedly when the + `ReadableStream` internal queue is not full. The operation may be sync or + async. If async, the function will not be called again until the previously + returned promise is fulfilled. + * `controller` {ReadableStreamDefaultController|ReadableByteStreamController} + * Returns: A promise fulfilled with `undefined`. + * `cancel` {Function} A user-defined function that is called when the + `ReadableStream` is canceled. + * `reason` {any} + * Returns: A promise fulfilled with `undefined`. + * `type` {string} Must be `'bytes'` or `undefined`. + * `autoAllocateChunkSize` {number} Used only when `type` is equal to + `'bytes'`. +* `strategy` {Object} + * `highWaterMark` {number} The maximum internal queue size before backpressure + is applied. + * `size` {Function} A user-defined function used to identify the size of each + chunk of data. + * `chunk` {any} + * Returns: {number} + + +#### `readableStream.locked` + + +* Type: {boolean} Set to `true` if there is an active reader for this + {ReadableStream}. + +The `readableStream.locked` property is `false` by default, and is +switch to `true` while there is an active reader consuming the +stream's data. + +#### `readableStream.cancel([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined` once cancelation has + been completed. + +#### `readableStream.getReader([options])` + + +* `options` {Object} + * `mode` {string} `'byob'` or `undefined` +* Returns: {ReadableStreamDefaultReader|ReadableStreamBYOBReader} + +```mjs +import { ReadableStream } from 'node:stream/web'; + +const stream = new ReadableStream(); + +const reader = stream.getReader(); + +console.log(await reader.read()); +``` + +```cjs +const { ReadableStream } = require('stream/web'); + +const stream = new ReadableStream(); + +const reader = stream.getReader(); + +reader.read().then(console.log); +``` + +Causes the `readableStream.locked` to be `true`. + +#### `readableStream.pipeThrough(transform[, options])` + + +* `transform` {Object} + * `readable` {ReadableStream} The `ReadableStream` to which + `transform.writable` will push the potentially modified data + is receives from this `ReadableStream`. + * `writable` {WritableStream} The `WritableStream` to which this + `ReadableStream`'s data will be written. +* `options` {Object} + * `preventAbort` {boolean} When `true`, errors in this `ReadableStream` + will not cause `transform.writable` to be aborted. + * `preventCancel` {boolean} When `true`, errors in the destination + `transform.writable` is not cause this `ReadableStream` to be + canceled. + * `preventClose` {boolean} When `true`, closing this `ReadableStream` + will no cause `transform.writable` to be closed. + * `signal` {AbortSignal} Allows the transfer of data to be canceled + using an {AbortController}. +* Returns: {ReadableStream} From `transform.readable`. + +Connects this {ReadableStream} to the pair of {ReadableStream} and +{WritableStream} provided in the `transform` argument such that the +data from this {ReadableStream} is written in to `transform.writable`, +possibly transformed, then pushed to `transform.readable`. Once the +pipeline is configured, `transform.readable` is returned. + +Causes the `readableStream.locked` to be `true` while the pipe operation +is active. + +```mjs +import { + ReadableStream, + TransformStream, +} from 'node:stream/web'; + +const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + }, +}); + +const transform = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } +}); + +const transformedStream = stream.pipeThrough(transform); + +for await (const chunk of transformedStream) + console.log(chunk); +``` + +```cjs +const { + ReadableStream, + TransformStream, +} = require('stream/web'); + +const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + }, +}); + +const transform = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } +}); + +const transformedStream = stream.pipeThrough(transform); + +(async () => { + for await (const chunk of transformedStream) + console.log(chunk); +})(); +``` + +#### `readableStream.pipeTo(destination, options)` + + +* `destination` {WritableStream} A {WritableStream} to which this + `ReadableStream`'s data will be written. +* `options` {Object} + * `preventAbort` {boolean} When `true`, errors in this `ReadableStream` + will not cause `transform.writable` to be aborted. + * `preventCancel` {boolean} When `true`, errors in the destination + `transform.writable` is not cause this `ReadableStream` to be + canceled. + * `preventClose` {boolean} When `true`, closing this `ReadableStream` + will no cause `transform.writable` to be closed. + * `signal` {AbortSignal} Allows the transfer of data to be canceled + using an {AbortController}. +* Returns: A promise fulfilled with `undefined` + +Causes the `readableStream.locked` to be `true` while the pipe operation +is active. + +#### `readableStream.tee()` + + +* Returns: {ReadableStream[]} + +Returns a pair of new {ReadableStream} instances to which this +`ReadableStream`'s data will be forwarded. Each will receive the +same data. + +Causes the `readableStream.locked` to be `true`. + +#### `readableStream.values([options])` + + +* `options` {Object} + * `preventCancel` {boolean} When `true`, prevents the {ReadableStream} + from being closed when the async iterator abruptly terminates. + **Defaults**: `false` + +Creates and returns an async iterator usable for consuming this +`ReadableStream`'s data. + +Causes the `readableStream.locked` to be `true` while the async iterator +is active. + +```mjs +import { Buffer } from 'node:buffer'; + +const stream = new ReadableStream(getSomeSource()); + +for await (const chunk of stream.values({ preventCancel: true })) + console.log(Buffer.from(chunk).toString()); +``` + +#### Async Iteration + +The {ReadableStream} object supports the async iterator protocol using +`for await` syntax. + +```mjs +import { Buffer } from 'buffer'; + +const stream = new ReadableStream(getSomeSource()); + +for await (const chunk of stream) + console.log(Buffer.from(chunk).toString()); +``` + +The async iterator will consume the {ReadableStream} until it terminates. + +By default, if the async iterator exits early (via either a `break`, +`return`, or a `throw`), the {ReadableStream} will be closed. To prevent +automatic closing of the {ReadableStream}, use the `readableStream.values()` +method to acquire the async iterator and set the `preventCancel` option to +`true`. + +The {ReadableStream} must not be locked (that is, it must not have an existing +active reader). During the async iteration, the {ReadableStream} will be locked. + +#### Transfering with `postMessage()` + +A {ReadableStream} instance can be transferred using a {MessagePort}. + +```js +const stream = new ReadableStream(getReadableSourceSomehow()); + +const { port1, port2 } = new MessageChannel(); + +port1.onmessage = ({ data }) => { + data.getReader().read().then((chunk) => { + console.log(chunk); + }); +}; + +port2.postMessage(stream, [stream]); +``` + +### Class: `ReadableStreamDefaultReader` + + +By default, calling `readableStream.getReader()` with no arguments +will return an instance of `ReadableStreamDefaultReader`. The default +reader treats the chunks of data passed through the stream as opaque +values, which allows the {ReadableStream} to work with generally any +JavaScript value. + +#### `new ReadableStreamDefaultReader(stream)` + + +* `stream` {ReadableStream} + +Creates a new {ReadableStreamDefaultReader} that is locked to the +given {ReadableStream}. + +#### `readableStreamDefaultReader.cancel([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined`. + +Cancels the {ReadableStream} and returns a promise that is fulfilled +when the underlying stream has been canceled. + +#### `readableStreamDefaultReader.closed` + + +* Type: {Promise} Fulfilled with `undefined` when the associated + {ReadableStream} is closed or this reader's lock is released. + +#### `readableStreamDefaultReader.read()` + + +* Returns: A promise fulfilled with an object: + * `value` {ArrayBuffer} + * `done` {boolean} + +Requests the next chunk of data from the underlying {ReadableStream} +and returns a promise that is fulfilled with the data once it is +available. + +#### `readableStreamDefaultReader.releaseLock()` + + +Releases this reader's lock on the underlying {ReadableStream}. + +### Class: `ReadableStreamBYOBReader` + + +The `ReadableStreamBYOBReader` is an alternative consumer for +byte-oriented {ReadableStream}'s (those that are created with +`underlyingSource.type` set equal to `'bytes`` when the +`ReadableStream` was created). + +The `BYOB` is short for "bring your own buffer". This is a +pattern that allows for more efficient reading of byte-oriented +data that avoids extraneous copying. + +```mjs +import { + open +} from 'node:fs/promises'; + +import { + ReadableStream +} from 'node:stream/web'; + +import { Buffer } from 'node:buffer'; + +class Source { + type = 'bytes'; + autoAllocateChunkSize = 1024; + + async start(controller) { + this.file = await open(new URL(import.meta.url)); + this.controller = controller; + } + + async pull(controller) { + const view = controller.byobRequest?.view; + const { + bytesRead, + } = await this.file.read({ + buffer: view, + offset: view.byteOffset, + length: view.byteLength + }); + + if (bytesRead === 0) { + await this.file.close(); + this.controller.close(); + } + controller.byobRequest.respond(bytesRead); + } +} + +const stream = new ReadableStream(new Source()); + +async function read(stream) { + const reader = stream.getReader({ mode: 'byob' }); + + const chunks = []; + let result; + do { + result = await reader.read(Buffer.alloc(100)); + if (result.value !== undefined) + chunks.push(Buffer.from(result.value)); + } while (!result.done); + + return Buffer.concat(chunks); +} + +const data = await read(stream); +console.log(Buffer.from(data).toString()); +``` + +#### `new ReadableStreamBYOBReader(stream)` + + +* `stream` {ReadableStream} + +Creates a new `ReadableStreamBYOBReader` that is locked to the +given {ReadableStream}. + +#### `readableStreamBYOBReader.cancel([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined`. + +Cancels the {ReadableStream} and returns a promise that is fulfilled +when the underlying stream has been canceled. + +#### `readableStreamBYOBReader.closed` + + +* Type: {Promise} Fulfilled with `undefined` when the associated + {ReadableStream} is closed or this reader's lock is released. + +#### `readableStreamBYOBReader.read(view)` + + +* `view` {Buffer|TypedArray|DataView} +* Returns: A promise fulfilled with an object: + * `value` {ArrayBuffer} + * `done` {boolean} + +Requests the next chunk of data from the underlying {ReadableStream} +and returns a promise that is fulfilled with the data once it is +available. + +Do not pass a pooled {Buffer} object instance in to this method. +Pooled `Buffer` objects are created using `Buffer.allocUnsafe()`, +or `Buffer.from()`, or are often returned by various `fs` module +callbacks. These types of `Buffer`s use a shared underlying +{ArrayBuffer} object that contains all of the data from all of +the pooled `Buffer` instances. When a `Buffer`, {TypedArray}, +or {DataView} is passed in to `readableStreamBYOBReader.read()`, +the view's underlying `ArrayBuffer` is *detached*, invalidating +all existing views that may exist on that `ArrayBuffer`. This +can have disasterous consequences for your application. + +#### `readableStreamBYOBReader.releaseLock()` + + +Releases this reader's lock on the underlying {ReadableStream}. + +### Class: `ReadableStreamDefaultController` + + +Every {ReadableStream} has a controller that is responsible for +the internal state and management of the stream's queue. The +`ReadableStreamDefaultController` is the default controller +implementation for `ReadableStream`s that are not byte-oriented. + +#### `readableStreamDefaultController.close()` + + +Closes the {ReadableStream} to which this controller is associated. + +#### `readableStreamDefaultController.desiredSize` + + +* Type: {number} + +Returns the amount of data remaining to fill the {ReadableStream}'s +queue. + +#### `readableStreamDefaultController.enqueue(chunk)` + + +* `chunk` {any} + +Appends a new chunk of data to the {ReadableStream}'s queue. + +#### `readableStreamDefaultController.error(error)` + + +* `error` {any} + +Signals an error that causes the {ReadableStream} to error and close. + +### Class: `ReadableByteStreamController` + + +Every {ReadableStream} has a controller that is responsible for +the internal state and management of the stream's queue. The +`ReadableByteStreamController` is for byte-oriented `ReadableStream`s. + +#### `readableByteStreamController.byobRequest` + + +* Type: {ReadableStreamBYOBRequest} + +#### `readableByteStreamController.close()` + + +Closes the {ReadableStream} to which this controller is associated. + +#### `readableByteStreamController.desiredSize` + + +* Type: {number} + +Returns the amount of data remaining to fill the {ReadableStream}'s +queue. + +#### `readableByteStreamController.enqueue(chunk)` + + +* `chunk`: {Buffer|TypedArray|DataView} + +Appends a new chunk of data to the {ReadableStream}'s queue. + +#### `readableByteStreamController.error(error)` + + +* `error` {any} + +Signals an error that causes the {ReadableStream} to error and close. + +### Class: `ReadableStreamBYOBRequest` + + +When using `ReadableByteStreamController` in byte-oriented +streams, and when using the `ReadableStreamBYOBReader`, +the `readableByteStreamController.byobRequest` property +provides access to a `ReadableStreamBYOBRequest` instance +that represents the current read request. The object +is used to gain access to the `ArrayBuffer`/`TypedArray` +that has been provided for the read request to fill, +and provides methods for signaling that the data has +been provided. + +#### `readableStreamBYOBRequest.respond(bytesWritten)` + + +* `bytesWritten` {number} + +Signals that a `bytesWritten` number of bytes have been written +to `readableStreamBYOBRequest.view`. + +#### `readableStreamBYOBRequest.respondWithNewView(view)` + + +* `view` {Buffer|TypedArray|DataView} + +Signals that the request has been fulfilled with bytes written +to a new `Buffer`, `TypedArray`, or `DataView`. + +#### `readableStreamBYOBRequest.view` + + +* Type: {Buffer|TypedArray|DataView} + +### Class: `WritableStream` + + +The `WritableStream` is a destination to which stream data is sent. + +```mjs +import { + WritableStream +} from 'node:stream/web'; + +const stream = new WritableStream({ + write(chunk) { + console.log(chunk); + } +}); + +await stream.getWriter().write('Hello World'); +``` + +#### `new WritableStream([underlyingSink[, strategy]])` + + +* `underlyingSink` {Object} + * `start` {Function} A user-defined function that is invoked immediately when + the `WritableStream` is created. + * `controller` {WritableStreamDefaultController} + * Returns: `undefined` or a promise fulfilled with `undefined`. + * `write` {Function} A user-defined function that is invoked when a chunk of + data has been written to the `WritableStream`. + * `chunk` {any} + * `controller` {WritableStreamDefaultController} + * Returns: A promise fulfilled with `undefined`. + * `close` {Function} A user-defined function that is called when the + `WritableStream` is closed. + * Returns: A promise fulfilled with `undefined`. + * `abort` {Function} A user-defined function that is called to abruptly close + the `WritableStream`. + * `reason` {any} + * Returns: A promise fulfilled with `undefined`. + * `type` {any} The `type` option is reserved for future use and *must* be + undefined. +* `strategy` {Object} + * `highWaterMark` {number} The maximum internal queue size before backpressure + is applied. + * `size` {Function} A user-defined function used to identify the size of each + chunk of data. + * `chunk` {any} + * Returns: {number} + +#### `writableStream.abort([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined`. + +Abruptly terminates the `WritableStream`. All queued writes will be +canceled with their associated promises rejected. + +#### `writableStream.close()` + + +* Returns: A promise fulfilled with `undefined`. + +Closes the `WritableStream` when no additional writes are expected. + +#### `writableStream.getWriter()` + + +* Returns: {WritableStreamDefaultWriter} + +Creates and creates a new writer instance that can be used to write +data into the `WritableStream`. + +#### `writableStream.locked` + + +* Type: {boolean} + +The `writableStream.locked` property is `false` by default, and is +switched to `true` while there is an active writer attached to this +`WritableStream`. + +#### Transfering with postMessage() + +A {WritableStream} instance can be transferred using a {MessagePort}. + +```js +const stream = new WritableStream(getWritableSinkSomehow()); + +const { port1, port2 } = new MessageChannel(); + +port1.onmessage = ({ data }) => { + data.getWriter().write('hello'); +}; + +port2.postMessage(stream, [stream]); +``` + +### Class: `WritableStreamDefaultWriter` + + +#### `new WritableStreamDefaultWriter(stream)` + + +* `stream` {WritableStream} + +Creates a new `WritableStreamDefaultWriter` that is locked to the given +`WritableStream`. + +#### `writableStreamDefaultWriter.abort([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined`. + +Abruptly terminates the `WritableStream`. All queued writes will be +canceled with their associated promises rejected. + +#### `writableStreamDefaultWriter.close()` + + +* Returns: A promise fulfilled with `undefined`. + +Closes the `WritableStream` when no additional writes are expected. + +#### `writableStreamDefaultWriter.closed` + + +* Type: A promise that is fulfilled with `undefined` when the + associated {WritableStream} is closed or this writer's lock is + released. + +#### `writableStreamDefaultWriter.desiredSize` + + +* Type: {number} + +The amount of data required to fill the {WritableStream}'s queue. + +#### `writableStreamDefaultWriter.ready` + + +* type: A promise that is fulfilled with `undefined` when the + writer is ready to be used. + +#### `writableStreamDefaultWriter.releaseLock()` + + +Releases this writer's lock on the underlying {ReadableStream}. + +#### `writableStreamDefaultWriter.write([chunk])` + + +* `chunk`: {any} +* Returns: A promise fulfilled with `undefined`. + +Appends a new chunk of data to the {WritableStream}'s queue. + +### Class: `WritableStreamDefaultController` + + +The `WritableStreamDefaultController` manage's the {WritableStream}'s +internal state. + +#### `writableStreamDefaultController.abortReason` + +* Type: {any} The `reason` value passed to `writableStream.abort()`. + +#### `writableStreamDefaultController.error(error)` + + +* `error` {any} + +Called by user-code to signal that an error has occurred while processing +the `WritableStream` data. When called, the {WritableStream} will be aborted, +with currently pending writes canceled. + +#### `writableStreamDefaultController.signal` + +* Type: {AbortSignal} An `AbortSignal` that can be used to cancel pending + write or close operations when a {WritableStream} is aborted. + +### Class: `TransformStream` + + +A `TransformStream` consists of a {ReadableStream} and a {WritableStream} that +are connected such that the data written to the `WritableStream` is received, +and potentially transformed, before being pushed into the `ReadableStream`'s +queue. + +```mjs +import { + TransformStream +} from 'node:stream/web'; + +const transform = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } +}); + +await Promise.all([ + transform.writable.getWriter().write('A'), + transform.readable.getReader().read(), +]); +``` + +#### `new TransformStream([transformer[, writableStrategy[, readableStrategy]]])` + + +* `transformer` {Object} + * `start` {Function} A user-defined function that is invoked immediately when + the `TransformStream` is created. + * `controller` {TransformStreamDefaultController} + * Returns: `undefined` or a promise fulfilled with `undefined` + * `transform` {Function} A user-defined function that receives, and + potentially modifies, a chunk of data written to `transformStream.writable`, + before forwarding that on to `transformStream.readable`. + * `chunk` {any} + * `controller` {TransformStreamDefaultController} + * Returns: A promise fulfilled with `undefined`. + * `flush` {Function} A user-defined function that is called immediately before + the writable side of the `TransformStream` is closed, signaling the end of + the transformation process. + * `controller` {TransformStreamDefaultController} + * Returns: A promise fulfilled with `undefined`. + * `readableType` {any} the `readableType` option is reserved for future use + and *must* be `undefined. + * `writableType` {any} the `writableType` option is reserved for future use + and *must* be `undefined. +* `writableStrategy` {Object} + * `highWaterMark` {number} The maximum internal queue size before backpressure + is applied. + * `size` {Function} A user-defined function used to identify the size of each + chunk of data. + * `chunk` {any} + * Returns: {number} +* `readableStrategy` {Object} + * `highWaterMark` {number} The maximum internal queue size before backpressure + is applied. + * `size` {Function} A user-defined function used to identify the size of each + chunk of data. + * `chunk` {any} + * Returns: {number} + +#### `transformStream.readable` + + +* Type: {ReadableStream} + +#### `transformStream.writable` + + +* Type: {WritableStream} + +#### Transfering with postMessage() + +A {TransformStream} instance can be transferred using a {MessagePort}. + +```js +const stream = new TransformStream(); + +const { port1, port2 } = new MessageChannel(); + +port1.onmessage = ({ data }) => { + const { writable, readable } = data; + // ... +}; + +port2.postMessage(stream, [stream]); +``` + +### Class: `TransformStreamDefaultController` + + +The `TransformStreamDefaultController` manages the internal state +of the `TransformStream`. + +#### `transformStreamDefaultController.desiredSize` + + +* Type: {number} + +The amount of data required to fill the readable side's queue. + +#### `transformStreamDefaultController.enqueue([chunk])` + + +* `chunk` {any} + +Appends a chunk of data to the readable side's queue. + +#### `transformStreamDefaultController.error([reason])` + + +* `reason` {any} + +Signals to both the readable and writable side that an error has occured +while processing the transform data, causing both sides to be abruptly +closed. + +#### `transformStreamDefaultController.terminate()` + + +Closes the readable side of the transport and causes the writable side +to be abruptly closed with an error. + +### Class: `ByteLengthQueuingStrategy` + + +#### `new ByteLengthQueuingStrategy(options)` + + +* `options` {Object} + * `highWaterMark` {number} + +#### `byteLengthQueuingStrategy.highWaterMark` + + +* Type: {number} + +#### `byteLengthQueuingStrategy.size` + + +* Type: {Function} + * `chunk` {any} + * Returns: {number} + +### Class: `CountQueuingStrategy` + + +#### `new CountQueuingStrategy(options)` + + +* `options` {Object} + * `highWaterMark` {number} + +#### `countQueuingStrategy.highWaterMark` + + +* Type: {number} + +#### `countQueuingStrategy.size` + + +* Type: {Function} + * `chunk` {any} + * Returns: {number} + +[Streams]: stream.md +[WHATWG Streams Standard]: https://streams.spec.whatwg.org/ diff --git a/lib/internal/abort_controller.js b/lib/internal/abort_controller.js index 6c80aa7bf4f2b3..e6ee07052617d5 100644 --- a/lib/internal/abort_controller.js +++ b/lib/internal/abort_controller.js @@ -143,6 +143,7 @@ ObjectDefineProperty(AbortController.prototype, SymbolToStringTag, { }); module.exports = { + kAborted, AbortController, AbortSignal, }; diff --git a/lib/internal/errors.js b/lib/internal/errors.js index 13b56311d370b8..ed3fa3787e5eec 100644 --- a/lib/internal/errors.js +++ b/lib/internal/errors.js @@ -1033,6 +1033,7 @@ E('ERR_HTTP_SOCKET_ENCODING', 'Changing the socket encoding is not allowed per RFC7230 Section 3.', Error); E('ERR_HTTP_TRAILER_INVALID', 'Trailers are invalid with this transfer encoding', Error); +E('ERR_ILLEGAL_CONSTRUCTOR', 'Illegal constructor', TypeError); E('ERR_INCOMPATIBLE_OPTION_PAIR', 'Option "%s" cannot be used in combination with option "%s"', TypeError); E('ERR_INPUT_TYPE_NOT_ALLOWED', '--input-type can only be used with string ' + @@ -1256,8 +1257,8 @@ E('ERR_INVALID_RETURN_VALUE', (input, name, value) => { } return `Expected ${input} to be returned from the "${name}"` + ` function but got ${type}.`; -}, TypeError); -E('ERR_INVALID_STATE', 'Invalid state: %s', Error); +}, TypeError, RangeError); +E('ERR_INVALID_STATE', 'Invalid state: %s', Error, TypeError, RangeError); E('ERR_INVALID_SYNC_FORK_INPUT', 'Asynchronous forks do not support ' + 'Buffer, TypedArray, DataView or string input: %s', @@ -1361,7 +1362,7 @@ E('ERR_NO_CRYPTO', 'Node.js is not compiled with OpenSSL crypto support', Error); E('ERR_NO_ICU', '%s is not supported on Node.js compiled without ICU', TypeError); -E('ERR_OPERATION_FAILED', 'Operation failed: %s', Error); +E('ERR_OPERATION_FAILED', 'Operation failed: %s', Error, TypeError); E('ERR_OUT_OF_RANGE', (str, range, input, replaceDefaultBoolean = false) => { assert(range, 'Missing "range" argument'); diff --git a/lib/internal/per_context/primordials.js b/lib/internal/per_context/primordials.js index 42250ffb422d6e..4dfb4dea85ef2a 100644 --- a/lib/internal/per_context/primordials.js +++ b/lib/internal/per_context/primordials.js @@ -415,5 +415,10 @@ primordials.SafePromisePrototypeFinally = (thisPromise, onFinally) => .then(a, b) ); +primordials.AsyncIteratorPrototype = + primordials.ReflectGetPrototypeOf( + primordials.ReflectGetPrototypeOf( + async function* () {}).prototype); + ObjectSetPrototypeOf(primordials, null); ObjectFreeze(primordials); diff --git a/lib/internal/webstreams/queuingstrategies.js b/lib/internal/webstreams/queuingstrategies.js new file mode 100644 index 00000000000000..d8750665bd5e86 --- /dev/null +++ b/lib/internal/webstreams/queuingstrategies.js @@ -0,0 +1,168 @@ +'use strict'; + +const { + ObjectDefineProperties, + SymbolToStringTag, +} = primordials; + +const { + codes: { + ERR_INVALID_THIS, + ERR_MISSING_OPTION, + }, +} = require('internal/errors'); + +const { + customInspectSymbol: kInspect, +} = require('internal/util'); + +const { + customInspect, + isBrandCheck, + kType, + kState, +} = require('internal/webstreams/util'); + +const { + validateObject, +} = require('internal/validators'); + +const isByteLengthQueuingStrategy = + isBrandCheck('ByteLengthQueuingStrategy'); + +const isCountQueuingStrategy = + isBrandCheck('CountQueuingStrategy'); + +/** + * @callback QueuingStrategySize + * @param {any} chunk + * @returns {number} + * + * @typedef {{ + * highWaterMark : number, + * size? : QueuingStrategySize, + * }} QueuingStrategy + */ + +// eslint-disable-next-line func-name-matching,func-style +const byteSizeFunction = function size(chunk) { return chunk.byteLength; }; + +// eslint-disable-next-line func-name-matching,func-style +const countSizeFunction = function size() { return 1; }; + +/** + * @type {QueuingStrategy} + */ +class ByteLengthQueuingStrategy { + [kType] = 'ByteLengthQueuingStrategy'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {{ + * highWaterMark : number + * }} init + */ + constructor(init) { + validateObject(init, 'init'); + if (init.highWaterMark === undefined) + throw new ERR_MISSING_OPTION('options.highWaterMark'); + + // The highWaterMark value is not checked until the strategy + // is actually used, per the spec. + this[kState] = { + highWaterMark: +init.highWaterMark, + }; + } + + /** + * @readonly + * @type {number} + */ + get highWaterMark() { + if (!isByteLengthQueuingStrategy(this)) + throw new ERR_INVALID_THIS('ByteLengthQueuingStrategy'); + return this[kState].highWaterMark; + } + + /** + * @type {QueuingStrategySize} + */ + get size() { + if (!isByteLengthQueuingStrategy(this)) + throw new ERR_INVALID_THIS('ByteLengthQueuingStrategy'); + return byteSizeFunction; + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + highWaterMark: this.highWaterMark, + }); + } +} + +ObjectDefineProperties(ByteLengthQueuingStrategy.prototype, { + highWaterMark: { enumerable: true }, + size: { enumerable: true }, +}); + +/** + * @type {QueuingStrategy} + */ +class CountQueuingStrategy { + [kType] = 'CountQueuingStrategy'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {{ + * highWaterMark : number + * }} init + */ + constructor(init) { + validateObject(init, 'init'); + if (init.highWaterMark === undefined) + throw new ERR_MISSING_OPTION('options.highWaterMark'); + + // The highWaterMark value is not checked until the strategy + // is actually used, per the spec. + this[kState] = { + highWaterMark: +init.highWaterMark, + }; + } + + /** + * @readonly + * @type {number} + */ + get highWaterMark() { + if (!isCountQueuingStrategy(this)) + throw new ERR_INVALID_THIS('CountQueuingStrategy'); + return this[kState].highWaterMark; + } + + /** + * @type {QueuingStrategySize} + */ + get size() { + if (!isCountQueuingStrategy(this)) + throw new ERR_INVALID_THIS('CountQueuingStrategy'); + return countSizeFunction; + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + highWaterMark: this.highWaterMark, + }); + } +} + +ObjectDefineProperties(CountQueuingStrategy.prototype, { + highWaterMark: { enumerable: true }, + size: { enumerable: true }, +}); + +module.exports = { + ByteLengthQueuingStrategy, + CountQueuingStrategy, +}; diff --git a/lib/internal/webstreams/readablestream.js b/lib/internal/webstreams/readablestream.js new file mode 100644 index 00000000000000..a8024c64af2353 --- /dev/null +++ b/lib/internal/webstreams/readablestream.js @@ -0,0 +1,2740 @@ +'use strict'; + +/* eslint-disable no-use-before-define */ + +const { + ArrayBuffer, + ArrayBufferPrototypeSlice, + ArrayPrototypePush, + ArrayPrototypeShift, + DataViewCtor, + FunctionPrototypeBind, + FunctionPrototypeCall, + MathMin, + NumberIsInteger, + ObjectCreate, + ObjectDefineProperties, + ObjectSetPrototypeOf, + Promise, + PromisePrototypeCatch, + PromisePrototypeThen, + PromiseResolve, + PromiseReject, + PromiseAll, + ReflectConstruct, + Symbol, + SymbolAsyncIterator, + SymbolToStringTag, + Uint8Array, +} = primordials; + +const { + codes: { + ERR_ILLEGAL_CONSTRUCTOR, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_STATE, + ERR_INVALID_THIS, + }, +} = require('internal/errors'); + +const { + DOMException, +} = internalBinding('messaging'); + +const { + isArrayBufferView, + isDataView, +} = require('util/types'); + +const { + createDeferredPromise, + customInspectSymbol: kInspect, +} = require('internal/util'); + +const { + serialize, + deserialize, +} = require('v8'); + +const { + validateObject, +} = require('internal/validators'); + +const { + kAborted, +} = require('internal/abort_controller'); + +const { + MessageChannel, +} = require('internal/worker/io'); + +const { + kDeserialize, + kTransfer, + kTransferList, + makeTransferable, +} = require('internal/worker/js_transferable'); + +const { + queueMicrotask, +} = require('internal/process/task_queues'); + +const { + ArrayBufferViewGetBuffer, + ArrayBufferViewGetByteLength, + ArrayBufferViewGetByteOffset, + ArrayBufferGetByteLength, + AsyncIterator, + copyArrayBuffer, + customInspect, + dequeueValue, + ensureIsPromise, + enqueueValueWithSize, + extractHighWaterMark, + extractSizeAlgorithm, + lazyTransfer, + isBrandCheck, + resetQueue, + setPromiseHandled, + transferArrayBuffer, + nonOpCancel, + nonOpPull, + nonOpStart, + kType, + kState, +} = require('internal/webstreams/util'); + +const { + WritableStreamDefaultWriter, + + isWritableStream, + isWritableStreamLocked, + isWritableStreamDefaultController, + isWritableStreamDefaultWriter, + + writableStreamAbort, + writableStreamCloseQueuedOrInFlight, + writableStreamDefaultWriterCloseWithErrorPropagation, + writableStreamDefaultWriterRelease, + writableStreamDefaultWriterWrite, +} = require('internal/webstreams/writablestream'); + +const assert = require('internal/assert'); + +const kCancel = Symbol('kCancel'); +const kClose = Symbol('kClose'); +const kChunk = Symbol('kChunk'); +const kError = Symbol('kError'); +const kPull = Symbol('kPull'); + +/** + * @typedef {import('../abort_controller').AbortSignal} AbortSignal + * @typedef {import('./queuingstrategies').QueuingStrategy} QueuingStrategy + * @typedef {import('./queuingstrategies').QueuingStrategySize + * } QueuingStrategySize + * @typedef {import('./writablestream').WritableStream} WritableStream + * + * @typedef {ReadableStreamDefaultController | ReadableByteStreamController + * } ReadableStreamController + * + * @typedef {ReadableStreamDefaultReader | ReadableStreamBYOBReader + * } ReadableStreamReader + * + * @callback UnderlyingSourceStartCallback + * @param {ReadableStreamController} controller + * @returns { any | Promise } + * + * @callback UnderlyingSourcePullCallback + * @param {ReadableStreamController} controller + * @returns { Promise } + * + * @callback UnderlyingSourceCancelCallback + * @param {any} reason + * @returns { Promise } + * + * @typedef {{ + * readable: ReadableStream, + * writable: WritableStream, + * }} ReadableWritablePair + * + * @typedef {{ + * preventClose? : boolean, + * preventAbort? : boolean, + * preventCancel? : boolean, + * signal? : AbortSignal, + * }} StreamPipeOptions + * + * @typedef {{ + * start? : UnderlyingSourceStartCallback, + * pull? : UnderlyingSourcePullCallback, + * cancel? : UnderlyingSourceCancelCallback, + * type? : "bytes", + * autoAllocateChunkSize? : number + * }} UnderlyingSource + * + */ + +class ReadableStream { + [kType] = 'ReadableStream'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {UnderlyingSource} [source] + * @param {QueuingStrategy} [strategy] + */ + constructor(source = {}, strategy = {}) { + if (source === null) + throw new ERR_INVALID_ARG_VALUE('source', 'Object', source); + this[kState] = { + disturbed: false, + state: 'readable', + storedError: undefined, + stream: undefined, + transfer: { + writable: undefined, + port1: undefined, + port2: undefined, + promise: undefined, + } + }; + // The spec requires handling of the strategy first + // here. Specifically, if getting the size and + // highWaterMark from the strategy fail, that has + // to trigger a throw before getting the details + // from the source. So be sure to keep these in + // this order. + const size = strategy?.size; + const highWaterMark = strategy?.highWaterMark; + const type = source.type; + + if (`${type}` === 'bytes') { + if (size !== undefined) + throw new ERR_INVALID_ARG_VALUE.RangeError('strategy.size', size); + setupReadableByteStreamControllerFromSource( + this, + source, + extractHighWaterMark(highWaterMark, 0)); + return; + } + + if (type !== undefined) + throw new ERR_INVALID_ARG_VALUE('source.type', type); + setupReadableStreamDefaultControllerFromSource( + this, + source, + extractHighWaterMark(highWaterMark, 1), + extractSizeAlgorithm(size)); + + // eslint-disable-next-line no-constructor-return + return makeTransferable(this); + } + + /** + * @readonly + * @type {boolean} + */ + get locked() { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + return isReadableStreamLocked(this); + } + + /** + * @param {any} [reason] + * @returns { Promise } + */ + cancel(reason = undefined) { + if (!isReadableStream(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStream')); + if (isReadableStreamLocked(this)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('ReadableStream is locked')); + } + return readableStreamCancel(this, reason); + } + + /** + * @param {{ + * mode? : "byob" + * }} [options] + * @returns {ReadableStreamReader} + */ + getReader(options = {}) { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + validateObject(options, 'options', { nullable: true, allowFunction: true }); + const mode = options?.mode; + + if (mode === undefined) + return new ReadableStreamDefaultReader(this); + + if (`${mode}` !== 'byob') + throw new ERR_INVALID_ARG_VALUE('options.mode', mode); + return new ReadableStreamBYOBReader(this); + } + + /** + * @param {ReadableWritablePair} transform + * @param {StreamPipeOptions} [options] + * @returns {ReadableStream} + */ + pipeThrough(transform, options = {}) { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + const readable = transform?.readable; + if (!isReadableStream(readable)) { + throw new ERR_INVALID_ARG_TYPE( + 'transform.readable', + 'ReadableStream', + readable); + } + const writable = transform?.writable; + if (!isWritableStream(writable)) { + throw new ERR_INVALID_ARG_TYPE( + 'transform.writable', + 'WritableStream', + writable); + } + + // The web platform tests require that these be handled one at a + // time and in a specific order. options can be null or undefined. + const preventAbort = options?.preventAbort; + const preventCancel = options?.preventCancel; + const preventClose = options?.preventClose; + const signal = options?.signal; + + if (signal !== undefined && signal?.[kAborted] === undefined) + throw new ERR_INVALID_ARG_TYPE('options.signal', 'AbortSignal', signal); + + if (isReadableStreamLocked(this)) + throw new ERR_INVALID_STATE.TypeError('The ReadableStream is locked'); + if (isWritableStreamLocked(writable)) + throw new ERR_INVALID_STATE.TypeError('The WritableStream is locked'); + + const promise = readableStreamPipeTo( + this, + writable, + !!preventClose, + !!preventAbort, + !!preventCancel, + signal); + setPromiseHandled(promise); + + return readable; + } + + /** + * @param {WritableStream} destination + * @param {StreamPipeOptions} [options] + * @returns {Promise} + */ + pipeTo(destination, options = {}) { + try { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + if (!isWritableStream(destination)) { + throw new ERR_INVALID_ARG_TYPE( + 'transform.writable', + 'WritableStream', + destination); + } + + const preventAbort = options?.preventAbort; + const preventCancel = options?.preventCancel; + const preventClose = options?.preventClose; + const signal = options?.signal; + + if (signal !== undefined && signal?.[kAborted] === undefined) + throw new ERR_INVALID_ARG_TYPE('options.signal', 'AbortSignal', signal); + + if (isReadableStreamLocked(this)) + throw new ERR_INVALID_STATE.TypeError('The ReadableStream is locked'); + if (isWritableStreamLocked(destination)) + throw new ERR_INVALID_STATE.TypeError('The WritableStream is locked'); + + return readableStreamPipeTo( + this, + destination, + !!preventClose, + !!preventAbort, + !!preventCancel, + signal); + } catch (error) { + return PromiseReject(error); + } + } + + /** + * @returns {ReadableStream[]} + */ + tee() { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + return readableStreamTee(this, false); + } + + /** + * @param {{ + * preventCancel? : boolean, + * }} [options] + * @returns {AsyncIterable} + */ + values(options = {}) { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + validateObject(options, 'options'); + const { + preventCancel = false, + } = options; + + const reader = new ReadableStreamDefaultReader(this); + let done = false; + let started = false; + let current; + + // The nextSteps function is not an async function in order + // to make it more efficient. Because nextSteps explicitly + // creates a Promise and returns it in the common case, + // making it an async function just causes two additional + // unnecessary Promise allocations to occur, which just add + // cost. + function nextSteps() { + if (done) + return PromiseResolve({ done: true, value: undefined }); + + if (reader[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'The reader is not bound to a ReadableStream')); + } + const promise = createDeferredPromise(); + + readableStreamDefaultReaderRead(reader, { + [kChunk](chunk) { + current = undefined; + promise.resolve({ value: chunk, done: false }); + }, + [kClose]() { + current = undefined; + done = true; + readableStreamReaderGenericRelease(reader); + promise.resolve({ done: true, value: undefined }); + }, + [kError](error) { + current = undefined; + done = true; + readableStreamReaderGenericRelease(reader); + promise.reject(error); + } + }); + return promise.promise; + } + + async function returnSteps(value) { + if (done) + return { done: true, value }; + done = true; + + if (reader[kState].stream === undefined) { + throw new ERR_INVALID_STATE.TypeError( + 'The reader is not bound to a ReadableStream'); + } + assert(!reader[kState].readRequests.length); + if (!preventCancel) { + const result = readableStreamReaderGenericCancel(reader, value); + readableStreamReaderGenericRelease(reader); + await result; + return { done: true, value }; + } + + readableStreamReaderGenericRelease(reader); + return { done: true, value }; + } + + // TODO(@jasnell): Explore whether an async generator + // can be used here instead of a custom iterator object. + return ObjectSetPrototypeOf({ + // Changing either of these functions (next or return) + // to async functions causes a failure in the streams + // Web Platform Tests that check for use of a modified + // Promise.prototype.then. Since the await keyword + // uses Promise.prototype.then, it is open to prototype + // polution, which causes the test to fail. The other + // await uses here do not trigger that failure because + // the test that fails does not trigger those code paths. + next() { + // If this is the first read, delay by one microtask + // to ensure that the controller has had an opportunity + // to properly start and perform the initial pull. + // TODO(@jasnell): The spec doesn't call this out so + // need to investigate if it's a bug in our impl or + // the spec. + if (!started) { + current = PromiseResolve(); + started = true; + } + current = current !== undefined ? + PromisePrototypeThen(current, nextSteps, nextSteps) : + nextSteps(); + return current; + }, + + return(error) { + return current ? + PromisePrototypeThen( + current, + () => returnSteps(error), + () => returnSteps(error)) : + returnSteps(error); + }, + + [SymbolAsyncIterator]() { return this; } + }, AsyncIterator); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + locked: this.locked, + state: this[kState].state, + }); + } + + [kTransfer]() { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + if (this.locked) { + this[kState].transfer.port1?.close(); + this[kState].transfer.port1 = undefined; + this[kState].transfer.port2 = undefined; + throw new DOMException( + 'Cannot transfer a locked ReadableStream', + 'DataCloneError'); + } + + const { + writable, + promise, + } = lazyTransfer().newCrossRealmWritableSink( + this, + this[kState].transfer.port1); + + this[kState].transfer.writable = writable; + this[kState].transfer.promise = promise; + + return { + data: { port: this[kState].transfer.port2 }, + deserializeInfo: + 'internal/webstreams/readablestream:TransferedReadableStream' + }; + } + + [kTransferList]() { + const { port1, port2 } = new MessageChannel(); + this[kState].transfer.port1 = port1; + this[kState].transfer.port2 = port2; + return [ port2 ]; + } + + [kDeserialize]({ port }) { + const transfer = lazyTransfer(); + setupReadableStreamDefaultControllerFromSource( + this, + new transfer.CrossRealmTransformReadableSource(port), + 0, () => 1); + } +} + +ObjectDefineProperties(ReadableStream.prototype, { + [SymbolAsyncIterator]: { + configurable: true, + enumerable: false, + writable: true, + value: ReadableStream.prototype.values, + }, + locked: { enumerable: true }, + cancel: { enumerable: true }, + getReader: { enumerable: true }, + pipeThrough: { enumerable: true }, + pipeTo: { enumerable: true }, + tee: { enumerable: true }, +}); + +function TransferedReadableStream() { + return makeTransferable(ReflectConstruct( + function() { + this[kType] = 'ReadableStream'; + this[kState] = { + disturbed: false, + state: 'readable', + storedError: undefined, + stream: undefined, + transfer: { + writable: undefined, + port: undefined, + promise: undefined, + } + }; + }, + [], ReadableStream)); +} +TransferedReadableStream.prototype[kDeserialize] = () => {}; + +class ReadableStreamBYOBRequest { + [kType] = 'ReadableStreamBYOBRequest'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + /** + * @readonly + * @type {ArrayBufferView} + */ + get view() { + if (!isReadableStreamBYOBRequest(this)) + throw new ERR_INVALID_THIS('ReadableStreamBYOBRequest'); + return this[kState].view; + } + + /** + * @param {number} bytesWritten + */ + respond(bytesWritten) { + if (!isReadableStreamBYOBRequest(this)) + throw new ERR_INVALID_THIS('ReadableStreamBYOBRequest'); + const { + view, + controller, + } = this[kState]; + if (controller === undefined) { + throw new ERR_INVALID_STATE.TypeError( + 'This BYOB request has been invalidated'); + } + + const viewByteLength = ArrayBufferViewGetByteLength(view); + const viewBuffer = ArrayBufferViewGetBuffer(view); + const viewBufferByteLength = ArrayBufferGetByteLength(viewBuffer); + + if (viewByteLength === 0 || viewBufferByteLength === 0) { + throw new ERR_INVALID_STATE.TypeError( + 'View ArrayBuffer is zero-length or detached'); + } + + readableByteStreamControllerRespond(controller, bytesWritten); + } + + /** + * @param {ArrayBufferView} view + */ + respondWithNewView(view) { + if (!isReadableStreamBYOBRequest(this)) + throw new ERR_INVALID_THIS('ReadableStreamBYOBRequest'); + const { + controller, + } = this[kState]; + + if (controller === undefined) { + throw new ERR_INVALID_STATE.TypeError( + 'This BYOB request has been invalidated'); + } + + readableByteStreamControllerRespondWithNewView(controller, view); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + view: this.view, + controller: this[kState].controller, + }); + } +} + +ObjectDefineProperties(ReadableStreamBYOBRequest.prototype, { + view: { enumerable: true }, + respond: { enumerable: true }, + respondWithNewView: { enumerable: true }, +}); + +function createReadableStreamBYOBRequest(controller, view) { + return ReflectConstruct( + function() { + this[kType] = 'ReadableStreamBYOBRequest'; + this[kState] = { + controller, + view, + }; + }, + [], + ReadableStreamBYOBRequest + ); +} + +class DefaultReadRequest { + constructor() { + this[kState] = createDeferredPromise(); + } + + [kChunk](value) { + this[kState].resolve?.({ value, done: false }); + } + + [kClose]() { + this[kState].resolve?.({ value: undefined, done: true }); + } + + [kError](error) { + this[kState].reject?.(error); + } + + get promise() { return this[kState].promise; } +} + +class ReadIntoRequest { + constructor() { + this[kState] = createDeferredPromise(); + } + + [kChunk](value) { + this[kState].resolve?.({ value, done: false }); + } + + [kClose](value) { + this[kState].resolve?.({ value, done: true }); + } + + [kError](error) { + this[kState].reject?.(error); + } + + get promise() { return this[kState].promise; } +} + +class ReadableStreamDefaultReader { + [kType] = 'ReadableStreamDefaultReader'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {ReadableStream} stream + */ + constructor(stream) { + if (!isReadableStream(stream)) + throw new ERR_INVALID_ARG_TYPE('stream', 'ReadableStream', stream); + this[kState] = { + readRequests: [], + stream: undefined, + close: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + }; + setupReadableStreamDefaultReader(this, stream); + } + + /** + * @returns {Promise<{ + * value : any, + * done : boolean + * }>} + */ + read() { + if (!isReadableStreamDefaultReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamDefaultReader')); + if (this[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'The reader is not attached to a stream')); + } + const readRequest = new DefaultReadRequest(); + readableStreamDefaultReaderRead(this, readRequest); + return readRequest.promise; + } + + releaseLock() { + if (!isReadableStreamDefaultReader(this)) + throw new ERR_INVALID_THIS('ReadableStreamDefaultReader'); + if (this[kState].stream === undefined) + return; + if (this[kState].readRequests.length) { + throw new ERR_INVALID_STATE.TypeError( + 'Cannot release with pending read requests'); + } + readableStreamReaderGenericRelease(this); + } + + /** + * @readonly + * @type {Promise} + */ + get closed() { + if (!isReadableStreamDefaultReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamDefaultReader')); + return this[kState].close.promise; + } + + /** + * @param {any} reason + * @returns {Promise} + */ + cancel(reason = undefined) { + if (!isReadableStreamDefaultReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamDefaultReader')); + if (this[kState].stream === undefined) { + return PromiseReject(new ERR_INVALID_STATE.TypeError( + 'The reader is not attached to a stream')); + } + return readableStreamReaderGenericCancel(this, reason); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + readRequests: this[kState].readRequests.length, + close: this[kState].close.promise, + }); + } +} + +ObjectDefineProperties(ReadableStreamDefaultReader.prototype, { + closed: { enumerable: true }, + read: { enumerable: true }, + releaseLock: { enumerable: true }, + cancel: { enumerable: true }, +}); + +class ReadableStreamBYOBReader { + [kType] = 'ReadableStreamBYOBReader'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {ReadableStream} stream + */ + constructor(stream) { + if (!isReadableStream(stream)) + throw new ERR_INVALID_ARG_TYPE('stream', 'ReadableStream', stream); + this[kState] = { + stream: undefined, + requestIntoRequests: [], + close: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + }; + setupReadableStreamBYOBReader(this, stream); + } + + /** + * @param {ArrayBufferView} view + * @returns {Promise<{ + * view : ArrayBufferView, + * done : boolean, + * }>} + */ + read(view) { + if (!isReadableStreamBYOBReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamBYOBReader')); + if (!isArrayBufferView(view)) { + return PromiseReject( + new ERR_INVALID_ARG_TYPE( + 'view', + [ + 'Buffer', + 'TypedArray', + 'DataView', + ], + view)); + } + const viewByteLength = ArrayBufferViewGetByteLength(view); + const viewBuffer = ArrayBufferViewGetBuffer(view); + const viewBufferByteLength = ArrayBufferGetByteLength(viewBuffer); + + if (viewByteLength === 0 || viewBufferByteLength === 0) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'View ArrayBuffer is zero-length or detached')); + } + // Supposed to assert here that the view's buffer is not + // detached, but there's no API available to use to check that. + if (this[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'The reader is not attached to a stream')); + } + const readIntoRequest = new ReadIntoRequest(); + readableStreamBYOBReaderRead(this, view, readIntoRequest); + return readIntoRequest.promise; + } + + releaseLock() { + if (!isReadableStreamBYOBReader(this)) + throw new ERR_INVALID_THIS('ReadableStreamBYOBReader'); + if (this[kState].stream === undefined) + return; + if (this[kState].readIntoRequests.length) { + throw new ERR_INVALID_STATE.TypeError( + 'Cannot release with pending read requests'); + } + readableStreamReaderGenericRelease(this); + } + + /** + * @readonly + * @type {Promise} + */ + get closed() { + if (!isReadableStreamBYOBReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamBYOBReader')); + return this[kState].close.promise; + } + + /** + * @param {any} reason + * @returns {Promise} + */ + cancel(reason = undefined) { + if (!isReadableStreamBYOBReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamBYOBReader')); + if (this[kState].stream === undefined) { + return PromiseReject(new ERR_INVALID_STATE.TypeError( + 'The reader is not attached to a stream')); + } + return readableStreamReaderGenericCancel(this, reason); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + requestIntoRequests: this[kState].requestIntoRequests.length, + close: this[kState].close.promise, + }); + } +} + +ObjectDefineProperties(ReadableStreamBYOBReader.prototype, { + closed: { enumerable: true }, + read: { enumerable: true }, + releaseLock: { enumerable: true }, + cancel: { enumerable: true }, +}); + +class ReadableStreamDefaultController { + [kType] = 'ReadableStreamDefaultController'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + /** + * @readonly + * @type {number} + */ + get desiredSize() { + return readableStreamDefaultControllerGetDesiredSize(this); + } + + close() { + if (!readableStreamDefaultControllerCanCloseOrEnqueue(this)) + throw new ERR_INVALID_STATE.TypeError('Controller is already closed'); + readableStreamDefaultControllerClose(this); + } + + /** + * @param {any} chunk + */ + enqueue(chunk = undefined) { + if (!readableStreamDefaultControllerCanCloseOrEnqueue(this)) + throw new ERR_INVALID_STATE.TypeError('Controller is already closed'); + readableStreamDefaultControllerEnqueue(this, chunk); + } + + /** + * @param {any} error + */ + error(error = undefined) { + readableStreamDefaultControllerError(this, error); + } + + [kCancel](reason) { + return readableStreamDefaultControllerCancelSteps(this, reason); + } + + [kPull](readRequest) { + readableStreamDefaultControllerPullSteps(this, readRequest); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { }); + } +} + +ObjectDefineProperties(ReadableStreamDefaultController.prototype, { + desiredSize: { enumerable: true }, + close: { enumerable: true }, + enqueue: { enumerable: true }, + error: { enumerable: true }, +}); + +function createReadableStreamDefaultController() { + return ReflectConstruct( + function() { + this[kType] = 'ReadableStreamDefaultController'; + this[kState] = {}; + }, + [], + ReadableStreamDefaultController, + ); +} + +class ReadableByteStreamController { + [kType] = 'ReadableByteStreamController'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + /** + * @readonly + * @type {ReadableStreamBYOBRequest} + */ + get byobRequest() { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + if (this[kState].byobRequest === null && + this[kState].pendingPullIntos.length) { + const { + buffer, + byteOffset, + bytesFilled, + byteLength, + } = this[kState].pendingPullIntos[0]; + const view = + new Uint8Array( + buffer, + byteOffset + bytesFilled, + byteLength - bytesFilled); + this[kState].byobRequest = createReadableStreamBYOBRequest(this, view); + } + return this[kState].byobRequest; + } + + /** + * @readonly + * @type {number} + */ + get desiredSize() { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + return readableByteStreamControllerGetDesiredSize(this); + } + + close() { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + if (this[kState].closeRequested) + throw new ERR_INVALID_STATE.TypeError('Controller is already closed'); + if (this[kState].stream[kState].state !== 'readable') + throw new ERR_INVALID_STATE.TypeError('ReadableStream is already closed'); + readableByteStreamControllerClose(this); + } + + /** + * @param {ArrayBufferView} chunk + */ + enqueue(chunk) { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + if (!isArrayBufferView(chunk)) { + throw new ERR_INVALID_ARG_TYPE( + 'chunk', + [ + 'Buffer', + 'TypedArray', + 'DataView', + ], + chunk); + } + const chunkByteLength = ArrayBufferViewGetByteLength(chunk); + const chunkByteOffset = ArrayBufferViewGetByteOffset(chunk); + const chunkBuffer = ArrayBufferViewGetBuffer(chunk); + const chunkBufferByteLength = ArrayBufferGetByteLength(chunkBuffer); + if (chunkByteLength === 0 || chunkBufferByteLength === 0) { + throw new ERR_INVALID_STATE.TypeError( + 'chunk ArrayBuffer is zero-length or detached'); + } + if (this[kState].closeRequested) + throw new ERR_INVALID_STATE.TypeError('Controller is already closed'); + if (this[kState].stream[kState].state !== 'readable') + throw new ERR_INVALID_STATE.TypeError('ReadableStream is already closed'); + readableByteStreamControllerEnqueue( + this, + chunkBuffer, + chunkByteLength, + chunkByteOffset); + } + + /** + * @param {any} error + */ + error(error = undefined) { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + readableByteStreamControllerError(this, error); + } + + [kCancel](reason) { + return readableByteStreamControllerCancelSteps(this, reason); + } + + [kPull](readRequest) { + readableByteStreamControllerPullSteps(this, readRequest); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { }); + } +} + +ObjectDefineProperties(ReadableByteStreamController.prototype, { + byobRequest: { enumerable: true }, + desiredSize: { enumerable: true }, + close: { enumerable: true }, + enqueue: { enumerable: true }, + error: { enumerable: true }, +}); + +function createReadableByteStreamController() { + return ReflectConstruct( + function() { + this[kType] = 'ReadableByteStreamController'; + this[kState] = {}; + }, + [], + ReadableByteStreamController, + ); +} + +function createTeeReadableStream(start, pull, cancel) { + return ReflectConstruct( + function() { + this[kType] = 'ReadableStream'; + this[kState] = { + disturbed: false, + state: 'readable', + storedError: undefined, + stream: undefined, + transfer: { + writable: undefined, + port: undefined, + promise: undefined, + } + }; + setupReadableStreamDefaultControllerFromSource( + this, + ObjectCreate(null, { + start: { value: start }, + pull: { value: pull }, + cancel: { value: cancel } + }), + 1, + () => 1); + return makeTransferable(this); + }, [], ReadableStream, + ); +} + +const isReadableStream = + isBrandCheck('ReadableStream'); +const isReadableByteStreamController = + isBrandCheck('ReadableByteStreamController'); +const isReadableStreamBYOBRequest = + isBrandCheck('ReadableStreamBYOBRequest'); +const isReadableStreamDefaultReader = + isBrandCheck('ReadableStreamDefaultReader'); +const isReadableStreamBYOBReader = + isBrandCheck('ReadableStreamBYOBReader'); + +// ---- ReadableStream Implementation + +function readableStreamPipeTo( + source, + dest, + preventClose, + preventAbort, + preventCancel, + signal) { + + let reader; + let writer; + // Both of these can throw synchronously. We want to capture + // the error and return a rejected promise instead. + try { + reader = new ReadableStreamDefaultReader(source); + writer = new WritableStreamDefaultWriter(dest); + } catch (error) { + return PromiseReject(error); + } + + source[kState].disturbed = true; + + let shuttingDown = false; + + if (signal !== undefined && signal?.[kAborted] === undefined) { + return PromiseReject( + new ERR_INVALID_ARG_TYPE( + 'options.signal', + 'AbortSignal', + signal)); + } + + const promise = createDeferredPromise(); + + let currentWrite = PromiseResolve(); + + // The error here can be undefined. The rejected arg + // tells us that the promise must be rejected even + // when error is undefine. + function finalize(rejected, error) { + writableStreamDefaultWriterRelease(writer); + readableStreamReaderGenericRelease(reader); + if (signal !== undefined) + signal.removeEventListener('abort', abortAlgorithm); + if (rejected) + promise.reject(error); + else + promise.resolve(); + } + + async function waitForCurrentWrite() { + const write = currentWrite; + await write; + if (write !== currentWrite) + await waitForCurrentWrite(); + } + + function shutdownWithAnAction(action, rejected, originalError) { + if (shuttingDown) return; + shuttingDown = true; + if (dest[kState].state === 'writable' && + !writableStreamCloseQueuedOrInFlight(dest)) { + PromisePrototypeThen( + waitForCurrentWrite(), + complete, + (error) => finalize(true, error)); + return; + } + complete(); + + function complete() { + PromisePrototypeThen( + action(), + () => finalize(rejected, originalError), + (error) => finalize(true, error)); + } + } + + function shutdown(rejected, error) { + if (shuttingDown) return; + shuttingDown = true; + if (dest[kState].state === 'writable' && + !writableStreamCloseQueuedOrInFlight(dest)) { + PromisePrototypeThen( + waitForCurrentWrite(), + () => finalize(rejected, error), + (error) => finalize(true, error)); + return; + } + finalize(rejected, error); + } + + function abortAlgorithm() { + // Cannot use the AbortError class here. It must be a DOMException + const error = new DOMException('The operation was aborted', 'AbortError'); + const actions = []; + if (!preventAbort) { + ArrayPrototypePush( + actions, + () => { + if (dest[kState].state === 'writable') + return writableStreamAbort(dest, error); + return PromiseResolve(); + }); + } + if (!preventCancel) { + ArrayPrototypePush( + actions, + () => { + if (source[kState].state === 'readable') + return readableStreamCancel(source, error); + return PromiseResolve(); + }); + } + + shutdownWithAnAction( + async () => PromiseAll(actions.map((action) => action())), + true, + error); + } + + function watchErrored(stream, promise, action) { + if (stream[kState].state === 'errored') + action(stream[kState].storedError); + else + PromisePrototypeCatch(promise, action); + } + + function watchClosed(stream, promise, action) { + if (stream[kState].state === 'closed') + action(stream[kState].storedError); + else + PromisePrototypeThen(promise, action, () => {}); + } + + async function step() { + if (shuttingDown) + return true; + await writer[kState].ready.promise; + return new Promise((resolve, reject) => { + readableStreamDefaultReaderRead( + reader, + { + [kChunk](chunk) { + currentWrite = writableStreamDefaultWriterWrite(writer, chunk); + setPromiseHandled(currentWrite); + resolve(false); + }, + [kClose]: () => resolve(true), + [kError]: reject, + }); + }); + } + + async function run() { + // Run until step resolves as true + while (!await step()) {} + } + + if (signal !== undefined) { + if (signal.aborted) { + abortAlgorithm(); + return promise.promise; + } + signal.addEventListener('abort', abortAlgorithm, { once: true }); + } + + setPromiseHandled(run()); + + watchErrored(source, reader[kState].close.promise, (error) => { + if (!preventAbort) { + return shutdownWithAnAction( + () => writableStreamAbort(dest, error), + true, + error); + } + shutdown(true, error); + }); + + watchErrored(dest, writer[kState].close.promise, (error) => { + if (!preventCancel) { + return shutdownWithAnAction( + () => readableStreamCancel(source, error), + true, + error); + } + shutdown(true, error); + }); + + watchClosed(source, reader[kState].close.promise, () => { + if (!preventClose) { + return shutdownWithAnAction( + () => writableStreamDefaultWriterCloseWithErrorPropagation(writer)); + } + shutdown(); + }); + + if (writableStreamCloseQueuedOrInFlight(dest) || + dest[kState].state === 'closed') { + const error = new ERR_INVALID_STATE.TypeError( + 'Destination WritableStream is closed'); + if (!preventCancel) { + shutdownWithAnAction( + () => readableStreamCancel(source, error), true, error); + } else { + shutdown(true, error); + } + } + + return promise.promise; +} + +function readableStreamTee(stream, cloneForBranch2) { + const reader = new ReadableStreamDefaultReader(stream); + let reading = false; + let canceled1 = false; + let canceled2 = false; + let reason1; + let reason2; + let branch1; + let branch2; + const cancelPromise = createDeferredPromise(); + + async function pullAlgorithm() { + if (reading) return; + reading = true; + const readRequest = { + [kChunk](value) { + queueMicrotask(() => { + reading = false; + const value1 = value; + let value2 = value; + if (!canceled2 && cloneForBranch2) { + // Structured Clone + value2 = deserialize(serialize(value2)); + } + if (!canceled1) { + readableStreamDefaultControllerEnqueue( + branch1[kState].controller, + value1); + } + if (!canceled2) { + readableStreamDefaultControllerEnqueue( + branch2[kState].controller, + value2); + } + }); + }, + [kClose]() { + reading = false; + if (!canceled1) + readableStreamDefaultControllerClose(branch1[kState].controller); + if (!canceled2) + readableStreamDefaultControllerClose(branch2[kState].controller); + if (!canceled1 || !canceled2) + cancelPromise.resolve(); + }, + [kError]() { + reading = false; + }, + }; + readableStreamDefaultReaderRead(reader, readRequest); + } + + function cancel1Algorithm(reason) { + canceled1 = true; + reason1 = reason; + if (canceled2) { + const compositeReason = [reason1, reason2]; + cancelPromise.resolve(readableStreamCancel(stream, compositeReason)); + } + return cancelPromise.promise; + } + + function cancel2Algorithm(reason) { + canceled2 = true; + reason2 = reason; + if (canceled1) { + const compositeReason = [reason1, reason2]; + cancelPromise.resolve(readableStreamCancel(stream, compositeReason)); + } + return cancelPromise.promise; + } + + branch1 = + createTeeReadableStream(nonOpStart, pullAlgorithm, cancel1Algorithm); + branch2 = + createTeeReadableStream(nonOpStart, pullAlgorithm, cancel2Algorithm); + + PromisePrototypeCatch( + reader[kState].close.promise, + (error) => { + readableStreamDefaultControllerError(branch1[kState].controller, error); + readableStreamDefaultControllerError(branch2[kState].controller, error); + if (!canceled1 || !canceled2) + cancelPromise.resolve(); + }); + + return [branch1, branch2]; +} + +function readableByteStreamControllerConvertPullIntoDescriptor(desc) { + const { + buffer, + bytesFilled, + byteLength, + byteOffset, + ctor, + elementSize, + } = desc; + if (bytesFilled > byteLength) + throw new ERR_INVALID_STATE.RangeError('The buffer size is invalid'); + assert(!(bytesFilled % elementSize)); + const transferedBuffer = transferArrayBuffer(buffer); + return new ctor(transferedBuffer, byteOffset, bytesFilled / elementSize); +} + +function isReadableStreamLocked(stream) { + return stream[kState].reader !== undefined; +} + +function readableStreamCancel(stream, reason) { + stream[kState].disturbed = true; + switch (stream[kState].state) { + case 'closed': + return PromiseResolve(); + case 'errored': + return PromiseReject(stream[kState].storedError); + } + readableStreamClose(stream); + const { + reader, + } = stream[kState]; + if (reader !== undefined && readableStreamHasBYOBReader(stream)) { + for (let n = 0; n < reader[kState].readIntoRequests.length; n++) + reader[kState].readIntoRequests[n][kClose](); + reader[kState].readIntoRequests = []; + } + + return PromisePrototypeThen( + ensureIsPromise( + stream[kState].controller[kCancel], + stream[kState].controller, + reason), + () => {}); +} + +function readableStreamClose(stream) { + assert(stream[kState].state === 'readable'); + stream[kState].state = 'closed'; + + const { + reader, + } = stream[kState]; + + if (reader === undefined) + return; + + reader[kState].close.resolve(); + + if (readableStreamHasDefaultReader(stream)) { + for (let n = 0; n < reader[kState].readRequests.length; n++) + reader[kState].readRequests[n][kClose](); + reader[kState].readRequests = []; + } +} + +function readableStreamError(stream, error) { + assert(stream[kState].state === 'readable'); + stream[kState].state = 'errored'; + stream[kState].storedError = error; + + const { + reader + } = stream[kState]; + + if (reader === undefined) + return; + + reader[kState].close.reject(error); + setPromiseHandled(reader[kState].close.promise); + + if (readableStreamHasDefaultReader(stream)) { + for (let n = 0; n < reader[kState].readRequests.length; n++) + reader[kState].readRequests[n][kError](error); + reader[kState].readRequests = []; + } else { + assert(readableStreamHasBYOBReader(stream)); + for (let n = 0; n < reader[kState].readIntoRequests.length; n++) + reader[kState].readIntoRequests[n][kError](error); + reader[kState].readIntoRequests = []; + } +} + +function readableStreamHasDefaultReader(stream) { + const { + reader, + } = stream[kState]; + + if (reader === undefined) + return false; + + return reader[kState] !== undefined && + reader[kType] === 'ReadableStreamDefaultReader'; +} + +function readableStreamGetNumReadRequests(stream) { + assert(readableStreamHasDefaultReader(stream)); + return stream[kState].reader[kState].readRequests.length; +} + +function readableStreamHasBYOBReader(stream) { + const { + reader, + } = stream[kState]; + + if (reader === undefined) + return false; + + return reader[kState] !== undefined && + reader[kType] === 'ReadableStreamBYOBReader'; +} + +function readableStreamGetNumReadIntoRequests(stream) { + assert(readableStreamHasBYOBReader(stream)); + return stream[kState].reader[kState].readIntoRequests.length; +} + +function readableStreamFulfillReadRequest(stream, chunk, done) { + assert(readableStreamHasDefaultReader(stream)); + const { + reader, + } = stream[kState]; + assert(reader[kState].readRequests.length); + const readRequest = ArrayPrototypeShift(reader[kState].readRequests); + + // TODO(@jasnell): It's not clear under what exact conditions done + // will be true here. The spec requires this check but none of the + // WPT's or other tests trigger it. Will need to investigate how to + // get coverage for this. + if (done) + readRequest[kClose](); + else + readRequest[kChunk](chunk); +} + +function readableStreamFulfillReadIntoRequest(stream, chunk, done) { + assert(readableStreamHasBYOBReader(stream)); + const { + reader, + } = stream[kState]; + assert(reader[kState].readIntoRequests.length); + const readIntoRequest = ArrayPrototypeShift(reader[kState].readIntoRequests); + if (done) + readIntoRequest[kClose](chunk); + else + readIntoRequest[kChunk](chunk); +} + +function readableStreamAddReadRequest(stream, readRequest) { + assert(readableStreamHasDefaultReader(stream)); + assert(stream[kState].state === 'readable'); + ArrayPrototypePush(stream[kState].reader[kState].readRequests, readRequest); +} + +function readableStreamAddReadIntoRequest(stream, readIntoRequest) { + assert(readableStreamHasBYOBReader(stream)); + assert(stream[kState].state !== 'errored'); + ArrayPrototypePush( + stream[kState].reader[kState].readIntoRequests, + readIntoRequest); +} + +function readableStreamReaderGenericCancel(reader, reason) { + const { + stream, + } = reader[kState]; + assert(stream !== undefined); + return readableStreamCancel(stream, reason); +} + +function readableStreamReaderGenericInitialize(reader, stream) { + reader[kState].stream = stream; + stream[kState].reader = reader; + switch (stream[kState].state) { + case 'readable': + reader[kState].close = createDeferredPromise(); + break; + case 'closed': + reader[kState].close = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + break; + case 'errored': + reader[kState].close = { + promise: PromiseReject(stream[kState].storedError), + resolve: undefined, + reject: undefined, + }; + setPromiseHandled(reader[kState].close.promise); + break; + } +} + +function readableStreamReaderGenericRelease(reader) { + const { + stream, + } = reader[kState]; + assert(stream !== undefined); + assert(stream[kState].reader === reader); + + if (stream[kState].state === 'readable') { + reader[kState].close.reject?.( + new ERR_INVALID_STATE.TypeError('Reader released')); + } else { + reader[kState].close = { + promise: PromiseReject( + new ERR_INVALID_STATE.TypeError('Reader released')), + resolve: undefined, + reject: undefined, + }; + } + setPromiseHandled(reader[kState].close.promise); + stream[kState].reader = undefined; + reader[kState].stream = undefined; +} + +function readableStreamBYOBReaderRead(reader, view, readIntoRequest) { + const { + stream, + } = reader[kState]; + assert(stream !== undefined); + stream[kState].disturbed = true; + if (stream[kState].state === 'errored') { + readIntoRequest[kError](stream[kState].storedError); + return; + } + readableByteStreamControllerPullInto( + stream[kState].controller, + view, + readIntoRequest); +} + +function readableStreamDefaultReaderRead(reader, readRequest) { + const { + stream, + } = reader[kState]; + assert(stream !== undefined); + stream[kState].disturbed = true; + switch (stream[kState].state) { + case 'closed': + readRequest[kClose](); + break; + case 'errored': + readRequest[kError](stream[kState].storedError); + break; + case 'readable': + stream[kState].controller[kPull](readRequest); + } +} + +function setupReadableStreamBYOBReader(reader, stream) { + if (isReadableStreamLocked(stream)) + throw new ERR_INVALID_STATE.TypeError('ReadableStream is locked'); + const { + controller, + } = stream[kState]; + if (!isReadableByteStreamController(controller)) + throw new ERR_INVALID_ARG_VALUE('reader', reader, 'must be a byte stream'); + readableStreamReaderGenericInitialize(reader, stream); + reader[kState].readIntoRequests = []; +} + +function setupReadableStreamDefaultReader(reader, stream) { + if (isReadableStreamLocked(stream)) + throw new ERR_INVALID_STATE.TypeError('ReadableStream is locked'); + readableStreamReaderGenericInitialize(reader, stream); + reader[kState].readRequests = []; +} + +function readableStreamDefaultControllerClose(controller) { + if (!readableStreamDefaultControllerCanCloseOrEnqueue(controller)) + return; + controller[kState].closeRequested = true; + if (!controller[kState].queue.length) { + readableStreamDefaultControllerClearAlgorithms(controller); + readableStreamClose(controller[kState].stream); + } +} + +function readableStreamDefaultControllerEnqueue(controller, chunk) { + if (!readableStreamDefaultControllerCanCloseOrEnqueue(controller)) + return; + + const { + stream, + } = controller[kState]; + + if (isReadableStreamLocked(stream) && + readableStreamGetNumReadRequests(stream)) { + readableStreamFulfillReadRequest(stream, chunk, false); + } else { + try { + const chunkSize = + FunctionPrototypeCall( + controller[kState].sizeAlgorithm, + undefined, + chunk); + enqueueValueWithSize(controller, chunk, chunkSize); + } catch (error) { + readableStreamDefaultControllerError(controller, error); + throw error; + } + } + readableStreamDefaultControllerCallPullIfNeeded(controller); +} + +function readableStreamDefaultControllerHasBackpressure(controller) { + return !readableStreamDefaultControllerShouldCallPull(controller); +} + +function readableStreamDefaultControllerCanCloseOrEnqueue(controller) { + const { + stream, + } = controller[kState]; + return !controller[kState].closeRequested && + stream[kState].state === 'readable'; +} + +function readableStreamDefaultControllerGetDesiredSize(controller) { + const { + stream, + highWaterMark, + queueTotalSize, + } = controller[kState]; + switch (stream[kState].state) { + case 'errored': return null; + case 'closed': return 0; + default: + return highWaterMark - queueTotalSize; + } +} + +function readableStreamDefaultControllerShouldCallPull(controller) { + const { + stream, + } = controller[kState]; + if (!readableStreamDefaultControllerCanCloseOrEnqueue(controller) || + !controller[kState].started) + return false; + + if (isReadableStreamLocked(stream) && + readableStreamGetNumReadRequests(stream)) { + return true; + } + + const desiredSize = readableStreamDefaultControllerGetDesiredSize(controller); + assert(desiredSize !== null); + + return desiredSize > 0; +} + +function readableStreamDefaultControllerCallPullIfNeeded(controller) { + if (!readableStreamDefaultControllerShouldCallPull(controller)) + return; + if (controller[kState].pulling) { + controller[kState].pullAgain = true; + return; + } + assert(!controller[kState].pullAgain); + controller[kState].pulling = true; + PromisePrototypeThen( + ensureIsPromise(controller[kState].pullAlgorithm, controller), + () => { + controller[kState].pulling = false; + if (controller[kState].pullAgain) { + controller[kState].pullAgain = false; + readableStreamDefaultControllerCallPullIfNeeded(controller); + } + }, + (error) => readableStreamDefaultControllerError(controller, error)); +} + +function readableStreamDefaultControllerClearAlgorithms(controller) { + controller[kState].pullAlgorithm = undefined; + controller[kState].cancelAlgorithm = undefined; + controller[kState].sizeAlgorithm = undefined; +} + +function readableStreamDefaultControllerError(controller, error) { + const { + stream, + } = controller[kState]; + if (stream[kState].state === 'readable') { + resetQueue(controller); + readableStreamDefaultControllerClearAlgorithms(controller); + readableStreamError(stream, error); + } +} + +function readableStreamDefaultControllerCancelSteps(controller, reason) { + resetQueue(controller); + const result = controller[kState].cancelAlgorithm(reason); + readableStreamDefaultControllerClearAlgorithms(controller); + return result; +} + +function readableStreamDefaultControllerPullSteps(controller, readRequest) { + const { + stream, + queue, + } = controller[kState]; + if (queue.length) { + const chunk = dequeueValue(controller); + if (controller[kState].closeRequested && !queue.length) { + readableStreamDefaultControllerClearAlgorithms(controller); + readableStreamClose(stream); + } else { + readableStreamDefaultControllerCallPullIfNeeded(controller); + } + readRequest[kChunk](chunk); + return; + } + readableStreamAddReadRequest(stream, readRequest); + readableStreamDefaultControllerCallPullIfNeeded(controller); +} + +function setupReadableStreamDefaultController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + sizeAlgorithm) { + assert(stream[kState].controller === undefined); + controller[kState] = { + cancelAlgorithm, + closeRequested: false, + highWaterMark, + pullAgain: false, + pullAlgorithm, + pulling: false, + queue: [], + queueTotalSize: 0, + started: false, + sizeAlgorithm, + stream, + }; + stream[kState].controller = controller; + + const startResult = startAlgorithm(); + + PromisePrototypeThen( + PromiseResolve(startResult), + () => { + controller[kState].started = true; + assert(!controller[kState].pulling); + assert(!controller[kState].pullAgain); + readableStreamDefaultControllerCallPullIfNeeded(controller); + }, + (error) => readableStreamDefaultControllerError(controller, error)); +} + +function setupReadableStreamDefaultControllerFromSource( + stream, + source, + highWaterMark, + sizeAlgorithm) { + const controller = createReadableStreamDefaultController(); + const start = source?.start; + const pull = source?.pull; + const cancel = source?.cancel; + const startAlgorithm = start ? + FunctionPrototypeBind(start, source, controller) : + nonOpStart; + const pullAlgorithm = pull ? + FunctionPrototypeBind(pull, source, controller) : + nonOpPull; + + const cancelAlgorithm = cancel ? + FunctionPrototypeBind(cancel, source) : + nonOpCancel; + + setupReadableStreamDefaultController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + sizeAlgorithm); +} + +function readableByteStreamControllerClose(controller) { + const { + closeRequested, + pendingPullIntos, + queueTotalSize, + stream, + } = controller[kState]; + + if (closeRequested || stream[kState].state !== 'readable') + return; + + if (queueTotalSize) { + controller[kState].closeRequested = true; + return; + } + + if (pendingPullIntos.length) { + const firstPendingPullInto = pendingPullIntos[0]; + if (firstPendingPullInto.bytesFilled > 0) { + const error = new ERR_INVALID_STATE.TypeError('Partial read'); + readableByteStreamControllerError(controller, error); + throw error; + } + } + + readableByteStreamControllerClearAlgorithms(controller); + readableStreamClose(stream); +} + +function readableByteStreamControllerCommitPullIntoDescriptor(stream, desc) { + assert(stream[kState].state !== 'errored'); + let done = false; + if (stream[kState].state === 'closed') { + desc.bytesFilled = 0; + done = true; + } + + const filledView = + readableByteStreamControllerConvertPullIntoDescriptor(desc); + + if (desc.type === 'default') { + readableStreamFulfillReadRequest(stream, filledView, done); + } else { + assert(desc.type === 'byob'); + readableStreamFulfillReadIntoRequest(stream, filledView, done); + } +} + +function readableByteStreamControllerInvalidateBYOBRequest(controller) { + if (controller[kState].byobRequest === null) + return; + controller[kState].byobRequest[kState].controller = undefined; + controller[kState].byobRequest[kState].view = null; + controller[kState].byobRequest = null; +} + +function readableByteStreamControllerClearAlgorithms(controller) { + controller[kState].pullAlgorithm = undefined; + controller[kState].cancelAlgorithm = undefined; +} + +function readableByteStreamControllerClearPendingPullIntos(controller) { + readableByteStreamControllerInvalidateBYOBRequest(controller); + controller[kState].pendingPullIntos = []; +} + +function readableByteStreamControllerGetDesiredSize(controller) { + const { + stream, + highWaterMark, + queueTotalSize, + } = controller[kState]; + switch (stream[kState].state) { + case 'errored': return null; + case 'closed': return 0; + default: return highWaterMark - queueTotalSize; + } +} + +function readableByteStreamControllerShouldCallPull(controller) { + const { + stream, + } = controller[kState]; + if (stream[kState].state !== 'readable' || + controller[kState].closeRequested || + !controller[kState].started) { + return false; + } + if (readableStreamHasDefaultReader(stream) && + readableStreamGetNumReadRequests(stream) > 0) { + return true; + } + + if (readableStreamHasBYOBReader(stream) && + readableStreamGetNumReadIntoRequests(stream) > 0) { + return true; + } + + const desiredSize = readableByteStreamControllerGetDesiredSize(controller); + assert(desiredSize !== null); + + return desiredSize > 0; +} + +function readableByteStreamControllerHandleQueueDrain(controller) { + const { + closeRequested, + queueTotalSize, + stream, + } = controller[kState]; + assert(stream[kState].state === 'readable'); + if (!queueTotalSize && closeRequested) { + readableByteStreamControllerClearAlgorithms(controller); + readableStreamClose(stream); + return; + } + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function readableByteStreamControllerPullInto( + controller, + view, + readIntoRequest) { + const { + closeRequested, + stream, + pendingPullIntos, + } = controller[kState]; + let elementSize = 1; + let ctor = DataViewCtor; + if (isArrayBufferView(view) && !isDataView(view)) { + elementSize = view.constructor.BYTES_PER_ELEMENT; + ctor = view.constructor; + } + const buffer = ArrayBufferViewGetBuffer(view); + const byteOffset = ArrayBufferViewGetByteOffset(view); + const byteLength = ArrayBufferViewGetByteLength(view); + const bufferByteLength = ArrayBufferGetByteLength(buffer); + + let transferedBuffer; + try { + transferedBuffer = transferArrayBuffer(buffer); + } catch (error) { + readIntoRequest[kError](error); + return; + } + const desc = { + buffer: transferedBuffer, + bufferByteLength, + byteOffset, + byteLength, + bytesFilled: 0, + elementSize, + ctor, + type: 'byob', + }; + if (pendingPullIntos.length) { + ArrayPrototypePush(pendingPullIntos, desc); + readableStreamAddReadIntoRequest(stream, readIntoRequest); + return; + } + if (stream[kState].state === 'closed') { + const emptyView = new ctor(desc.buffer, byteOffset, 0); + readIntoRequest[kClose](emptyView); + return; + } + if (controller[kState].queueTotalSize) { + if (readableByteStreamControllerFillPullIntoDescriptorFromQueue( + controller, + desc)) { + const filledView = + readableByteStreamControllerConvertPullIntoDescriptor(desc); + readableByteStreamControllerHandleQueueDrain(controller); + readIntoRequest[kChunk](filledView); + return; + } + if (closeRequested) { + const error = new ERR_INVALID_STATE.TypeError('ReadableStream closed'); + readableByteStreamControllerError(controller, error); + readIntoRequest[kError](error); + return; + } + } + ArrayPrototypePush(pendingPullIntos, desc); + readableStreamAddReadIntoRequest(stream, readIntoRequest); + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function readableByteStreamControllerRespondInternal(controller, bytesWritten) { + const { + stream, + pendingPullIntos, + } = controller[kState]; + const desc = pendingPullIntos[0]; + readableByteStreamControllerInvalidateBYOBRequest(controller); + if (stream[kState].state === 'closed') { + if (bytesWritten) + throw new ERR_INVALID_STATE.TypeError( + 'Controller is closed but view is not zero-length'); + readableByteStreamControllerRespondInClosedState(controller, desc); + } else { + assert(stream[kState].state === 'readable'); + if (!bytesWritten) + throw new ERR_INVALID_STATE.TypeError('View cannot be zero-length'); + readableByteStreamControllerRespondInReadableState( + controller, + bytesWritten, + desc); + } + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function readableByteStreamControllerRespond(controller, bytesWritten) { + const { + pendingPullIntos, + stream, + } = controller[kState]; + assert(pendingPullIntos.length); + const desc = pendingPullIntos[0]; + + if (stream[kState].state === 'closed') { + if (bytesWritten !== 0) + throw new ERR_INVALID_ARG_VALUE('bytesWritten', bytesWritten); + } else { + assert(stream[kState].state === 'readable'); + + if (!bytesWritten) + throw new ERR_INVALID_ARG_VALUE('bytesWritten', bytesWritten); + + if ((desc.bytesFilled + bytesWritten) > desc.byteLength) + throw new ERR_INVALID_ARG_VALUE.RangeError('bytesWritten', bytesWritten); + } + + desc.buffer = transferArrayBuffer(desc.buffer); + + readableByteStreamControllerRespondInternal(controller, bytesWritten); +} + +function readableByteStreamControllerRespondInClosedState(controller, desc) { + assert(!desc.bytesFilled); + const { + stream, + } = controller[kState]; + if (readableStreamHasBYOBReader(stream)) { + while (readableStreamGetNumReadIntoRequests(stream) > 0) { + readableByteStreamControllerCommitPullIntoDescriptor( + stream, + readableByteStreamControllerShiftPendingPullInto(controller)); + } + } +} + +function readableByteStreamControllerFillHeadPullIntoDescriptor( + controller, + size, + desc) { + const { + pendingPullIntos, + byobRequest, + } = controller[kState]; + assert(!pendingPullIntos.length || pendingPullIntos[0] === desc); + assert(byobRequest === null); + desc.bytesFilled += size; +} + +function readableByteStreamControllerEnqueue( + controller, + buffer, + byteLength, + byteOffset) { + const { + closeRequested, + pendingPullIntos, + queue, + stream, + } = controller[kState]; + + if (closeRequested || stream[kState].state !== 'readable') + return; + + const transferedBuffer = transferArrayBuffer(buffer); + + if (pendingPullIntos.length) { + const firstPendingPullInto = pendingPullIntos[0]; + + const pendingBufferByteLength = + ArrayBufferGetByteLength(firstPendingPullInto.buffer); + if (pendingBufferByteLength === 0) { + throw new ERR_INVALID_STATE.TypeError( + 'Destination ArrayBuffer is zero-length or detached'); + } + + firstPendingPullInto.buffer = + transferArrayBuffer(firstPendingPullInto.buffer); + } + + readableByteStreamControllerInvalidateBYOBRequest(controller); + + if (readableStreamHasDefaultReader(stream)) { + if (!readableStreamGetNumReadRequests(stream)) { + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferedBuffer, + byteOffset, + byteLength); + } else { + assert(!queue.length); + const transferedView = + new Uint8Array(transferedBuffer, byteOffset, byteLength); + readableStreamFulfillReadRequest(stream, transferedView, false); + } + } else if (readableStreamHasBYOBReader(stream)) { + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferedBuffer, + byteOffset, + byteLength); + readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue( + controller); + } else { + assert(!isReadableStreamLocked(stream)); + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferedBuffer, + byteOffset, + byteLength); + } + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function readableByteStreamControllerEnqueueChunkToQueue( + controller, + buffer, + byteOffset, + byteLength) { + ArrayPrototypePush( + controller[kState].queue, + { + buffer, + byteOffset, + byteLength, + }); + controller[kState].queueTotalSize += byteLength; +} + +function readableByteStreamControllerFillPullIntoDescriptorFromQueue( + controller, + desc) { + const { + buffer, + byteLength, + byteOffset, + bytesFilled, + elementSize, + } = desc; + const currentAlignedBytes = bytesFilled - (bytesFilled % elementSize); + const maxBytesToCopy = MathMin( + controller[kState].queueTotalSize, + byteLength - bytesFilled); + const maxBytesFilled = bytesFilled + maxBytesToCopy; + const maxAlignedBytes = maxBytesFilled - (maxBytesFilled % elementSize); + let totalBytesToCopyRemaining = maxBytesToCopy; + let ready = false; + if (maxAlignedBytes > currentAlignedBytes) { + totalBytesToCopyRemaining = maxAlignedBytes - bytesFilled; + ready = true; + } + const { + queue, + } = controller[kState]; + + while (totalBytesToCopyRemaining) { + const headOfQueue = queue[0]; + const bytesToCopy = MathMin( + totalBytesToCopyRemaining, + headOfQueue.byteLength); + const destStart = byteOffset + desc.bytesFilled; + const arrayBufferByteLength = ArrayBufferGetByteLength(buffer); + if (arrayBufferByteLength - destStart < bytesToCopy) { + throw new ERR_INVALID_STATE.RangeError( + 'view ArrayBuffer size is invalid'); + } + assert(arrayBufferByteLength - destStart >= bytesToCopy); + copyArrayBuffer( + buffer, + destStart, + headOfQueue.buffer, + headOfQueue.byteOffset, + bytesToCopy); + if (headOfQueue.byteLength === bytesToCopy) { + ArrayPrototypeShift(queue); + } else { + headOfQueue.byteOffset += bytesToCopy; + headOfQueue.byteLength -= bytesToCopy; + } + controller[kState].queueTotalSize -= bytesToCopy; + readableByteStreamControllerFillHeadPullIntoDescriptor( + controller, + bytesToCopy, + desc); + totalBytesToCopyRemaining -= bytesToCopy; + } + + if (!ready) { + assert(!controller[kState].queueTotalSize); + assert(desc.bytesFilled > 0); + assert(desc.bytesFilled < elementSize); + } + return ready; +} + +function readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue( + controller) { + const { + closeRequested, + pendingPullIntos, + stream, + } = controller[kState]; + assert(!closeRequested); + while (pendingPullIntos.length) { + if (!controller[kState].queueTotalSize) + return; + const desc = pendingPullIntos[0]; + if (readableByteStreamControllerFillPullIntoDescriptorFromQueue( + controller, + desc)) { + readableByteStreamControllerShiftPendingPullInto(controller); + readableByteStreamControllerCommitPullIntoDescriptor(stream, desc); + } + } +} + +function readableByteStreamControllerRespondInReadableState( + controller, + bytesWritten, + desc) { + const { + buffer, + bytesFilled, + byteLength, + } = desc; + + if (bytesFilled + bytesWritten > byteLength) + throw new ERR_INVALID_STATE.RangeError('The buffer size is invalid'); + + readableByteStreamControllerFillHeadPullIntoDescriptor( + controller, + bytesWritten, + desc); + + if (desc.bytesFilled < desc.elementSize) + return; + + readableByteStreamControllerShiftPendingPullInto(controller); + + const remainderSize = desc.bytesFilled % desc.elementSize; + + if (remainderSize) { + const end = desc.byteOffset + desc.bytesFilled; + const start = end - remainderSize; + const remainder = + ArrayBufferPrototypeSlice( + buffer, + start, + end); + readableByteStreamControllerEnqueueChunkToQueue( + controller, + remainder, + 0, + ArrayBufferGetByteLength(remainder)); + } + desc.bytesFilled -= remainderSize; + readableByteStreamControllerCommitPullIntoDescriptor( + controller[kState].stream, + desc); + readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller); +} + +function readableByteStreamControllerRespondWithNewView(controller, view) { + const { + stream, + pendingPullIntos, + } = controller[kState]; + assert(pendingPullIntos.length); + + const desc = pendingPullIntos[0]; + assert(stream[kState].state !== 'errored'); + + if (!isArrayBufferView(view)) { + throw new ERR_INVALID_ARG_TYPE( + 'view', + [ + 'Buffer', + 'TypedArray', + 'DataView', + ], + view); + } + const viewByteLength = ArrayBufferViewGetByteLength(view); + const viewByteOffset = ArrayBufferViewGetByteOffset(view); + const viewBuffer = ArrayBufferViewGetBuffer(view); + const viewBufferByteLength = ArrayBufferGetByteLength(viewBuffer); + + const { + byteOffset, + byteLength, + bytesFilled, + bufferByteLength, + } = desc; + + if (byteOffset + bytesFilled !== viewByteOffset) + throw new ERR_INVALID_ARG_VALUE.RangeError('view', view); + + if (bytesFilled + viewByteOffset > byteLength) + throw new ERR_INVALID_ARG_VALUE.RangeError('view', view); + + if (bufferByteLength !== viewBufferByteLength) + throw new ERR_INVALID_ARG_VALUE.RangeError('view', view); + + desc.buffer = transferArrayBuffer(viewBuffer); + + readableByteStreamControllerRespondInternal(controller, viewByteLength); +} + +function readableByteStreamControllerShiftPendingPullInto(controller) { + assert(controller[kState].byobRequest === null); + return ArrayPrototypeShift(controller[kState].pendingPullIntos); +} + +function readableByteStreamControllerCallPullIfNeeded(controller) { + if (!readableByteStreamControllerShouldCallPull(controller)) + return; + if (controller[kState].pulling) { + controller[kState].pullAgain = true; + return; + } + assert(!controller[kState].pullAgain); + controller[kState].pulling = true; + PromisePrototypeThen( + ensureIsPromise(controller[kState].pullAlgorithm, controller), + () => { + controller[kState].pulling = false; + if (controller[kState].pullAgain) { + controller[kState].pullAgain = false; + readableByteStreamControllerCallPullIfNeeded(controller); + } + }, + (error) => readableByteStreamControllerError(controller, error)); +} + +function readableByteStreamControllerError(controller, error) { + const { + stream, + } = controller[kState]; + if (stream[kState].state !== 'readable') + return; + readableByteStreamControllerClearPendingPullIntos(controller); + resetQueue(controller); + readableByteStreamControllerClearAlgorithms(controller); + readableStreamError(stream, error); +} + +function readableByteStreamControllerCancelSteps(controller, reason) { + readableByteStreamControllerClearPendingPullIntos(controller); + resetQueue(controller); + const result = controller[kState].cancelAlgorithm(reason); + readableByteStreamControllerClearAlgorithms(controller); + return result; +} + +function readableByteStreamControllerPullSteps(controller, readRequest) { + const { + pendingPullIntos, + queue, + queueTotalSize, + stream, + } = controller[kState]; + assert(readableStreamHasDefaultReader(stream)); + if (queueTotalSize) { + assert(!readableStreamGetNumReadRequests(stream)); + const { + buffer, + byteOffset, + byteLength, + } = ArrayPrototypeShift(queue); + controller[kState].queueTotalSize -= byteLength; + readableByteStreamControllerHandleQueueDrain(controller); + const view = new Uint8Array(buffer, byteOffset, byteLength); + readRequest[kChunk](view); + return; + } + const { + autoAllocateChunkSize, + } = controller[kState]; + if (autoAllocateChunkSize !== undefined) { + try { + const buffer = new ArrayBuffer(autoAllocateChunkSize); + ArrayPrototypePush( + pendingPullIntos, + { + buffer, + byteOffset: 0, + byteLength: autoAllocateChunkSize, + bytesFilled: 0, + elementSize: 1, + ctor: Uint8Array, + type: 'default', + }); + } catch (error) { + readRequest[kError](error); + return; + } + } + + readableStreamAddReadRequest(stream, readRequest); + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function setupReadableByteStreamController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + autoAllocateChunkSize) { + assert(stream[kState].controller === undefined); + if (autoAllocateChunkSize !== undefined) { + assert(NumberIsInteger(autoAllocateChunkSize)); + assert(autoAllocateChunkSize > 0); + } + controller[kState] = { + byobRequest: null, + closeRequested: false, + pullAgain: false, + pulling: false, + started: false, + stream, + queue: [], + queueTotalSize: 0, + highWaterMark, + pullAlgorithm, + cancelAlgorithm, + autoAllocateChunkSize, + pendingPullIntos: [], + }; + stream[kState].controller = controller; + + const startResult = startAlgorithm(); + + PromisePrototypeThen( + PromiseResolve(startResult), + () => { + controller[kState].started = true; + assert(!controller[kState].pulling); + assert(!controller[kState].pullAgain); + readableByteStreamControllerCallPullIfNeeded(controller); + }, + (error) => readableByteStreamControllerError(controller, error)); +} + +function setupReadableByteStreamControllerFromSource( + stream, + source, + highWaterMark) { + const controller = createReadableByteStreamController(); + const start = source?.start; + const pull = source?.pull; + const cancel = source?.cancel; + const autoAllocateChunkSize = source?.autoAllocateChunkSize; + const startAlgorithm = start ? + FunctionPrototypeBind(start, source, controller) : + nonOpStart; + const pullAlgorithm = pull ? + FunctionPrototypeBind(pull, source, controller) : + nonOpPull; + const cancelAlgorithm = cancel ? + FunctionPrototypeBind(cancel, source) : + nonOpCancel; + + if (autoAllocateChunkSize === 0) { + throw new ERR_INVALID_ARG_VALUE( + 'source.autoAllocateChunkSize', + autoAllocateChunkSize); + } + setupReadableByteStreamController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + autoAllocateChunkSize); +} + +module.exports = { + ReadableStream, + ReadableStreamDefaultReader, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, + ReadableByteStreamController, + ReadableStreamDefaultController, + TransferedReadableStream, + + // Exported Brand Checks + isReadableStream, + isReadableByteStreamController, + isReadableStreamBYOBRequest, + isReadableStreamDefaultReader, + isReadableStreamBYOBReader, + isWritableStreamDefaultWriter, + isWritableStreamDefaultController, + + readableStreamPipeTo, + readableStreamTee, + readableByteStreamControllerConvertPullIntoDescriptor, + isReadableStreamLocked, + readableStreamCancel, + readableStreamClose, + readableStreamError, + readableStreamHasDefaultReader, + readableStreamGetNumReadRequests, + readableStreamHasBYOBReader, + readableStreamGetNumReadIntoRequests, + readableStreamFulfillReadRequest, + readableStreamFulfillReadIntoRequest, + readableStreamAddReadRequest, + readableStreamAddReadIntoRequest, + readableStreamReaderGenericCancel, + readableStreamReaderGenericInitialize, + readableStreamReaderGenericRelease, + readableStreamBYOBReaderRead, + readableStreamDefaultReaderRead, + setupReadableStreamBYOBReader, + setupReadableStreamDefaultReader, + readableStreamDefaultControllerClose, + readableStreamDefaultControllerEnqueue, + readableStreamDefaultControllerHasBackpressure, + readableStreamDefaultControllerCanCloseOrEnqueue, + readableStreamDefaultControllerGetDesiredSize, + readableStreamDefaultControllerShouldCallPull, + readableStreamDefaultControllerCallPullIfNeeded, + readableStreamDefaultControllerClearAlgorithms, + readableStreamDefaultControllerError, + readableStreamDefaultControllerCancelSteps, + readableStreamDefaultControllerPullSteps, + setupReadableStreamDefaultController, + setupReadableStreamDefaultControllerFromSource, + readableByteStreamControllerClose, + readableByteStreamControllerCommitPullIntoDescriptor, + readableByteStreamControllerInvalidateBYOBRequest, + readableByteStreamControllerClearAlgorithms, + readableByteStreamControllerClearPendingPullIntos, + readableByteStreamControllerGetDesiredSize, + readableByteStreamControllerShouldCallPull, + readableByteStreamControllerHandleQueueDrain, + readableByteStreamControllerPullInto, + readableByteStreamControllerRespondInternal, + readableByteStreamControllerRespond, + readableByteStreamControllerRespondInClosedState, + readableByteStreamControllerFillHeadPullIntoDescriptor, + readableByteStreamControllerEnqueue, + readableByteStreamControllerEnqueueChunkToQueue, + readableByteStreamControllerFillPullIntoDescriptorFromQueue, + readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue, + readableByteStreamControllerRespondInReadableState, + readableByteStreamControllerRespondWithNewView, + readableByteStreamControllerShiftPendingPullInto, + readableByteStreamControllerCallPullIfNeeded, + readableByteStreamControllerError, + readableByteStreamControllerCancelSteps, + readableByteStreamControllerPullSteps, + setupReadableByteStreamController, + setupReadableByteStreamControllerFromSource, +}; + +/* eslint-enable no-use-before-define */ diff --git a/lib/internal/webstreams/transfer.js b/lib/internal/webstreams/transfer.js new file mode 100644 index 00000000000000..72cdc36a153564 --- /dev/null +++ b/lib/internal/webstreams/transfer.js @@ -0,0 +1,299 @@ +'use strict'; + +const { + ObjectDefineProperties, + PromiseResolve, + ReflectConstruct, +} = primordials; + +const { + kState, + setPromiseHandled, +} = require('internal/webstreams/util'); + +const { + DOMException, +} = internalBinding('messaging'); + +const { + ReadableStream, + readableStreamDefaultControllerEnqueue, + readableStreamDefaultControllerClose, + readableStreamDefaultControllerError, + readableStreamPipeTo, +} = require('internal/webstreams/readablestream'); + +const { + WritableStream, + writableStreamDefaultControllerErrorIfNeeded, +} = require('internal/webstreams/writablestream'); + +const { + createDeferredPromise, +} = require('internal/util'); + +const assert = require('internal/assert'); + +const { + makeTransferable, + kClone, + kDeserialize, +} = require('internal/worker/js_transferable'); + +// This class is a bit of a hack. The Node.js implementation of +// DOMException is not transferable/cloneable. This provides us +// with a variant that is. Unfortunately, it means playing around +// a bit with the message, name, and code properties and the +// prototype. We can revisit this if DOMException is ever made +// properly cloneable. +class CloneableDOMException extends DOMException { + constructor(message, name) { + super(message, name); + this[kDeserialize]({ + message: this.message, + name: this.name, + code: this.code, + }); + // eslint-disable-next-line no-constructor-return + return makeTransferable(this); + } + + [kClone]() { + return { + data: { + message: this.message, + name: this.name, + code: this.code, + }, + deserializeInfo: + 'internal/webstreams/transfer:InternalCloneableDOMException' + }; + } + + [kDeserialize]({ message, name, code }) { + ObjectDefineProperties(this, { + message: { + configurable: true, + enumerable: true, + get() { return message; }, + }, + name: { + configurable: true, + enumerable: true, + get() { return name; }, + }, + code: { + configurable: true, + enumerable: true, + get() { return code; }, + }, + }); + } +} + +function InternalCloneableDOMException() { + return makeTransferable( + ReflectConstruct( + CloneableDOMException, + [], + DOMException)); +} +InternalCloneableDOMException[kDeserialize] = () => {}; + +class CrossRealmTransformReadableSource { + constructor(port) { + this[kState] = { + port, + controller: undefined, + }; + + port.onmessage = ({ data }) => { + const { + controller, + } = this[kState]; + const { + type, + value, + } = data; + switch (type) { + case 'chunk': + readableStreamDefaultControllerEnqueue( + controller, + value); + break; + case 'close': + readableStreamDefaultControllerClose(controller); + port.close(); + break; + case 'error': + readableStreamDefaultControllerError(controller, value); + port.close(); + break; + } + }; + + port.onmessageerror = () => { + const error = new CloneableDOMException( + 'Internal transfered ReadableStream error', + 'DataCloneError'); + port.postMessage({ type: 'error', value: error }); + readableStreamDefaultControllerError( + this[kState].controller, + error); + port.close(); + }; + } + + start(controller) { + this[kState].controller = controller; + } + + async pull() { + this[kState].port.postMessage({ type: 'pull' }); + } + + async cancel(reason) { + try { + this[kState].port.postMessage({ type: 'error', value: reason }); + } catch (error) { + if (error instanceof DOMException) { + // eslint-disable-next-line no-ex-assign + error = new CloneableDOMException(error.message, error.name); + } + this[kState].port.postMessage({ type: 'error', value: error }); + throw error; + } finally { + this[kState].port.close(); + } + } +} + +class CrossRealmTransformWritableSink { + constructor(port) { + this[kState] = { + port, + controller: undefined, + backpressurePromise: createDeferredPromise(), + }; + + port.onmessage = ({ data }) => { + assert(typeof data === 'object'); + const { + type, + value + } = { ...data }; + assert(typeof type === 'string'); + switch (type) { + case 'pull': + if (this[kState].backpressurePromise !== undefined) + this[kState].backpressurePromise.resolve?.(); + this[kState].backpressurePromise = undefined; + break; + case 'error': + writableStreamDefaultControllerErrorIfNeeded( + this[kState].controller, + value); + if (this[kState].backpressurePromise !== undefined) + this[kState].backpressurePromise.resolve?.(); + this[kState].backpressurePromise = undefined; + break; + } + }; + port.onmessageerror = () => { + const error = new CloneableDOMException( + 'Internal transfered ReadableStream error', + 'DataCloneError'); + port.postMessage({ type: 'error', value: error }); + writableStreamDefaultControllerErrorIfNeeded( + this[kState].controller, + error); + port.close(); + }; + + } + + start(controller) { + this[kState].controller = controller; + } + + async write(chunk) { + if (this[kState].backpressurePromise === undefined) { + this[kState].backpressurePromise = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + } + await this[kState].backpressurePromise.promise; + this[kState].backpressurePromise = createDeferredPromise(); + try { + this[kState].port.postMessage({ type: 'chunk', value: chunk }); + } catch (error) { + if (error instanceof DOMException) { + // eslint-disable-next-line no-ex-assign + error = new CloneableDOMException(error.message, error.name); + } + this[kState].port.postMessage({ type: 'error', value: error }); + this[kState].port.close(); + throw error; + } + } + + close() { + this[kState].port.postMessage({ type: 'close' }); + this[kState].port.close(); + } + + abort(reason) { + try { + this[kState].port.postMessage({ type: 'error', value: reason }); + } catch (error) { + if (error instanceof DOMException) { + // eslint-disable-next-line no-ex-assign + error = new CloneableDOMException(error.message, error.name); + } + this[kState].port.postMessage({ type: 'error', value: error }); + throw error; + } finally { + this[kState].port.close(); + } + } +} + +function newCrossRealmReadableStream(writable, port) { + const readable = + new ReadableStream( + new CrossRealmTransformReadableSource(port)); + + const promise = + readableStreamPipeTo(readable, writable, false, false, false); + + setPromiseHandled(promise); + + return { + readable, + promise, + }; +} + +function newCrossRealmWritableSink(readable, port) { + const writable = + new WritableStream( + new CrossRealmTransformWritableSink(port)); + + const promise = readableStreamPipeTo(readable, writable, false, false, false); + setPromiseHandled(promise); + return { + writable, + promise, + }; +} + +module.exports = { + newCrossRealmReadableStream, + newCrossRealmWritableSink, + CrossRealmTransformWritableSink, + CrossRealmTransformReadableSource, + CloneableDOMException, + InternalCloneableDOMException, +}; diff --git a/lib/internal/webstreams/transformstream.js b/lib/internal/webstreams/transformstream.js new file mode 100644 index 00000000000000..745675266f7f1b --- /dev/null +++ b/lib/internal/webstreams/transformstream.js @@ -0,0 +1,591 @@ +'use strict'; + +const { + FunctionPrototypeBind, + FunctionPrototypeCall, + ObjectDefineProperties, + PromisePrototypeCatch, + PromisePrototypeThen, + PromiseResolve, + ReflectConstruct, + SymbolToStringTag, +} = primordials; + +const { + codes: { + ERR_ILLEGAL_CONSTRUCTOR, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_STATE, + ERR_INVALID_THIS, + }, +} = require('internal/errors'); + +const { + DOMException, +} = internalBinding('messaging'); + +const { + createDeferredPromise, + customInspectSymbol: kInspect, +} = require('internal/util'); + +const { + kDeserialize, + kTransfer, + kTransferList, + makeTransferable, +} = require('internal/worker/js_transferable'); + +const { + customInspect, + ensureIsPromise, + extractHighWaterMark, + extractSizeAlgorithm, + isBrandCheck, + nonOpFlush, + kType, + kState, +} = require('internal/webstreams/util'); + +const { + ReadableStream, + readableStreamDefaultControllerCanCloseOrEnqueue, + readableStreamDefaultControllerClose, + readableStreamDefaultControllerEnqueue, + readableStreamDefaultControllerError, + readableStreamDefaultControllerGetDesiredSize, + readableStreamDefaultControllerHasBackpressure, +} = require('internal/webstreams/readablestream'); + +const { + WritableStream, + writableStreamDefaultControllerErrorIfNeeded, +} = require('internal/webstreams/writablestream'); + +const assert = require('internal/assert'); + +/** + * @typedef {import('./queuingstrategies').QueuingStrategy + * } QueuingStrategy + * @typedef {import('./queuingstrategies').QueuingStrategySize + * } QueuingStrategySize + * + * @callback TransformerStartCallback + * @param {TransformStreamDefaultController} controller; + * + * @callback TransformerFlushCallback + * @param {TransformStreamDefaultController} controller; + * @returns {Promise} + * + * @callback TransformerTransformCallback + * @param {any} chunk + * @param {TransformStreamDefaultController} controller + * @returns {Promise} + * + * @typedef {{ + * start? : TransformerStartCallback, + * transform? : TransformerTransformCallback, + * flush? : TransformerFlushCallback, + * readableType? : any, + * writableType? : any, + * }} Transformer + */ + +class TransformStream { + [kType] = 'TransformStream'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {Transformer} [transformer] + * @param {QueuingStrategy} [writableStrategy] + * @param {QueuingStrategy} [readableStrategy] + */ + constructor( + transformer = null, + writableStrategy = {}, + readableStrategy = {}) { + const readableType = transformer?.readableType; + const writableType = transformer?.writableType; + const start = transformer?.start; + + if (readableType !== undefined) { + throw new ERR_INVALID_ARG_VALUE.RangeError( + 'transformer.readableType', + readableType); + } + if (writableType !== undefined) { + throw new ERR_INVALID_ARG_VALUE.RangeError( + 'transformer.writableType', + writableType); + } + + const readableHighWaterMark = readableStrategy?.highWaterMark; + const readableSize = readableStrategy?.size; + + const writableHighWaterMark = writableStrategy?.highWaterMark; + const writableSize = writableStrategy?.size; + + const actualReadableHighWaterMark = + extractHighWaterMark(readableHighWaterMark, 0); + const actualReadableSize = extractSizeAlgorithm(readableSize); + + const actualWritableHighWaterMark = + extractHighWaterMark(writableHighWaterMark, 1); + const actualWritableSize = extractSizeAlgorithm(writableSize); + + const startPromise = createDeferredPromise(); + + initializeTransformStream( + this, + startPromise, + actualWritableHighWaterMark, + actualWritableSize, + actualReadableHighWaterMark, + actualReadableSize); + + setupTransformStreamDefaultControllerFromTransformer(this, transformer); + + if (start !== undefined) { + startPromise.resolve( + FunctionPrototypeCall( + start, + transformer, + this[kState].controller)); + } else { + startPromise.resolve(); + } + + // eslint-disable-next-line no-constructor-return + return makeTransferable(this); + } + + /** + * @readonly + * @type {ReadableStream} + */ + get readable() { + if (!isTransformStream(this)) + throw new ERR_INVALID_THIS('TransformStream'); + return this[kState].readable; + } + + /** + * @readonly + * @type {WritableStream} + */ + get writable() { + if (!isTransformStream(this)) + throw new ERR_INVALID_THIS('TransformStream'); + return this[kState].writable; + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + readable: this.readable, + writable: this.writable, + backpressure: this[kState].backpressure, + }); + } + + [kTransfer]() { + if (!isTransformStream(this)) + throw new ERR_INVALID_THIS('TransformStream'); + const { + readable, + writable, + } = this[kState]; + if (readable.locked) { + throw new DOMException( + 'Cannot transfer a locked ReadableStream', + 'DataCloneError'); + } + if (writable.locked) { + throw new DOMException( + 'Cannot transfer a locked WritableStream', + 'DataCloneError'); + } + return { + data: { + readable, + writable, + }, + deserializeInfo: + 'internal/webstreams/transformstream:TransferedTransformStream' + }; + } + + [kTransferList]() { + return [ this[kState].readable, this[kState].writable ]; + } + + [kDeserialize]({ readable, writable }) { + this[kState].readable = readable; + this[kState].writable = writable; + } +} + +ObjectDefineProperties(TransformStream.prototype, { + readable: { enumerable: true }, + writable: { enumerable: true }, +}); + +function TransferedTransformStream() { + return makeTransferable(ReflectConstruct( + function() { + this[kType] = 'TransformStream'; + this[kState] = { + readable: undefined, + writable: undefined, + backpressure: undefined, + backpressureChange: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + controller: undefined, + }; + }, + [], TransformStream)); +} +TransferedTransformStream.prototype[kDeserialize] = () => {}; + +class TransformStreamDefaultController { + [kType] = 'TransformStreamDefaultController'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + /** + * @readonly + * @type {number} + */ + get desiredSize() { + if (!isTransformStreamDefaultController(this)) + throw new ERR_INVALID_THIS('TransformStreamDefaultController'); + const { + stream, + } = this[kState]; + const { + readable, + } = stream[kState]; + const { + controller: readableController, + } = readable[kState]; + return readableStreamDefaultControllerGetDesiredSize(readableController); + } + + /** + * @param {any} chunk + */ + enqueue(chunk = undefined) { + if (!isTransformStreamDefaultController(this)) + throw new ERR_INVALID_THIS('TransformStreamDefaultController'); + transformStreamDefaultControllerEnqueue(this, chunk); + } + + /** + * @param {any} reason + */ + error(reason = undefined) { + if (!isTransformStreamDefaultController(this)) + throw new ERR_INVALID_THIS('TransformStreamDefaultController'); + transformStreamDefaultControllerError(this, reason); + } + + terminate() { + if (!isTransformStreamDefaultController(this)) + throw new ERR_INVALID_THIS('TransformStreamDefaultController'); + transformStreamDefaultControllerTerminate(this); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + }); + } +} + +ObjectDefineProperties(TransformStreamDefaultController.prototype, { + desiredSize: { enumerable: true }, + enqueue: { enumerable: true }, + error: { enumerable: true }, + terminate: { enumerable: true }, +}); + +function createTransformStreamDefaultController() { + return ReflectConstruct( + function() { + this[kType] = 'TransformStreamDefaultController'; + }, + [], + TransformStreamDefaultController); +} + +const isTransformStream = + isBrandCheck('TransformStream'); +const isTransformStreamDefaultController = + isBrandCheck('TransformStreamDefaultController'); + +async function defaultTransformAlgorithm(chunk, controller) { + transformStreamDefaultControllerEnqueue(controller, chunk); +} + +function initializeTransformStream( + stream, + startPromise, + writableHighWaterMark, + writableSizeAlgorithm, + readableHighWaterMark, + readableSizeAlgorithm) { + + const writable = new WritableStream({ + start() { return startPromise.promise; }, + write(chunk) { + return transformStreamDefaultSinkWriteAlgorithm(stream, chunk); + }, + abort(reason) { + return transformStreamDefaultSinkAbortAlgorithm(stream, reason); + }, + close() { + return transformStreamDefaultSinkCloseAlgorithm(stream); + }, + }, { + highWaterMark: writableHighWaterMark, + size: writableSizeAlgorithm, + }); + + const readable = new ReadableStream({ + start() { return startPromise.promise; }, + pull() { + return transformStreamDefaultSourcePullAlgorithm(stream); + }, + cancel(reason) { + transformStreamErrorWritableAndUnblockWrite(stream, reason); + return PromiseResolve(); + }, + }, { + highWaterMark: readableHighWaterMark, + size: readableSizeAlgorithm, + }); + + stream[kState] = { + readable, + writable, + controller: undefined, + backpressure: undefined, + backpressureChange: { + promise: undefined, + resolve: undefined, + reject: undefined, + } + }; + + transformStreamSetBackpressure(stream, true); +} + +function transformStreamError(stream, error) { + const { + readable, + } = stream[kState]; + const { + controller, + } = readable[kState]; + readableStreamDefaultControllerError(controller, error); + transformStreamErrorWritableAndUnblockWrite(stream, error); +} + +function transformStreamErrorWritableAndUnblockWrite(stream, error) { + const { + controller, + writable, + } = stream[kState]; + transformStreamDefaultControllerClearAlgorithms(controller); + writableStreamDefaultControllerErrorIfNeeded( + writable[kState].controller, + error); + if (stream[kState].backpressure) + transformStreamSetBackpressure(stream, false); +} + +function transformStreamSetBackpressure(stream, backpressure) { + assert(stream[kState].backpressure !== backpressure); + if (stream[kState].backpressureChange.promise !== undefined) + stream[kState].backpressureChange.resolve?.(); + stream[kState].backpressureChange = createDeferredPromise(); + stream[kState].backpressure = backpressure; +} + +function setupTransformStreamDefaultController( + stream, + controller, + transformAlgorithm, + flushAlgorithm) { + assert(isTransformStream(stream)); + assert(stream[kState].controller === undefined); + controller[kState] = { + stream, + transformAlgorithm, + flushAlgorithm, + }; + stream[kState].controller = controller; +} + +function setupTransformStreamDefaultControllerFromTransformer( + stream, + transformer) { + const controller = createTransformStreamDefaultController(); + const transform = transformer?.transform || defaultTransformAlgorithm; + const flush = transformer?.flush || nonOpFlush; + const transformAlgorithm = + FunctionPrototypeBind(transform, transformer); + const flushAlgorithm = + FunctionPrototypeBind(flush, transformer); + + setupTransformStreamDefaultController( + stream, + controller, + transformAlgorithm, + flushAlgorithm); +} + +function transformStreamDefaultControllerClearAlgorithms(controller) { + controller[kState].transformAlgorithm = undefined; + controller[kState].flushAlgorithm = undefined; +} + +function transformStreamDefaultControllerEnqueue(controller, chunk) { + const { + stream, + } = controller[kState]; + const { + readable, + } = stream[kState]; + const { + controller: readableController, + } = readable[kState]; + if (!readableStreamDefaultControllerCanCloseOrEnqueue(readableController)) + throw new ERR_INVALID_STATE.TypeError('Unable to enqueue'); + try { + readableStreamDefaultControllerEnqueue(readableController, chunk); + } catch (error) { + transformStreamErrorWritableAndUnblockWrite(stream, error); + throw readable[kState].storedError; + } + const backpressure = + readableStreamDefaultControllerHasBackpressure(readableController); + if (backpressure !== stream[kState].backpressure) { + assert(backpressure); + transformStreamSetBackpressure(stream, true); + } +} + +function transformStreamDefaultControllerError(controller, error) { + transformStreamError(controller[kState].stream, error); +} + +function transformStreamDefaultControllerPerformTransform(controller, chunk) { + const transformPromise = + ensureIsPromise( + controller[kState].transformAlgorithm, + controller, + chunk, + controller); + return PromisePrototypeCatch( + transformPromise, + (error) => { + transformStreamError(controller[kState].stream, error); + throw error; + }); +} + +function transformStreamDefaultControllerTerminate(controller) { + const { + stream, + } = controller[kState]; + const { + readable, + } = stream[kState]; + assert(readable !== undefined); + const { + controller: readableController, + } = readable[kState]; + readableStreamDefaultControllerClose(readableController); + transformStreamErrorWritableAndUnblockWrite( + stream, + new ERR_INVALID_STATE.TypeError('TransformStream has been terminated')); +} + +function transformStreamDefaultSinkWriteAlgorithm(stream, chunk) { + const { + writable, + controller, + } = stream[kState]; + assert(writable[kState].state === 'writable'); + if (stream[kState].backpressure) { + const backpressureChange = stream[kState].backpressureChange.promise; + return PromisePrototypeThen( + backpressureChange, + () => { + const { + writable, + } = stream[kState]; + if (writable[kState].state === 'erroring') + throw writable[kState].storedError; + assert(writable[kState].state === 'writable'); + return transformStreamDefaultControllerPerformTransform( + controller, + chunk); + }); + } + return transformStreamDefaultControllerPerformTransform(controller, chunk); +} + +async function transformStreamDefaultSinkAbortAlgorithm(stream, reason) { + transformStreamError(stream, reason); +} + +function transformStreamDefaultSinkCloseAlgorithm(stream) { + const { + readable, + controller, + } = stream[kState]; + + const flushPromise = + ensureIsPromise( + controller[kState].flushAlgorithm, + controller, + controller); + transformStreamDefaultControllerClearAlgorithms(controller); + return PromisePrototypeThen( + flushPromise, + () => { + if (readable[kState].state === 'errored') + throw readable[kState].storedError; + readableStreamDefaultControllerClose(readable[kState].controller); + }, + (error) => { + transformStreamError(stream, error); + throw readable[kState].storedError; + }); +} + +function transformStreamDefaultSourcePullAlgorithm(stream) { + assert(stream[kState].backpressure); + assert(stream[kState].backpressureChange.promise !== undefined); + transformStreamSetBackpressure(stream, false); + return stream[kState].backpressureChange.promise; +} + +module.exports = { + TransformStream, + TransformStreamDefaultController, + TransferedTransformStream, + + // Exported Brand Checks + isTransformStream, + isTransformStreamDefaultController, +}; diff --git a/lib/internal/webstreams/util.js b/lib/internal/webstreams/util.js new file mode 100644 index 00000000000000..e0876caf81b944 --- /dev/null +++ b/lib/internal/webstreams/util.js @@ -0,0 +1,237 @@ +'use strict'; + +const { + ArrayBufferPrototype, + ArrayPrototypePush, + ArrayPrototypeShift, + AsyncIteratorPrototype, + FunctionPrototypeCall, + MathMax, + NumberIsNaN, + ObjectCreate, + PromisePrototypeThen, + PromiseResolve, + PromiseReject, + ReflectGet, + Symbol, +} = primordials; + +const { + codes: { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_OPERATION_FAILED, + }, +} = require('internal/errors'); + +const { + copyArrayBuffer, + detachArrayBuffer +} = internalBinding('buffer'); + +const { + isPromise, +} = require('util/types'); + +const { + inspect, +} = require('util'); + +const { + getPromiseDetails, + kPending, +} = internalBinding('util'); + +const assert = require('internal/assert'); + +const kState = Symbol('kState'); +const kType = Symbol('kType'); + +const AsyncIterator = ObjectCreate(AsyncIteratorPrototype, { + next: { + configurable: true, + enumerable: true, + writable: true, + }, + return: { + configurable: true, + enumerable: true, + writable: true, + }, +}); + +function extractHighWaterMark(value, defaultHWM) { + if (value === undefined) return defaultHWM; + value = +value; + if (typeof value !== 'number' || + NumberIsNaN(value) || + value < 0) + throw new ERR_INVALID_ARG_VALUE.RangeError('strategy.highWaterMark', value); + return value; +} + +function extractSizeAlgorithm(size) { + if (size === undefined) return () => 1; + if (typeof size !== 'function') + throw new ERR_INVALID_ARG_TYPE('strategy.size', 'Function', size); + return size; +} + +function customInspect(depth, options, name, data) { + if (depth < 0) + return this; + + const opts = { + ...options, + depth: options.depth == null ? null : options.depth - 1 + }; + + return `${name} ${inspect(data, opts)}`; +} + +// These are defensive to work around the possibility that +// the buffer, byteLength, and byteOffset properties on +// ArrayBuffer and ArrayBufferView's may have been tampered with. + +function ArrayBufferViewGetBuffer(view) { + return ReflectGet(view.constructor.prototype, 'buffer', view); +} + +function ArrayBufferViewGetByteLength(view) { + return ReflectGet(view.constructor.prototype, 'byteLength', view); +} + +function ArrayBufferViewGetByteOffset(view) { + return ReflectGet(view.constructor.prototype, 'byteOffset', view); +} + +function ArrayBufferGetByteLength(view) { + return ReflectGet(ArrayBufferPrototype, 'byteLength', view); +} + +function isBrandCheck(brand) { + return (value) => { + return value != null && + value[kState] !== undefined && + value[kType] === brand; + }; +} + +function transferArrayBuffer(buffer) { + const res = detachArrayBuffer(buffer); + if (res === undefined) { + throw new ERR_OPERATION_FAILED.TypeError( + 'The ArrayBuffer could not be transfered'); + } + return res; +} + +function dequeueValue(controller) { + assert(controller[kState].queue !== undefined); + assert(controller[kState].queueTotalSize !== undefined); + assert(controller[kState].queue.length); + const { + value, + size, + } = ArrayPrototypeShift(controller[kState].queue); + controller[kState].queueTotalSize = + MathMax(0, controller[kState].queueTotalSize - size); + return value; +} + +function resetQueue(controller) { + assert(controller[kState].queue !== undefined); + assert(controller[kState].queueTotalSize !== undefined); + controller[kState].queue = []; + controller[kState].queueTotalSize = 0; +} + +function peekQueueValue(controller) { + assert(controller[kState].queue !== undefined); + assert(controller[kState].queueTotalSize !== undefined); + assert(controller[kState].queue.length); + return controller[kState].queue[0].value; +} + +function enqueueValueWithSize(controller, value, size) { + assert(controller[kState].queue !== undefined); + assert(controller[kState].queueTotalSize !== undefined); + size = +size; + if (typeof size !== 'number' || + size < 0 || + NumberIsNaN(size) || + size === Infinity) { + throw new ERR_INVALID_ARG_VALUE.RangeError('size', size); + } + ArrayPrototypePush(controller[kState].queue, { value, size }); + controller[kState].queueTotalSize += size; +} + +function ensureIsPromise(fn, thisArg, ...args) { + try { + const value = FunctionPrototypeCall(fn, thisArg, ...args); + return isPromise(value) ? value : PromiseResolve(value); + } catch (error) { + return PromiseReject(error); + } +} + +function isPromisePending(promise) { + if (promise === undefined) return false; + const details = getPromiseDetails(promise); + return details?.[0] === kPending; +} + +function setPromiseHandled(promise) { + // Alternatively, we could use the native API + // MarkAsHandled, but this avoids the extra boundary cross + // and is hopefully faster at the cost of an extra Promise + // allocation. + PromisePrototypeThen(promise, () => {}, () => {}); +} + +async function nonOpFlush() {} + +function nonOpStart() {} + +async function nonOpPull() {} + +async function nonOpCancel() {} + +async function nonOpWrite() {} + +let transfer; +function lazyTransfer() { + if (transfer === undefined) + transfer = require('internal/webstreams/transfer'); + return transfer; +} + +module.exports = { + ArrayBufferViewGetBuffer, + ArrayBufferViewGetByteLength, + ArrayBufferViewGetByteOffset, + ArrayBufferGetByteLength, + AsyncIterator, + copyArrayBuffer, + customInspect, + dequeueValue, + ensureIsPromise, + enqueueValueWithSize, + extractHighWaterMark, + extractSizeAlgorithm, + lazyTransfer, + isBrandCheck, + isPromisePending, + peekQueueValue, + resetQueue, + setPromiseHandled, + transferArrayBuffer, + nonOpCancel, + nonOpFlush, + nonOpPull, + nonOpStart, + nonOpWrite, + kType, + kState, +}; diff --git a/lib/internal/webstreams/writablestream.js b/lib/internal/webstreams/writablestream.js new file mode 100644 index 00000000000000..793ae9e9ad8fb4 --- /dev/null +++ b/lib/internal/webstreams/writablestream.js @@ -0,0 +1,1329 @@ +'use strict'; + +/* eslint-disable no-use-before-define */ + +const { + ArrayPrototypePush, + ArrayPrototypeShift, + FunctionPrototypeBind, + FunctionPrototypeCall, + ObjectDefineProperties, + PromisePrototypeThen, + PromiseResolve, + PromiseReject, + ReflectConstruct, + Symbol, + SymbolToStringTag, +} = primordials; + +const { + codes: { + ERR_ILLEGAL_CONSTRUCTOR, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_STATE, + ERR_INVALID_THIS, + }, +} = require('internal/errors'); + +const { + DOMException, +} = internalBinding('messaging'); + +const { + createDeferredPromise, + customInspectSymbol: kInspect, +} = require('internal/util'); + +const { + MessageChannel, +} = require('internal/worker/io'); + +const { + kDeserialize, + kTransfer, + kTransferList, + makeTransferable, +} = require('internal/worker/js_transferable'); + +const { + customInspect, + dequeueValue, + ensureIsPromise, + enqueueValueWithSize, + extractHighWaterMark, + extractSizeAlgorithm, + lazyTransfer, + isBrandCheck, + isPromisePending, + peekQueueValue, + resetQueue, + setPromiseHandled, + nonOpCancel, + nonOpStart, + nonOpWrite, + kType, + kState, +} = require('internal/webstreams/util'); + +const { + AbortController, +} = require('internal/abort_controller'); + +const assert = require('internal/assert'); + +const kAbort = Symbol('kAbort'); +const kCloseSentinel = Symbol('kCloseSentinel'); +const kError = Symbol('kError'); + +/** + * @typedef {import('../abort_controller').AbortSignal} AbortSignal + * @typedef {import('./queuingstrategies').QueuingStrategy + * } QueuingStrategy + * @typedef {import('./queuingstrategies').QueuingStrategySize + * } QueuingStrategySize + * + * @callback UnderlyingSinkStartCallback + * @param {WritableStreamDefaultController} controller + * + * @callback UnderlyingSinkWriteCallback + * @param {any} chunk + * @param {WritableStreamDefaultController} controller + * @returns {Promise} + * + * @callback UnderlyingSinkCloseCallback + * @returns {Promise} + * + * @callback UnderlyingSinkAbortCallback + * @param {any} reason + * @returns {Promise} + * + * @typedef {{ + * start? : UnderlyingSinkStartCallback, + * write? : UnderlyingSinkWriteCallback, + * close? : UnderlyingSinkCloseCallback, + * abort? : UnderlyingSinkAbortCallback, + * type? : any, + * }} UnderlyingSink + */ + +class WritableStream { + [kType] = 'WritableStream'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {UnderlyingSink} [sink] + * @param {QueuingStrategy} [strategy] + */ + constructor(sink = null, strategy = {}) { + const type = sink?.type; + if (type !== undefined) + throw new ERR_INVALID_ARG_VALUE.RangeError('type', type); + + this[kState] = { + close: createDeferredPromise(), + closeRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + inFlightWriteRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + inFlightCloseRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + pendingAbortRequest: { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }, + backpressure: false, + controller: undefined, + state: 'writable', + storedError: undefined, + writeRequests: [], + writer: undefined, + transfer: { + readable: undefined, + port1: undefined, + port2: undefined, + promise: undefined, + } + }; + + const size = extractSizeAlgorithm(strategy?.size); + const highWaterMark = extractHighWaterMark(strategy?.highWaterMark, 1); + + setupWritableStreamDefaultControllerFromSink( + this, + sink, + highWaterMark, + size); + + // eslint-disable-next-line no-constructor-return + return makeTransferable(this); + } + + /** + * @readonly + * @type {boolean} + */ + get locked() { + if (!isWritableStream(this)) + throw new ERR_INVALID_THIS('WritableStream'); + return isWritableStreamLocked(this); + } + + /** + * @param {any} reason + * @returns {Promise} + */ + abort(reason = undefined) { + if (!isWritableStream(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStream')); + if (isWritableStreamLocked(this)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('WritableStream is locked')); + } + return writableStreamAbort(this, reason); + } + + /** + * @returns {Promise} + */ + close() { + if (!isWritableStream(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStream')); + if (isWritableStreamLocked(this)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('WritableStream is locked')); + } + if (writableStreamCloseQueuedOrInFlight(this)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('Failure closing WritableStream')); + } + return writableStreamClose(this); + } + + /** + * @returns {WritableStreamDefaultWriter} + */ + getWriter() { + if (!isWritableStream(this)) + throw new ERR_INVALID_THIS('WritableStream'); + return new WritableStreamDefaultWriter(this); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + locked: this.locked, + state: this[kState].state, + }); + } + + [kTransfer]() { + if (!isWritableStream(this)) + throw new ERR_INVALID_THIS('WritableStream'); + if (this.locked) { + this[kState].transfer.port1?.close(); + this[kState].transfer.port1 = undefined; + this[kState].transfer.port2 = undefined; + throw new DOMException( + 'Cannot transfer a locked WritableStream', + 'DataCloneError'); + } + + const { + readable, + promise, + } = lazyTransfer().newCrossRealmReadableStream( + this, + this[kState].transfer.port1); + + this[kState].transfer.readable = readable; + this[kState].transfer.promise = promise; + + setPromiseHandled(this[kState].transfer.promise); + + return { + data: { port: this[kState].transfer.port2 }, + deserializeInfo: + 'internal/webstreams/writablestream:TransferedWritableStream' + }; + } + + [kTransferList]() { + const { port1, port2 } = new MessageChannel(); + this[kState].transfer.port1 = port1; + this[kState].transfer.port2 = port2; + return [ port2 ]; + } + + [kDeserialize]({ port }) { + const transfer = lazyTransfer(); + setupWritableStreamDefaultControllerFromSink( + this, + new transfer.CrossRealmTransformWritableSink(port), + 1, + () => 1); + } +} + +ObjectDefineProperties(WritableStream.prototype, { + locked: { enumerable: true }, + abort: { enumerable: true }, + close: { enumerable: true }, + getWriter: { enumerable: true }, +}); + +function TransferedWritableStream() { + return makeTransferable(ReflectConstruct( + function() { + this[kType] = 'WritableStream'; + this[kState] = { + close: createDeferredPromise(), + closeRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + inFlightWriteRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + inFlightCloseRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + pendingAbortRequest: { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }, + backpressure: false, + controller: undefined, + state: 'writable', + storedError: undefined, + writeRequests: [], + writer: undefined, + transfer: { + promise: undefined, + port1: undefined, + port2: undefined, + readable: undefined, + }, + }; + }, + [], WritableStream)); +} +TransferedWritableStream.prototype[kDeserialize] = () => {}; + +class WritableStreamDefaultWriter { + [kType] = 'WritableStreamDefaultWriter'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {WritableStream} stream + */ + constructor(stream) { + if (!isWritableStream(stream)) + throw new ERR_INVALID_ARG_TYPE('stream', 'WritableStream', stream); + this[kState] = { + stream: undefined, + close: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + ready: { + promise: undefined, + resolve: undefined, + reject: undefined, + } + }; + setupWritableStreamDefaultWriter(this, stream); + } + + /** + * @readonly + * @type {Promise} + */ + get closed() { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + return this[kState].close.promise; + } + + /** + * @readonly + * @type {number} + */ + get desiredSize() { + if (!isWritableStreamDefaultWriter(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultWriter'); + if (this[kState].stream === undefined) { + throw new ERR_INVALID_STATE.TypeError( + 'Writer is not bound to a WritableStream'); + } + return writableStreamDefaultWriterGetDesiredSize(this); + } + + /** + * @readonly + * @type {Promise} + */ + get ready() { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + return this[kState].ready.promise; + } + + /** + * @param {any} reason + * @returns {Promise} + */ + abort(reason = undefined) { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + if (this[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'Writer is not bound to a WritableStream')); + } + return writableStreamDefaultWriterAbort(this, reason); + } + + /** + * @returns {Promise} + */ + close() { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + const { + stream, + } = this[kState]; + if (stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'Writer is not bound to a WritableStream')); + } + if (writableStreamCloseQueuedOrInFlight(stream)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('Failure to close WritableStream')); + } + return writableStreamDefaultWriterClose(this); + } + + releaseLock() { + if (!isWritableStreamDefaultWriter(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultWriter'); + const { + stream, + } = this[kState]; + if (stream === undefined) + return; + assert(stream[kState].writer !== undefined); + writableStreamDefaultWriterRelease(this); + } + + /** + * @param {any} chunk + * @returns {Promise} + */ + write(chunk = undefined) { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + if (this[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'Writer is not bound to a WritableStream')); + } + return writableStreamDefaultWriterWrite(this, chunk); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + close: this[kState].close.promise, + ready: this[kState].ready.promise, + desiredSize: this.desiredSize, + }); + } +} + +ObjectDefineProperties(WritableStreamDefaultWriter.prototype, { + closed: { enumerable: true }, + ready: { enumerable: true }, + desiredSize: { enumerable: true }, + abort: { enumerable: true }, + close: { enumerable: true }, + releaseLock: { enumerable: true }, + write: { enumerable: true }, +}); + +class WritableStreamDefaultController { + [kType] = 'WritableStreamDefaultController'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + [kAbort](reason) { + const result = this[kState].abortAlgorithm(reason); + writableStreamDefaultControllerClearAlgorithms(this); + return result; + } + + [kError]() { + resetQueue(this); + } + + /** + * @type {any} + */ + get abortReason() { + if (!isWritableStreamDefaultController(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultController'); + return this[kState].abortReason; + } + + /** + * @type {AbortSignal} + */ + get signal() { + if (!isWritableStreamDefaultController(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultController'); + return this[kState].abortController.signal; + } + + /** + * @param {any} error + */ + error(error = undefined) { + if (!isWritableStreamDefaultController(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultController'); + if (this[kState].stream[kState].state !== 'writable') + return; + writableStreamDefaultControllerError(this, error); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + }); + } +} + +ObjectDefineProperties(WritableStreamDefaultController.prototype, { + abortReason: { enumerable: true }, + signal: { enumerable: true }, + error: { enumerable: true }, +}); + +function createWritableStreamDefaultController() { + return ReflectConstruct( + function() { + this[kType] = 'WritableStreamDefaultController'; + }, + [], WritableStreamDefaultController); +} + +const isWritableStream = + isBrandCheck('WritableStream'); +const isWritableStreamDefaultWriter = + isBrandCheck('WritableStreamDefaultWriter'); +const isWritableStreamDefaultController = + isBrandCheck('WritableStreamDefaultController'); + +function isWritableStreamLocked(stream) { + return stream[kState].writer !== undefined; +} + +function setupWritableStreamDefaultWriter(writer, stream) { + if (isWritableStreamLocked(stream)) + throw new ERR_INVALID_STATE.TypeError('WritableStream is locked'); + writer[kState].stream = stream; + stream[kState].writer = writer; + switch (stream[kState].state) { + case 'writable': + if (!writableStreamCloseQueuedOrInFlight(stream) && + stream[kState].backpressure) { + writer[kState].ready = createDeferredPromise(); + } else { + writer[kState].ready = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + } + setClosedPromiseToNewPromise(); + break; + case 'erroring': + writer[kState].ready = { + promise: PromiseReject(stream[kState].storedError), + resolve: undefined, + reject: undefined, + }; + setPromiseHandled(writer[kState].ready.promise); + setClosedPromiseToNewPromise(); + break; + case 'closed': + writer[kState].ready = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + writer[kState].close = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + break; + default: + writer[kState].ready = { + promise: PromiseReject(stream[kState].storedError), + resolve: undefined, + reject: undefined, + }; + writer[kState].close = { + promise: PromiseReject(stream[kState].storedError), + resolve: undefined, + reject: undefined, + }; + setPromiseHandled(writer[kState].ready.promise); + setPromiseHandled(writer[kState].close.promise); + } + + function setClosedPromiseToNewPromise() { + writer[kState].close = createDeferredPromise(); + } +} + +function writableStreamAbort(stream, reason) { + const { + state, + controller, + } = stream[kState]; + if (state === 'closed' || state === 'errored') + return PromiseResolve(); + + controller[kState].abortReason = reason; + controller[kState].abortController.abort(); + + if (stream[kState].pendingAbortRequest.abort.promise !== undefined) + return stream[kState].pendingAbortRequest.abort.promise; + + assert(state === 'writable' || state === 'erroring'); + + let wasAlreadyErroring = false; + if (state === 'erroring') { + wasAlreadyErroring = true; + reason = undefined; + } + + const abort = createDeferredPromise(); + + stream[kState].pendingAbortRequest = { + abort, + reason, + wasAlreadyErroring, + }; + + if (!wasAlreadyErroring) + writableStreamStartErroring(stream, reason); + + return abort.promise; +} + +function writableStreamClose(stream) { + const { + state, + writer, + backpressure, + controller, + } = stream[kState]; + if (state === 'closed' || state === 'errored') { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('WritableStream is closed')); + } + assert(state === 'writable' || state === 'erroring'); + assert(!writableStreamCloseQueuedOrInFlight(stream)); + stream[kState].closeRequest = createDeferredPromise(); + const { promise } = stream[kState].closeRequest; + if (writer !== undefined && backpressure && state === 'writable') + writer[kState].ready.resolve?.(); + writableStreamDefaultControllerClose(controller); + return promise; +} + +function writableStreamUpdateBackpressure(stream, backpressure) { + assert(stream[kState].state === 'writable'); + assert(!writableStreamCloseQueuedOrInFlight(stream)); + const { + writer, + } = stream[kState]; + if (writer !== undefined && stream[kState].backpressure !== backpressure) { + if (backpressure) { + writer[kState].ready = createDeferredPromise(); + } else { + writer[kState].ready.resolve?.(); + } + } + stream[kState].backpressure = backpressure; +} + +function writableStreamStartErroring(stream, reason) { + assert(stream[kState].storedError === undefined); + assert(stream[kState].state === 'writable'); + const { + controller, + writer, + } = stream[kState]; + assert(controller !== undefined); + stream[kState].state = 'erroring'; + stream[kState].storedError = reason; + if (writer !== undefined) { + writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason); + } + if (!writableStreamHasOperationMarkedInFlight(stream) && + controller[kState].started) { + writableStreamFinishErroring(stream); + } +} + +function writableStreamRejectCloseAndClosedPromiseIfNeeded(stream) { + assert(stream[kState].state === 'errored'); + if (stream[kState].closeRequest.promise !== undefined) { + assert(stream[kState].inFlightCloseRequest.promise === undefined); + stream[kState].closeRequest.reject?.(stream[kState].storedError); + stream[kState].closeRequest = { + promise: undefined, + reject: undefined, + resolve: undefined, + }; + } + const { + writer, + } = stream[kState]; + if (writer !== undefined) { + writer[kState].close.reject?.(stream[kState].storedError); + setPromiseHandled(writer[kState].close.promise); + } +} + +function writableStreamMarkFirstWriteRequestInFlight(stream) { + assert(stream[kState].inFlightWriteRequest.promise === undefined); + assert(stream[kState].writeRequests.length); + const writeRequest = ArrayPrototypeShift(stream[kState].writeRequests); + stream[kState].inFlightWriteRequest = writeRequest; +} + +function writableStreamMarkCloseRequestInFlight(stream) { + assert(stream[kState].inFlightWriteRequest.promise === undefined); + assert(stream[kState].closeRequest.promise !== undefined); + stream[kState].inFlightCloseRequest = stream[kState].closeRequest; + stream[kState].closeRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; +} + +function writableStreamHasOperationMarkedInFlight(stream) { + const { + inFlightWriteRequest, + inFlightCloseRequest, + } = stream[kState]; + if (inFlightWriteRequest.promise === undefined && + inFlightCloseRequest.promise === undefined) { + return false; + } + return true; +} + +function writableStreamFinishInFlightWriteWithError(stream, error) { + assert(stream[kState].inFlightWriteRequest.promise !== undefined); + stream[kState].inFlightWriteRequest.reject?.(error); + stream[kState].inFlightWriteRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; + assert(stream[kState].state === 'writable' || + stream[kState].state === 'erroring'); + writableStreamDealWithRejection(stream, error); +} + +function writableStreamFinishInFlightWrite(stream) { + assert(stream[kState].inFlightWriteRequest.promise !== undefined); + stream[kState].inFlightWriteRequest.resolve?.(); + stream[kState].inFlightWriteRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; +} + +function writableStreamFinishInFlightCloseWithError(stream, error) { + assert(stream[kState].inFlightCloseRequest.promise !== undefined); + stream[kState].inFlightCloseRequest.reject?.(error); + stream[kState].inFlightCloseRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; + assert(stream[kState].state === 'writable' || + stream[kState].state === 'erroring'); + if (stream[kState].pendingAbortRequest.abort.promise !== undefined) { + stream[kState].pendingAbortRequest.abort.reject?.(error); + stream[kState].pendingAbortRequest = { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }; + } + writableStreamDealWithRejection(stream, error); +} + +function writableStreamFinishInFlightClose(stream) { + assert(stream[kState].inFlightCloseRequest.promise !== undefined); + stream[kState].inFlightCloseRequest.resolve?.(); + stream[kState].inFlightCloseRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; + if (stream[kState].state === 'erroring') { + stream[kState].storedError = undefined; + if (stream[kState].pendingAbortRequest.abort.promise !== undefined) { + stream[kState].pendingAbortRequest.abort.resolve?.(); + stream[kState].pendingAbortRequest = { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }; + } + } + stream[kState].state = 'closed'; + if (stream[kState].writer !== undefined) + stream[kState].writer[kState].close.resolve?.(); + assert(stream[kState].pendingAbortRequest.abort.promise === undefined); + assert(stream[kState].storedError === undefined); +} + +function writableStreamFinishErroring(stream) { + assert(stream[kState].state === 'erroring'); + assert(!writableStreamHasOperationMarkedInFlight(stream)); + stream[kState].state = 'errored'; + stream[kState].controller[kError](); + const storedError = stream[kState].storedError; + for (let n = 0; n < stream[kState].writeRequests.length; n++) + stream[kState].writeRequests[n].reject?.(storedError); + stream[kState].writeRequests = []; + + if (stream[kState].pendingAbortRequest.abort.promise === undefined) { + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + return; + } + + const abortRequest = stream[kState].pendingAbortRequest; + stream[kState].pendingAbortRequest = { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }; + if (abortRequest.wasAlreadyErroring) { + abortRequest.abort.reject?.(storedError); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + return; + } + PromisePrototypeThen( + ensureIsPromise( + stream[kState].controller[kAbort], + stream[kState].controller, + abortRequest.reason), + () => { + abortRequest.abort.resolve?.(); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + }, + (error) => { + abortRequest.abort.reject?.(error); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + }); +} + +function writableStreamDealWithRejection(stream, error) { + const { + state, + } = stream[kState]; + if (state === 'writable') { + writableStreamStartErroring(stream, error); + return; + } + + assert(state === 'erroring'); + writableStreamFinishErroring(stream); +} + +function writableStreamCloseQueuedOrInFlight(stream) { + if (stream[kState].closeRequest.promise === undefined && + stream[kState].inFlightCloseRequest.promise === undefined) { + return false; + } + return true; +} + +function writableStreamAddWriteRequest(stream) { + assert(isWritableStreamLocked(stream)); + assert(stream[kState].state === 'writable'); + const { + promise, + resolve, + reject, + } = createDeferredPromise(); + ArrayPrototypePush( + stream[kState].writeRequests, + { + promise, + resolve, + reject, + }); + return promise; +} + +function writableStreamDefaultWriterWrite(writer, chunk) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + const { + controller, + } = stream[kState]; + const chunkSize = writableStreamDefaultControllerGetChunkSize( + controller, + chunk); + if (stream !== writer[kState].stream) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('Mismatched WritableStreams')); + } + const { + state, + } = stream[kState]; + + if (state === 'errored') + return PromiseReject(stream[kState].storedError); + + if (writableStreamCloseQueuedOrInFlight(stream) || state === 'closed') { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('WritableStream is closed')); + } + + if (state === 'erroring') + return PromiseReject(stream[kState].storedError); + + assert(state === 'writable'); + + const promise = writableStreamAddWriteRequest(stream); + writableStreamDefaultControllerWrite(controller, chunk, chunkSize); + return promise; +} + +function writableStreamDefaultWriterRelease(writer) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + assert(stream[kState].writer === writer); + const releasedError = + new ERR_INVALID_STATE.TypeError('Writer has been released'); + writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError); + writableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError); + stream[kState].writer = undefined; + writer[kState].stream = undefined; +} + +function writableStreamDefaultWriterGetDesiredSize(writer) { + const { + stream, + } = writer[kState]; + switch (stream[kState].state) { + case 'errored': + // Fall through + case 'erroring': + return null; + case 'closed': + return 0; + } + return writableStreamDefaultControllerGetDesiredSize( + stream[kState].controller); +} + +function writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, error) { + if (isPromisePending(writer[kState].ready.promise)) { + writer[kState].ready.reject?.(error); + } else { + writer[kState].ready = { + promise: PromiseReject(error), + resolve: undefined, + reject: undefined, + }; + } + setPromiseHandled(writer[kState].ready.promise); +} + +function writableStreamDefaultWriterEnsureClosedPromiseRejected(writer, error) { + if (isPromisePending(writer[kState].close.promise)) { + writer[kState].close.reject?.(error); + } else { + writer[kState].close = { + promise: PromiseReject(error), + resolve: undefined, + reject: undefined, + }; + } + setPromiseHandled(writer[kState].close.promise); +} + +function writableStreamDefaultWriterCloseWithErrorPropagation(writer) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + const { + state, + } = stream[kState]; + if (writableStreamCloseQueuedOrInFlight(stream) || state === 'closed') + return PromiseResolve(); + + if (state === 'errored') + return PromiseReject(stream[kState].storedError); + + assert(state === 'writable' || state === 'erroring'); + + return writableStreamDefaultWriterClose(writer); +} + +function writableStreamDefaultWriterClose(writer) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + return writableStreamClose(stream); +} + +function writableStreamDefaultWriterAbort(writer, reason) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + return writableStreamAbort(stream, reason); +} + +function writableStreamDefaultControllerWrite(controller, chunk, chunkSize) { + try { + enqueueValueWithSize(controller, chunk, chunkSize); + } catch (error) { + writableStreamDefaultControllerErrorIfNeeded(controller, error); + return; + } + const { + stream, + } = controller[kState]; + if (!writableStreamCloseQueuedOrInFlight(stream) && + stream[kState].state === 'writable') { + writableStreamUpdateBackpressure( + stream, + writableStreamDefaultControllerGetBackpressure(controller)); + } + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); +} + +function writableStreamDefaultControllerProcessWrite(controller, chunk) { + const { + stream, + writeAlgorithm, + } = controller[kState]; + writableStreamMarkFirstWriteRequestInFlight(stream); + + PromisePrototypeThen( + ensureIsPromise(writeAlgorithm, controller, chunk, controller), + () => { + writableStreamFinishInFlightWrite(stream); + const { + state, + } = stream[kState]; + assert(state === 'writable' || state === 'erroring'); + dequeueValue(controller); + if (!writableStreamCloseQueuedOrInFlight(stream) && + state === 'writable') { + writableStreamUpdateBackpressure( + stream, + writableStreamDefaultControllerGetBackpressure(controller)); + } + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + }, + (error) => { + if (stream[kState].state === 'writable') + writableStreamDefaultControllerClearAlgorithms(controller); + writableStreamFinishInFlightWriteWithError(stream, error); + }); + +} + +function writableStreamDefaultControllerProcessClose(controller) { + const { + closeAlgorithm, + queue, + stream, + } = controller[kState]; + writableStreamMarkCloseRequestInFlight(stream); + dequeueValue(controller); + assert(!queue.length); + const sinkClosePromise = ensureIsPromise(closeAlgorithm, controller); + writableStreamDefaultControllerClearAlgorithms(controller); + PromisePrototypeThen( + sinkClosePromise, + () => writableStreamFinishInFlightClose(stream), + (error) => writableStreamFinishInFlightCloseWithError(stream, error)); +} + +function writableStreamDefaultControllerGetDesiredSize(controller) { + const { + highWaterMark, + queueTotalSize, + } = controller[kState]; + return highWaterMark - queueTotalSize; +} + +function writableStreamDefaultControllerGetChunkSize(controller, chunk) { + try { + return FunctionPrototypeCall( + controller[kState].sizeAlgorithm, + undefined, + chunk); + } catch (error) { + writableStreamDefaultControllerErrorIfNeeded(controller, error); + return 1; + } +} + +function writableStreamDefaultControllerErrorIfNeeded(controller, error) { + const { + stream, + } = controller[kState]; + if (stream[kState].state === 'writable') + writableStreamDefaultControllerError(controller, error); +} + +function writableStreamDefaultControllerError(controller, error) { + const { + stream, + } = controller[kState]; + assert(stream[kState].state === 'writable'); + writableStreamDefaultControllerClearAlgorithms(controller); + writableStreamStartErroring(stream, error); +} + +function writableStreamDefaultControllerClose(controller) { + enqueueValueWithSize(controller, kCloseSentinel, 0); + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); +} + +function writableStreamDefaultControllerClearAlgorithms(controller) { + controller[kState].writeAlgorithm = undefined; + controller[kState].closeAlgorithm = undefined; + controller[kState].abortAlgorithm = undefined; + controller[kState].sizeAlgorithm = undefined; +} + +function writableStreamDefaultControllerGetBackpressure(controller) { + return writableStreamDefaultControllerGetDesiredSize(controller) <= 0; +} + +function writableStreamDefaultControllerAdvanceQueueIfNeeded(controller) { + const { + queue, + started, + stream, + } = controller[kState]; + if (!started || stream[kState].inFlightWriteRequest.promise !== undefined) + return; + + if (stream[kState].state === 'erroring') { + writableStreamFinishErroring(stream); + return; + } + + if (!queue.length) + return; + + const value = peekQueueValue(controller); + if (value === kCloseSentinel) + writableStreamDefaultControllerProcessClose(controller); + else + writableStreamDefaultControllerProcessWrite(controller, value); +} + +function setupWritableStreamDefaultControllerFromSink( + stream, + sink, + highWaterMark, + sizeAlgorithm) { + const controller = createWritableStreamDefaultController(); + const start = sink?.start; + const write = sink?.write; + const close = sink?.close; + const abort = sink?.abort; + const startAlgorithm = start ? + FunctionPrototypeBind(start, sink, controller) : + nonOpStart; + const writeAlgorithm = write ? + FunctionPrototypeBind(write, sink) : + nonOpWrite; + const closeAlgorithm = close ? + FunctionPrototypeBind(close, sink) : nonOpCancel; + const abortAlgorithm = abort ? + FunctionPrototypeBind(abort, sink) : nonOpCancel; + setupWritableStreamDefaultController( + stream, + controller, + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + highWaterMark, + sizeAlgorithm); +} + +function setupWritableStreamDefaultController( + stream, + controller, + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + highWaterMark, + sizeAlgorithm) { + assert(isWritableStream(stream)); + assert(stream[kState].controller === undefined); + controller[kState] = { + abortAlgorithm, + abortReason: undefined, + closeAlgorithm, + highWaterMark, + queue: [], + queueTotalSize: 0, + abortController: new AbortController(), + sizeAlgorithm, + started: false, + stream, + writeAlgorithm, + }; + stream[kState].controller = controller; + + writableStreamUpdateBackpressure( + stream, + writableStreamDefaultControllerGetBackpressure(controller)); + + const startResult = startAlgorithm(); + + PromisePrototypeThen( + PromiseResolve(startResult), + () => { + assert(stream[kState].state === 'writable' || + stream[kState].state === 'erroring'); + controller[kState].started = true; + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + }, + (error) => { + assert(stream[kState].state === 'writable' || + stream[kState].state === 'erroring'); + controller[kState].started = true; + writableStreamDealWithRejection(stream, error); + }); +} + +module.exports = { + WritableStream, + WritableStreamDefaultWriter, + WritableStreamDefaultController, + TransferedWritableStream, + + // Exported Brand Checks + isWritableStream, + isWritableStreamDefaultController, + isWritableStreamDefaultWriter, + + isWritableStreamLocked, + setupWritableStreamDefaultWriter, + writableStreamAbort, + writableStreamClose, + writableStreamUpdateBackpressure, + writableStreamStartErroring, + writableStreamRejectCloseAndClosedPromiseIfNeeded, + writableStreamMarkFirstWriteRequestInFlight, + writableStreamMarkCloseRequestInFlight, + writableStreamHasOperationMarkedInFlight, + writableStreamFinishInFlightWriteWithError, + writableStreamFinishInFlightWrite, + writableStreamFinishInFlightCloseWithError, + writableStreamFinishInFlightClose, + writableStreamFinishErroring, + writableStreamDealWithRejection, + writableStreamCloseQueuedOrInFlight, + writableStreamAddWriteRequest, + writableStreamDefaultWriterWrite, + writableStreamDefaultWriterRelease, + writableStreamDefaultWriterGetDesiredSize, + writableStreamDefaultWriterEnsureReadyPromiseRejected, + writableStreamDefaultWriterEnsureClosedPromiseRejected, + writableStreamDefaultWriterCloseWithErrorPropagation, + writableStreamDefaultWriterClose, + writableStreamDefaultWriterAbort, + writableStreamDefaultControllerWrite, + writableStreamDefaultControllerProcessWrite, + writableStreamDefaultControllerProcessClose, + writableStreamDefaultControllerGetDesiredSize, + writableStreamDefaultControllerGetChunkSize, + writableStreamDefaultControllerErrorIfNeeded, + writableStreamDefaultControllerError, + writableStreamDefaultControllerClose, + writableStreamDefaultControllerClearAlgorithms, + writableStreamDefaultControllerGetBackpressure, + writableStreamDefaultControllerAdvanceQueueIfNeeded, + setupWritableStreamDefaultControllerFromSink, + setupWritableStreamDefaultController, +}; + +/* eslint-enable no-use-before-define */ diff --git a/lib/stream/web.js b/lib/stream/web.js new file mode 100644 index 00000000000000..929abd19044458 --- /dev/null +++ b/lib/stream/web.js @@ -0,0 +1,48 @@ +'use strict'; + +const { + emitExperimentalWarning, +} = require('internal/util'); + +emitExperimentalWarning('stream/web'); + +const { + TransformStream, + TransformStreamDefaultController, +} = require('internal/webstreams/transformstream'); + +const { + WritableStream, + WritableStreamDefaultController, + WritableStreamDefaultWriter, +} = require('internal/webstreams/writablestream'); + +const { + ReadableStream, + ReadableStreamDefaultReader, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, + ReadableByteStreamController, + ReadableStreamDefaultController, +} = require('internal/webstreams/readablestream'); + +const { + ByteLengthQueuingStrategy, + CountQueuingStrategy, +} = require('internal/webstreams/queuingstrategies'); + +module.exports = { + ReadableStream, + ReadableStreamDefaultReader, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, + ReadableByteStreamController, + ReadableStreamDefaultController, + TransformStream, + TransformStreamDefaultController, + WritableStream, + WritableStreamDefaultWriter, + WritableStreamDefaultController, + ByteLengthQueuingStrategy, + CountQueuingStrategy, +}; diff --git a/src/node_buffer.cc b/src/node_buffer.cc index e816ba131644ad..b5651b5e325fc9 100644 --- a/src/node_buffer.cc +++ b/src/node_buffer.cc @@ -67,6 +67,7 @@ using v8::MaybeLocal; using v8::Nothing; using v8::Number; using v8::Object; +using v8::SharedArrayBuffer; using v8::String; using v8::Uint32; using v8::Uint32Array; @@ -1158,6 +1159,60 @@ void GetZeroFillToggle(const FunctionCallbackInfo& args) { args.GetReturnValue().Set(Uint32Array::New(ab, 0, 1)); } +void DetachArrayBuffer(const FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + if (args[0]->IsArrayBuffer()) { + Local buf = args[0].As(); + if (buf->IsDetachable()) { + std::shared_ptr store = buf->GetBackingStore(); + buf->Detach(); + args.GetReturnValue().Set(ArrayBuffer::New(env->isolate(), store)); + } + } +} + +void CopyArrayBuffer(const FunctionCallbackInfo& args) { + // args[0] == Destination ArrayBuffer + // args[1] == Destination ArrayBuffer Offset + // args[2] == Source ArrayBuffer + // args[3] == Source ArrayBuffer Offset + // args[4] == bytesToCopy + + CHECK(args[0]->IsArrayBuffer() || args[0]->IsSharedArrayBuffer()); + CHECK(args[1]->IsUint32()); + CHECK(args[2]->IsArrayBuffer() || args[2]->IsSharedArrayBuffer()); + CHECK(args[3]->IsUint32()); + CHECK(args[4]->IsUint32()); + + std::shared_ptr destination; + std::shared_ptr source; + + if (args[0]->IsArrayBuffer()) { + destination = args[0].As()->GetBackingStore(); + } else if (args[0]->IsSharedArrayBuffer()) { + destination = args[0].As()->GetBackingStore(); + } + + if (args[2]->IsArrayBuffer()) { + source = args[2].As()->GetBackingStore(); + } else if (args[0]->IsSharedArrayBuffer()) { + source = args[2].As()->GetBackingStore(); + } + + uint32_t destination_offset = args[1].As()->Value(); + uint32_t source_offset = args[3].As()->Value(); + size_t bytes_to_copy = args[4].As()->Value(); + + CHECK_GE(destination->ByteLength() - destination_offset, bytes_to_copy); + CHECK_GE(source->ByteLength() - source_offset, bytes_to_copy); + + uint8_t* dest = + static_cast(destination->Data()) + destination_offset; + uint8_t* src = + static_cast(source->Data()) + source_offset; + memcpy(dest, src, bytes_to_copy); +} + void Initialize(Local target, Local unused, Local context, @@ -1176,6 +1231,9 @@ void Initialize(Local target, env->SetMethodNoSideEffect(target, "indexOfNumber", IndexOfNumber); env->SetMethodNoSideEffect(target, "indexOfString", IndexOfString); + env->SetMethod(target, "detachArrayBuffer", DetachArrayBuffer); + env->SetMethod(target, "copyArrayBuffer", CopyArrayBuffer); + env->SetMethod(target, "swap16", Swap16); env->SetMethod(target, "swap32", Swap32); env->SetMethod(target, "swap64", Swap64); @@ -1251,6 +1309,9 @@ void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(StringWrite); registry->Register(GetZeroFillToggle); + registry->Register(DetachArrayBuffer); + registry->Register(CopyArrayBuffer); + Blob::RegisterExternalReferences(registry); FixedSizeBlobCopyJob::RegisterExternalReferences(registry); } diff --git a/test/fixtures/wpt/README.md b/test/fixtures/wpt/README.md index 86f2eaada97b3b..60d3aeb98c17da 100644 --- a/test/fixtures/wpt/README.md +++ b/test/fixtures/wpt/README.md @@ -21,7 +21,7 @@ Last update: - html/webappapis/timers: https://github.com/web-platform-tests/wpt/tree/5873f2d8f1/html/webappapis/timers - interfaces: https://github.com/web-platform-tests/wpt/tree/fcb671ed8b/interfaces - resources: https://github.com/web-platform-tests/wpt/tree/972ca5b669/resources -- streams: https://github.com/web-platform-tests/wpt/tree/b869e60df1/streams +- streams: https://github.com/web-platform-tests/wpt/tree/8f60d94439/streams - url: https://github.com/web-platform-tests/wpt/tree/1fcb39223d/url [Web Platform Tests]: https://github.com/web-platform-tests/wpt diff --git a/test/fixtures/wpt/streams/readable-byte-streams/general.any.js b/test/fixtures/wpt/streams/readable-byte-streams/general.any.js index db8ac3a39983fd..9aa508225865c8 100644 --- a/test/fixtures/wpt/streams/readable-byte-streams/general.any.js +++ b/test/fixtures/wpt/streams/readable-byte-streams/general.any.js @@ -957,7 +957,8 @@ promise_test(() => { assert_equals(view.byteOffset, 0, 'byteOffset'); assert_equals(view.byteLength, 2, 'byteLength'); - assert_equals(view[0], 0x0201); + const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength); + assert_equals(dataView.getUint16(0), 0x0102); return reader.read(new Uint8Array(1)); }).then(result => { @@ -1138,7 +1139,7 @@ promise_test(() => { assert_equals(pullCount, 1, '1 pull() should have been made in response to partial fill by enqueue()'); assert_not_equals(byobRequest, null, 'byobRequest should not be null'); - assert_equals(viewInfos[0].byteLength, 2, 'byteLength before enqueue() shouild be 2'); + assert_equals(viewInfos[0].byteLength, 2, 'byteLength before enqueue() should be 2'); assert_equals(viewInfos[1].byteLength, 1, 'byteLength after enqueue() should be 1'); reader.cancel(); @@ -1326,7 +1327,9 @@ promise_test(() => { const view = result.value; assert_equals(view.byteOffset, 0); assert_equals(view.byteLength, 2); - assert_equals(view[0], 0xaaff); + + const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength); + assert_equals(dataView.getUint16(0), 0xffaa); assert_equals(viewInfo.constructor, Uint8Array, 'view.constructor should be Uint8Array'); assert_equals(viewInfo.bufferByteLength, 2, 'view.buffer.byteLength should be 2'); @@ -1381,7 +1384,9 @@ promise_test(() => { assert_equals(view.buffer.byteLength, 4, 'buffer.byteLength'); assert_equals(view.byteOffset, 0, 'byteOffset'); assert_equals(view.byteLength, 2, 'byteLength'); - assert_equals(view[0], 0x0001, 'Contents are set'); + + const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength); + assert_equals(dataView.getUint16(0), 0x0100, 'contents are set'); const p = reader.read(new Uint16Array(1)); @@ -1395,7 +1400,9 @@ promise_test(() => { assert_equals(view.buffer.byteLength, 2, 'buffer.byteLength'); assert_equals(view.byteOffset, 0, 'byteOffset'); assert_equals(view.byteLength, 2, 'byteLength'); - assert_equals(view[0], 0x0302, 'Contents are set'); + + const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength); + assert_equals(dataView.getUint16(0), 0x0203, 'contents are set'); assert_not_equals(byobRequest, null, 'byobRequest must not be null'); assert_equals(viewInfo.constructor, Uint8Array, 'view.constructor should be Uint8Array'); diff --git a/test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js b/test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js index 2dcab69f42db0d..7c0bffb78710fe 100644 --- a/test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js +++ b/test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js @@ -25,7 +25,7 @@ test(t => { const memory = new WebAssembly.Memory({ initial: 1 }); const view = new Uint8Array(memory.buffer, 0, 1); - assert_throws_js(t, TypeError, controller.enqueue(view)); + assert_throws_js(TypeError, () => controller.enqueue(view)); }, 'ReadableStream with byte source: enqueue() with a non-transferable buffer'); promise_test(async t => { @@ -54,5 +54,5 @@ promise_test(async t => { ); await pullCalledPromise; - assert_throws_js(t, TypeError, byobRequest.respondWithNewView(newView)); + assert_throws_js(TypeError, () => byobRequest.respondWithNewView(newView)); }, 'ReadableStream with byte source: respondWithNewView() with a non-transferable buffer'); diff --git a/test/fixtures/wpt/streams/writable-streams/aborting.any.js b/test/fixtures/wpt/streams/writable-streams/aborting.any.js index 5c053bab915700..ab154a705ed0e9 100644 --- a/test/fixtures/wpt/streams/writable-streams/aborting.any.js +++ b/test/fixtures/wpt/streams/writable-streams/aborting.any.js @@ -1376,3 +1376,111 @@ promise_test(t => { return promise_rejects_js(t, TypeError, ws.abort(), 'abort should reject') .then(() => writer.ready); }, 'abort on a locked stream should reject'); + +test(t => { + let ctrl; + const ws = new WritableStream({start(c) { ctrl = c; }}); + const e = Error('hello'); + + assert_true(ctrl.signal instanceof AbortSignal); + assert_false(ctrl.signal.aborted); + assert_equals(ctrl.abortReason, undefined); + ws.abort(e); + assert_true(ctrl.signal.aborted); + assert_equals(ctrl.abortReason, e); +}, 'WritableStreamDefaultController.signal'); + +promise_test(async t => { + let ctrl; + let resolve; + const called = new Promise(r => resolve = r); + + const ws = new WritableStream({ + start(c) { ctrl = c; }, + write() { resolve(); return new Promise(() => {}); } + }); + const writer = ws.getWriter(); + + writer.write(99); + await called; + + assert_false(ctrl.signal.aborted); + assert_equals(ctrl.abortReason, undefined); + writer.abort(); + assert_true(ctrl.signal.aborted); + assert_equals(ctrl.abortReason, undefined); +}, 'the abort signal is signalled synchronously - write'); + +promise_test(async t => { + let ctrl; + let resolve; + const called = new Promise(r => resolve = r); + + const ws = new WritableStream({ + start(c) { ctrl = c; }, + close() { resolve(); return new Promise(() => {}); } + }); + const writer = ws.getWriter(); + + writer.close(99); + await called; + + assert_false(ctrl.signal.aborted); + writer.abort(); + assert_true(ctrl.signal.aborted); +}, 'the abort signal is signalled synchronously - close'); + +promise_test(async t => { + let ctrl; + const ws = new WritableStream({start(c) { ctrl = c; }}); + const writer = ws.getWriter(); + + const e = TypeError(); + ctrl.error(e); + await promise_rejects_exactly(t, e, writer.closed); + assert_false(ctrl.signal.aborted); +}, 'the abort signal is not signalled on error'); + +promise_test(async t => { + let ctrl; + const e = TypeError(); + const ws = new WritableStream({ + start(c) { ctrl = c; }, + async write() { throw e; } + }); + const writer = ws.getWriter(); + + await promise_rejects_exactly(t, e, writer.write('hello'), 'write result'); + await promise_rejects_exactly(t, e, writer.closed, 'closed'); + assert_false(ctrl.signal.aborted); +}, 'the abort signal is not signalled on write failure'); + +promise_test(async t => { + let ctrl; + const e = TypeError(); + const ws = new WritableStream({ + start(c) { ctrl = c; }, + async close() { throw e; } + }); + const writer = ws.getWriter(); + + await promise_rejects_exactly(t, e, writer.close(), 'close result'); + await promise_rejects_exactly(t, e, writer.closed, 'closed'); + assert_false(ctrl.signal.aborted); +}, 'the abort signal is not signalled on close failure'); + +promise_test(async t => { + let ctrl; + const e1 = SyntaxError(); + const e2 = TypeError(); + const ws = new WritableStream({ + start(c) { ctrl = c; }, + }); + + const writer = ws.getWriter(); + ctrl.signal.addEventListener('abort', () => writer.abort(e2)); + writer.abort(e1); + assert_true(ctrl.signal.aborted); + + await promise_rejects_exactly(t, e2, writer.closed, 'closed'); +}, 'recursive abort() call'); diff --git a/test/fixtures/wpt/versions.json b/test/fixtures/wpt/versions.json index 50316e8c583eb1..6567782a1d47c9 100644 --- a/test/fixtures/wpt/versions.json +++ b/test/fixtures/wpt/versions.json @@ -44,7 +44,7 @@ "path": "resources" }, "streams": { - "commit": "b869e60df1b8d3840e09b41c5e987c7e23f6856c", + "commit": "8f60d9443949c323522a2009518d54d5d6ab5541", "path": "streams" }, "url": { diff --git a/test/parallel/test-whatwg-readablebytestream.js b/test/parallel/test-whatwg-readablebytestream.js new file mode 100644 index 00000000000000..eb4355505053ef --- /dev/null +++ b/test/parallel/test-whatwg-readablebytestream.js @@ -0,0 +1,238 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +const { + ReadableStream, + ReadableByteStreamController, + ReadableStreamDefaultReader, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, +} = require('stream/web'); + +const { + kState, +} = require('internal/webstreams/util'); + +const { + open, +} = require('fs/promises'); + +const { + readFileSync, +} = require('fs'); + +const { + Buffer, +} = require('buffer'); + +const { + inspect, +} = require('util'); + +{ + const r = new ReadableStream({ + type: 'bytes', + }); + + assert(r[kState].controller instanceof ReadableByteStreamController); + + assert.strictEqual(typeof r.locked, 'boolean'); + assert.strictEqual(typeof r.cancel, 'function'); + assert.strictEqual(typeof r.getReader, 'function'); + assert.strictEqual(typeof r.pipeThrough, 'function'); + assert.strictEqual(typeof r.pipeTo, 'function'); + assert.strictEqual(typeof r.tee, 'function'); + + ['', null, 'asdf'].forEach((mode) => { + assert.throws(() => r.getReader({ mode }), { + code: 'ERR_INVALID_ARG_VALUE', + }); + }); + + [1, 'asdf'].forEach((options) => { + assert.throws(() => r.getReader(options), { + code: 'ERR_INVALID_ARG_TYPE', + }); + }); + + assert(!r.locked); + const defaultReader = r.getReader(); + assert(r.locked); + assert(defaultReader instanceof ReadableStreamDefaultReader); + defaultReader.releaseLock(); + const byobReader = r.getReader({ mode: 'byob' }); + assert(byobReader instanceof ReadableStreamBYOBReader); +} + +class Source { + constructor() { + this.controllerClosed = false; + } + + async start(controller) { + this.file = await open(__filename); + this.controller = controller; + } + + async pull(controller) { + const byobRequest = controller.byobRequest; + assert.match(inspect(byobRequest), /ReadableStreamBYOBRequest/); + + const view = byobRequest.view; + const { + bytesRead, + } = await this.file.read({ + buffer: view, + offset: view.byteOffset, + length: view.byteLength + }); + + if (bytesRead === 0) { + await this.file.close(); + this.controller.close(); + } + + assert.throws(() => byobRequest.respondWithNewView({}), { + code: 'ERR_INVALID_ARG_TYPE', + }); + + byobRequest.respond(bytesRead); + + assert.throws(() => byobRequest.respond(bytesRead), { + code: 'ERR_INVALID_STATE', + }); + assert.throws(() => byobRequest.respondWithNewView(view), { + code: 'ERR_INVALID_STATE', + }); + } + + get type() { return 'bytes'; } + + get autoAllocateChunkSize() { return 1024; } +} + +{ + const stream = new ReadableStream(new Source()); + assert(stream[kState].controller instanceof ReadableByteStreamController); + + async function read(stream) { + const reader = stream.getReader({ mode: 'byob' }); + + const chunks = []; + let result; + do { + result = await reader.read(Buffer.alloc(100)); + if (result.value !== undefined) + chunks.push(Buffer.from(result.value)); + } while (!result.done); + + return Buffer.concat(chunks); + } + + read(stream).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.deepStrictEqual(check, data); + })); +} + +{ + const stream = new ReadableStream(new Source()); + assert(stream[kState].controller instanceof ReadableByteStreamController); + + async function read(stream) { + const chunks = []; + for await (const chunk of stream) + chunks.push(chunk); + + return Buffer.concat(chunks); + } + + read(stream).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.deepStrictEqual(check, data); + })); +} + +{ + const stream = new ReadableStream(new Source()); + assert(stream[kState].controller instanceof ReadableByteStreamController); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream) + break; + } + + read(stream).then(common.mustCall()); +} + +{ + const stream = new ReadableStream(new Source()); + assert(stream[kState].controller instanceof ReadableByteStreamController); + + const error = new Error('boom'); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream) + throw error; + } + + assert.rejects(read(stream), error); +} + +{ + assert.throws(() => { + Reflect.get(ReadableStreamBYOBRequest.prototype, 'view', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => ReadableStreamBYOBRequest.prototype.respond.call({}), { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + ReadableStreamBYOBRequest.prototype.respondWithNewView.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); +} + +{ + const readable = new ReadableStream({ type: 'bytes' }); + const reader = readable.getReader({ mode: 'byob' }); + reader.releaseLock(); + reader.releaseLock(); + assert.rejects(reader.read(new Uint8Array(10)), { + code: 'ERR_INVALID_STATE', + }); + assert.rejects(reader.cancel(), { + code: 'ERR_INVALID_STATE', + }); +} + +{ + let controller; + new ReadableStream({ + type: 'bytes', + start(c) { controller = c; } + }); + assert.throws(() => controller.enqueue(1), { + code: 'ERR_INVALID_ARG_TYPE', + }); + controller.close(); + assert.throws(() => controller.enqueue(new Uint8Array(10)), { + code: 'ERR_INVALID_STATE', + }); + assert.throws(() => controller.close(), { + code: 'ERR_INVALID_STATE', + }); +} + +{ + +} diff --git a/test/parallel/test-whatwg-readablestream.js b/test/parallel/test-whatwg-readablestream.js new file mode 100644 index 00000000000000..1c18efeec41963 --- /dev/null +++ b/test/parallel/test-whatwg-readablestream.js @@ -0,0 +1,1522 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { + isPromise, +} = require('util/types'); +const { + setImmediate: delay +} = require('timers/promises'); + +const { + ByteLengthQueuingStrategy, + CountQueuingStrategy, + ReadableStream, + ReadableStreamDefaultReader, + ReadableStreamDefaultController, + ReadableByteStreamController, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, + WritableStream, +} = require('stream/web'); + +const { + readableStreamPipeTo, + readableStreamTee, + readableByteStreamControllerConvertPullIntoDescriptor, + readableStreamDefaultControllerEnqueue, + readableByteStreamControllerEnqueue, + readableStreamDefaultControllerCanCloseOrEnqueue, + readableByteStreamControllerClose, + readableByteStreamControllerRespond, +} = require('internal/webstreams/readablestream'); + +const { + kState +} = require('internal/webstreams/util'); + +const { + createReadStream, + readFileSync, +} = require('fs'); +const { + Buffer, +} = require('buffer'); + +const { + kTransfer, +} = require('internal/worker/js_transferable'); + +const { + inspect, +} = require('util'); + +{ + const r = new ReadableStream(); + assert.strictEqual(typeof r.locked, 'boolean'); + assert.strictEqual(typeof r.cancel, 'function'); + assert.strictEqual(typeof r.getReader, 'function'); + assert.strictEqual(typeof r.pipeThrough, 'function'); + assert.strictEqual(typeof r.pipeTo, 'function'); + assert.strictEqual(typeof r.tee, 'function'); + + ['', null, 'asdf'].forEach((mode) => { + assert.throws(() => r.getReader({ mode }), { + code: 'ERR_INVALID_ARG_VALUE', + }); + }); + + [1, 'asdf'].forEach((options) => { + assert.throws(() => r.getReader(options), { + code: 'ERR_INVALID_ARG_TYPE', + }); + }); + + assert(!r.locked); + r.getReader(); + assert(r.locked); +} + +{ + const source = { + start: common.mustCall((controller) => { + assert(controller instanceof ReadableStreamDefaultController); + }), + pull: common.mustCall((controller) => { + assert(controller instanceof ReadableStreamDefaultController); + }), + cancel: common.mustNotCall(), + }; + + new ReadableStream(source); +} + +{ + const source = { + start: common.mustCall(async (controller) => { + assert(controller instanceof ReadableStreamDefaultController); + }), + pull: common.mustCall(async (controller) => { + assert(controller instanceof ReadableStreamDefaultController); + }), + cancel: common.mustNotCall(), + }; + + new ReadableStream(source); +} + +{ + const source = { + start: common.mustCall((controller) => { + assert(controller instanceof ReadableByteStreamController); + }), + pull: common.mustNotCall(), + cancel: common.mustNotCall(), + type: 'bytes', + }; + + new ReadableStream(source); +} + +{ + const source = { + start: common.mustCall(async (controller) => { + assert(controller instanceof ReadableByteStreamController); + }), + pull: common.mustNotCall(), + cancel: common.mustNotCall(), + type: 'bytes', + }; + + new ReadableStream(source); +} + +{ + const source = { + start: common.mustCall(async (controller) => { + assert(controller instanceof ReadableByteStreamController); + }), + pull: common.mustCall(async (controller) => { + assert(controller instanceof ReadableByteStreamController); + }), + cancel: common.mustNotCall(), + type: 'bytes', + }; + + new ReadableStream(source, { highWaterMark: 10 }); +} + +{ + // These are silly but they should all work per spec + new ReadableStream(1); + new ReadableStream('hello'); + new ReadableStream(false); + new ReadableStream([]); + new ReadableStream(1, 1); + new ReadableStream(1, 'hello'); + new ReadableStream(1, false); + new ReadableStream(1, []); +} + +['a', {}, false].forEach((size) => { + assert.throws(() => { + new ReadableStream({}, { size }); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); +}); + +['a', {}].forEach((highWaterMark) => { + assert.throws(() => { + new ReadableStream({}, { highWaterMark }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); + + assert.throws(() => { + new ReadableStream({ type: 'bytes' }, { highWaterMark }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); +}); + +[-1, NaN].forEach((highWaterMark) => { + assert.throws(() => { + new ReadableStream({}, { highWaterMark }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); + + assert.throws(() => { + new ReadableStream({ type: 'bytes' }, { highWaterMark }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); +}); + +{ + new ReadableStream({}, new ByteLengthQueuingStrategy({ highWaterMark: 1 })); + new ReadableStream({}, new CountQueuingStrategy({ highWaterMark: 1 })); +} + +{ + const strategy = new ByteLengthQueuingStrategy({ highWaterMark: 1 }); + assert.strictEqual(strategy.highWaterMark, 1); + assert.strictEqual(strategy.size(new ArrayBuffer(10)), 10); + + const { size } = strategy; + assert.strictEqual(size(new ArrayBuffer(10)), 10); +} + +{ + const strategy = new CountQueuingStrategy({ highWaterMark: 1 }); + assert.strictEqual(strategy.highWaterMark, 1); + assert.strictEqual(strategy.size(new ArrayBuffer(10)), 1); + + const { size } = strategy; + assert.strictEqual(size(new ArrayBuffer(10)), 1); +} + +{ + const r = new ReadableStream({ + async start() { + throw new Error('boom'); + } + }); + + setImmediate(() => { + assert.strictEqual(r[kState].state, 'errored'); + assert.match(r[kState].storedError?.message, /boom/); + }); +} + +{ + const data = Buffer.from('hello'); + const r = new ReadableStream({ + start(controller) { + controller.enqueue(data); + controller.close(); + }, + }); + + (async function read() { + const reader = r.getReader(); + let res = await reader.read(); + if (res.done) return; + const buf = Buffer.from(res.value); + assert.strictEqual(buf.toString(), data.toString()); + res = await reader.read(); + assert(res.done); + })().then(common.mustCall()); +} + +{ + const r = new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + + (async function read() { + const reader = r.getReader(); + const res = await reader.read(); + assert(res.done); + })().then(common.mustCall()); +} + +assert.throws(() => { + new ReadableStream({ + get start() { throw new Error('boom1'); } + }, { + get size() { throw new Error('boom2'); } + }); +}, /boom2/); + +{ + const stream = new ReadableStream(); + const reader = stream.getReader(); + + assert(stream.locked); + assert.strictEqual(reader[kState].stream, stream); + assert.strictEqual(stream[kState].reader, reader); + + assert.throws(() => stream.getReader(), { + code: 'ERR_INVALID_STATE', + }); + + assert(reader instanceof ReadableStreamDefaultReader); + + assert(isPromise(reader.closed)); + assert.strictEqual(typeof reader.cancel, 'function'); + assert.strictEqual(typeof reader.read, 'function'); + assert.strictEqual(typeof reader.releaseLock, 'function'); + + const read1 = reader.read(); + const read2 = reader.read(); + + // The stream is empty so the read will never settle. + read1.then( + common.mustNotCall(), + common.mustNotCall() + ); + + // The stream is empty so the read will never settle. + read2.then( + common.mustNotCall(), + common.mustNotCall() + ); + + assert.notStrictEqual(read1, read2); + + assert.strictEqual(reader[kState].readRequests.length, 2); + + delay().then(common.mustCall()); + + assert.throws(() => reader.releaseLock(), { + code: 'ERR_INVALID_STATE', + }); + assert(stream.locked); +} + +{ + const stream = new ReadableStream(); + const reader = stream.getReader(); + const closedBefore = reader.closed; + assert(stream.locked); + reader.releaseLock(); + assert(!stream.locked); + const closedAfter = reader.closed; + + assert.strictEqual(closedBefore, closedAfter); + + assert.rejects(reader.read(), { + code: 'ERR_INVALID_STATE', + }); + + assert.rejects(closedBefore, { + code: 'ERR_INVALID_STATE', + }); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(Buffer.from('hello')); + } + }); + + const reader = stream.getReader(); + + assert.rejects(stream.cancel(), { + code: 'ERR_INVALID_STATE', + }); + + reader.cancel(); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + assert(!stream.locked); + + const cancel1 = stream.cancel(); + const cancel2 = stream.cancel(); + + assert.notStrictEqual(cancel1, cancel2); + + Promise.all([cancel1, cancel2]).then(common.mustCall((res) => { + assert.deepStrictEqual(res, [undefined, undefined]); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + + stream.getReader().releaseLock(); + stream.getReader().releaseLock(); + stream.getReader(); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + + stream.getReader(); + + assert.throws(() => stream.getReader(), { + code: 'ERR_INVALID_STATE', + }); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + + const reader = stream.getReader(); + + reader.closed.then(common.mustCall()); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + + const reader = stream.getReader(); + + const closedBefore = reader.closed; + reader.releaseLock(); + const closedAfter = reader.closed; + assert.notStrictEqual(closedBefore, closedAfter); + + closedBefore.then(common.mustCall()); + assert.rejects(closedAfter, { + code: 'ERR_INVALID_STATE', + }); +} + +{ + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + }, + }); + + const reader = stream.getReader(); + c.close(); + + const closedBefore = reader.closed; + reader.releaseLock(); + const closedAfter = reader.closed; + assert.notStrictEqual(closedBefore, closedAfter); + + closedBefore.then(common.mustCall()); + assert.rejects(closedAfter, { + code: 'ERR_INVALID_STATE', + }); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + + const reader = stream.getReader(); + + const cancel1 = reader.cancel(); + const cancel2 = reader.cancel(); + const closed = reader.closed; + + assert.notStrictEqual(cancel1, cancel2); + assert.notStrictEqual(cancel1, closed); + assert.notStrictEqual(cancel2, closed); + + Promise.all([cancel1, cancel2]).then(common.mustCall((res) => { + assert.deepStrictEqual(res, [undefined, undefined]); + })); +} + +{ + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + }, + }); + + const reader = stream.getReader(); + c.close(); + + const cancel1 = reader.cancel(); + const cancel2 = reader.cancel(); + const closed = reader.closed; + + assert.notStrictEqual(cancel1, cancel2); + assert.notStrictEqual(cancel1, closed); + assert.notStrictEqual(cancel2, closed); + + Promise.all([cancel1, cancel2]).then(common.mustCall((res) => { + assert.deepStrictEqual(res, [undefined, undefined]); + })); +} + +{ + const stream = new ReadableStream(); + const cancel1 = stream.cancel(); + const cancel2 = stream.cancel(); + assert.notStrictEqual(cancel1, cancel2); + + Promise.all([cancel1, cancel2]).then(common.mustCall((res) => { + assert.deepStrictEqual(res, [undefined, undefined]); + })); + + stream.getReader().read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + start(controller) { + controller.error(error); + } + }); + stream.getReader().releaseLock(); + const reader = stream.getReader(); + assert.rejects(reader.closed, error); + assert.rejects(reader.read(), error); + assert.rejects(reader.read(), error); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + start(controller) { + controller.error(error); + } + }); + const reader = stream.getReader(); + const cancel1 = reader.cancel(); + const cancel2 = reader.cancel(); + assert.notStrictEqual(cancel1, cancel2); + assert.rejects(cancel1, error); + assert.rejects(cancel2, error); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + async start(controller) { + throw error; + } + }); + stream.getReader().releaseLock(); + const reader = stream.getReader(); + assert.rejects(reader.closed, error); + assert.rejects(reader.read(), error); + assert.rejects(reader.read(), error); +} + +{ + const buf1 = Buffer.from('hello'); + const buf2 = Buffer.from('there'); + let doClose; + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(buf1); + controller.enqueue(buf2); + doClose = controller.close.bind(controller); + } + }); + const reader = stream.getReader(); + doClose(); + reader.read().then(common.mustCall(({ value, done }) => { + assert.deepStrictEqual(value, buf1); + assert(!done); + reader.read().then(common.mustCall(({ value, done }) => { + assert.deepStrictEqual(value, buf2); + assert(!done); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); + })); + })); +} + +{ + const buf1 = Buffer.from('hello'); + const buf2 = Buffer.from('there'); + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(buf1); + controller.enqueue(buf2); + } + }); + const reader = stream.getReader(); + reader.read().then(common.mustCall(({ value, done }) => { + assert.deepStrictEqual(value, buf1); + assert(!done); + reader.read().then(common.mustCall(({ value, done }) => { + assert.deepStrictEqual(value, buf2); + assert(!done); + reader.read().then(common.mustNotCall()); + delay().then(common.mustCall()); + })); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.close(); + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + assert(s1 instanceof ReadableStream); + assert(s2 instanceof ReadableStream); + + async function read(stream) { + const reader = stream.getReader(); + assert.deepStrictEqual( + await reader.read(), { value: 'a', done: false }); + assert.deepStrictEqual( + await reader.read(), { value: 'b', done: false }); + assert.deepStrictEqual( + await reader.read(), { value: undefined, done: true }); + } + + Promise.all([ + read(s1), + read(s2), + ]).then(common.mustCall()); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + }, + pull() { throw error; } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + assert(stream.locked); + + assert(s1 instanceof ReadableStream); + assert(s2 instanceof ReadableStream); + + const reader1 = s1.getReader(); + const reader2 = s2.getReader(); + + const closed1 = reader1.closed; + const closed2 = reader2.closed; + + assert.notStrictEqual(closed1, closed2); + + assert.rejects(closed1, error); + assert.rejects(closed2, error); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.close(); + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + assert(s1 instanceof ReadableStream); + assert(s2 instanceof ReadableStream); + + s2.cancel(); + + async function read(stream, canceled = false) { + const reader = stream.getReader(); + if (!canceled) { + assert.deepStrictEqual( + await reader.read(), { value: 'a', done: false }); + assert.deepStrictEqual( + await reader.read(), { value: 'b', done: false }); + } + assert.deepStrictEqual( + await reader.read(), { value: undefined, done: true }); + } + + Promise.all([ + read(s1), + read(s2, true), + ]).then(common.mustCall()); +} + +{ + const error1 = new Error('boom1'); + const error2 = new Error('boom2'); + + const stream = new ReadableStream({ + cancel(reason) { + assert.deepStrictEqual(reason, [error1, error2]); + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + s1.cancel(error1); + s2.cancel(error2); +} + +{ + const error1 = new Error('boom1'); + const error2 = new Error('boom2'); + + const stream = new ReadableStream({ + cancel(reason) { + assert.deepStrictEqual(reason, [error1, error2]); + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + s2.cancel(error2); + s1.cancel(error1); +} + +{ + const error = new Error('boom1'); + + const stream = new ReadableStream({ + cancel() { + throw error; + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + assert.rejects(s1.cancel(), error); + assert.rejects(s2.cancel(), error); +} + +{ + const error = new Error('boom1'); + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + c.error(error); + + assert.rejects(s1.cancel(), error); + assert.rejects(s2.cancel(), error); +} + +{ + const error = new Error('boom1'); + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + const reader1 = s1.getReader(); + const reader2 = s2.getReader(); + + assert.rejects(reader1.closed, error); + assert.rejects(reader2.closed, error); + + assert.rejects(reader1.read(), error); + assert.rejects(reader2.read(), error); + + setImmediate(() => c.error(error)); +} + +{ + let pullCount = 0; + const stream = new ReadableStream({ + pull(controller) { + if (pullCount) + controller.enqueue(pullCount); + pullCount++; + }, + }); + + const reader = stream.getReader(); + + queueMicrotask(common.mustCall(() => { + assert.strictEqual(pullCount, 1); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 1); + assert(!done); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 2); + assert(!done); + })); + + })); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + }, + pull: common.mustCall(), + }); + + stream.getReader().read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'a'); + assert(!done); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + }, + pull: common.mustCall(), + }); + + const reader = stream.getReader(); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'a'); + assert(!done); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'b'); + assert(!done); + })); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.close(); + }, + pull: common.mustNotCall(), + }); + + const reader = stream.getReader(); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'a'); + assert(!done); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'b'); + assert(!done); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); + + })); + })); +} + +{ + let res; + let promise; + let calls = 0; + const stream = new ReadableStream({ + pull(controller) { + controller.enqueue(++calls); + promise = new Promise((resolve) => res = resolve); + return promise; + } + }); + + const reader = stream.getReader(); + + (async () => { + await reader.read(); + assert.strictEqual(calls, 1); + await delay(); + assert.strictEqual(calls, 1); + res(); + await delay(); + assert.strictEqual(calls, 2); + })().then(common.mustCall()); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + }, + pull: common.mustCall(4), + }, { + highWaterMark: Infinity, + size() { return 1; } + }); + + const reader = stream.getReader(); + (async () => { + await delay(); + await reader.read(); + await reader.read(); + await reader.read(); + })().then(common.mustCall()); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + controller.close(); + }, + pull: common.mustNotCall(), + }, { + highWaterMark: Infinity, + size() { return 1; } + }); + + const reader = stream.getReader(); + (async () => { + await delay(); + await reader.read(); + await reader.read(); + await reader.read(); + })().then(common.mustCall()); +} + +{ + let calls = 0; + let res; + const ready = new Promise((resolve) => res = resolve); + + new ReadableStream({ + pull(controller) { + controller.enqueue(++calls); + if (calls === 4) + res(); + } + }, { + size() { return 1; }, + highWaterMark: 4 + }); + + ready.then(common.mustCall(() => { + assert.strictEqual(calls, 4); + })); +} + +{ + const stream = new ReadableStream({ + pull: common.mustCall((controller) => controller.close()) + }); + + const reader = stream.getReader(); + + reader.closed.then(common.mustCall()); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + pull: common.mustCall((controller) => controller.error(error)) + }); + + const reader = stream.getReader(); + + assert.rejects(reader.closed, error); +} + +{ + const error = new Error('boom'); + const error2 = new Error('boom2'); + const stream = new ReadableStream({ + pull: common.mustCall((controller) => { + controller.error(error); + throw error2; + }) + }); + + const reader = stream.getReader(); + + assert.rejects(reader.closed, error); +} + +{ + let startCalled = false; + new ReadableStream({ + start: common.mustCall((controller) => { + controller.enqueue('a'); + controller.close(); + assert.throws(() => controller.enqueue('b'), { + code: 'ERR_INVALID_STATE' + }); + startCalled = true; + }) + }); + assert(startCalled); +} + +{ + let startCalled = false; + new ReadableStream({ + start: common.mustCall((controller) => { + controller.close(); + assert.throws(() => controller.enqueue('b'), { + code: 'ERR_INVALID_STATE' + }); + startCalled = true; + }) + }); + assert(startCalled); +} + +{ + class Source { + startCalled = false; + pullCalled = false; + cancelCalled = false; + + start(controller) { + assert.strictEqual(this, source); + this.startCalled = true; + controller.enqueue('a'); + } + + pull() { + assert.strictEqual(this, source); + this.pullCalled = true; + } + + cancel() { + assert.strictEqual(this, source); + this.cancelCalled = true; + } + } + + const source = new Source(); + + const stream = new ReadableStream(source); + const reader = stream.getReader(); + + (async () => { + await reader.read(); + reader.releaseLock(); + stream.cancel(); + assert(source.startCalled); + assert(source.pullCalled); + assert(source.cancelCalled); + })().then(common.mustCall()); +} + +{ + let startCalled = false; + new ReadableStream({ + start(controller) { + assert.strictEqual(controller.desiredSize, 10); + controller.close(); + assert.strictEqual(controller.desiredSize, 0); + startCalled = true; + } + }, { + highWaterMark: 10 + }); + assert(startCalled); +} + +{ + let startCalled = false; + new ReadableStream({ + start(controller) { + assert.strictEqual(controller.desiredSize, 10); + controller.error(); + assert.strictEqual(controller.desiredSize, null); + startCalled = true; + } + }, { + highWaterMark: 10 + }); + assert(startCalled); +} + +{ + class Foo extends ReadableStream {} + const foo = new Foo(); + foo.getReader(); +} + +{ + let startCalled = false; + new ReadableStream({ + start(controller) { + assert.strictEqual(controller.desiredSize, 1); + controller.enqueue('a'); + assert.strictEqual(controller.desiredSize, 0); + controller.enqueue('a'); + assert.strictEqual(controller.desiredSize, -1); + controller.enqueue('a'); + assert.strictEqual(controller.desiredSize, -2); + controller.enqueue('a'); + assert.strictEqual(controller.desiredSize, -3); + startCalled = true; + } + }); + assert(startCalled); +} + +{ + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + } + }); + + const reader = stream.getReader(); + + (async () => { + assert.strictEqual(c.desiredSize, 1); + c.enqueue(1); + assert.strictEqual(c.desiredSize, 0); + await reader.read(); + assert.strictEqual(c.desiredSize, 1); + c.enqueue(1); + c.enqueue(1); + assert.strictEqual(c.desiredSize, -1); + await reader.read(); + assert.strictEqual(c.desiredSize, 0); + await reader.read(); + assert.strictEqual(c.desiredSize, 1); + })().then(common.mustCall()); +} + +{ + let c; + new ReadableStream({ + start(controller) { + c = controller; + } + }); + assert(c instanceof ReadableStreamDefaultController); + assert.strictEqual(typeof c.desiredSize, 'number'); + assert.strictEqual(typeof c.enqueue, 'function'); + assert.strictEqual(typeof c.close, 'function'); + assert.strictEqual(typeof c.error, 'function'); +} + +class Source { + constructor() { + this.cancelCalled = false; + } + + start(controller) { + this.stream = createReadStream(__filename); + this.stream.on('data', (chunk) => { + controller.enqueue(chunk); + }); + this.stream.once('end', () => { + if (!this.cancelCalled) + controller.close(); + }); + this.stream.once('error', (error) => { + controller.error(error); + }); + } + + cancel() { + this.cancelCalled = true; + } +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + async function read(stream) { + const reader = stream.getReader(); + const chunks = []; + let read = await reader.read(); + while (!read.done) { + chunks.push(Buffer.from(read.value)); + read = await reader.read(); + } + return Buffer.concat(chunks); + } + + read(stream).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.deepStrictEqual(data, check); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + async function read(stream) { + const chunks = []; + for await (const chunk of stream) + chunks.push(chunk); + return Buffer.concat(chunks); + } + + read(stream).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.deepStrictEqual(data, check); + + assert.strictEqual(stream[kState].state, 'closed'); + assert(!stream.locked); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + [1, false, ''].forEach((options) => { + assert.throws(() => stream.values(options), { + code: 'ERR_INVALID_ARG_TYPE', + }); + }); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream.values({ preventCancel: true })) + return; + } + + read(stream).then(common.mustCall((data) => { + assert.strictEqual(stream[kState].state, 'readable'); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream.values({ preventCancel: false })) + return; + } + + read(stream).then(common.mustCall((data) => { + assert.strictEqual(stream[kState].state, 'closed'); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + const error = new Error('boom'); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream.values({ preventCancel: true })) + throw error; + } + + assert.rejects(read(stream), error).then(common.mustCall(() => { + assert.strictEqual(stream[kState].state, 'readable'); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + const error = new Error('boom'); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream.values({ preventCancel: false })) + throw error; + } + + assert.rejects(read(stream), error).then(common.mustCall(() => { + assert.strictEqual(stream[kState].state, 'closed'); + })); +} + +{ + assert.throws(() => Reflect.get(ReadableStream.prototype, 'locked', {}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStream.prototype.cancel.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => ReadableStream.prototype.getReader.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => ReadableStream.prototype.tee.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => ReadableStream.prototype[kTransfer].call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStreamDefaultReader.prototype.read.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStreamDefaultReader.prototype.cancel.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => { + return Reflect.get(ReadableStreamDefaultReader.prototype, 'closed'); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableStreamDefaultReader.prototype.releaseLock.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStreamBYOBReader.prototype.read.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableStreamBYOBReader.prototype.releaseLock.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => { + return Reflect.get(ReadableStreamBYOBReader.prototype, 'closed'); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStreamBYOBReader.prototype.cancel.call({}), { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + Reflect.get(ReadableByteStreamController.prototype, 'byobRequest', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + Reflect.get(ReadableByteStreamController.prototype, 'desiredSize', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableByteStreamController.prototype.close.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableByteStreamController.prototype.enqueue.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableByteStreamController.prototype.error.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => new ReadableStreamBYOBRequest(), { + code: 'ERR_ILLEGAL_CONSTRUCTOR', + }); + + assert.throws(() => new ReadableStreamDefaultController(), { + code: 'ERR_ILLEGAL_CONSTRUCTOR', + }); + + assert.throws(() => new ReadableByteStreamController(), { + code: 'ERR_ILLEGAL_CONSTRUCTOR', + }); +} + +{ + let controller; + const readable = new ReadableStream({ + start(c) { controller = c; } + }); + + assert.strictEqual( + inspect(readable), + 'ReadableStream { locked: false, state: \'readable\' }'); + assert.strictEqual( + inspect(readable, { depth: null }), + 'ReadableStream { locked: false, state: \'readable\' }'); + assert.strictEqual( + inspect(readable, { depth: 0 }), + 'ReadableStream [Object]'); + + assert.strictEqual( + inspect(controller), + 'ReadableStreamDefaultController {}'); + assert.strictEqual( + inspect(controller, { depth: null }), + 'ReadableStreamDefaultController {}'); + assert.strictEqual( + inspect(controller, { depth: 0 }), + 'ReadableStreamDefaultController {}'); + + const reader = readable.getReader(); + + assert.match( + inspect(reader), + /ReadableStreamDefaultReader/); + assert.match( + inspect(reader, { depth: null }), + /ReadableStreamDefaultReader/); + assert.match( + inspect(reader, { depth: 0 }), + /ReadableStreamDefaultReader/); + + assert.rejects(readableStreamPipeTo(1), { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.rejects(readableStreamPipeTo(new ReadableStream(), 1), { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.rejects( + readableStreamPipeTo( + new ReadableStream(), + new WritableStream(), + false, + false, + false, + {}), + { + code: 'ERR_INVALID_ARG_TYPE', + }); +} + +{ + const readable = new ReadableStream(); + const reader = readable.getReader(); + reader.releaseLock(); + reader.releaseLock(); + assert.rejects(reader.read(), { + code: 'ERR_INVALID_STATE', + }); + assert.rejects(reader.cancel(), { + code: 'ERR_INVALID_STATE', + }); +} + +{ + // Test tee() cloneForBranch2 argument + const readable = new ReadableStream({ + start(controller) { + controller.enqueue('hello'); + } + }); + const [r1, r2] = readableStreamTee(readable, true); + r1.getReader().read().then( + common.mustCall(({ value }) => assert.strictEqual(value, 'hello'))); + r2.getReader().read().then( + common.mustCall(({ value }) => assert.strictEqual(value, 'hello'))); +} + +{ + assert.throws(() => { + readableByteStreamControllerConvertPullIntoDescriptor({ + bytesFilled: 10, + byteLength: 5 + }); + }, { + code: 'ERR_INVALID_STATE', + }); +} + +{ + let controller; + const readable = new ReadableStream({ + start(c) { controller = c; } + }); + + controller[kState].pendingPullIntos = [{}]; + assert.throws(() => readableByteStreamControllerRespond(controller, 0), { + code: 'ERR_INVALID_ARG_VALUE', + }); + + readable.cancel().then(common.mustCall()); + + assert.throws(() => readableByteStreamControllerRespond(controller, 1), { + code: 'ERR_INVALID_ARG_VALUE', + }); + + assert(!readableStreamDefaultControllerCanCloseOrEnqueue(controller)); + readableStreamDefaultControllerEnqueue(controller); + readableByteStreamControllerClose(controller); + readableByteStreamControllerEnqueue(controller); +} diff --git a/test/parallel/test-whatwg-transformstream.js b/test/parallel/test-whatwg-transformstream.js new file mode 100644 index 00000000000000..0cbc76cc4ce8c0 --- /dev/null +++ b/test/parallel/test-whatwg-transformstream.js @@ -0,0 +1,188 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +const { + ReadableStream, + TransformStream, + TransformStreamDefaultController, +} = require('stream/web'); + +const { + createReadStream, + readFileSync, +} = require('fs'); + +const { + kTransfer, +} = require('internal/worker/js_transferable'); + +const { + inspect, +} = require('util'); + +assert.throws(() => new TransformStream({ readableType: 1 }), { + code: 'ERR_INVALID_ARG_VALUE', +}); +assert.throws(() => new TransformStream({ writableType: 1 }), { + code: 'ERR_INVALID_ARG_VALUE', +}); + + +{ + const stream = new TransformStream(); + + async function test(stream) { + const writer = stream.writable.getWriter(); + const reader = stream.readable.getReader(); + + const { 1: result } = await Promise.all([ + writer.write('hello'), + reader.read(), + ]); + + assert.strictEqual(result.value, 'hello'); + } + + test(stream).then(common.mustCall()); +} + +class Transform { + start(controller) { + this.started = true; + } + + async transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } + + async flush() { + this.flushed = true; + } +} + +{ + const transform = new Transform(); + const stream = new TransformStream(transform); + assert(transform.started); + + async function test(stream) { + const writer = stream.writable.getWriter(); + const reader = stream.readable.getReader(); + + const { 1: result } = await Promise.all([ + writer.write('hello'), + reader.read(), + ]); + + assert.strictEqual(result.value, 'HELLO'); + + await writer.close(); + } + + test(stream).then(common.mustCall(() => { + assert(transform.flushed); + })); +} + +class Source { + constructor() { + this.cancelCalled = false; + } + + start(controller) { + this.stream = createReadStream(__filename); + this.stream.on('data', (chunk) => { + controller.enqueue(chunk.toString()); + }); + this.stream.once('end', () => { + if (!this.cancelCalled) + controller.close(); + }); + this.stream.once('error', (error) => { + controller.error(error); + }); + } + + cancel() { + this.cancelCalled = true; + } +} + +{ + const instream = new ReadableStream(new Source()); + const tstream = new TransformStream(new Transform()); + const r = instream.pipeThrough(tstream); + + async function read(stream) { + let res = ''; + for await (const chunk of stream) + res += chunk; + return res; + } + + read(r).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.strictEqual(check.toString().toUpperCase(), data); + })); +} + +{ + assert.throws(() => Reflect.get(TransformStream.prototype, 'readable', {}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => Reflect.get(TransformStream.prototype, 'writable', {}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => TransformStream.prototype[kTransfer]({}), { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + Reflect.get(TransformStreamDefaultController.prototype, 'desiredSize', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + TransformStreamDefaultController.prototype.enqueue({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + TransformStreamDefaultController.prototype.error({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + TransformStreamDefaultController.prototype.terminate({}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => new TransformStreamDefaultController(), { + code: 'ERR_ILLEGAL_CONSTRUCTOR', + }); +} + +{ + let controller; + const transform = new TransformStream({ + start(c) { + controller = c; + } + }); + + assert.match(inspect(transform), /TransformStream/); + assert.match(inspect(transform, { depth: null }), /TransformStream/); + assert.match(inspect(transform, { depth: 0 }), /TransformStream \[/); + + assert.match(inspect(controller), /TransformStreamDefaultController/); + assert.match( + inspect(controller, { depth: null }), + /TransformStreamDefaultController/); + assert.match( + inspect(controller, { depth: 0 }), + /TransformStreamDefaultController \[/); +} diff --git a/test/parallel/test-whatwg-webstreams-coverage.js b/test/parallel/test-whatwg-webstreams-coverage.js new file mode 100644 index 00000000000000..f0036723b05977 --- /dev/null +++ b/test/parallel/test-whatwg-webstreams-coverage.js @@ -0,0 +1,70 @@ +// Flags: --no-warnings --expose-internals +'use strict'; + +require('../common'); + +const { + ByteLengthQueuingStrategy, + CountQueuingStrategy, +} = require('stream/web'); + +const { + inspect, +} = require('util'); + +const { + isPromisePending, +} = require('internal/webstreams/util'); + +const assert = require('assert'); + +assert(!isPromisePending({})); +assert(!isPromisePending(Promise.resolve())); +assert(isPromisePending(new Promise(() => {}))); + +// Brand checking works +assert.throws(() => { + Reflect.get(ByteLengthQueuingStrategy.prototype, 'highWaterMark', {}); +}, { + code: 'ERR_INVALID_THIS' +}); + +assert.throws(() => { + Reflect.get(ByteLengthQueuingStrategy.prototype, 'size', {}); +}, { + code: 'ERR_INVALID_THIS' +}); + +assert.throws(() => { + Reflect.get(CountQueuingStrategy.prototype, 'highWaterMark', {}); +}, { + code: 'ERR_INVALID_THIS' +}); + +assert.throws(() => { + Reflect.get(CountQueuingStrategy.prototype, 'size', {}); +}, { + code: 'ERR_INVALID_THIS' +}); + +// Custom Inspect Works + +{ + const strategy = new CountQueuingStrategy({ highWaterMark: 1 }); + + assert.strictEqual( + inspect(strategy, { depth: null }), + 'CountQueuingStrategy { highWaterMark: 1 }'); + + assert.strictEqual( + inspect(strategy), + 'CountQueuingStrategy { highWaterMark: 1 }'); + + assert.strictEqual( + inspect(strategy, { depth: 0 }), + 'CountQueuingStrategy [Object]'); + + assert.strictEqual( + inspect(new ByteLengthQueuingStrategy({ highWaterMark: 1 })), + 'ByteLengthQueuingStrategy { highWaterMark: 1 }'); +} diff --git a/test/parallel/test-whatwg-webstreams-transfer.js b/test/parallel/test-whatwg-webstreams-transfer.js new file mode 100644 index 00000000000000..2b7333d9c6fbf7 --- /dev/null +++ b/test/parallel/test-whatwg-webstreams-transfer.js @@ -0,0 +1,503 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); + +const { + ReadableStream, + WritableStream, + TransformStream, +} = require('stream/web'); + +const { + Worker +} = require('worker_threads'); + +const { + isReadableStream, +} = require('internal/webstreams/readablestream'); + +const { + isWritableStream, +} = require('internal/webstreams/writablestream'); + +const { + isTransformStream, +} = require('internal/webstreams/transformstream'); + +const { + makeTransferable, + kClone, + kTransfer, + kDeserialize, +} = require('internal/worker/js_transferable'); + +const assert = require('assert'); + +const theData = 'hello'; + +{ + const { port1, port2 } = new MessageChannel(); + port1.onmessageerror = common.mustNotCall(); + port2.onmessageerror = common.mustNotCall(); + + // This test takes the ReadableStream and transfers it to the + // port1 first, then again to port2, which reads the data. + // Internally, this sets up a pipelined data flow that is + // important to understand in case this test fails.. + // + // Specifically: + // + // 1. We start with ReadableStream R1, + // 2. Calling port2.postMessage causes a new internal WritableStream W1 + // and a new ReadableStream R2 to be created, both of which are coupled + // to each other via a pair of MessagePorts P1 and P2. + // 3. ReadableStream R2 is passed to the port1.onmessage callback as the + // data property of the MessageEvent, and R1 is configured to pipeTo W1. + // 4. Within port1.onmessage, we transfer ReadableStream R2 to port1, which + // creates a new internal WritableStream W2 and a new ReadableStream R3, + // both of which are coupled to each other via a pair of MessagePorts + // P3 and P4. + // 5. ReadableStream R3 is passed to the port2.onmessage callback as the + // data property of the MessageEvent, and R2 is configured to pipeTo W2. + // 6. Once the reader is attached to R3 in the port2.onmessage callback, + // a message is sent along the path: R3 -> P4 -> P3 -> R2 -> P2 -> P1 -> R1 + // to begin pulling the data. The data is then pushed along the pipeline + // R1 -> W1 -> P1 -> P2 -> R2 -> W2 -> P3 -> P4 -> R3 + // 7. The MessagePorts P1, P2, P3, and P4 serve as a control channel for + // passing data and control instructions, potentially across realms, + // to the other ReadableStream and WritableStream instances. + // + // If this test experiences timeouts (hangs without finishing), it's most + // likely because the control instructions are somehow broken and the + // MessagePorts are not being closed properly or it could be caused by + // failing the close R1's controller which signals the end of the data + // flow. + + const readable = new ReadableStream({ + start: common.mustCall((controller) => { + controller.enqueue(theData); + controller.close(); + }), + }); + + port2.onmessage = common.mustCall(({ data }) => { + assert(isReadableStream(data)); + + const reader = data.getReader(); + reader.read().then(common.mustCall((chunk) => { + assert.deepStrictEqual(chunk, { done: false, value: theData }); + })); + + port2.close(); + }); + + port1.onmessage = common.mustCall(({ data }) => { + assert(isReadableStream(data)); + assert(!data.locked); + port1.postMessage(data, [data]); + assert(data.locked); + }); + + assert.throws(() => port2.postMessage(readable), { + code: 'ERR_MISSING_TRANSFERABLE_IN_TRANSFER_LIST', + }); + + port2.postMessage(readable, [readable]); + assert(readable.locked); +} + +{ + const { port1, port2 } = new MessageChannel(); + port1.onmessageerror = common.mustNotCall(); + port2.onmessageerror = common.mustNotCall(); + + // Like the ReadableStream test above, this sets up a pipeline + // through which the data flows... + // + // We start with WritableStream W1, which is transfered to port1. + // Doing so creates an internal ReadableStream R1 and WritableStream W2, + // which are coupled together with MessagePorts P1 and P2. + // The port1.onmessage callback receives WritableStream W2 and + // immediately transfers that to port2. Doing so creates an internal + // ReadableStream R2 and WritableStream W3, which are coupled together + // with MessagePorts P3 and P4. WritableStream W3 is handed off to + // port2.onmessage. + // + // When the writer on port2.onmessage writes the chunk of data, it + // gets passed along the pipeline: + // W3 -> P4 -> P3 -> R2 -> W2 -> P2 -> P1 -> R1 -> W1 + + const writable = new WritableStream({ + write: common.mustCall((chunk) => { + assert.strictEqual(chunk, theData); + }), + }); + + port2.onmessage = common.mustCall(({ data }) => { + assert(isWritableStream(data)); + assert(!data.locked); + const writer = data.getWriter(); + writer.write(theData).then(common.mustCall()); + writer.close(); + port2.close(); + }); + + port1.onmessage = common.mustCall(({ data }) => { + assert(isWritableStream(data)); + assert(!data.locked); + port1.postMessage(data, [data]); + assert(data.locked); + }); + + assert.throws(() => port2.postMessage(writable), { + code: 'ERR_MISSING_TRANSFERABLE_IN_TRANSFER_LIST', + }); + + port2.postMessage(writable, [writable]); + assert(writable.locked); +} + +{ + const { port1, port2 } = new MessageChannel(); + port1.onmessageerror = common.mustNotCall(); + port2.onmessageerror = common.mustNotCall(); + + // The data flow here is actually quite complicated, and is a combination + // of the WritableStream and ReadableStream examples above. + // + // We start with TransformStream T1, which creates ReadableStream R1, + // and WritableStream W1. + // + // When T1 is transfered to port1.onmessage, R1 and W1 are individually + // transfered. + // + // When R1 is transfered, it creates internal WritableStream W2, and + // new ReadableStream R2, coupled together via MessagePorts P1 and P2. + // + // When W1 is transfered, it creates internal ReadableStream R3 and + // new WritableStream W3, coupled together via MessagePorts P3 and P4. + // + // A new TransformStream T2 is created that owns ReadableStream R2 and + // WritableStream W3. The port1.onmessage callback immediately transfers + // that to port2.onmessage. + // + // When T2 is transfered, R2 and W3 are individually transfered. + // + // When R2 is transfered, it creates internal WritableStream W4, and + // ReadableStream R4, coupled together via MessagePorts P5 and P6. + // + // When W3 is transfered, it creates internal ReadableStream R5, and + // WritableStream W5, coupled together via MessagePorts P7 and P8. + // + // A new TransformStream T3 is created that owns ReadableStream R4 and + // WritableStream W5. + // + // port1.onmessage then writes a chunk of data. That chunk of data + // flows through the pipeline to T1: + // + // W5 -> P8 -> P7 -> R5 -> W3 -> P4 -> P3 -> R3 -> W1 -> T1 + // + // T1 performs the transformation, then pushes the chunk back out + // along the pipeline: + // + // T1 -> R1 -> W2 -> P1 -> P2 -> R2 -> W4 -> P5 -> P6 -> R4 + + const transform = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } + }); + + port2.onmessage = common.mustCall(({ data }) => { + assert(isTransformStream(data)); + const writer = data.writable.getWriter(); + const reader = data.readable.getReader(); + Promise.all([ + writer.write(theData), + writer.close(), + reader.read().then(common.mustCall((result) => { + assert(!result.done); + assert.strictEqual(result.value, theData.toUpperCase()); + })), + reader.read().then(common.mustCall((result) => { + assert(result.done); + })), + ]).then(common.mustCall()); + port2.close(); + }); + + port1.onmessage = common.mustCall(({ data }) => { + assert(isTransformStream(data)); + assert(!data.readable.locked); + assert(!data.writable.locked); + port1.postMessage(data, [data]); + assert(data.readable.locked); + assert(data.writable.locked); + }); + + assert.throws(() => port2.postMessage(transform), { + code: 'ERR_MISSING_TRANSFERABLE_IN_TRANSFER_LIST', + }); + + port2.postMessage(transform, [transform]); + assert(transform.readable.locked); + assert(transform.writable.locked); +} + +{ + const { port1, port2 } = new MessageChannel(); + let controller; + + const readable = new ReadableStream({ + start(c) { controller = c; }, + + cancel: common.mustCall((error) => { + assert.strictEqual(error.code, 25); // DataCloneError + }), + }); + + port1.onmessage = ({ data }) => { + const reader = data.getReader(); + assert.rejects(reader.read(), { + code: 25, // DataCloneError + }); + port1.close(); + }; + + port2.postMessage(readable, [readable]); + + const notActuallyTransferable = makeTransferable({ + [kClone]() { + return { + data: {}, + deserializeInfo: 'nothing that will work', + }; + }, + [kDeserialize]: common.mustNotCall(), + }); + + controller.enqueue(notActuallyTransferable); +} + +{ + const { port1, port2 } = new MessageChannel(); + + const source = { + abort: common.mustCall((error) => { + process.nextTick(() => { + assert.strictEqual(error.code, 25); + assert.strictEqual(error.name, 'DataCloneError'); + }); + }) + }; + + const writable = new WritableStream(source); + + const notActuallyTransferable = makeTransferable({ + [kClone]() { + return { + data: {}, + deserializeInfo: 'nothing that will work', + }; + }, + [kDeserialize]: common.mustNotCall(), + }); + + port1.onmessage = common.mustCall(({ data }) => { + const writer = data.getWriter(); + + assert.rejects(writer.closed, { + code: 25, + name: 'DataCloneError', + }); + + writer.write(notActuallyTransferable).then(common.mustCall()); + + port1.close(); + }); + + port2.postMessage(writable, [writable]); +} + +{ + const error = new Error('boom'); + const { port1, port2 } = new MessageChannel(); + + const source = { + abort: common.mustCall((reason) => { + process.nextTick(() => { + assert.deepStrictEqual(reason, error); + + // Reason is a clone of the original error. + assert.notStrictEqual(reason, error); + }); + }), + }; + + const writable = new WritableStream(source); + + port1.onmessage = common.mustCall(({ data }) => { + const writer = data.getWriter(); + + assert.rejects(writer.closed, error); + + writer.abort(error).then(common.mustCall()); + port1.close(); + }); + + port2.postMessage(writable, [writable]); +} + +{ + const { port1, port2 } = new MessageChannel(); + + const source = { + abort: common.mustCall((error) => { + process.nextTick(() => assert.strictEqual(error.code, 25)); + }) + }; + + const writable = new WritableStream(source); + + port1.onmessage = common.mustCall(({ data }) => { + const writer = data.getWriter(); + + const m = new WebAssembly.Memory({ initial: 1 }); + + assert.rejects(writer.abort(m), { + code: 25 + }); + port1.close(); + }); + + port2.postMessage(writable, [writable]); +} + +{ + // Verify that the communication works across worker threads... + + const worker = new Worker(` + const { + isReadableStream, + } = require('internal/webstreams/readablestream'); + + const { + parentPort, + } = require('worker_threads'); + + const assert = require('assert'); + + const tracker = new assert.CallTracker(); + process.on('exit', () => { + tracker.verify(); + }); + + parentPort.onmessage = tracker.calls(({ data }) => { + assert(isReadableStream(data)); + const reader = data.getReader(); + reader.read().then(tracker.calls((result) => { + assert(!result.done); + assert(result.value instanceof Uint8Array); + })); + parentPort.close(); + }); + parentPort.onmessageerror = () => assert.fail('should not be called'); + `, { eval: true }); + + worker.on('error', common.mustNotCall()); + + const readable = new ReadableStream({ + start(controller) { + controller.enqueue(new Uint8Array(10)); + controller.close(); + } + }); + + worker.postMessage(readable, [readable]); +} + +{ + const source = { + cancel: common.mustCall(), + }; + + const readable = new ReadableStream(source); + + const { port1, port2 } = new MessageChannel(); + + port1.onmessage = common.mustCall(({ data }) => { + data.cancel().then(common.mustCall()); + port1.close(); + }); + + port2.postMessage(readable, [readable]); +} + +{ + const source = { + cancel: common.mustCall((error) => { + process.nextTick(() => assert(error.code, 25)); + }), + }; + + const readable = new ReadableStream(source); + + const { port1, port2 } = new MessageChannel(); + + port1.onmessage = common.mustCall(({ data }) => { + const m = new WebAssembly.Memory({ initial: 1 }); + + const reader = data.getReader(); + + const cancel = reader.cancel(m); + + reader.closed.then(common.mustCall()); + + assert.rejects(cancel, { + code: 25 + }); + + port1.close(); + }); + + port2.postMessage(readable, [readable]); +} + +{ + const source = { + abort: common.mustCall((error) => { + process.nextTick(() => { + assert.strictEqual(error.code, 25); + }); + }), + }; + + const writable = new WritableStream(source); + + const { port1, port2 } = new MessageChannel(); + + port1.onmessage = common.mustCall(({ data }) => { + const m = new WebAssembly.Memory({ initial: 1 }); + const writer = data.getWriter(); + const write = writer.write(m); + assert.rejects(write, { code: 25 }); + port1.close(); + }); + + port2.postMessage(writable, [writable]); +} + +{ + const readable = new ReadableStream(); + readable.getReader(); + assert.throws(() => readable[kTransfer](), { + code: 25 + }); + + const writable = new WritableStream(); + writable.getWriter(); + assert.throws(() => writable[kTransfer](), { + code: 25 + }); +} diff --git a/test/parallel/test-whatwg-writablestream.js b/test/parallel/test-whatwg-writablestream.js new file mode 100644 index 00000000000000..91e3c098462949 --- /dev/null +++ b/test/parallel/test-whatwg-writablestream.js @@ -0,0 +1,260 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +const { + WritableStream, + WritableStreamDefaultController, + WritableStreamDefaultWriter, + CountQueuingStrategy, +} = require('stream/web'); + +const { + kState, +} = require('internal/webstreams/util'); + +const { + isPromise, +} = require('util/types'); + +const { + kTransfer, +} = require('internal/worker/js_transferable'); + +const { + inspect, +} = require('util'); + +class Sink { + constructor() { + this.chunks = []; + } + + start() { + this.started = true; + } + + write(chunk) { + this.chunks.push(chunk); + } + + close() { + this.closed = true; + } + + abort() { + this.aborted = true; + } +} + +{ + const stream = new WritableStream(); + + assert(stream[kState].controller instanceof WritableStreamDefaultController); + assert(!stream.locked); + + assert.strictEqual(typeof stream.abort, 'function'); + assert.strictEqual(typeof stream.close, 'function'); + assert.strictEqual(typeof stream.getWriter, 'function'); +} + +[1, false, ''].forEach((type) => { + assert.throws(() => new WritableStream({ type }), { + code: 'ERR_INVALID_ARG_VALUE', + }); +}); + +['a', {}].forEach((highWaterMark) => { + assert.throws(() => new WritableStream({}, { highWaterMark }), { + code: 'ERR_INVALID_ARG_VALUE', + }); +}); + +['a', false, {}].forEach((size) => { + assert.throws(() => new WritableStream({}, { size }), { + code: 'ERR_INVALID_ARG_TYPE', + }); +}); + +{ + new WritableStream({}, 1); + new WritableStream({}, 'a'); + new WritableStream({}, null); +} + +{ + const sink = new Sink(); + const stream = new WritableStream( + sink, + new CountQueuingStrategy({ highWaterMark: 1 })); + + assert(!stream.locked); + const writer = stream.getWriter(); + assert(stream.locked); + assert(writer instanceof WritableStreamDefaultWriter); + + assert(isPromise(writer.closed)); + assert(isPromise(writer.ready)); + assert(typeof writer.desiredSize, 'number'); + assert(typeof writer.abort, 'function'); + assert(typeof writer.close, 'function'); + assert(typeof writer.releaseLock, 'function'); + assert(typeof writer.write, 'function'); + + writer.releaseLock(); + assert(!stream.locked); + + const writer2 = stream.getWriter(); + + assert(sink.started); + + writer2.closed.then(common.mustCall()); + writer2.ready.then(common.mustCall()); + + writer2.close().then(common.mustCall(() => { + assert.strict(sink.closed); + })); +} + +{ + const sink = new Sink(); + + const stream = new WritableStream( + sink, + new CountQueuingStrategy({ highWaterMark: 1 })); + + const error = new Error('boom'); + + const writer = stream.getWriter(); + + assert.rejects(writer.closed, error); + + writer.abort(error).then(common.mustCall(() => { + assert.strictEqual(stream[kState].state, 'errored'); + assert(sink.aborted); + })); +} + +{ + const sink = new Sink(); + + const stream = new WritableStream( + sink, { highWaterMark: 1 } + ); + + async function write(stream) { + const writer = stream.getWriter(); + const p = writer.write('hello'); + assert.strictEqual(writer.desiredSize, 0); + await p; + assert.strictEqual(writer.desiredSize, 1); + } + + write(stream).then(common.mustCall(() => { + assert.deepStrictEqual(['hello'], sink.chunks); + })); +} + +{ + assert.throws(() => Reflect.get(WritableStream.prototype, 'locked', {}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => WritableStream.prototype.abort({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => WritableStream.prototype.close({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => WritableStream.prototype.getWriter.call(), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => WritableStream.prototype[kTransfer].call(), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects( + Reflect.get(WritableStreamDefaultWriter.prototype, 'closed'), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects( + Reflect.get(WritableStreamDefaultWriter.prototype, 'ready'), { + code: 'ERR_INVALID_THIS', + }); + assert.throws( + () => Reflect.get(WritableStreamDefaultWriter.prototype, 'desiredSize'), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(WritableStreamDefaultWriter.prototype.abort({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(WritableStreamDefaultWriter.prototype.close({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(WritableStreamDefaultWriter.prototype.write({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => WritableStreamDefaultWriter.prototype.releaseLock({}), { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + Reflect.get(WritableStreamDefaultController.prototype, 'abortReason', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + Reflect.get(WritableStreamDefaultController.prototype, 'signal', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + WritableStreamDefaultController.prototype.error({}); + }, { + code: 'ERR_INVALID_THIS', + }); +} + +{ + let controller; + const writable = new WritableStream({ + start(c) { controller = c; } + }); + assert.strictEqual( + inspect(writable), + 'WritableStream { locked: false, state: \'writable\' }'); + assert.strictEqual( + inspect(writable, { depth: null }), + 'WritableStream { locked: false, state: \'writable\' }'); + assert.strictEqual( + inspect(writable, { depth: 0 }), + 'WritableStream [Object]'); + + const writer = writable.getWriter(); + assert.match( + inspect(writer), + /WritableStreamDefaultWriter/); + assert.match( + inspect(writer, { depth: null }), + /WritableStreamDefaultWriter/); + assert.match( + inspect(writer, { depth: 0 }), + /WritableStreamDefaultWriter \[/); + + assert.match( + inspect(controller), + /WritableStreamDefaultController/); + assert.match( + inspect(controller, { depth: null }), + /WritableStreamDefaultController/); + assert.match( + inspect(controller, { depth: 0 }), + /WritableStreamDefaultController \[/); + + writer.abort(new Error('boom')); + + assert.strictEqual(writer.desiredSize, null); + setImmediate(() => assert.strictEqual(writer.desiredSize, null)); +} diff --git a/test/wpt/status/streams.json b/test/wpt/status/streams.json index 0967ef424bce67..c1b80d69dd8cd3 100644 --- a/test/wpt/status/streams.json +++ b/test/wpt/status/streams.json @@ -1 +1,11 @@ -{} +{ + "queuing-strategies-size-function-per-global.window.js": { + "skip": "Browser-specific test" + }, + "transferable/deserialize-error.window.js": { + "skip": "Browser-specific test" + }, + "readable-byte-streams/bad-buffers-and-views.any.js": { + "fail": "TODO: implement detached ArrayBuffer support" + } +} diff --git a/test/wpt/test-streams.js b/test/wpt/test-streams.js index 6a64f241c10e2d..987676d8c49125 100644 --- a/test/wpt/test-streams.js +++ b/test/wpt/test-streams.js @@ -10,7 +10,7 @@ runner.setFlags(['--expose-internals']); // Set a script that will be executed in the worker before running the tests. runner.setInitScript(` - const { + let { ReadableStream, ReadableStreamDefaultReader, ReadableStreamBYOBReader, @@ -29,19 +29,111 @@ runner.setInitScript(` const { internalBinding } = require('internal/test/binding'); const { DOMException } = internalBinding('messaging'); global.DOMException = DOMException; - global.ReadableStream = ReadableStream; - global.ReadableStreamDefaultReader = ReadableStreamDefaultReader; - global.ReadableStreamBYOBReader = ReadableStreamBYOBReader; - global.ReadableStreamBYOBRequest = ReadableStreamBYOBRequest; - global.ReadableByteStreamController = ReadableByteStreamController; - global.ReadableStreamDefaultController = ReadableStreamDefaultController; - global.TransformStream = TransformStream; - global.TransformStreamDefaultController = TransformStreamDefaultController; - global.WritableStream = WritableStream; - global.WritableStreamDefaultWriter = WritableStreamDefaultWriter; - global.WritableStreamDefaultController = WritableStreamDefaultController; - global.ByteLengthQueuingStrategy = ByteLengthQueuingStrategy; - global.CountQueuingStrategy = CountQueuingStrategy; + + Object.defineProperties(global, { + ReadableStream: { + value: ReadableStream, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableStreamDefaultReader: { + value: ReadableStreamDefaultReader, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableStreamBYOBReader: { + value: ReadableStreamBYOBReader, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableStreamBYOBRequest: { + value: ReadableStreamBYOBRequest, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableByteStreamController: { + value: ReadableByteStreamController, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableStreamDefaultController: { + value: ReadableStreamDefaultController, + configurable: true, + writable: true, + enumerable: false, + }, + TransformStream: { + value: TransformStream, + configurable: true, + writable: true, + enumerable: false, + }, + TransformStreamDefaultController: { + value: TransformStreamDefaultController, + configurable: true, + writable: true, + enumerable: false, + }, + WritableStream: { + value: WritableStream, + configurable: true, + writable: true, + enumerable: false, + }, + WritableStreamDefaultWriter: { + value: WritableStreamDefaultWriter, + configurable: true, + writable: true, + enumerable: false, + }, + WritableStreamDefaultController: { + value: WritableStreamDefaultController, + configurable: true, + writable: true, + enumerable: false, + }, + ByteLengthQueuingStrategy: { + value: ByteLengthQueuingStrategy, + configurable: true, + writable: true, + enumerable: false, + }, + CountQueuingStrategy: { + value: CountQueuingStrategy, + configurable: true, + writable: true, + enumerable: false, + }, + }); + + // Simulate global postMessage for enqueue-with-detached-buffer.window.js + function postMessage(value, origin, transferList) { + const mc = new MessageChannel(); + mc.port1.postMessage(value, transferList); + mc.port2.close(); + } + + // TODO(@jasnell): This is a bit of a hack to get the idl harness test + // working. Later we should investigate a better approach. + // See: https://github.com/nodejs/node/pull/39062#discussion_r659383373 + Object.defineProperties(global, { + DedicatedWorkerGlobalScope: { + get() { + // Pretend that we're a DedicatedWorker, but *only* for the + // IDL harness. For everything else, keep the JavaScript shell + // environment. + if (new Error().stack.includes('idlharness.js')) + return global.constructor; + else + return function() {}; + } + } + }); `); runner.runJsTests(); diff --git a/tools/doc/type-parser.mjs b/tools/doc/type-parser.mjs index c2586a43254ecb..e3b8ad0ffac4bf 100644 --- a/tools/doc/type-parser.mjs +++ b/tools/doc/type-parser.mjs @@ -226,6 +226,33 @@ const customTypesMap = { 'X509Certificate': 'crypto.html#crypto_class_x509certificate', 'zlib options': 'zlib.html#zlib_class_options', + + 'ReadableStream': + 'webstreams.md#webstreamsapi_class_readablestream', + 'ReadableStreamDefaultReader': + 'webstreams.md#webstreamsapi_class_readablestreamdefaultreader', + 'ReadableStreamBYOBReader': + 'webstreams.md#webstreamsapi_class_readablestreambyobreader', + 'ReadableStreamDefaultController': + 'webstreams.md#webstreamsapi_class_readablestreamdefaultcontroller', + 'ReadableByteStreamController': + 'webstreams.md#webstreamsapi_class_readablebytestreamcontroller', + 'ReadableStreamBYOBRequest': + 'webstreams.md#webstreamsapi_class_readablestreambyobrequest', + 'WritableStream': + 'webstreams.md#webstreamsapi_class_writablestream', + 'WritableStreamDefaultWriter': + 'webstreams.md#webstreamsapi_class_writablestreamdefaultwriter', + 'WritableStreamDefaultController': + 'webstreams.md#webstreamsapi_class_writablestreamdefaultcontroller', + 'TransformStream': + 'webstreams.md#webstreamsapi_class_transformstream', + 'TransformStreamDefaultController': + 'webstreams.md#webstreamsapi_class_transformstreamdefaultcontroller', + 'ByteLengthQueuingStrategy': + 'webstreams.md#webstreamsapi_class_bytelengthqueuingstrategy', + 'CountQueuingStrategy': + 'webstreams.md#webstreamsapi_class_countqueuingstrategy', }; const arrayPart = /(?:\[])+$/; diff --git a/typings/primordials.d.ts b/typings/primordials.d.ts index 0436e92b1d9b53..beed1d7b83c4c9 100644 --- a/typings/primordials.d.ts +++ b/typings/primordials.d.ts @@ -1,3 +1,5 @@ +import { AsyncIterator } from "internal/webstreams/util"; + type UncurryThis unknown> = (self: ThisParameterType, ...args: Parameters) => ReturnType; type UncurryThisStaticApply unknown> = @@ -9,15 +11,15 @@ type StaticApply unknown> = * Primordials are a way to safely use globals without fear of global mutation * Generally, this means removing `this` parameter usage and instead using * a regular parameter: - * + * * @example - * + * * ```js * 'thing'.startsWith('hello'); * ``` - * + * * becomes - * + * * ```js * primordials.StringPrototypeStartsWith('thing', 'hello') * ``` @@ -142,6 +144,7 @@ declare namespace primordials { export const ArrayBufferPrototype: typeof ArrayBuffer.prototype export const ArrayBufferIsView: typeof ArrayBuffer.isView export const ArrayBufferPrototypeSlice: UncurryThis + export const AsyncIteratorPrototype: UncurryThis export const BigInt: typeof globalThis.BigInt; export const BigIntLength: typeof BigInt.length export const BigIntName: typeof BigInt.name @@ -522,5 +525,5 @@ declare namespace primordials { export const PromiseAny: typeof Promise.any export const PromisePrototypeThen: UncurryThis export const PromisePrototypeCatch: UncurryThis - export const PromisePrototypeFinally: UncurryThis + export const PromisePrototypeFinally: UncurryThis }