How to use flushDone method in wpt

Best JavaScript code snippet using wpt

general.any.js

Source:general.any.js Github

copy

Full Screen

1// META: global=worker,jsshell2// META: script=../resources/test-utils.js3// META: script=../resources/rs-utils.js4'use strict';5test(() => {6 new TransformStream({ transform() { } });7}, 'TransformStream can be constructed with a transform function');8test(() => {9 new TransformStream();10 new TransformStream({});11}, 'TransformStream can be constructed with no transform function');12test(() => {13 const ts = new TransformStream({ transform() { } });14 const proto = Object.getPrototypeOf(ts);15 const writableStream = Object.getOwnPropertyDescriptor(proto, 'writable');16 assert_true(writableStream !== undefined, 'it has a writable property');17 assert_false(writableStream.enumerable, 'writable should be non-enumerable');18 assert_equals(typeof writableStream.get, 'function', 'writable should have a getter');19 assert_equals(writableStream.set, undefined, 'writable should not have a setter');20 assert_true(writableStream.configurable, 'writable should be configurable');21 assert_true(ts.writable instanceof WritableStream, 'writable is an instance of WritableStream');22 assert_not_equals(WritableStream.prototype.getWriter.call(ts.writable), undefined,23 'writable should pass WritableStream brand check');24 const readableStream = Object.getOwnPropertyDescriptor(proto, 'readable');25 assert_true(readableStream !== undefined, 'it has a readable property');26 assert_false(readableStream.enumerable, 'readable should be non-enumerable');27 assert_equals(typeof readableStream.get, 'function', 'readable should have a getter');28 assert_equals(readableStream.set, undefined, 'readable should not have a setter');29 assert_true(readableStream.configurable, 'readable should be configurable');30 assert_true(ts.readable instanceof ReadableStream, 'readable is an instance of ReadableStream');31 assert_not_equals(ReadableStream.prototype.getReader.call(ts.readable), undefined,32 'readable should pass ReadableStream brand check');33}, 'TransformStream instances must have writable and readable properties of the correct types');34test(() => {35 const ts = new TransformStream({ transform() { } });36 const writer = ts.writable.getWriter();37 assert_equals(writer.desiredSize, 1, 'writer.desiredSize should be 1');38}, 'TransformStream writable starts in the writable state');39promise_test(() => {40 const ts = new TransformStream();41 const writer = ts.writable.getWriter();42 writer.write('a');43 assert_equals(writer.desiredSize, 0, 'writer.desiredSize should be 0 after write()');44 return ts.readable.getReader().read().then(result => {45 assert_equals(result.value, 'a',46 'result from reading the readable is the same as was written to writable');47 assert_false(result.done, 'stream should not be done');48 return delay(0).then(() => assert_equals(writer.desiredSize, 1, 'desiredSize should be 1 again'));49 });50}, 'Identity TransformStream: can read from readable what is put into writable');51promise_test(() => {52 let c;53 const ts = new TransformStream({54 start(controller) {55 c = controller;56 },57 transform(chunk) {58 c.enqueue(chunk.toUpperCase());59 }60 });61 const writer = ts.writable.getWriter();62 writer.write('a');63 return ts.readable.getReader().read().then(result => {64 assert_equals(result.value, 'A',65 'result from reading the readable is the transformation of what was written to writable');66 assert_false(result.done, 'stream should not be done');67 });68}, 'Uppercaser sync TransformStream: can read from readable transformed version of what is put into writable');69promise_test(() => {70 let c;71 const ts = new TransformStream({72 start(controller) {73 c = controller;74 },75 transform(chunk) {76 c.enqueue(chunk.toUpperCase());77 c.enqueue(chunk.toUpperCase());78 }79 });80 const writer = ts.writable.getWriter();81 writer.write('a');82 const reader = ts.readable.getReader();83 return reader.read().then(result1 => {84 assert_equals(result1.value, 'A',85 'the first chunk read is the transformation of the single chunk written');86 assert_false(result1.done, 'stream should not be done');87 return reader.read().then(result2 => {88 assert_equals(result2.value, 'A',89 'the second chunk read is also the transformation of the single chunk written');90 assert_false(result2.done, 'stream should not be done');91 });92 });93}, 'Uppercaser-doubler sync TransformStream: can read both chunks put into the readable');94promise_test(() => {95 let c;96 const ts = new TransformStream({97 start(controller) {98 c = controller;99 },100 transform(chunk) {101 return delay(0).then(() => c.enqueue(chunk.toUpperCase()));102 }103 });104 const writer = ts.writable.getWriter();105 writer.write('a');106 return ts.readable.getReader().read().then(result => {107 assert_equals(result.value, 'A',108 'result from reading the readable is the transformation of what was written to writable');109 assert_false(result.done, 'stream should not be done');110 });111}, 'Uppercaser async TransformStream: can read from readable transformed version of what is put into writable');112promise_test(() => {113 let doSecondEnqueue;114 let returnFromTransform;115 const ts = new TransformStream({116 transform(chunk, controller) {117 delay(0).then(() => controller.enqueue(chunk.toUpperCase()));118 doSecondEnqueue = () => controller.enqueue(chunk.toUpperCase());119 return new Promise(resolve => {120 returnFromTransform = resolve;121 });122 }123 });124 const reader = ts.readable.getReader();125 const writer = ts.writable.getWriter();126 writer.write('a');127 return reader.read().then(result1 => {128 assert_equals(result1.value, 'A',129 'the first chunk read is the transformation of the single chunk written');130 assert_false(result1.done, 'stream should not be done');131 doSecondEnqueue();132 return reader.read().then(result2 => {133 assert_equals(result2.value, 'A',134 'the second chunk read is also the transformation of the single chunk written');135 assert_false(result2.done, 'stream should not be done');136 returnFromTransform();137 });138 });139}, 'Uppercaser-doubler async TransformStream: can read both chunks put into the readable');140promise_test(() => {141 const ts = new TransformStream({ transform() { } });142 const writer = ts.writable.getWriter();143 writer.close();144 return Promise.all([writer.closed, ts.readable.getReader().closed]);145}, 'TransformStream: by default, closing the writable closes the readable (when there are no queued writes)');146promise_test(() => {147 let transformResolve;148 const transformPromise = new Promise(resolve => {149 transformResolve = resolve;150 });151 const ts = new TransformStream({152 transform() {153 return transformPromise;154 }155 }, undefined, { highWaterMark: 1 });156 const writer = ts.writable.getWriter();157 writer.write('a');158 writer.close();159 let rsClosed = false;160 ts.readable.getReader().closed.then(() => {161 rsClosed = true;162 });163 return delay(0).then(() => {164 assert_equals(rsClosed, false, 'readable is not closed after a tick');165 transformResolve();166 return writer.closed.then(() => {167 // TODO: Is this expectation correct?168 assert_equals(rsClosed, true, 'readable is closed at that point');169 });170 });171}, 'TransformStream: by default, closing the writable waits for transforms to finish before closing both');172promise_test(() => {173 let c;174 const ts = new TransformStream({175 start(controller) {176 c = controller;177 },178 transform() {179 c.enqueue('x');180 c.enqueue('y');181 return delay(0);182 }183 });184 const writer = ts.writable.getWriter();185 writer.write('a');186 writer.close();187 const readableChunks = readableStreamToArray(ts.readable);188 return writer.closed.then(() => {189 return readableChunks.then(chunks => {190 assert_array_equals(chunks, ['x', 'y'], 'both enqueued chunks can be read from the readable');191 });192 });193}, 'TransformStream: by default, closing the writable closes the readable after sync enqueues and async done');194promise_test(() => {195 let c;196 const ts = new TransformStream({197 start(controller) {198 c = controller;199 },200 transform() {201 return delay(0)202 .then(() => c.enqueue('x'))203 .then(() => c.enqueue('y'))204 .then(() => delay(0));205 }206 });207 const writer = ts.writable.getWriter();208 writer.write('a');209 writer.close();210 const readableChunks = readableStreamToArray(ts.readable);211 return writer.closed.then(() => {212 return readableChunks.then(chunks => {213 assert_array_equals(chunks, ['x', 'y'], 'both enqueued chunks can be read from the readable');214 });215 });216}, 'TransformStream: by default, closing the writable closes the readable after async enqueues and async done');217promise_test(() => {218 let c;219 const ts = new TransformStream({220 suffix: '-suffix',221 start(controller) {222 c = controller;223 c.enqueue('start' + this.suffix);224 },225 transform(chunk) {226 c.enqueue(chunk + this.suffix);227 },228 flush() {229 c.enqueue('flushed' + this.suffix);230 }231 });232 const writer = ts.writable.getWriter();233 writer.write('a');234 writer.close();235 const readableChunks = readableStreamToArray(ts.readable);236 return writer.closed.then(() => {237 return readableChunks.then(chunks => {238 assert_array_equals(chunks, ['start-suffix', 'a-suffix', 'flushed-suffix'], 'all enqueued chunks have suffixes');239 });240 });241}, 'Transform stream should call transformer methods as methods');242promise_test(() => {243 function functionWithOverloads() {}244 functionWithOverloads.apply = () => assert_unreached('apply() should not be called');245 functionWithOverloads.call = () => assert_unreached('call() should not be called');246 const ts = new TransformStream({247 start: functionWithOverloads,248 transform: functionWithOverloads,249 flush: functionWithOverloads250 });251 const writer = ts.writable.getWriter();252 writer.write('a');253 writer.close();254 return readableStreamToArray(ts.readable);255}, 'methods should not not have .apply() or .call() called');256promise_test(t => {257 let startCalled = false;258 let startDone = false;259 let transformDone = false;260 let flushDone = false;261 const ts = new TransformStream({262 start() {263 startCalled = true;264 return flushAsyncEvents().then(() => {265 startDone = true;266 });267 },268 transform() {269 return t.step(() => {270 assert_true(startDone, 'transform() should not be called until the promise returned from start() has resolved');271 return flushAsyncEvents().then(() => {272 transformDone = true;273 });274 });275 },276 flush() {277 return t.step(() => {278 assert_true(transformDone,279 'flush() should not be called until the promise returned from transform() has resolved');280 return flushAsyncEvents().then(() => {281 flushDone = true;282 });283 });284 }285 }, undefined, { highWaterMark: 1 });286 assert_true(startCalled, 'start() should be called synchronously');287 const writer = ts.writable.getWriter();288 const writePromise = writer.write('a');289 return writer.close().then(() => {290 assert_true(flushDone, 'promise returned from flush() should have resolved');291 return writePromise;292 });293}, 'TransformStream start, transform, and flush should be strictly ordered');294promise_test(() => {295 let transformCalled = false;296 const ts = new TransformStream({297 transform() {298 transformCalled = true;299 }300 }, undefined, { highWaterMark: Infinity });301 // transform() is only called synchronously when there is no backpressure and all microtasks have run.302 return delay(0).then(() => {303 const writePromise = ts.writable.getWriter().write();304 assert_true(transformCalled, 'transform() should have been called');305 return writePromise;306 });307}, 'it should be possible to call transform() synchronously');308promise_test(() => {309 const ts = new TransformStream({}, undefined, { highWaterMark: 0 });310 const writer = ts.writable.getWriter();311 writer.close();312 return Promise.all([writer.closed, ts.readable.getReader().closed]);313}, 'closing the writable should close the readable when there are no queued chunks, even with backpressure');314test(() => {315 new TransformStream({316 start(controller) {317 controller.terminate();318 assert_throws(new TypeError(), () => controller.enqueue(), 'enqueue should throw');319 }320 });321}, 'enqueue() should throw after controller.terminate()');322promise_test(() => {323 let controller;324 const ts = new TransformStream({325 start(c) {326 controller = c;327 }328 });329 const cancelPromise = ts.readable.cancel();330 assert_throws(new TypeError(), () => controller.enqueue(), 'enqueue should throw');331 return cancelPromise;332}, 'enqueue() should throw after readable.cancel()');333test(() => {334 new TransformStream({335 start(controller) {336 controller.terminate();337 controller.terminate();338 }339 });340}, 'controller.terminate() should do nothing the second time it is called');341promise_test(t => {342 let controller;343 const ts = new TransformStream({344 start(c) {345 controller = c;346 }347 });348 const cancelReason = { name: 'cancelReason' };349 const cancelPromise = ts.readable.cancel(cancelReason);350 controller.terminate();351 return Promise.all([352 cancelPromise,353 promise_rejects(t, cancelReason, ts.writable.getWriter().closed, 'closed should reject with cancelReason')354 ]);355}, 'terminate() should do nothing after readable.cancel()');356promise_test(() => {357 let calls = 0;358 new TransformStream({359 start() {360 ++calls;361 }362 });363 return flushAsyncEvents().then(() => {364 assert_equals(calls, 1, 'start() should have been called exactly once');365 });366}, 'start() should not be called twice');367test(() => {368 assert_throws(new RangeError(), () => new TransformStream({ readableType: 'bytes' }), 'constructor should throw');369}, 'specifying a defined readableType should throw');370test(() => {371 assert_throws(new RangeError(), () => new TransformStream({ writableType: 'bytes' }), 'constructor should throw');372}, 'specifying a defined writableType should throw');373test(() => {374 class Subclass extends TransformStream {375 extraFunction() {376 return true;377 }378 }379 assert_equals(380 Object.getPrototypeOf(Subclass.prototype), TransformStream.prototype,381 'Subclass.prototype\'s prototype should be TransformStream.prototype');382 assert_equals(Object.getPrototypeOf(Subclass), TransformStream,383 'Subclass\'s prototype should be TransformStream');384 const sub = new Subclass();385 assert_true(sub instanceof TransformStream,386 'Subclass object should be an instance of TransformStream');387 assert_true(sub instanceof Subclass,388 'Subclass object should be an instance of Subclass');389 const readableGetter = Object.getOwnPropertyDescriptor(390 TransformStream.prototype, 'readable').get;391 assert_equals(readableGetter.call(sub), sub.readable,392 'Subclass object should pass brand check');393 assert_true(sub.extraFunction(),394 'extraFunction() should be present on Subclass object');...

Full Screen

Full Screen

videoDecoder-codec-specific.https.any.js

Source:videoDecoder-codec-specific.https.any.js Github

copy

Full Screen

1// META: global=window,dedicatedworker2// META: variant=?av13// META: variant=?vp84// META: variant=?vp95// META: variant=?h264_avc6// META: variant=?h264_annexb7const AV1_DATA = {8 src: 'av1.mp4',9 config: {10 codec: 'av01.0.04M.08',11 codedWidth: 320,12 codedHeight: 240,13 visibleRect: {x: 0, y: 0, width: 320, height: 240},14 displayWidth: 320,15 displayHeight: 240,16 },17 chunks: [18 {offset: 48, size: 1938}, {offset: 1986, size: 848},19 {offset: 2834, size: 3}, {offset: 2837, size: 47}, {offset: 2884, size: 3},20 {offset: 2887, size: 116}, {offset: 3003, size: 3},21 {offset: 3006, size: 51}, {offset: 3057, size: 25},22 {offset: 3082, size: 105}23 ]24};25const VP8_DATA = {26 src: 'vp8.webm',27 config: {28 codec: 'vp8',29 codedWidth: 320,30 codedHeight: 240,31 visibleRect: {x: 0, y: 0, width: 320, height: 240},32 displayWidth: 320,33 displayHeight: 240,34 },35 chunks: [36 {offset: 522, size: 4826}, {offset: 5355, size: 394},37 {offset: 5756, size: 621}, {offset: 6384, size: 424},38 {offset: 6815, size: 532}, {offset: 7354, size: 655},39 {offset: 8016, size: 670}, {offset: 8693, size: 2413},40 {offset: 11113, size: 402}, {offset: 11522, size: 686}41 ]42};43const VP9_DATA = {44 src: 'vp9.mp4',45 // TODO(sandersd): Verify that the file is actually level 1.46 config: {47 codec: 'vp09.00.10.08',48 codedWidth: 320,49 codedHeight: 240,50 displayAspectWidth: 320,51 displayAspectHeight: 240,52 },53 chunks: [54 {offset: 44, size: 3315}, {offset: 3359, size: 203},55 {offset: 3562, size: 245}, {offset: 3807, size: 172},56 {offset: 3979, size: 312}, {offset: 4291, size: 170},57 {offset: 4461, size: 195}, {offset: 4656, size: 181},58 {offset: 4837, size: 356}, {offset: 5193, size: 159}59 ]60};61const H264_AVC_DATA = {62 src: 'h264.mp4',63 config: {64 codec: 'avc1.64000b',65 description: {offset: 9490, size: 45},66 codedWidth: 320,67 codedHeight: 240,68 displayAspectWidth: 320,69 displayAspectHeight: 240,70 },71 chunks: [72 {offset: 48, size: 4140}, {offset: 4188, size: 604},73 {offset: 4792, size: 475}, {offset: 5267, size: 561},74 {offset: 5828, size: 587}, {offset: 6415, size: 519},75 {offset: 6934, size: 532}, {offset: 7466, size: 523},76 {offset: 7989, size: 454}, {offset: 8443, size: 528}77 ]78};79const H264_ANNEXB_DATA = {80 src: 'h264.annexb',81 config: {82 codec: 'avc1.64000b',83 codedWidth: 320,84 codedHeight: 240,85 displayAspectWidth: 320,86 displayAspectHeight: 240,87 },88 chunks: [89 {offset: 0, size: 4175}, {offset: 4175, size: 602},90 {offset: 4777, size: 473}, {offset: 5250, size: 559},91 {offset: 5809, size: 585}, {offset: 6394, size: 517},92 {offset: 6911, size: 530}, {offset: 7441, size: 521},93 {offset: 7962, size: 452}, {offset: 8414, size: 526}94 ]95};96// Allows mutating `callbacks` after constructing the VideoDecoder, wraps calls97// in t.step().98function createVideoDecoder(t, callbacks) {99 return new VideoDecoder({100 output(frame) {101 if (callbacks && callbacks.output) {102 t.step(() => callbacks.output(frame));103 } else {104 t.unreached_func('unexpected output()');105 }106 },107 error(e) {108 if (callbacks && callbacks.error) {109 t.step(() => callbacks.error(e));110 } else {111 t.unreached_func('unexpected error()');112 }113 }114 });115}116function createCorruptChunk(index) {117 let bad_data = CHUNK_DATA[index];118 for (var i = 0; i < bad_data.byteLength; i += 4)119 bad_data[i] = 0xFF;120 return new EncodedVideoChunk(121 {type: 'delta', timestamp: index, data: bad_data});122}123// Create a view of an ArrayBuffer.124function view(buffer, {offset, size}) {125 return new Uint8Array(buffer, offset, size);126}127let CONFIG = null;128let CHUNK_DATA = null;129let CHUNKS = null;130promise_setup(async () => {131 const data = {132 '?av1': AV1_DATA,133 '?vp8': VP8_DATA,134 '?vp9': VP9_DATA,135 '?h264_avc': H264_AVC_DATA,136 '?h264_annexb': H264_ANNEXB_DATA137 }[location.search];138 // Don't run any tests if the codec is not supported.139 let supported = false;140 try {141 // TODO(sandersd): To properly support H.264 in AVC format, this should142 // include the `description`. For now this test assumes that H.264 Annex B143 // support is the same as H.264 AVC support.144 const support =145 await VideoDecoder.isConfigSupported({codec: data.config.codec});146 supported = support.supported;147 } catch (e) {148 }149 assert_implements_optional(supported, data.config.codec + ' unsupported');150 // Fetch the media data and prepare buffers.151 const response = await fetch(data.src);152 const buf = await response.arrayBuffer();153 CONFIG = {...data.config};154 if (data.config.description) {155 CONFIG.description = view(buf, data.config.description);156 }157 CHUNK_DATA = data.chunks.map((chunk, i) => view(buf, chunk));158 CHUNKS = CHUNK_DATA.map(159 (data, i) => new EncodedVideoChunk(160 {type: i == 0 ? 'key' : 'delta', timestamp: i, duration: 1, data}));161});162promise_test(async t => {163 const support = await VideoDecoder.isConfigSupported(CONFIG);164 assert_true(support.supported, 'supported');165}, 'Test isConfigSupported()');166promise_test(async t => {167 // TODO(sandersd): Create a 1080p `description` for H.264 in AVC format.168 // This version is testing only the H.264 Annex B path.169 const config = {170 codec: CONFIG.codec,171 codedWidth: 1920,172 codedHeight: 1088,173 displayAspectWidth: 1920,174 displayAspectHeight: 1080,175 };176 const support = await VideoDecoder.isConfigSupported(config);177 assert_true(support.supported, 'supported');178}, 'Test isConfigSupported() with 1080p crop');179promise_test(async t => {180 // Define a valid config that includes a hypothetical `futureConfigFeature`,181 // which is not yet recognized by the User Agent.182 const config = {183 ...CONFIG,184 colorSpace: {primaries: 'bt709'},185 futureConfigFeature: 'foo',186 };187 // The UA will evaluate validConfig as being "valid", ignoring the188 // `futureConfigFeature` it doesn't recognize.189 const support = await VideoDecoder.isConfigSupported(config);190 assert_true(support.supported, 'supported');191 assert_equals(support.config.codec, config.codec, 'codec');192 assert_equals(support.config.codedWidth, config.codedWidth, 'codedWidth');193 assert_equals(support.config.codedHeight, config.codedHeight, 'codedHeight');194 assert_equals(support.config.displayAspectWidth, config.displayAspectWidth, 'displayAspectWidth');195 assert_equals(support.config.displayAspectHeight, config.displayAspectHeight, 'displayAspectHeight');196 assert_equals(support.config.colorSpace.primaries, config.colorSpace.primaries, 'color primaries');197 assert_equals(support.config.colorSpace.transfer, undefined, 'color transfer');198 assert_equals(support.config.colorSpace.matrix, undefined, 'color matrix');199 assert_equals(support.config.colorSpace.fullRange, undefined, 'color range');200 assert_false(support.config.hasOwnProperty('futureConfigFeature'), 'futureConfigFeature');201 if (config.description) {202 // The description must be copied.203 assert_false(204 support.config.description === config.description,205 'description is unique');206 assert_array_equals(207 new Uint8Array(support.config.description, 0),208 new Uint8Array(config.description, 0), 'description');209 } else {210 assert_false(support.config.hasOwnProperty('description'), 'description');211 }212}, 'Test that isConfigSupported() returns a parsed configuration');213promise_test(async t => {214 async function test(t, config, description) {215 await promise_rejects_js(216 t, TypeError, VideoDecoder.isConfigSupported(config), description);217 const decoder = createVideoDecoder(t);218 assert_throws_js(TypeError, () => decoder.configure(config), description);219 assert_equals(decoder.state, 'unconfigured', 'state');220 }221 await test(t, {...CONFIG, codedWidth: 0}, 'invalid codedWidth');222 await test(t, {...CONFIG, displayAspectWidth: 0}, 'invalid displayAspectWidth');223}, 'Test invalid configs');224promise_test(async t => {225 const decoder = createVideoDecoder(t);226 decoder.configure(CONFIG);227 assert_equals(decoder.state, 'configured', 'state');228}, 'Test configure()');229promise_test(async t => {230 const callbacks = {};231 const decoder = createVideoDecoder(t, callbacks);232 decoder.configure(CONFIG);233 decoder.decode(CHUNKS[0]);234 let outputs = 0;235 callbacks.output = frame => {236 outputs++;237 assert_equals(frame.timestamp, CHUNKS[0].timestamp, 'timestamp');238 frame.close();239 };240 await decoder.flush();241 assert_equals(outputs, 1, 'outputs');242}, 'Decode a key frame');243promise_test(async t => {244 const callbacks = {};245 const decoder = createVideoDecoder(t, callbacks);246 decoder.configure(CONFIG);247 // Ensure type value is verified.248 assert_equals(CHUNKS[1].type, 'delta');249 assert_throws_dom('DataError', () => decoder.decode(CHUNKS[1], 'decode'));250}, 'Decode a non key frame first fails');251promise_test(async t => {252 const callbacks = {};253 const decoder = createVideoDecoder(t, callbacks);254 decoder.configure(CONFIG);255 for (let i = 0; i < 16; i++) {256 decoder.decode(new EncodedVideoChunk(257 {type: 'key', timestamp: 0, data: CHUNK_DATA[0]}));258 }259 assert_greater_than(decoder.decodeQueueSize, 0);260 // Wait for the first output, then reset the decoder.261 let outputs = 0;262 await new Promise(resolve => {263 callbacks.output = frame => {264 outputs++;265 assert_equals(outputs, 1, 'outputs');266 assert_equals(frame.timestamp, 0, 'timestamp');267 frame.close();268 decoder.reset();269 assert_equals(decoder.decodeQueueSize, 0, 'decodeQueueSize');270 resolve();271 };272 });273 decoder.configure(CONFIG);274 for (let i = 0; i < 4; i++) {275 decoder.decode(new EncodedVideoChunk(276 {type: 'key', timestamp: 1, data: CHUNK_DATA[0]}));277 }278 // Expect future outputs to come from after the reset.279 callbacks.output = frame => {280 outputs++;281 assert_equals(frame.timestamp, 1, 'timestamp');282 frame.close();283 };284 await decoder.flush();285 assert_equals(outputs, 5);286 assert_equals(decoder.decodeQueueSize, 0);287}, 'Verify reset() suppresses outputs');288promise_test(async t => {289 const decoder = createVideoDecoder(t);290 assert_equals(decoder.state, 'unconfigured');291 decoder.reset();292 assert_equals(decoder.state, 'unconfigured');293 assert_throws_dom(294 'InvalidStateError', () => decoder.decode(CHUNKS[0]), 'decode');295 await promise_rejects_dom(t, 'InvalidStateError', decoder.flush(), 'flush');296}, 'Test unconfigured VideoDecoder operations');297promise_test(async t => {298 const decoder = createVideoDecoder(t);299 decoder.close();300 assert_equals(decoder.state, 'closed');301 assert_throws_dom(302 'InvalidStateError', () => decoder.configure(CONFIG), 'configure');303 assert_throws_dom('InvalidStateError', () => decoder.reset(), 'reset');304 assert_throws_dom('InvalidStateError', () => decoder.close(), 'close');305 assert_throws_dom(306 'InvalidStateError', () => decoder.decode(CHUNKS[0]), 'decode');307 await promise_rejects_dom(t, 'InvalidStateError', decoder.flush(), 'flush');308}, 'Test closed VideoDecoder operations');309promise_test(async t => {310 const callbacks = {};311 let errors = 0;312 callbacks.error = e => errors++;313 const decoder = createVideoDecoder(t, callbacks);314 decoder.configure(CONFIG);315 decoder.decode(CHUNKS[0]); // Decode keyframe first.316 decoder.decode(new EncodedVideoChunk(317 {type: 'key', timestamp: 1, data: new ArrayBuffer(0)}));318 await promise_rejects_dom(t, 'AbortError', decoder.flush());319 assert_equals(errors, 1, 'errors');320 assert_equals(decoder.state, 'closed', 'state');321}, 'Decode empty frame');322promise_test(async t => {323 const callbacks = {};324 let errors = 0;325 callbacks.error = e => errors++;326 let outputs = 0;327 callbacks.output = frame => {328 outputs++;329 frame.close();330 };331 const decoder = createVideoDecoder(t, callbacks);332 decoder.configure(CONFIG);333 decoder.decode(CHUNKS[0]); // Decode keyframe first.334 decoder.decode(createCorruptChunk(2));335 await promise_rejects_dom(t, 'AbortError', decoder.flush());336 assert_less_than_equal(outputs, 1);337 assert_equals(errors, 1, 'errors');338 assert_equals(decoder.state, 'closed', 'state');339}, 'Decode corrupt frame');340promise_test(async t => {341 const decoder = createVideoDecoder(t);342 decoder.configure(CONFIG);343 decoder.decode(CHUNKS[0]); // Decode keyframe first.344 decoder.decode(createCorruptChunk(1));345 let flushDone = decoder.flush();346 decoder.close();347 // Flush should have been synchronously rejected, with no output() or error()348 // callbacks.349 await promise_rejects_dom(t, 'AbortError', flushDone);350}, 'Close while decoding corrupt frame');351promise_test(async t => {352 const callbacks = {};353 const decoder = createVideoDecoder(t, callbacks);354 decoder.configure(CONFIG);355 decoder.decode(CHUNKS[0]);356 let outputs = 0;357 callbacks.output = frame => {358 outputs++;359 frame.close();360 };361 await decoder.flush();362 assert_equals(outputs, 1, 'outputs');363 decoder.decode(CHUNKS[0]);364 await decoder.flush();365 assert_equals(outputs, 2, 'outputs');366}, 'Test decoding after flush');367promise_test(async t => {368 const callbacks = {};369 const decoder = createVideoDecoder(t, callbacks);370 decoder.configure(CONFIG);371 decoder.decode(new EncodedVideoChunk(372 {type: 'key', timestamp: -42, data: CHUNK_DATA[0]}));373 let outputs = 0;374 callbacks.output = frame => {375 outputs++;376 assert_equals(frame.timestamp, -42, 'timestamp');377 frame.close();378 };379 await decoder.flush();380 assert_equals(outputs, 1, 'outputs');381}, 'Test decoding a with negative timestamp');382promise_test(async t => {383 const callbacks = {};384 const decoder = createVideoDecoder(t, callbacks);385 decoder.configure(CONFIG);386 decoder.decode(CHUNKS[0]);387 decoder.decode(CHUNKS[1]);388 const flushDone = decoder.flush();389 // Wait for the first output, then reset.390 let outputs = 0;391 await new Promise(resolve => {392 callbacks.output = frame => {393 outputs++;394 assert_equals(outputs, 1, 'outputs');395 decoder.reset();396 frame.close();397 resolve();398 };399 });400 // Flush should have been synchronously rejected.401 await promise_rejects_dom(t, 'AbortError', flushDone);402 assert_equals(outputs, 1, 'outputs');403}, 'Test reset during flush');404promise_test(async t => {405 const callbacks = {};406 const decoder = createVideoDecoder(t, callbacks);407 decoder.configure({...CONFIG, optimizeForLatency: true});408 decoder.decode(CHUNKS[0]);409 // The frame should be output without flushing.410 await new Promise(resolve => {411 callbacks.output = frame => {412 frame.close();413 resolve();414 };415 });...

Full Screen

Full Screen

usePriorityConfig.ts

Source:usePriorityConfig.ts Github

copy

Full Screen

...14 if (!config || !isObject(config)) throw new TypeError('config must be a object');15 const old = priorityColumnMap.get(prop) ?? {};16 deepMerge(old, config);17 priorityColumnMap.set(prop, old);18 flushDone();19 };20 const clearConfig = () => {21 priorityColumnMap.clear();22 };23 const removeConfig = (prop: string) => {24 priorityColumnMap.delete(prop);25 };26 return { setConfig, removeConfig, clearConfig, onSetConfig };...

Full Screen

Full Screen

Using AI Code Generation

copy

Full Screen

1var wpt = require('webpagetest');2var wpt = new WebPageTest('www.webpagetest.org');3 if (err) {4 return console.error(err);5 }6 wpt.getTestResults(data.data.testId, function(err, data) {7 if (err) {8 return console.error(err);9 }10 console.log(data);11 });12});

Full Screen

Using AI Code Generation

copy

Full Screen

1var wpt = require('wpt');2var wpt = new WebPageTest('www.webpagetest.org');3var options = {4};5wpt.runTest(options, function(err, data) {6 if (err) {7 console.log(err);8 return;9 }10 console.log(data);11 wpt.flushDone(data.data.testId, function(err, data) {12 if (err) {13 console.log(err);14 return;15 }16 console.log(data);17 });18});

Full Screen

Using AI Code Generation

copy

Full Screen

1var wpt = require('webpagetest');2var wpt = new WebPageTest('www.webpagetest.org', 'A.2f2e7c4d0eeb5e5a5c5d5b5f5b5a5b5');3 if (err) return console.log(err);4 wpt.getTestResults(data.data.testId, function(err, data) {5 if (err) return console.log(err);6 console.log(data);7 });8});9var wpt = require('webpagetest');10var wpt = new WebPageTest('www.webpagetest.org', 'A.2f2e7c4d0eeb5e5a5c5d5b5f5b5a5b5');11 if (err) return console.log(err);12 wpt.getTestResults(data.data.testId, function(err, data) {13 if (err) return console.log(err);14 console.log(data);15 });16});17var wpt = require('webpagetest');18var wpt = new WebPageTest('www.webpagetest.org', 'A.2f2e7c4d0eeb5e5a5c5d5b5f5b5a5b5');19 if (err) return console.log(err);20 wpt.getTestResults(data.data.testId, function(err, data) {21 if (err) return console.log(err);22 console.log(data);23 });24});25var wpt = require('webpagetest');26var wpt = new WebPageTest('www.webpagetest.org', 'A.2f2e7c4d0eeb5e5a5c5d5b5f5b5a5b5');27 if (err) return console.log

Full Screen

Using AI Code Generation

copy

Full Screen

1var wpt = require('webpagetest');2var test = wpt('www.webpagetest.org');3var testId = '160820_4S_4f5d5d7a8b1a5b7d5b5e5b7f5d5c5d5d';4test.getTestStatus(testId, function(err, data) {5 if (err) {6 console.log(err);7 } else {8 console.log(data);9 }10});

Full Screen

Using AI Code Generation

copy

Full Screen

1function flushDone(path){2 if(!path){3 console.log('Please enter a path for the file to test');4 return;5 }6 var xhr = new XMLHttpRequest();7 xhr.open('GET', path, true);8 xhr.onload = function(e) {9 if (this.status == 200) {10 console.log('File loaded successfully');11 }12 };13 xhr.send();14 console.log('File loading');15 setTimeout(function(){16 console.log('File loaded');17 }, 1000);18}

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run wpt automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful