How to use ontrackPromise method in wpt

Best JavaScript code snippet using wpt

RTCPeerConnection-helper.js

Source:RTCPeerConnection-helper.js Github

copy

Full Screen

1'use strict'2/*3 * Helper Methods for testing the following methods in RTCPeerConnection:4 * createOffer5 * createAnswer6 * setLocalDescription7 * setRemoteDescription8 *9 * This file offers the following features:10 * SDP similarity comparison11 * Generating offer/answer using anonymous peer connection12 * Test signalingstatechange event13 * Test promise that never resolve14 */15const audioLineRegex = /\r\nm=audio.+\r\n/g;16const videoLineRegex = /\r\nm=video.+\r\n/g;17const applicationLineRegex = /\r\nm=application.+\r\n/g;18function countLine(sdp, regex) {19 const matches = sdp.match(regex);20 if(matches === null) {21 return 0;22 } else {23 return matches.length;24 }25}26function countAudioLine(sdp) {27 return countLine(sdp, audioLineRegex);28}29function countVideoLine(sdp) {30 return countLine(sdp, videoLineRegex);31}32function countApplicationLine(sdp) {33 return countLine(sdp, applicationLineRegex);34}35function similarMediaDescriptions(sdp1, sdp2) {36 if(sdp1 === sdp2) {37 return true;38 } else if(39 countAudioLine(sdp1) !== countAudioLine(sdp2) ||40 countVideoLine(sdp1) !== countVideoLine(sdp2) ||41 countApplicationLine(sdp1) !== countApplicationLine(sdp2))42 {43 return false;44 } else {45 return true;46 }47}48// Assert that given object is either an49// RTCSessionDescription or RTCSessionDescriptionInit50function assert_is_session_description(sessionDesc) {51 if(sessionDesc instanceof RTCSessionDescription) {52 return;53 }54 assert_not_equals(sessionDesc, undefined,55 'Expect session description to be defined');56 assert_true(typeof(sessionDesc) === 'object',57 'Expect sessionDescription to be either a RTCSessionDescription or an object');58 assert_true(typeof(sessionDesc.type) === 'string',59 'Expect sessionDescription.type to be a string');60 assert_true(typeof(sessionDesc.sdp) === 'string',61 'Expect sessionDescription.sdp to be a string');62}63// We can't do string comparison to the SDP content,64// because RTCPeerConnection may return SDP that is65// slightly modified or reordered from what is given66// to it due to ICE candidate events or serialization.67// Instead, we create SDP with different number of media68// lines, and if the SDP strings are not the same, we69// simply count the media description lines and if they70// are the same, we assume it is the same.71function isSimilarSessionDescription(sessionDesc1, sessionDesc2) {72 assert_is_session_description(sessionDesc1);73 assert_is_session_description(sessionDesc2);74 if(sessionDesc1.type !== sessionDesc2.type) {75 return false;76 } else {77 return similarMediaDescriptions(sessionDesc1.sdp, sessionDesc2.sdp);78 }79}80function assert_session_desc_similar(sessionDesc1, sessionDesc2) {81 assert_true(isSimilarSessionDescription(sessionDesc1, sessionDesc2),82 'Expect both session descriptions to have the same count of media lines');83}84function assert_session_desc_not_similar(sessionDesc1, sessionDesc2) {85 assert_false(isSimilarSessionDescription(sessionDesc1, sessionDesc2),86 'Expect both session descriptions to have different count of media lines');87}88// Helper function to generate offer using a freshly created RTCPeerConnection89// object with any audio, video, data media lines present90function generateOffer(options={}) {91 const {92 audio = false,93 video = false,94 data = false,95 pc,96 } = options;97 if (data) {98 pc.createDataChannel('test');99 }100 const setup = {};101 if (audio) {102 setup.offerToReceiveAudio = true;103 }104 if (video) {105 setup.offerToReceiveVideo = true;106 }107 return pc.createOffer(setup).then(offer => {108 // Guard here to ensure that the generated offer really109 // contain the number of media lines we want110 const { sdp } = offer;111 if(audio) {112 assert_equals(countAudioLine(sdp), 1,113 'Expect m=audio line to be present in generated SDP');114 } else {115 assert_equals(countAudioLine(sdp), 0,116 'Expect m=audio line to be present in generated SDP');117 }118 if(video) {119 assert_equals(countVideoLine(sdp), 1,120 'Expect m=video line to be present in generated SDP');121 } else {122 assert_equals(countVideoLine(sdp), 0,123 'Expect m=video line to not present in generated SDP');124 }125 if(data) {126 assert_equals(countApplicationLine(sdp), 1,127 'Expect m=application line to be present in generated SDP');128 } else {129 assert_equals(countApplicationLine(sdp), 0,130 'Expect m=application line to not present in generated SDP');131 }132 return offer;133 });134}135// Helper function to generate answer based on given offer using a freshly136// created RTCPeerConnection object137function generateAnswer(offer) {138 const pc = new RTCPeerConnection();139 return pc.setRemoteDescription(offer)140 .then(() => pc.createAnswer())141 .then((answer) => {142 pc.close();143 return answer;144 });145}146// Run a test function that return a promise that should147// never be resolved. For lack of better options,148// we wait for a time out and pass the test if the149// promise doesn't resolve within that time.150function test_never_resolve(testFunc, testName) {151 async_test(t => {152 testFunc(t)153 .then(154 t.step_func(result => {155 assert_unreached(`Pending promise should never be resolved. Instead it is fulfilled with: ${result}`);156 }),157 t.step_func(err => {158 assert_unreached(`Pending promise should never be resolved. Instead it is rejected with: ${err}`);159 }));160 t.step_timeout(t.step_func_done(), 100)161 }, testName);162}163// Helper function to exchange ice candidates between164// two local peer connections165function exchangeIceCandidates(pc1, pc2) {166 // private function167 function doExchange(localPc, remotePc) {168 localPc.addEventListener('icecandidate', event => {169 const { candidate } = event;170 // candidate may be null to indicate end of candidate gathering.171 // There is ongoing discussion on w3c/webrtc-pc#1213172 // that there should be an empty candidate string event173 // for end of candidate for each m= section.174 if(candidate && remotePc.signalingState !== 'closed') {175 remotePc.addIceCandidate(candidate);176 }177 });178 }179 doExchange(pc1, pc2);180 doExchange(pc2, pc1);181}182// Helper function for doing one round of offer/answer exchange183// betweeen two local peer connections184function doSignalingHandshake(localPc, remotePc) {185 return localPc.createOffer()186 .then(offer => Promise.all([187 localPc.setLocalDescription(offer),188 remotePc.setRemoteDescription(offer)]))189 .then(() => remotePc.createAnswer())190 .then(answer => Promise.all([191 remotePc.setLocalDescription(answer),192 localPc.setRemoteDescription(answer)]))193}194// Helper function to create a pair of connected data channel.195// On success the promise resolves to an array with two data channels.196// It does the heavy lifting of performing signaling handshake,197// ICE candidate exchange, and waiting for data channel at two198// end points to open.199function createDataChannelPair(200 pc1=new RTCPeerConnection(),201 pc2=new RTCPeerConnection())202{203 const channel1 = pc1.createDataChannel('');204 exchangeIceCandidates(pc1, pc2);205 return new Promise((resolve, reject) => {206 let channel2;207 let opened1 = false;208 let opened2 = false;209 function onBothOpened() {210 resolve([channel1, channel2]);211 }212 function onOpen1() {213 opened1 = true;214 if(opened2) onBothOpened();215 }216 function onOpen2() {217 opened2 = true;218 if(opened1) onBothOpened();219 }220 function onDataChannel(event) {221 channel2 = event.channel;222 channel2.addEventListener('error', reject);223 const { readyState } = channel2;224 if(readyState === 'open') {225 onOpen2();226 } else if(readyState === 'connecting') {227 channel2.addEventListener('open', onOpen2);228 } else {229 reject(new Error(`Unexpected ready state ${readyState}`));230 }231 }232 channel1.addEventListener('open', onOpen1);233 channel1.addEventListener('error', reject);234 pc2.addEventListener('datachannel', onDataChannel);235 doSignalingHandshake(pc1, pc2);236 });237}238// Wait for a single message event and return239// a promise that resolve when the event fires240function awaitMessage(channel) {241 return new Promise((resolve, reject) => {242 channel.addEventListener('message',243 event => resolve(event.data),244 { once: true });245 channel.addEventListener('error', reject, { once: true });246 });247}248// Helper to convert a blob to array buffer so that249// we can read the content250function blobToArrayBuffer(blob) {251 return new Promise((resolve, reject) => {252 const reader = new FileReader();253 reader.addEventListener('load', () => {254 resolve(reader.result);255 });256 reader.addEventListener('error', reject);257 reader.readAsArrayBuffer(blob);258 });259}260// Assert that two ArrayBuffer objects have the same byte values261function assert_equals_array_buffer(buffer1, buffer2) {262 assert_true(buffer1 instanceof ArrayBuffer,263 'Expect buffer to be instance of ArrayBuffer');264 assert_true(buffer2 instanceof ArrayBuffer,265 'Expect buffer to be instance of ArrayBuffer');266 assert_equals(buffer1.byteLength, buffer2.byteLength,267 'Expect both array buffers to be of the same byte length');268 const byteLength = buffer1.byteLength;269 const byteArray1 = new Uint8Array(buffer1);270 const byteArray2 = new Uint8Array(buffer2);271 for(let i=0; i<byteLength; i++) {272 assert_equals(byteArray1[i], byteArray2[i],273 `Expect byte at buffer position ${i} to be equal`);274 }275}276// These media tracks will be continually updated with deterministic "noise" in277// order to ensure UAs do not cease transmission in response to apparent278// silence.279//280// > Many codecs and systems are capable of detecting "silence" and changing281// > their behavior in this case by doing things such as not transmitting any282// > media.283//284// Source: https://w3c.github.io/webrtc-pc/#offer-answer-options285const trackFactories = {286 // Share a single context between tests to avoid exceeding resource limits287 // without requiring explicit destruction.288 audioContext: null,289 /**290 * Given a set of requested media types, determine if the user agent is291 * capable of procedurally generating a suitable media stream.292 *293 * @param {object} requested294 * @param {boolean} [requested.audio] - flag indicating whether the desired295 * stream should include an audio track296 * @param {boolean} [requested.video] - flag indicating whether the desired297 * stream should include a video track298 *299 * @returns {boolean}300 */301 canCreate(requested) {302 const supported = {303 audio: !!window.MediaStreamAudioDestinationNode,304 video: !!HTMLCanvasElement.prototype.captureStream305 };306 return (!requested.audio || supported.audio) &&307 (!requested.video || supported.video);308 },309 audio() {310 const ctx = trackFactories.audioContext = trackFactories.audioContext ||311 new AudioContext();312 const oscillator = ctx.createOscillator();313 const dst = oscillator.connect(ctx.createMediaStreamDestination());314 oscillator.start();315 return dst.stream.getAudioTracks()[0];316 },317 video({width = 640, height = 480} = {}) {318 const canvas = Object.assign(319 document.createElement("canvas"), {width, height}320 );321 const ctx = canvas.getContext('2d');322 const stream = canvas.captureStream();323 let count = 0;324 setInterval(() => {325 ctx.fillStyle = `rgb(${count%255}, ${count*count%255}, ${count%255})`;326 count += 1;327 ctx.fillRect(0, 0, width, height);328 }, 100);329 if (document.body) {330 document.body.appendChild(canvas);331 } else {332 document.addEventListener('DOMContentLoaded', () => {333 document.body.appendChild(canvas);334 });335 }336 return stream.getVideoTracks()[0];337 }338};339// Generate a MediaStream bearing the specified tracks.340//341// @param {object} [caps]342// @param {boolean} [caps.audio] - flag indicating whether the generated stream343// should include an audio track344// @param {boolean} [caps.video] - flag indicating whether the generated stream345// should include a video track346async function getNoiseStream(caps = {}) {347 if (!trackFactories.canCreate(caps)) {348 return navigator.mediaDevices.getUserMedia(caps);349 }350 const tracks = [];351 if (caps.audio) {352 tracks.push(trackFactories.audio());353 }354 if (caps.video) {355 tracks.push(trackFactories.video());356 }357 return new MediaStream(tracks);358}359// Obtain a MediaStreamTrack of kind using procedurally-generated streams (and360// falling back to `getUserMedia` when the user agent cannot generate the361// requested streams).362// Return Promise of pair of track and associated mediaStream.363// Assumes that there is at least one available device364// to generate the track.365function getTrackFromUserMedia(kind) {366 return getNoiseStream({ [kind]: true })367 .then(mediaStream => {368 const [track] = mediaStream.getTracks();369 return [track, mediaStream];370 });371}372// Obtain |count| MediaStreamTracks of type |kind| and MediaStreams. The tracks373// do not belong to any stream and the streams are empty. Returns a Promise374// resolved with a pair of arrays [tracks, streams].375// Assumes there is at least one available device to generate the tracks and376// streams and that the getUserMedia() calls resolve.377function getUserMediaTracksAndStreams(count, type = 'audio') {378 let otherTracksPromise;379 if (count > 1)380 otherTracksPromise = getUserMediaTracksAndStreams(count - 1, type);381 else382 otherTracksPromise = Promise.resolve([[], []]);383 return otherTracksPromise.then(([tracks, streams]) => {384 return getTrackFromUserMedia(type)385 .then(([track, stream]) => {386 // Remove the default stream-track relationship.387 stream.removeTrack(track);388 tracks.push(track);389 streams.push(stream);390 return [tracks, streams];391 });392 });393}394// Performs an offer exchange caller -> callee.395async function exchangeOffer(caller, callee) {396 const offer = await caller.createOffer();397 await caller.setLocalDescription(offer);398 return callee.setRemoteDescription(offer);399}400// Performs an answer exchange caller -> callee.401async function exchangeAnswer(caller, callee) {402 const answer = await callee.createAnswer();403 await callee.setLocalDescription(answer);404 return caller.setRemoteDescription(answer);405}406async function exchangeOfferAnswer(caller, callee) {407 await exchangeOffer(caller, callee);408 return exchangeAnswer(caller, callee);409}410// The returned promise is resolved with caller's ontrack event.411async function exchangeAnswerAndListenToOntrack(t, caller, callee) {412 const ontrackPromise = addEventListenerPromise(t, caller, 'track');413 await exchangeAnswer(caller, callee);414 return ontrackPromise;415}416// The returned promise is resolved with callee's ontrack event.417async function exchangeOfferAndListenToOntrack(t, caller, callee) {418 const ontrackPromise = addEventListenerPromise(t, callee, 'track');419 await exchangeOffer(caller, callee);420 return ontrackPromise;421}422// The resolver has a |promise| that can be resolved or rejected using |resolve|423// or |reject|.424class Resolver {425 constructor() {426 let promiseResolve;427 let promiseReject;428 this.promise = new Promise(function(resolve, reject) {429 promiseResolve = resolve;430 promiseReject = reject;431 });432 this.resolve = promiseResolve;433 this.reject = promiseReject;434 }435}436function addEventListenerPromise(t, target, type, listener) {437 return new Promise((resolve, reject) => {438 target.addEventListener(type, t.step_func(e => {439 if (listener != undefined)440 e = listener(e);441 resolve(e);442 }));443 });444}445function createPeerConnectionWithCleanup(t) {446 const pc = new RTCPeerConnection();447 t.add_cleanup(() => pc.close());448 return pc;449}450async function createTrackAndStreamWithCleanup(t, kind = 'audio') {451 let constraints = {};452 constraints[kind] = true;453 const stream = await navigator.mediaDevices.getUserMedia(constraints);454 const [track] = stream.getTracks();455 t.add_cleanup(() => track.stop());456 return [track, stream];457}458function findTransceiverForSender(pc, sender) {459 const transceivers = pc.getTransceivers();460 for (let i = 0; i < transceivers.length; ++i) {461 if (transceivers[i].sender == sender)462 return transceivers[i];463 }464 return null;...

Full Screen

Full Screen

RTCRtpSynchronizationSource-helper.js

Source:RTCRtpSynchronizationSource-helper.js Github

copy

Full Screen

1'use strict';2// This file depends on `webrtc/RTCPeerConnection-helper.js`3// which should be loaded from the main HTML file.4var kAbsCaptureTime =5 'http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time';6function addHeaderExtensionToSdp(sdp, uri) {7 const extmap = new RegExp('a=extmap:(\\d+)');8 let sdpLines = sdp.split('\r\n');9 // This assumes at most one audio m= section and one video m= section.10 // If more are present, only the first section of each kind is munged.11 for (const section of ['audio', 'video']) {12 let found_section = false;13 let maxId = undefined;14 let maxIdLine = undefined;15 let extmapAllowMixed = false;16 // find the largest header extension id for section.17 for (let i = 0; i < sdpLines.length; ++i) {18 if (!found_section) {19 if (sdpLines[i].startsWith('m=' + section)) {20 found_section = true;21 }22 continue;23 } else {24 if (sdpLines[i].startsWith('m=')) {25 // end of section26 break;27 }28 }29 if (sdpLines[i] === 'a=extmap-allow-mixed') {30 extmapAllowMixed = true;31 }32 let result = sdpLines[i].match(extmap);33 if (result && result.length === 2) {34 if (maxId == undefined || result[1] > maxId) {35 maxId = parseInt(result[1]);36 maxIdLine = i;37 }38 }39 }40 if (maxId == 14 && !extmapAllowMixed) {41 // Reaching the limit of one byte header extension. Adding two byte header42 // extension support.43 sdpLines.splice(maxIdLine + 1, 0, 'a=extmap-allow-mixed');44 }45 if (maxIdLine !== undefined) {46 sdpLines.splice(maxIdLine + 1, 0,47 'a=extmap:' + (maxId + 1).toString() + ' ' + uri);48 }49 }50 return sdpLines.join('\r\n');51}52// TODO(crbug.com/1051821): Use RTP header extension API instead of munging53// when the RTP header extension API is implemented.54async function addAbsCaptureTimeAndExchangeOffer(caller, callee) {55 let offer = await caller.createOffer();56 // Absolute capture time header extension may not be offered by default,57 // in such case, munge the SDP.58 offer.sdp = addHeaderExtensionToSdp(offer.sdp, kAbsCaptureTime);59 await caller.setLocalDescription(offer);60 return callee.setRemoteDescription(offer);61}62// TODO(crbug.com/1051821): Use RTP header extension API instead of munging63// when the RTP header extension API is implemented.64async function checkAbsCaptureTimeAndExchangeAnswer(caller, callee,65 absCaptureTimeAnswered) {66 let answer = await callee.createAnswer();67 const extmap = new RegExp('a=extmap:\\d+ ' + kAbsCaptureTime + '\r\n', 'g');68 if (answer.sdp.match(extmap) == null) {69 // We expect that absolute capture time RTP header extension is answered.70 // But if not, there is no need to proceed with the test.71 assert_false(absCaptureTimeAnswered, 'Absolute capture time RTP ' +72 'header extension is not answered');73 } else {74 if (!absCaptureTimeAnswered) {75 // We expect that absolute capture time RTP header extension is not76 // answered, but it is, then we munge the answer to remove it.77 answer.sdp = answer.sdp.replace(extmap, '');78 }79 }80 await callee.setLocalDescription(answer);81 return caller.setRemoteDescription(answer);82}83async function exchangeOfferAndListenToOntrack(t, caller, callee,84 absCaptureTimeOffered) {85 const ontrackPromise = addEventListenerPromise(t, callee, 'track');86 // Absolute capture time header extension is expected not offered by default,87 // and thus munging is needed to enable it.88 await absCaptureTimeOffered89 ? addAbsCaptureTimeAndExchangeOffer(caller, callee)90 : exchangeOffer(caller, callee);91 return ontrackPromise;92}93async function initiateSingleTrackCall(t, cap, absCaptureTimeOffered,94 absCaptureTimeAnswered) {95 const caller = new RTCPeerConnection();96 t.add_cleanup(() => caller.close());97 const callee = new RTCPeerConnection();98 t.add_cleanup(() => callee.close());99 const stream = await getNoiseStream(cap);100 stream.getTracks().forEach(track => {101 caller.addTrack(track, stream);102 t.add_cleanup(() => track.stop());103 });104 // TODO(crbug.com/988432): `getSynchronizationSources() on the audio side105 // needs a hardware sink for the returned dictionary entries to get updated.106 const remoteVideo = document.getElementById('remote');107 callee.ontrack = e => {108 remoteVideo.srcObject = e.streams[0];109 }110 exchangeIceCandidates(caller, callee);111 await exchangeOfferAndListenToOntrack(t, caller, callee,112 absCaptureTimeOffered);113 // Exchange answer and check whether the absolute capture time RTP header114 // extension is answered.115 await checkAbsCaptureTimeAndExchangeAnswer(caller, callee,116 absCaptureTimeAnswered);117 return [caller, callee];...

Full Screen

Full Screen

Using AI Code Generation

copy

Full Screen

1const wpt = require('webpagetest');2const webpagetest = new wpt('API_KEY');3const options = {4 videoParams: {5 },6};7webpagetest.runTest(url, options, (err, data) => {8 if (err) {9 console.log('Error: ', err);10 } else {11 console.log('Test ID: ', data.data.testId);12 console.log('Test status: ', data.data.statusText);13 console.log('Test results: ', data.data.summary);14 }15});16webpagetest.on('error', (err) => {17 console.log('Error: ', err);18});19webpagetest.on('done', (data) => {20 console.log('Test ID: ', data.data.testId);21 console.log('Test status: ', data.data.statusText);22 console.log('Test results: ', data.data.summary);23});24webpagetest.on('log', (log) => {25 console.log('Log: ', log);26});27webpagetest.on('result', (result) => {28 console.log('Test ID: ', result.data.testId);29 console.log('Test status: ', result.data.statusText);30 console.log('Test results: ', result.data.summary);31});32webpagetest.on('video', (video) => {33 console.log('Video: ', video);34});35webpagetest.on('test', (test) => {36 console.log('Test ID: ', test.data.testId);37 console.log('Test status: ', test.data.statusText);38 console.log('Test results: ', test.data.summary);39});40webpagetest.on('testStart', (testStart) => {41 console.log('Test ID: ', testStart.data

Full Screen

Using AI Code Generation

copy

Full Screen

1var wpt = require('wpt');2var wpt = new WebPageTest('www.webpagetest.org');3wpt.ontrackPromise(url)4.then(function(data){5 console.log(data);6})7.catch(function(err){8 console.log(err);9});10var wpt = require('wpt');11var wpt = new WebPageTest('www.webpagetest.org');12wpt.ontrack(url, function(err, data) {13 if (err) {14 console.log(err);15 } else {16 console.log(data);17 }18});19### 4.3.4. getLocationsPromise()20var wpt = require('wpt');21var wpt = new WebPageTest('www.webpagetest.org');22wpt.getLocationsPromise()23.then(function(data){24 console.log(data);25})26.catch(function(err){27 console.log(err);28});29var wpt = require('wpt');30var wpt = new WebPageTest('www.webpagetest.org');31wpt.getLocations(function(err, data) {32 if (err) {33 console.log(err);34 } else {35 console.log(data);36 }37});38### 4.3.5. getTestersPromise()39var wpt = require('wpt');40var wpt = new WebPageTest('www.webpagetest.org');41wpt.getTestersPromise()42.then(function(data){43 console.log(data);44})45.catch(function(err){46 console.log(err);47});48var wpt = require('wpt');49var wpt = new WebPageTest('www.webpagetest.org');50wpt.getTesters(function(err, data) {51 if (err) {52 console.log(err);53 } else {54 console.log(data);55 }56});

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run wpt automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful