How to use frames method in Playwright Python

Best Python code snippet using playwright-python

Run Playwright Python automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

views.py

Source: views.py Github

copy
1from django.shortcuts import render
2from django.http import HttpResponse
3from .models import KanoBasicAttack
4from .models import KanoStringAttack
5from .models import KanoSpecialAttack
6from .models import KanoRipperAttack
7from .models import KanoDirtbagAttack
8from .models import NightwolfBasicAttack
9from .models import NightwolfStringAttack
10from .models import NightwolfSpecialAttack
11from .models import NightwolfMatokaAttack
12from .models import NightwolfAncestralAttack
13from .models import JadeBasicAttack
14from .models import JadeStringAttack
15from .models import JadeSpecialAttack
16from .models import JadeEmeraldAttack
17from .models import JadeJadedAttack
18from .models import SubZeroBasicAttack
19from .models import SubZeroStringAttack
20from .models import SubZeroSpecialAttack
21from .models import SubZeroDeadofWinterAttack
22from .models import SubZeroThinIceAttack
23
24# Create your views here.
25
26def home(request):
27    return render(request, 'home.html', {'name': 'Max'})
28
29def about(request):
30    #return HttpResponse('about')
31    return render(request, 'about.html') #the about page
32
33def character(request):
34    #return HttpResponse('character')
35    return render(request, 'character.html')
36
37def frames(request):
38    #return HttpResponse('frames')
39    return render(request,'frames.html')
40
41def KanoHome(request):
42    #return HttpResponse('Kano')
43    KanoBasicFrames = KanoBasicAttack.objects.all()
44    KanoStringFrames = KanoStringAttack.objects.all()
45    KanoSpecialFrames = KanoSpecialAttack.objects.all()
46    KanoRipperFrames = KanoRipperAttack.objects.all()
47    KanoDirtbagFrames = KanoDirtbagAttack.objects.all()
48    return render(request, 'KanoHome.html',
49     {'KanoBasicFrames': KanoBasicFrames ,'KanoStringFrames': KanoStringFrames ,
50      'KanoSpecialFrames': KanoSpecialFrames, 'KanoRipperFrames':KanoRipperFrames, 'KanoDirtbagFrames': KanoDirtbagFrames})
51
52def NightwolfHome(request):
53    #return HttpResponse('Nightwolf')
54    NightwolfBasicFrames = NightwolfBasicAttack.objects.all()
55    NightwolfStringFrames = NightwolfStringAttack.objects.all()
56    NightwolfSpecialFrames = NightwolfSpecialAttack.objects.all()
57    NightwolfMatokaFrames = NightwolfMatokaAttack.objects.all()
58    NightwolfAncestralFrames = NightwolfAncestralAttack.objects.all()
59    return render(request, 'NightwolfHome.html', {'NightwolfBasicFrames': NightwolfBasicFrames,'NightwolfStringFrames': NightwolfStringFrames
60    ,'NightwolfSpecialFrames': NightwolfSpecialFrames,'NightwolfMatokaFrames': NightwolfMatokaFrames,'NightwolfAncestralFrames': NightwolfAncestralFrames})
61
62def JadeHome(request):
63    #return HttpResponse('Jade')
64    JadeBasicFrames = JadeBasicAttack.objects.all()
65    JadeStringFrames = JadeStringAttack.objects.all()
66    JadeSpecialFrames = JadeSpecialAttack.objects.all()
67    JadeEmeraldFrames = JadeEmeraldAttack.objects.all()
68    JadeJadedFrames =JadeJadedAttack.objects.all()
69    return render(request, 'JadeHome.html',{'JadeBasicFrames': JadeBasicFrames,'JadeStringFrames': JadeStringFrames,
70    'JadeSpecialFrames':JadeSpecialFrames,'JadeEmeraldFrames':JadeEmeraldFrames,'JadeJadedFrames':JadeJadedFrames})
71
72def SubZeroHome(request):
73    #return HttpResponse('SubZero')
74    SubZeroBasicFrames = SubZeroBasicAttack.objects.all()
75    SubZeroStringFrames = SubZeroStringAttack.objects.all()
76    SubZeroSpecialFrames = SubZeroSpecialAttack.objects.all()
77    SubZeroDeadofWinterFrames = SubZeroDeadofWinterAttack.objects.all()
78    SubZeroThinIceFrames = SubZeroThinIceAttack.objects.all()
79    return render(request, 'SubZeroHome.html',{'SubZeroBasicFrames': SubZeroBasicFrames,'SubZeroStringFrames': SubZeroStringFrames,
80    'SubZeroSpecialFrames':SubZeroSpecialFrames,'SubZeroDeadofWinterFrames':SubZeroDeadofWinterFrames,'SubZeroThinIceFrames':SubZeroThinIceFrames})
81
Full Screen

vpxtest.py

Source: vpxtest.py Github

copy
1import time
2
3import vpx
4import numpy as np
5from SimpleCV import Camera
6
7from SimpleSeer.util import Clock
8from SimpleSeer import models as M
9
10RATE=24.0
11NUM_FRAMES=24
12
13def main():
14    camera = Camera(0, dict(width=640, height=480))
15    clock = Clock(RATE)
16
17    # capture frames as scv images
18    frames = list(capture_frames(camera, clock, NUM_FRAMES))
19
20    print '=== REALTIME ==='
21    decoded_frames = run_test(frames, vpx.VPX_DL_REALTIME)
22    playback(clock, decoded_frames)
23    print '=== GOOD QUALITY ==='
24    decoded_frames = run_test(frames, vpx.VPX_DL_GOOD_QUALITY)
25    playback(clock, decoded_frames)
26    print '=== BEST QUALITY ==='
27    decoded_frames = run_test(frames, vpx.VPX_DL_BEST_QUALITY)
28    playback(clock, decoded_frames)
29
30def capture_frames(camera, clock, num_frames):
31    for fno in xrange(num_frames):
32        clock.tick()
33        yield camera.getImage()
34
35def playback(clock, frames):
36    for img in frames:
37        clock.tick()
38        img.show()
39
40def run_test(frames, deadline):
41    # Encode frames
42    start = time.time()
43    w,h = frames[0].width, frames[0].height
44    clip = M.Clip.encode(w,h,frames, deadline=deadline)
45    elapsed = time.time() - start
46    print '%d frames encoded in %fs, %.2f fps (avg) (%d kB)' % (
47        NUM_FRAMES, elapsed, NUM_FRAMES / elapsed,
48        sum(len(p) for p in clip.packets) / 2**10)
49
50    # Decode frames
51    start = time.time()
52    decoded_frames = clip.images
53    elapsed = time.time() - start
54    print '%d frames decoded in %fs, %.2f fps (avg)' % (
55        NUM_FRAMES, elapsed, NUM_FRAMES / elapsed)
56
57    print 'MSE: %.2f' % mse_clip(frames, decoded_frames)
58    return decoded_frames
59
60
61def mse_frame(org, new):
62    orga = np.fromstring(org.toString(), dtype=np.uint8)
63    newa = np.fromstring(new.toString(), dtype=np.uint8)
64    se = np.sum((orga-newa)**2)
65    return float(se) / len(orga)
66
67def mse_clip(org_frames, new_frames):
68    return sum(mse_frame(o,n) for o,n in zip(org_frames, new_frames)) / len(new_frames)
69
70if __name__ == '__main__':
71    main()
72
Full Screen

TestVSCode_stackTrace.py

Source: TestVSCode_stackTrace.py Github

copy
1"""
2Test lldb-vscode setBreakpoints request
3"""
4
5from __future__ import print_function
6
7import unittest2
8import vscode
9from lldbsuite.test.decorators import *
10from lldbsuite.test.lldbtest import *
11from lldbsuite.test import lldbutil
12import lldbvscode_testcase
13import os
14
15
16class TestVSCode_stackTrace(lldbvscode_testcase.VSCodeTestCaseBase):
17
18    mydir = TestBase.compute_mydir(__file__)
19    name_key_path = ['name']
20    source_key_path = ['source', 'path']
21    line_key_path = ['line']
22
23    def verify_stackFrames(self, start_idx, stackFrames):
24        frame_idx = start_idx
25        for stackFrame in stackFrames:
26            # Don't care about frame above main
27            if frame_idx > 20:
28                return
29            self.verify_stackFrame(frame_idx, stackFrame)
30            frame_idx += 1
31
32    def verify_stackFrame(self, frame_idx, stackFrame):
33        frame_name = self.get_dict_value(stackFrame, self.name_key_path)
34        frame_source = self.get_dict_value(stackFrame, self.source_key_path)
35        frame_line = self.get_dict_value(stackFrame, self.line_key_path)
36        if frame_idx == 0:
37            expected_line = self.recurse_end
38            expected_name = 'recurse'
39        elif frame_idx < 20:
40            expected_line = self.recurse_call
41            expected_name = 'recurse'
42        else:
43            expected_line = self.recurse_invocation
44            expected_name = 'main'
45        self.assertTrue(frame_name == expected_name,
46                        'frame #%i name "%s" == "%s"' % (
47                            frame_idx, frame_name, expected_name))
48        self.assertTrue(frame_source == self.source_path,
49                        'frame #%i source "%s" == "%s"' % (
50                            frame_idx, frame_source, self.source_path))
51        self.assertTrue(frame_line == expected_line,
52                        'frame #%i line %i == %i' % (frame_idx, frame_line,
53                                                     expected_line))
54
55    @skipIfWindows
56    @skipIfDarwin # Skip this test for now until we can figure out why tings aren't working on build bots
57    @no_debug_info_test
58    def test_stackTrace(self):
59        '''
60            Tests the 'stackTrace' packet and all its variants.
61        '''
62        program = self.getBuildArtifact("a.out")
63        self.build_and_launch(program)
64        source = 'main.c'
65        self.source_path = os.path.join(os.getcwd(), source)
66        self.recurse_end = line_number(source, 'recurse end')
67        self.recurse_call = line_number(source, 'recurse call')
68        self.recurse_invocation = line_number(source, 'recurse invocation')
69
70        lines = [self.recurse_end]
71
72        # Set breakoint at a point of deepest recuusion
73        breakpoint_ids = self.set_source_breakpoints(source, lines)
74        self.assertTrue(len(breakpoint_ids) == len(lines),
75                        "expect correct number of breakpoints")
76
77        self.continue_to_breakpoints(breakpoint_ids)
78        startFrame = 0
79        # Verify we get all stack frames with no arguments
80        stackFrames = self.get_stackFrames()
81        frameCount = len(stackFrames)
82        self.assertTrue(frameCount >= 20,
83                        'verify we get at least 20 frames for all frames')
84        self.verify_stackFrames(startFrame, stackFrames)
85
86        # Verify all stack frames by specifying startFrame = 0 and levels not
87        # specified
88        stackFrames = self.get_stackFrames(startFrame=startFrame)
89        self.assertTrue(frameCount == len(stackFrames),
90                        ('verify same number of frames with startFrame=%i') % (
91                            startFrame))
92        self.verify_stackFrames(startFrame, stackFrames)
93
94        # Verify all stack frames by specifying startFrame = 0 and levels = 0
95        levels = 0
96        stackFrames = self.get_stackFrames(startFrame=startFrame,
97                                           levels=levels)
98        self.assertTrue(frameCount == len(stackFrames),
99                        ('verify same number of frames with startFrame=%i and'
100                         ' levels=%i') % (startFrame, levels))
101        self.verify_stackFrames(startFrame, stackFrames)
102
103        # Get only the first stack frame by sepcifying startFrame = 0 and
104        # levels = 1
105        levels = 1
106        stackFrames = self.get_stackFrames(startFrame=startFrame,
107                                           levels=levels)
108        self.assertTrue(levels == len(stackFrames),
109                        ('verify one frame with startFrame=%i and'
110                         ' levels=%i') % (startFrame, levels))
111        self.verify_stackFrames(startFrame, stackFrames)
112
113        # Get only the first 3 stack frames by sepcifying startFrame = 0 and
114        # levels = 3
115        levels = 3
116        stackFrames = self.get_stackFrames(startFrame=startFrame,
117                                           levels=levels)
118        self.assertTrue(levels == len(stackFrames),
119                        ('verify %i frames with startFrame=%i and'
120                         ' levels=%i') % (levels, startFrame, levels))
121        self.verify_stackFrames(startFrame, stackFrames)
122
123        # Get only the first 15 stack frames by sepcifying startFrame = 5 and
124        # levels = 16
125        startFrame = 5
126        levels = 16
127        stackFrames = self.get_stackFrames(startFrame=startFrame,
128                                           levels=levels)
129        self.assertTrue(levels == len(stackFrames),
130                        ('verify %i frames with startFrame=%i and'
131                         ' levels=%i') % (levels, startFrame, levels))
132        self.verify_stackFrames(startFrame, stackFrames)
133
134        # Verify we cap things correctly when we ask for too many frames
135        startFrame = 5
136        levels = 1000
137        stackFrames = self.get_stackFrames(startFrame=startFrame,
138                                           levels=levels)
139        self.assertTrue(len(stackFrames) == frameCount - startFrame,
140                        ('verify less than 1000 frames with startFrame=%i and'
141                         ' levels=%i') % (startFrame, levels))
142        self.verify_stackFrames(startFrame, stackFrames)
143
144        # Verify level=0 works with non-zerp start frame
145        startFrame = 5
146        levels = 0
147        stackFrames = self.get_stackFrames(startFrame=startFrame,
148                                           levels=levels)
149        self.assertTrue(len(stackFrames) == frameCount - startFrame,
150                        ('verify less than 1000 frames with startFrame=%i and'
151                         ' levels=%i') % (startFrame, levels))
152        self.verify_stackFrames(startFrame, stackFrames)
153
154        # Verify we get not frames when startFrame is too high
155        startFrame = 1000
156        levels = 1
157        stackFrames = self.get_stackFrames(startFrame=startFrame,
158                                           levels=levels)
159        self.assertTrue(0 == len(stackFrames),
160                        'verify zero frames with startFrame out of bounds')
161
Full Screen

extension_video_sensing.js

Source: extension_video_sensing.js Github

copy
1const {createReadStream} = require('fs');
2const {join} = require('path');
3
4const {PNG} = require('pngjs');
5const {test} = require('tap');
6
7const {wrapClamp} = require('../../src/util/math-util');
8
9const VideoSensing = require('../../src/extensions/scratch3_video_sensing/index.js');
10const VideoMotion = require('../../src/extensions/scratch3_video_sensing/library.js');
11
12/**
13 * Prefix to the mock frame images used to test the video sensing extension.
14 * @type {string}
15 */
16const pngPrefix = 'extension_video_sensing_';
17
18/**
19 * Map of frame keys to the image filenames appended to the pngPrefix.
20 * @type {object}
21 */
22const framesMap = {
23    center: 'center',
24    left: 'left-5',
25    left2: 'left-10',
26    down: 'down-10'
27};
28
29/**
30 * Asynchronously read a png file and copy its pixel data into a typed array
31 * VideoMotion will accept.
32 * @param {string} name - partial filename to read
33 * @returns {Promise.<Uint32Array>} pixel data of the image
34 */
35const readPNG = name => (
36    new Promise((resolve, reject) => {
37        const png = new PNG();
38        createReadStream(join(__dirname, `${pngPrefix}${name}.png`))
39            .pipe(png)
40            .on('parsed', () => {
41                // Copy the RGBA pixel values into a separate typed array and
42                // cast the array to Uint32, the array format VideoMotion takes.
43                resolve(new Uint32Array(new Uint8ClampedArray(png.data).buffer));
44            })
45            .on('error', reject);
46    })
47);
48
49/**
50 * Read all the frames for testing asynchrnously and produce an object with
51 * keys following the keys in framesMap.
52 * @returns {object} mapping of keys in framesMap to image data read from disk
53 */
54const readFrames = (() => {
55    // Use this immediately invoking function expression (IIFE) to delay reading
56    // once to the first test that calls readFrames.
57    let _promise = null;
58
59    return () => {
60        if (_promise === null) {
61            _promise = Promise.all(Object.keys(framesMap).map(key => readPNG(framesMap[key])))
62                .then(pngs => (
63                    Object.keys(framesMap).reduce((frames, key, i) => {
64                        frames[key] = pngs[i];
65                        return frames;
66                    }, {})
67                ));
68        }
69        return _promise;
70    };
71})();
72
73/**
74 * Match if actual is within optMargin to expect. If actual is under -180,
75 * match if actual + 360 is near expect. If actual is over 180, match if actual
76 * - 360 is near expect.
77 * @param {number} actual - actual angle in degrees
78 * @param {number} expect - expected angle in degrees
79 * @param {number} optMargin - allowed margin between actual and expect in degrees
80 * @returns {boolean} true if actual is close to expect
81 */
82const isNearAngle = (actual, expect, optMargin = 10) => (
83    (wrapClamp(actual - expect, 0, 359) < optMargin) ||
84    (wrapClamp(actual - expect, 0, 359) > 360 - optMargin)
85);
86
87// A fake scratch-render drawable that will be used by VideoMotion to restrain
88// the area considered for motion detection in VideoMotion.getLocalMotion
89const fakeDrawable = {
90    getFastBounds () {
91        return {
92            left: -120,
93            top: 60,
94            right: 0,
95            bottom: -60
96        };
97    },
98
99    isTouching () {
100        return true;
101    }
102};
103
104// A fake MotionState used to test the stored values in
105// VideoMotion.getLocalMotion, VideoSensing.videoOn and
106// VideoSensing.whenMotionGreaterThan.
107const fakeMotionState = {
108    motionFrameNumber: -1,
109    motionAmount: -1,
110    motionDirection: -Infinity
111};
112
113// A fake target referring to the fake drawable and MotionState.
114const fakeTarget = {
115    drawableID: 0,
116
117    getCustomState () {
118        return fakeMotionState;
119    },
120    setCustomState () {}
121};
122
123const fakeRuntime = {
124    targets: [fakeTarget],
125
126    // Without defined devices, VideoSensing will not try to start sampling from
127    // a video source.
128    ioDevices: null,
129
130    renderer: {
131        _allDrawables: [
132            fakeDrawable
133        ]
134    }
135};
136
137const fakeBlockUtility = {
138    target: fakeTarget
139};
140
141test('detect motionAmount between frames', t => {
142    t.plan(6);
143
144    return readFrames()
145        .then(frames => {
146            const detect = new VideoMotion();
147
148            // Each of these pairs should have enough motion for the detector.
149            const framePairs = [
150                [frames.center, frames.left],
151                [frames.center, frames.left2],
152                [frames.left, frames.left2],
153                [frames.left, frames.center],
154                [frames.center, frames.down],
155                [frames.down, frames.center]
156            ];
157
158            // Add both frames of a pair and test for motion.
159            let index = 0;
160            for (const [frame1, frame2] of framePairs) {
161                detect.addFrame(frame1);
162                detect.addFrame(frame2);
163
164                detect.analyzeFrame();
165                t.ok(
166                    detect.motionAmount > 10,
167                    `frame pair ${index + 1} has motion ${detect.motionAmount} over threshold (10)`
168                );
169                index += 1;
170            }
171
172            t.end();
173        });
174});
175
176test('detect local motionAmount between frames', t => {
177    t.plan(6);
178
179    return readFrames()
180        .then(frames => {
181            const detect = new VideoMotion();
182
183            // Each of these pairs should have enough motion for the detector.
184            const framePairs = [
185                [frames.center, frames.left],
186                [frames.center, frames.left2],
187                [frames.left, frames.left2],
188                [frames.left, frames.center],
189                [frames.center, frames.down],
190                [frames.down, frames.center]
191            ];
192
193            // Add both frames of a pair and test for local motion.
194            let index = 0;
195            for (const [frame1, frame2] of framePairs) {
196                detect.addFrame(frame1);
197                detect.addFrame(frame2);
198
199                detect.analyzeFrame();
200                detect.getLocalMotion(fakeDrawable, fakeMotionState);
201                t.ok(
202                    fakeMotionState.motionAmount > 10,
203                    `frame pair ${index + 1} has motion ${fakeMotionState.motionAmount} over threshold (10)`
204                );
205                index += 1;
206            }
207
208            t.end();
209        });
210});
211
212test('detect motionDirection between frames', t => {
213    t.plan(6);
214
215    return readFrames()
216        .then(frames => {
217            const detect = new VideoMotion();
218
219            // Each of these pairs is moving in the given direction. Does the detector
220            // guess a value to that?
221            const directionMargin = 10;
222            const framePairs = [
223                {
224                    frames: [frames.center, frames.left],
225                    direction: -90
226                },
227                {
228                    frames: [frames.center, frames.left2],
229                    direction: -90
230                },
231                {
232                    frames: [frames.left, frames.left2],
233                    direction: -90
234                },
235                {
236                    frames: [frames.left, frames.center],
237                    direction: 90
238                },
239                {
240                    frames: [frames.center, frames.down],
241                    direction: 180
242                },
243                {
244                    frames: [frames.down, frames.center],
245                    direction: 0
246                }
247            ];
248
249            // Add both frames of a pair and check if the motionDirection is near the
250            // expected angle.
251            let index = 0;
252            for (const {frames: [frame1, frame2], direction} of framePairs) {
253                detect.addFrame(frame1);
254                detect.addFrame(frame2);
255
256                detect.analyzeFrame();
257                t.ok(
258                    isNearAngle(detect.motionDirection, direction, directionMargin),
259                    `frame pair ${index + 1} is ${detect.motionDirection.toFixed(0)} ` +
260                    `degrees and close to ${direction} degrees`
261                );
262                index += 1;
263            }
264
265            t.end();
266        });
267});
268
269test('detect local motionDirection between frames', t => {
270    t.plan(6);
271
272    return readFrames()
273        .then(frames => {
274            const detect = new VideoMotion();
275
276            // Each of these pairs is moving in the given direction. Does the detector
277            // guess a value to that?
278            const directionMargin = 10;
279            const framePairs = [
280                {
281                    frames: [frames.center, frames.left],
282                    direction: -90
283                },
284                {
285                    frames: [frames.center, frames.left2],
286                    direction: -90
287                },
288                {
289                    frames: [frames.left, frames.left2],
290                    direction: -90
291                },
292                {
293                    frames: [frames.left, frames.center],
294                    direction: 90
295                },
296                {
297                    frames: [frames.center, frames.down],
298                    direction: 180
299                },
300                {
301                    frames: [frames.down, frames.center],
302                    direction: 0
303                }
304            ];
305
306            // Add both frames of a pair and check if the local motionDirection is near
307            // the expected angle.
308            let index = 0;
309            for (const {frames: [frame1, frame2], direction} of framePairs) {
310                detect.addFrame(frame1);
311                detect.addFrame(frame2);
312
313                detect.analyzeFrame();
314                detect.getLocalMotion(fakeDrawable, fakeMotionState);
315                const motionDirection = fakeMotionState.motionDirection;
316                t.ok(
317                    isNearAngle(motionDirection, direction, directionMargin),
318                    `frame pair ${index + 1} is ${motionDirection.toFixed(0)} degrees and close to ${direction} degrees`
319                );
320                index += 1;
321            }
322
323            t.end();
324        });
325});
326
327test('videoOn returns value dependent on arguments', t => {
328    t.plan(4);
329
330    return readFrames()
331        .then(frames => {
332            const sensing = new VideoSensing(fakeRuntime);
333
334            // With these two frame test if we get expected values depending on the
335            // arguments to videoOn.
336            sensing.detect.addFrame(frames.center);
337            sensing.detect.addFrame(frames.left);
338
339            const motionAmount = sensing.videoOn({
340                ATTRIBUTE: VideoSensing.SensingAttribute.MOTION,
341                SUBJECT: VideoSensing.SensingSubject.STAGE
342            }, fakeBlockUtility);
343            t.ok(
344                motionAmount > 10,
345                `stage motionAmount ${motionAmount} is over the threshold (10)`
346            );
347
348            const localMotionAmount = sensing.videoOn({
349                ATTRIBUTE: VideoSensing.SensingAttribute.MOTION,
350                SUBJECT: VideoSensing.SensingSubject.SPRITE
351            }, fakeBlockUtility);
352            t.ok(
353                localMotionAmount > 10,
354                `sprite motionAmount ${localMotionAmount} is over the threshold (10)`
355            );
356
357            const motionDirection = sensing.videoOn({
358                ATTRIBUTE: VideoSensing.SensingAttribute.DIRECTION,
359                SUBJECT: VideoSensing.SensingSubject.STAGE
360            }, fakeBlockUtility);
361            t.ok(
362                isNearAngle(motionDirection, -90),
363                `stage motionDirection ${motionDirection.toFixed(0)} degrees is close to ${90} degrees`
364            );
365
366            const localMotionDirection = sensing.videoOn({
367                ATTRIBUTE: VideoSensing.SensingAttribute.DIRECTION,
368                SUBJECT: VideoSensing.SensingSubject.SPRITE
369            }, fakeBlockUtility);
370            t.ok(
371                isNearAngle(localMotionDirection, -90),
372                `sprite motionDirection ${localMotionDirection.toFixed(0)} degrees is close to ${90} degrees`
373            );
374
375            t.end();
376        });
377});
378
379test('whenMotionGreaterThan returns true if local motion meets target', t => {
380    t.plan(2);
381
382    return readFrames()
383        .then(frames => {
384            const sensing = new VideoSensing(fakeRuntime);
385
386            // With these two frame test if we get expected values depending on the
387            // arguments to whenMotionGreaterThan.
388            sensing.detect.addFrame(frames.center);
389            sensing.detect.addFrame(frames.left);
390
391            const over20 = sensing.whenMotionGreaterThan({
392                REFERENCE: 20
393            }, fakeBlockUtility);
394            t.ok(
395                over20,
396                `enough motion in drawable bounds to reach reference of 20`
397            );
398
399            const over80 = sensing.whenMotionGreaterThan({
400                REFERENCE: 80
401            }, fakeBlockUtility);
402            t.notOk(
403                over80,
404                `not enough motion in drawable bounds to reach reference of 80`
405            );
406
407            t.end();
408        });
409});
410
Full Screen

timecode-converter.js

Source: timecode-converter.js Github

copy
1/**
2 * ============================
3 * Library: Timecode Converter
4 * ============================
5 * 
6 * Format of the 'timecode' object:
7 * {
8 *     h: Integer, // Number of hours
9 *     m: Integer, // Number of minutes
10 *     s: Integer, // Number of seconds
11 *     f: Integer, // Number of frames 
12 *     dropFrame: Boolean // Determines if the timecode is expressed in 'drop-frame' or not
13 * }
14 *
15 */
16var tcc = (function() {
17
18	return {
19
20		// ---------------------------
21		// Utility Functions
22		// ----------------------------
23
24		// Provides the list of available framerates
25		getFramerates: function() {
26			return [
27				{value: 23.976, dropFrame: true,  label:'23.976 fps'},
28				{value: 24,     dropFrame: false, label:'24 fps'},
29				{value: 29.97,  dropFrame: true,  label:'29.97 fps'},
30				{value: 30,     dropFrame: false, label:'30 fps', },
31				{value: 59.94,  dropFrame: true,  label:'59.94 fps'},
32				{value: 60,     dropFrame: false, label:'60 fps'}
33			];
34		},
35		
36		// Formats the timecode for display
37		formatTimecode: function (timecode) {
38			// Formatting the timecode
39			var formattedTimecode = "";
40			if (timecode) { formattedTimecode = ("00"+timecode.h).slice(-2) +":"+ ("00"+timecode.m).slice(-2) +":"+ ("00"+timecode.s).slice(-2) +":"+ ("00"+timecode.f).slice(-2) } 
41			else { formattedTimecode = "00:00:00:00"; };
42			// Replacing the ";" by ";" for drop-frame framerates
43			if (timecode && timecode.dropFrame) { formattedTimecode = formattedTimecode.substr(0, 8) + ';' + formattedTimecode.substr(9, 10); };
44			// Retunring the formatted timecode
45			return formattedTimecode;
46		},
47
48		// Extracts the timecode from a string
49		// If there are errors in the format of the string, the errors are reported in the 'errors' field
50		extractTimecode: function (timecodeString) {
51			var timecode = {h:0, m:0, s:0, f:0, errors:[]};
52			// Checking if the timecode string has 11 characters, like this one "01:25:44:00"
53			if (timecodeString.length != 11) { timecode.errors.push("The timecode string passed in parameters doesn't have the correct length."); };
54			// Checking if the ":" characters are located properly
55			if (":" != timecodeString.substring(2, 3)) { timecode.errors.push("There should be a ':' separating the hours and minutes."); };
56			if (":" != timecodeString.substring(5, 6)) { timecode.errors.push("There should be a ':' separating the minutes and seconds."); };
57			if (":" == timecodeString.substring(8, 9) || ";" == timecodeString.substring(8, 9)) { 
58				if (":" == timecodeString.substring(8, 9)) { timecode.dropFrame = false; };
59				if (";" == timecodeString.substring(8, 9)) { timecode.dropFrame = true; };
60			} else {
61				timecode.errors.push("There should be a ':' or a ';' separating the seconds and the number of frames."); 
62			};
63			// Checking if the characters are numbers
64			var h = timecodeString.substring(0, 2);
65			var m = timecodeString.substring(3, 5);
66			var s = timecodeString.substring(6, 8);
67			var f = timecodeString.substring(9, 11);
68			if (isNaN(h)) { timecode.errors.push("The 'hours' is not a number."); }   else { timecode.h = h; };
69			if (isNaN(m)) { timecode.errors.push("The 'minutes' is not a number."); } else { timecode.m = m; };
70			if (isNaN(s)) { timecode.errors.push("The 'seconds' is not a number."); } else { timecode.s = s; };
71			if (isNaN(f)) { timecode.errors.push("The 'frames' is not a number."); }  else { timecode.f = f; };
72			// We return the timecode object
73			return timecode;
74		},
75
76		// Converts an Array into a timecode string
77		// Example: [1,2,3,4,5,6] => "00:12:34:56"
78		array2TCString: function (tcArray) {
79			var tcString = ['0','0',':','0','0',':','0','0',':','0','0'];
80			var positions = [10,9,7,6,4,3,1,0];
81			if (tcArray) {
82				if (tcArray.length > 8) { tcArray = tcArray.slice(tcArray.length-8,tcArray.length); }; // If the array is too long, we slice it.
83				
84				var i = 0;
85				var array = _.clone(tcArray);
86				while (array.length != 0) {
87					if ([1,2,3,4,5,6,7,8,9,0].includes(array[array.length-1])) {
88						tcString[positions[i]] = array[array.length-1]; 
89						i++;
90					};
91					array.pop();
92				};
93			};
94			return tcString.join("");
95		},
96
97
98
99		// ---------------------------
100		// Conversions: Non-Drop Frames
101		// ----------------------------
102
103		frames_to_timecode: function(nbFrames, framerate){
104			var timecode = {h:0, m:0, s:0, f:0, dropFrame: framerate.dropFrame};
105			var nbSeconds = nbFrames/framerate.value;
106			timecode.h = _.floor(nbSeconds / 3600); // Hours
107			timecode.m = _.floor((nbSeconds - (timecode.h * 3600)) / 60); // Minutes
108			timecode.s = _.floor(nbSeconds - (timecode.h * 3600) - (timecode.m * 60)); // Seconds
109			timecode.f = _.floor(nbFrames - (framerate.value * ((timecode.h * 3600) + (timecode.m * 60) + timecode.s))); // Frames
110			return timecode;
111		},
112
113		timecode_to_frames: function(nbHours, nbMinutes, nbSeconds, nbFrames, framerate){
114			return ((Number(nbHours) * 3600) + (Number(nbMinutes) * 60) + Number(nbSeconds)) * framerate.value + Number(nbFrames);
115		},
116
117
118		// ---------------------------
119		// Conversions: Drop Frames
120		// ---------------------------
121
122		frames_to_DF_timecode: function(nbFrames, framerate){
123
124			var dropFrames = _.ceil(framerate.value * 0.06); // Number of frames to drop on the minute marks is the nearest integer to 6% of the framerate
125			var framesPerHour = _.ceil(framerate.value * 60 * 60); // Number of frames in an hour
126			var framesPer24Hours = framesPerHour * 24; // Number of frames in a day - timecode rolls over after 24 hours
127			var framesPer10Minutes = _.ceil(framerate.value * 60 * 10); // Number of frames per ten minutes
128			var framesPerMinute = (_.ceil(framerate.value) * 60) - dropFrames; // Number of frames per minute is the round of the framerate * 60 minus the number of dropped frames
129
130			// Negative time. Add 24 hours.
131			if (nbFrames < 0) { nbFrames = framesPer24Hours + nbFrames; }
132
133			// If nbFrames is greater than 24 hrs, next operation will rollover clock
134			nbFrames = nbFrames % framesPer24Hours;
135
136			var d = _.floor(nbFrames / framesPer10Minutes);
137			var m = nbFrames % framesPer10Minutes
138
139			if (m > dropFrames) { 
140				nbFrames = nbFrames + (dropFrames * 9 * d) + dropFrames * ( _.floor((m-dropFrames) / framesPerMinute) );
141			} else {
142				nbFrames = nbFrames + dropFrames * 9 * d;
143			}
144
145			var timecode = {h:0, m:0, s:0, f:0, dropFrame: framerate.dropFrame};
146			var frRound  = _.ceil(framerate.value);
147			timecode.h   = _.floor(_.floor(_.floor(nbFrames / frRound) / 60) / 60);
148			timecode.m   = _.floor(_.floor(nbFrames / frRound) / 60) % 60;
149			timecode.s   = _.floor(nbFrames / frRound) % 60;
150			timecode.f   = nbFrames % frRound;
151			return timecode;
152		},
153
154		DF_timecode_to_frames: function(nbHours, nbMinutes, nbSeconds, nbFrames, framerate){
155			// Converting the parameters into Numbers
156			nbHours = Number(nbHours); nbMinutes = Number(nbMinutes); nbSeconds = Number(nbSeconds); nbFrames = Number(nbFrames);
157			// Converting the timecode into a number of frames
158			var dropFrames = _.ceil(framerate.value * 0.06); // Number of drop frames is 6% of framerate rounded to nearest integer
159			var timeBase = _.ceil(framerate.value); // We don't need the exact framerate anymore, we just need it rounded to nearest integer
160			var ND_FramesPerHour = timeBase * 60 * 60; // Number of frames per hour (non-drop)
161			var ND_FramesPerMinute = timeBase * 60; // Number of frames per minute (non-drop)
162			var totalMinutes = (Number(nbHours) * 60) + Number(nbMinutes); // Total number of minutes
163			var frameNumber = ((ND_FramesPerHour * nbHours) + (ND_FramesPerMinute * nbMinutes) + (timeBase * nbSeconds) + nbFrames) - (dropFrames * (totalMinutes - _.floor(totalMinutes / 10)));
164			return frameNumber;
165		},
166
167		bruteForce_frames_to_DF_timecode: function(nbFrames, framerate) {
168			var timecode = {h:0, m:0, s:0, f:0, dropFrame: framerate.dropFrame};
169			var dropFrames = _.ceil(framerate.value * 0.06);
170			var timeBase = _.ceil(framerate.value);
171			// Looping through all the frames
172			for (var i = 0; i < nbFrames; i++) {
173				if (timecode.f < timeBase-1) {timecode.f += 1;} // Tick-up the 'frames'
174				else {
175					timecode.f = 0; // Reset the 'frames'
176					if (timecode.s < 59) {timecode.s += 1;} // Tick-up the 'seconds'
177					else {
178						timecode.s = 0 // Reset the seconds
179						if (timecode.m < 59) {
180							timecode.m += 1; // Tick-up the 'minutes'
181							if (timecode.m%10!=0) { timecode.f = dropFrames; } else { timecode.f = 0; } // Drop the 'frames' every minutes... except every 10 minutes.
182						}
183						else {
184							timecode.m = 0 // Reset the minutes
185							timecode.f = 0; // We set the 'frames' to 0 when the minutes is a multiple of 10.
186							if (timecode.h < 24) {timecode.h += 1;}; // Tick-up the 'hours'
187						};
188					};
189				};
190			};
191			return timecode;
192		},
193
194
195
196		// ---------------------------
197		// Testing
198		// ---------------------------
199
200		showTestResults: function() {
201			// Testing "frames_to_timecode"
202			console.log('123456 frames at 24fps should return 01:25:44:00 => ' + tcc.formatTimecode(tcc.frames_to_timecode(123456,tcc.getFramerates()[1])));
203			console.log('123456 frames at 30fps should return 01:08:35:06 => ' + tcc.formatTimecode(tcc.frames_to_timecode(123456,tcc.getFramerates()[3])));
204			console.log('123456 frames at 60fps should return 00:34:17:36 => ' + tcc.formatTimecode(tcc.frames_to_timecode(123456,tcc.getFramerates()[5])));
205			console.log('987654 frames at 24fps should return 11:25:52:06 => ' + tcc.formatTimecode(tcc.frames_to_timecode(987654,tcc.getFramerates()[1])));
206			console.log('987654 frames at 30fps should return 09:08:41:24 => ' + tcc.formatTimecode(tcc.frames_to_timecode(987654,tcc.getFramerates()[3])));
207			console.log('987654 frames at 60fps should return 04:34:20:54 => ' + tcc.formatTimecode(tcc.frames_to_timecode(987654,tcc.getFramerates()[5])));
208
209			// Testing "timecode_to_frames"
210			console.log('The timecode 01:25:44:00 at 24fps should correspond to 123456 frames => ' + tcc.timecode_to_frames('01','25','44','00',tcc.getFramerates()[1]));
211			console.log('The timecode 01:08:35:06 at 30fps should correspond to 123456 frames => ' + tcc.timecode_to_frames('01','08','35','06',tcc.getFramerates()[3]));
212			console.log('The timecode 00:34:17:36 at 60fps should correspond to 123456 frames => ' + tcc.timecode_to_frames('00','34','17','36',tcc.getFramerates()[5]));
213			console.log('The timecode 11:25:52:06 at 24fps should correspond to 987654 frames => ' + tcc.timecode_to_frames('11','25','52','06',tcc.getFramerates()[1]));
214			console.log('The timecode 09:08:41:24 at 30fps should correspond to 987654 frames => ' + tcc.timecode_to_frames('09','08','41','24',tcc.getFramerates()[3]));
215			console.log('The timecode 04:34:20:54 at 60fps should correspond to 987654 frames => ' + tcc.timecode_to_frames('04','34','20','54',tcc.getFramerates()[5]));
216
217			// Testing "frames_to_DF_timecode" and "bruteForce_frames_to_DF_timecode"
218			console.log('123456 frames at 23.98fps should return '+ tcc.formatTimecode(tcc.bruteForce_frames_to_DF_timecode(123456,tcc.getFramerates()[0])) +' => ' + tcc.formatTimecode(tcc.frames_to_DF_timecode(123456,tcc.getFramerates()[0])));
219			console.log('123456 frames at 29.97fps should return '+ tcc.formatTimecode(tcc.bruteForce_frames_to_DF_timecode(123456,tcc.getFramerates()[2])) +' => ' + tcc.formatTimecode(tcc.frames_to_DF_timecode(123456,tcc.getFramerates()[2])));
220			console.log('123456 frames at 59.94fps should return '+ tcc.formatTimecode(tcc.bruteForce_frames_to_DF_timecode(123456,tcc.getFramerates()[4])) +' => ' + tcc.formatTimecode(tcc.frames_to_DF_timecode(123456,tcc.getFramerates()[4])));
221			console.log('987654 frames at 23.98fps should return '+ tcc.formatTimecode(tcc.bruteForce_frames_to_DF_timecode(987654,tcc.getFramerates()[0])) +' => ' + tcc.formatTimecode(tcc.frames_to_DF_timecode(987654,tcc.getFramerates()[0])));
222			console.log('987654 frames at 29.97fps should return '+ tcc.formatTimecode(tcc.bruteForce_frames_to_DF_timecode(987654,tcc.getFramerates()[2])) +' => ' + tcc.formatTimecode(tcc.frames_to_DF_timecode(987654,tcc.getFramerates()[2])));
223			console.log('987654 frames at 59.94fps should return '+ tcc.formatTimecode(tcc.bruteForce_frames_to_DF_timecode(987654,tcc.getFramerates()[4])) +' => ' + tcc.formatTimecode(tcc.frames_to_DF_timecode(987654,tcc.getFramerates()[4])));
224
225			// Testing "timecode_to_frames"
226			console.log('The timecode 01:25:50;10 at 23.98fps should correspond to 123456 frames => ' + tcc.DF_timecode_to_frames('01','25','50','10',tcc.getFramerates()[0]));
227			console.log('The timecode 01:08:39;10 at 29.97fps should correspond to 123456 frames => ' + tcc.DF_timecode_to_frames('01','08','39','10',tcc.getFramerates()[2]));
228			console.log('The timecode 00:34:19;40 at 59.94fps should correspond to 123456 frames => ' + tcc.DF_timecode_to_frames('00','34','19','40',tcc.getFramerates()[4]));
229			console.log('The timecode 11:26:43;18 at 23.98fps should correspond to 987654 frames => ' + tcc.DF_timecode_to_frames('11','26','43','18',tcc.getFramerates()[0]));
230			console.log('The timecode 09:09:14;24 at 29.97fps should correspond to 987654 frames => ' + tcc.DF_timecode_to_frames('09','09','14','24',tcc.getFramerates()[2]));
231			console.log('The timecode 04:34:37;22 at 59.94fps should correspond to 987654 frames => ' + tcc.DF_timecode_to_frames('04','34','37','22',tcc.getFramerates()[4]));
232		}
233
234	}; // end of 'return'
235})(); // end of IIFE
236
237
238
239
240
241
Full Screen

eval-stack-trace.js

Source: eval-stack-trace.js Github

copy
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28// Return the stack frames of an Error object.
29
30Error.prepareStackTrace = function(error, frames) {
31  return frames;
32}
33
34Error.prototype.getFrames = function() {
35  var frames = this.stack;
36  return frames;
37}
38
39String.prototype.contains = function(pattern) {
40  return this.indexOf(pattern) > -1;
41}
42
43// Check for every frame that a certain method returns the
44// expected value for every frame.
45Array.prototype.verifyEquals = function(frames, func_name) {
46  this.forEach(
47    function(element, index) {
48      var frame = frames[index];
49      if (element === null) return;
50      assertEquals(element, (frame[func_name])());
51    }
52  );
53}
54
55// Check for every frame that a certain method has a return value
56// that contains the expected pattern for every frame.
57Array.prototype.verifyContains = function(frames, func_name) {
58  this.forEach(
59    function(element, index) {
60      var frame = frames[index];
61      if (element === null) return;
62      assertTrue((frame[func_name])().contains(element));
63    }
64  );
65}
66
67// Check for every frame that a certain method returns undefined
68// when expected.
69Array.prototype.verifyUndefined = function(frames, func_name) {
70  this.forEach(
71    function(element, index) {
72      var frame = frames[index];
73      if (element === null) return;
74      assertEquals(element, (frame[func_name])() === undefined);
75    }
76  );
77}
78
79
80// Simple eval.
81var code1 = "function f() {        \n" +
82            "  throw new Error(3); \n" +  // Line 2
83            "}                     \n" +
84            "f();                  \n";   // Line 4
85
86function g() {
87  eval(code1);
88}
89
90try {
91  g();
92} catch (e) {
93  // We expect something like
94  //   f (eval at g (eval-stack.js:87:8), <anonymous>:2:9)
95  //   eval (eval at g (eval-stack.js:87:8), <anonymous>:4:1)
96  //   g (eval-stack.js:87:3)
97  //   eval-stack.js:94:3
98  var frames = e.getFrames();
99  assertEquals(4, frames.length);
100  ["f", "eval", "g"]
101      .verifyEquals(frames, "getFunctionName");
102  [2, 4]
103      .verifyEquals(frames, "getLineNumber");
104  ["<anonymous>:2:", "<anonymous>:4:"]
105      .verifyContains(frames, "toString");
106  [true, true, false, false]
107      .verifyUndefined(frames, "getFileName");
108  ["eval at g", "eval at g"]
109      .verifyContains(frames, "getEvalOrigin");
110}
111
112
113// Nested eval.
114var code2 = "function h() {        \n" +
115            "  // Empty            \n" +
116            "  eval(code1);        \n" +  // Line 3
117            "}                     \n" +
118            "h();                  \n";   // Line 5
119
120try {
121  eval(code2);
122} catch (e) {
123  // We expect something like
124  //   f (eval at h (eval at <anonymous> (eval-stack.js:116:8)),
125  //       <anonymous>:2:9)
126  //   eval (eval at h (eval at <anonymous> (eval-stack.js:116:8)),
127  //       <anonymous>:4:1)
128  //   h (eval at <anonymous> (eval-stack.js:116:8), <anonymous>:3:3)
129  //   eval (eval at <anonymous> (eval-stack.js:116:8), <anonymous>:5:1)
130  //   eval-stack.js:116:3
131  var frames = e.getFrames();
132  assertEquals(5, frames.length);
133  ["f", "eval", "h", "eval"]
134      .verifyEquals(frames, "getFunctionName");
135  [2, 4, 3, 5]
136      .verifyEquals(frames, "getLineNumber");
137  ["<anonymous>:2:", "<anonymous>:4:", "<anonymous>:3:", "<anonymous>:5:"]
138      .verifyContains(frames, "toString");
139  [true, true, true, true, false]
140      .verifyUndefined(frames, "getFileName");
141  ["eval at h (eval at <anonymous> (",
142   "eval at h (eval at <anonymous> (",
143   "eval at <anonymous> (",
144   "eval at <anonymous> ("]
145      .verifyContains(frames, "getEvalOrigin");
146}
147
148
149// Nested eval calling through non-eval defined function.
150var code3 = "function h() {        \n" +
151            "  // Empty            \n" +
152            "  g();                \n" +  // Line 3
153            "}                     \n" +
154            "h();                  \n";   // Line 5
155
156try {
157  eval(code3);
158} catch (e) {
159  // We expect something like
160  //   f (eval at g (test.js:83:8), <anonymous>:2:9)
161  //   eval (eval at g (test.js:83:8), <anonymous>:4:1)
162  //   g (test.js:83:3)
163  //   h (eval at <anonymous> (test.js:149:8), <anonymous>:3:3)
164  //   eval (eval at <anonymous> (test.js:149:8), <anonymous>:5:1)
165  //   test.js:149:3
166  var frames = e.getFrames();
167  assertEquals(6, frames.length);
168  ["f", "eval", "g", "h", "eval"]
169      .verifyEquals(frames, "getFunctionName");
170  [2, 4, null, 3, 5]
171      .verifyEquals(frames, "getLineNumber");
172  ["<anonymous>:2:", "<anonymous>:4:", null, "<anonymous>:3:", "<anonymous>:5:"]
173      .verifyContains(frames, "toString");
174  [true, true, false, true, true, false]
175      .verifyUndefined(frames, "getFileName");
176  ["eval at g (",
177   "eval at g (",
178   null,
179   "eval at <anonymous> (",
180   "eval at <anonymous> ("]
181      .verifyContains(frames, "getEvalOrigin");
182}
183
184
185// Calling function defined in eval.
186eval("function f() {               \n" +
187     "  throw new Error(3);        \n" +
188     "}                            \n");
189
190try {
191  f();
192} catch (e) {
193  // We expect something like
194  //   f (eval at <anonymous> (test.js:182:40), <anonymous>:2:9)
195  //   test.js:186:3
196  var frames = e.getFrames();
197  assertEquals(2, frames.length);
198  ["f"].verifyEquals(frames, "getFunctionName");
199  [2].verifyEquals(frames, "getLineNumber");
200  ["<anonymous>:2:"].verifyContains(frames, "toString");
201  [true, false].verifyUndefined(frames, "getFileName");
202  ["eval at <anonymous> ("].verifyContains(frames, "getEvalOrigin");
203}
204
Full Screen

Accelerate Your Automation Test Cycles With LambdaTest

Leverage LambdaTest’s cloud-based platform to execute your automation tests in parallel and trim down your test execution time significantly. Your first 100 automation testing minutes are on us.

Try LambdaTest

Run Python Tests on LambdaTest Cloud Grid

Execute automation tests with Playwright Python on a cloud-based Grid of 3000+ real browsers and operating systems for both web and mobile applications.

Test now for Free
LambdaTestX

We use cookies to give you the best experience. Cookies help to provide a more personalized experience and relevant advertising for you, and web analytics for us. Learn More in our Cookies policy, Privacy & Terms of service

Allow Cookie
Sarah

I hope you find the best code examples for your project.

If you want to accelerate automated browser testing, try LambdaTest. Your first 100 automation testing minutes are FREE.

Sarah Elson (Product & Growth Lead)