How to use record method in ATX

Best Python code snippet using ATX

csv_dataset_test.py

Source:csv_dataset_test.py Github

copy

Full Screen

1# Copyright 2018 The TensorFlow Authors. All Rights Reserved.2#3# Licensed under the Apache License, Version 2.0 (the "License");4# you may not use this file except in compliance with the License.5# You may obtain a copy of the License at6#7# http://www.apache.org/licenses/LICENSE-2.08#9# Unless required by applicable law or agreed to in writing, software10# distributed under the License is distributed on an "AS IS" BASIS,11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.12# See the License for the specific language governing permissions and13# limitations under the License.14# ==============================================================================15"""Tests for `tf.data.experimental.CsvDataset`."""16from __future__ import absolute_import17from __future__ import division18from __future__ import print_function19import gzip20import os21import zlib22from absl.testing import parameterized23from tensorflow.python.data.experimental.ops import error_ops24from tensorflow.python.data.experimental.ops import readers25from tensorflow.python.data.kernel_tests import test_base26from tensorflow.python.data.ops import readers as core_readers27from tensorflow.python.eager import context28from tensorflow.python.framework import combinations29from tensorflow.python.framework import constant_op30from tensorflow.python.framework import dtypes31from tensorflow.python.framework import errors32from tensorflow.python.ops import parsing_ops33from tensorflow.python.platform import test34class CsvDatasetTest(test_base.DatasetTestBase, parameterized.TestCase):35 def _setup_files(self, inputs, linebreak='\n', compression_type=None):36 filenames = []37 for i, ip in enumerate(inputs):38 fn = os.path.join(self.get_temp_dir(), 'temp_%d.csv' % i)39 contents = linebreak.join(ip).encode('utf-8')40 if compression_type is None:41 with open(fn, 'wb') as f:42 f.write(contents)43 elif compression_type == 'GZIP':44 with gzip.GzipFile(fn, 'wb') as f:45 f.write(contents)46 elif compression_type == 'ZLIB':47 contents = zlib.compress(contents)48 with open(fn, 'wb') as f:49 f.write(contents)50 else:51 raise ValueError('Unsupported compression_type', compression_type)52 filenames.append(fn)53 return filenames54 def _make_test_datasets(self, inputs, **kwargs):55 # Test by comparing its output to what we could get with map->decode_csv56 filenames = self._setup_files(inputs)57 dataset_expected = core_readers.TextLineDataset(filenames)58 dataset_expected = dataset_expected.map(59 lambda l: parsing_ops.decode_csv(l, **kwargs))60 dataset_actual = readers.CsvDataset(filenames, **kwargs)61 return (dataset_actual, dataset_expected)62 def _test_by_comparison(self, inputs, **kwargs):63 """Checks that CsvDataset is equiv to TextLineDataset->map(decode_csv)."""64 dataset_actual, dataset_expected = self._make_test_datasets(65 inputs, **kwargs)66 self.assertDatasetsEqual(dataset_actual, dataset_expected)67 def _verify_output_or_err(self,68 dataset,69 expected_output=None,70 expected_err_re=None):71 if expected_err_re is None:72 # Verify that output is expected, without errors73 nxt = self.getNext(dataset)74 expected_output = [[75 v.encode('utf-8') if isinstance(v, str) else v for v in op76 ] for op in expected_output]77 for value in expected_output:78 op = self.evaluate(nxt())79 self.assertAllEqual(op, value)80 with self.assertRaises(errors.OutOfRangeError):81 self.evaluate(nxt())82 else:83 nxt = self.getNext(dataset)84 while True:85 try:86 self.evaluate(nxt())87 except errors.OutOfRangeError:88 break89 def _test_dataset(90 self,91 inputs,92 expected_output=None,93 expected_err_re=None,94 linebreak='\n',95 compression_type=None, # Used for both setup and parsing96 **kwargs):97 """Checks that elements produced by CsvDataset match expected output."""98 # Convert str type because py3 tf strings are bytestrings99 filenames = self._setup_files(inputs, linebreak, compression_type)100 kwargs['compression_type'] = compression_type101 if expected_err_re is not None:102 # Verify that OpError is produced as expected103 with self.assertRaisesOpError(expected_err_re):104 dataset = readers.CsvDataset(filenames, **kwargs)105 self._verify_output_or_err(dataset, expected_output, expected_err_re)106 else:107 dataset = readers.CsvDataset(filenames, **kwargs)108 self._verify_output_or_err(dataset, expected_output, expected_err_re)109 @combinations.generate(test_base.default_test_combinations())110 def testCsvDataset_requiredFields(self):111 record_defaults = [[]] * 4112 inputs = [['1,2,3,4']]113 self._test_by_comparison(inputs, record_defaults=record_defaults)114 @combinations.generate(test_base.default_test_combinations())115 def testCsvDataset_int(self):116 record_defaults = [[0]] * 4117 inputs = [['1,2,3,4', '5,6,7,8']]118 self._test_by_comparison(inputs, record_defaults=record_defaults)119 @combinations.generate(test_base.default_test_combinations())120 def testCsvDataset_float(self):121 record_defaults = [[0.0]] * 4122 inputs = [['1.0,2.1,3.2,4.3', '5.4,6.5,7.6,8.7']]123 self._test_by_comparison(inputs, record_defaults=record_defaults)124 @combinations.generate(test_base.default_test_combinations())125 def testCsvDataset_string(self):126 record_defaults = [['']] * 4127 inputs = [['1.0,2.1,hello,4.3', '5.4,6.5,goodbye,8.7']]128 self._test_by_comparison(inputs, record_defaults=record_defaults)129 @combinations.generate(test_base.default_test_combinations())130 def testCsvDataset_withEmptyFields(self):131 record_defaults = [[0]] * 4132 inputs = [[',,,', '1,1,1,', ',2,2,2']]133 self._test_dataset(134 inputs, [[0, 0, 0, 0], [1, 1, 1, 0], [0, 2, 2, 2]],135 record_defaults=record_defaults)136 @combinations.generate(test_base.default_test_combinations())137 def testCsvDataset_errWithUnquotedQuotes(self):138 record_defaults = [['']] * 3139 inputs = [['1,2"3,4']]140 self._test_dataset(141 inputs,142 expected_err_re='Unquoted fields cannot have quotes inside',143 record_defaults=record_defaults)144 @combinations.generate(test_base.default_test_combinations())145 def testCsvDataset_errWithUnescapedQuotes(self):146 record_defaults = [['']] * 3147 inputs = [['"a"b","c","d"']]148 self._test_dataset(149 inputs,150 expected_err_re=151 'Quote inside a string has to be escaped by another quote',152 record_defaults=record_defaults)153 @combinations.generate(test_base.default_test_combinations())154 def testCsvDataset_ignoreErrWithUnescapedQuotes(self):155 record_defaults = [['']] * 3156 inputs = [['1,"2"3",4', '1,"2"3",4",5,5', 'a,b,"c"d"', 'e,f,g']]157 filenames = self._setup_files(inputs)158 dataset = readers.CsvDataset(filenames, record_defaults=record_defaults)159 dataset = dataset.apply(error_ops.ignore_errors())160 self._verify_output_or_err(dataset, [['e', 'f', 'g']])161 @combinations.generate(test_base.default_test_combinations())162 def testCsvDataset_ignoreErrWithUnquotedQuotes(self):163 record_defaults = [['']] * 3164 inputs = [['1,2"3,4', 'a,b,c"d', '9,8"7,6,5', 'e,f,g']]165 filenames = self._setup_files(inputs)166 dataset = readers.CsvDataset(filenames, record_defaults=record_defaults)167 dataset = dataset.apply(error_ops.ignore_errors())168 self._verify_output_or_err(dataset, [['e', 'f', 'g']])169 @combinations.generate(test_base.default_test_combinations())170 def testCsvDataset_withNoQuoteDelimAndUnquotedQuotes(self):171 record_defaults = [['']] * 3172 inputs = [['1,2"3,4']]173 self._test_by_comparison(174 inputs, record_defaults=record_defaults, use_quote_delim=False)175 @combinations.generate(test_base.default_test_combinations())176 def testCsvDataset_mixedTypes(self):177 record_defaults = [178 constant_op.constant([], dtype=dtypes.int32),179 constant_op.constant([], dtype=dtypes.float32),180 constant_op.constant([], dtype=dtypes.string),181 constant_op.constant([], dtype=dtypes.float64)182 ]183 inputs = [['1,2.1,3.2,4.3', '5,6.5,7.6,8.7']]184 self._test_by_comparison(inputs, record_defaults=record_defaults)185 @combinations.generate(test_base.default_test_combinations())186 def testCsvDataset_withUseQuoteDelimFalse(self):187 record_defaults = [['']] * 4188 inputs = [['1,2,"3,4"', '"5,6",7,8']]189 self._test_by_comparison(190 inputs, record_defaults=record_defaults, use_quote_delim=False)191 @combinations.generate(test_base.default_test_combinations())192 def testCsvDataset_withFieldDelim(self):193 record_defaults = [[0]] * 4194 inputs = [['1:2:3:4', '5:6:7:8']]195 self._test_by_comparison(196 inputs, record_defaults=record_defaults, field_delim=':')197 @combinations.generate(test_base.default_test_combinations())198 def testCsvDataset_withNaValue(self):199 record_defaults = [[0]] * 4200 inputs = [['1,NA,3,4', 'NA,6,7,8']]201 self._test_by_comparison(202 inputs, record_defaults=record_defaults, na_value='NA')203 @combinations.generate(test_base.default_test_combinations())204 def testCsvDataset_withSelectCols(self):205 record_defaults = [['']] * 2206 inputs = [['1,2,3,4', '"5","6","7","8"']]207 self._test_by_comparison(208 inputs, record_defaults=record_defaults, select_cols=[1, 2])209 @combinations.generate(test_base.default_test_combinations())210 def testCsvDataset_withSelectColsTooHigh(self):211 record_defaults = [[0]] * 2212 inputs = [['1,2,3,4', '5,6,7,8']]213 self._test_dataset(214 inputs,215 expected_err_re='Expect 2 fields but have 1 in record',216 record_defaults=record_defaults,217 select_cols=[3, 4])218 @combinations.generate(test_base.default_test_combinations())219 def testCsvDataset_withOneCol(self):220 record_defaults = [['NA']]221 inputs = [['0', '', '2']]222 self._test_dataset(223 inputs, [['0'], ['NA'], ['2']], record_defaults=record_defaults)224 @combinations.generate(test_base.default_test_combinations())225 def testCsvDataset_withMultipleFiles(self):226 record_defaults = [[0]] * 4227 inputs = [['1,2,3,4', '5,6,7,8'], ['5,6,7,8']]228 self._test_by_comparison(inputs, record_defaults=record_defaults)229 @combinations.generate(test_base.default_test_combinations())230 def testCsvDataset_withLeadingAndTrailingSpaces(self):231 record_defaults = [[0.0]] * 4232 inputs = [['0, 1, 2, 3']]233 expected = [[0.0, 1.0, 2.0, 3.0]]234 self._test_dataset(inputs, expected, record_defaults=record_defaults)235 @combinations.generate(test_base.default_test_combinations())236 def testCsvDataset_errorWithMissingDefault(self):237 record_defaults = [[]] * 2238 inputs = [['0,']]239 self._test_dataset(240 inputs,241 expected_err_re='Field 1 is required but missing in record!',242 record_defaults=record_defaults)243 @combinations.generate(test_base.default_test_combinations())244 def testCsvDataset_errorWithFewerDefaultsThanFields(self):245 record_defaults = [[0.0]] * 2246 inputs = [['0,1,2,3']]247 self._test_dataset(248 inputs,249 expected_err_re='Expect 2 fields but have more in record',250 record_defaults=record_defaults)251 @combinations.generate(test_base.default_test_combinations())252 def testCsvDataset_errorWithMoreDefaultsThanFields(self):253 record_defaults = [[0.0]] * 5254 inputs = [['0,1,2,3']]255 self._test_dataset(256 inputs,257 expected_err_re='Expect 5 fields but have 4 in record',258 record_defaults=record_defaults)259 @combinations.generate(test_base.default_test_combinations())260 def testCsvDataset_withHeader(self):261 record_defaults = [[0]] * 2262 inputs = [['col1,col2', '1,2']]263 expected = [[1, 2]]264 self._test_dataset(265 inputs,266 expected,267 record_defaults=record_defaults,268 header=True,269 )270 @combinations.generate(test_base.default_test_combinations())271 def testCsvDataset_withHeaderAndNoRecords(self):272 record_defaults = [[0]] * 2273 inputs = [['col1,col2']]274 expected = []275 self._test_dataset(276 inputs,277 expected,278 record_defaults=record_defaults,279 header=True,280 )281 @combinations.generate(test_base.default_test_combinations())282 def testCsvDataset_errorWithHeaderEmptyFile(self):283 record_defaults = [[0]] * 2284 inputs = [[]]285 expected_err_re = "Can't read header of file"286 self._test_dataset(287 inputs,288 expected_err_re=expected_err_re,289 record_defaults=record_defaults,290 header=True,291 )292 @combinations.generate(test_base.default_test_combinations())293 def testCsvDataset_withEmptyFile(self):294 record_defaults = [['']] * 2295 inputs = [['']] # Empty file296 self._test_dataset(297 inputs, expected_output=[], record_defaults=record_defaults)298 @combinations.generate(test_base.default_test_combinations())299 def testCsvDataset_errorWithEmptyRecord(self):300 record_defaults = [['']] * 2301 inputs = [['', '1,2']] # First record is empty302 self._test_dataset(303 inputs,304 expected_err_re='Expect 2 fields but have 1 in record',305 record_defaults=record_defaults)306 @combinations.generate(test_base.default_test_combinations())307 def testCsvDataset_withChainedOps(self):308 # Testing that one dataset can create multiple iterators fine.309 # `repeat` creates multiple iterators from the same C++ Dataset.310 record_defaults = [[0]] * 4311 inputs = [['1,,3,4', '5,6,,8']]312 ds_actual, ds_expected = self._make_test_datasets(313 inputs, record_defaults=record_defaults)314 self.assertDatasetsEqual(315 ds_actual.repeat(5).prefetch(1),316 ds_expected.repeat(5).prefetch(1))317 @combinations.generate(test_base.default_test_combinations())318 def testCsvDataset_withTypeDefaults(self):319 # Testing using dtypes as record_defaults for required fields320 record_defaults = [dtypes.float32, [0.0]]321 inputs = [['1.0,2.0', '3.0,4.0']]322 self._test_dataset(323 inputs,324 [[1.0, 2.0], [3.0, 4.0]],325 record_defaults=record_defaults,326 )327 @combinations.generate(test_base.default_test_combinations())328 def testMakeCsvDataset_fieldOrder(self):329 data = [[330 '1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19',331 '1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19'332 ]]333 file_path = self._setup_files(data)334 ds = readers.make_csv_dataset(335 file_path, batch_size=1, shuffle=False, num_epochs=1)336 nxt = self.getNext(ds)337 result = list(self.evaluate(nxt()).values())338 self.assertEqual(result, sorted(result))339## The following tests exercise parsing logic for quoted fields340 @combinations.generate(test_base.default_test_combinations())341 def testCsvDataset_withQuoted(self):342 record_defaults = [['']] * 4343 inputs = [['"a","b","c :)","d"', '"e","f","g :(","h"']]344 self._test_by_comparison(inputs, record_defaults=record_defaults)345 def testCsvDataset_withOneColAndQuotes(self):346 record_defaults = [['']]347 inputs = [['"0"', '"1"', '"2"']]348 self._test_dataset(349 inputs, [['0'], ['1'], ['2']], record_defaults=record_defaults)350 @combinations.generate(test_base.default_test_combinations())351 def testCsvDataset_withNewLine(self):352 # In this case, we expect it to behave differently from353 # TextLineDataset->map(decode_csv) since that flow has bugs354 record_defaults = [['']] * 4355 inputs = [['a,b,"""c""\n0","d\ne"', 'f,g,h,i']]356 expected = [['a', 'b', '"c"\n0', 'd\ne'], ['f', 'g', 'h', 'i']]357 self._test_dataset(inputs, expected, record_defaults=record_defaults)358 @combinations.generate(test_base.default_test_combinations())359 def testCsvDataset_withNewLineInUnselectedCol(self):360 record_defaults = [['']]361 inputs = [['1,"2\n3",4', '5,6,7']]362 self._test_dataset(363 inputs,364 expected_output=[['1'], ['5']],365 record_defaults=record_defaults,366 select_cols=[0])367 @combinations.generate(test_base.default_test_combinations())368 def testCsvDataset_withMultipleNewLines(self):369 # In this case, we expect it to behave differently from370 # TextLineDataset->map(decode_csv) since that flow has bugs371 record_defaults = [['']] * 4372 inputs = [['a,"b\n\nx","""c""\n \n0","d\ne"', 'f,g,h,i']]373 expected = [['a', 'b\n\nx', '"c"\n \n0', 'd\ne'], ['f', 'g', 'h', 'i']]374 self._test_dataset(inputs, expected, record_defaults=record_defaults)375 @combinations.generate(test_base.default_test_combinations())376 def testCsvDataset_errorWithTerminateMidRecord(self):377 record_defaults = [['']] * 4378 inputs = [['a,b,c,"a']]379 self._test_dataset(380 inputs,381 expected_err_re=382 'Reached end of file without closing quoted field in record',383 record_defaults=record_defaults)384 @combinations.generate(test_base.default_test_combinations())385 def testCsvDataset_withEscapedQuotes(self):386 record_defaults = [['']] * 4387 inputs = [['1.0,2.1,"she said: ""hello""",4.3', '5.4,6.5,goodbye,8.7']]388 self._test_by_comparison(inputs, record_defaults=record_defaults)389## Testing that parsing works with all buffer sizes, quoted/unquoted fields,390## and different types of line breaks391 @combinations.generate(test_base.default_test_combinations())392 def testCsvDataset_withInvalidBufferSize(self):393 record_defaults = [['']] * 4394 inputs = [['a,b,c,d']]395 self._test_dataset(396 inputs,397 expected_err_re='buffer_size should be positive',398 record_defaults=record_defaults,399 buffer_size=0)400 def _test_dataset_on_buffer_sizes(self,401 inputs,402 expected,403 linebreak,404 record_defaults,405 compression_type=None,406 num_sizes_to_test=20):407 # Testing reading with a range of buffer sizes that should all work.408 for i in list(range(1, 1 + num_sizes_to_test)) + [None]:409 self._test_dataset(410 inputs,411 expected,412 linebreak=linebreak,413 compression_type=compression_type,414 record_defaults=record_defaults,415 buffer_size=i)416 @combinations.generate(test_base.default_test_combinations())417 def testCsvDataset_withLF(self):418 record_defaults = [['NA']] * 3419 inputs = [['abc,def,ghi', '0,1,2', ',,']]420 expected = [['abc', 'def', 'ghi'], ['0', '1', '2'], ['NA', 'NA', 'NA']]421 self._test_dataset_on_buffer_sizes(422 inputs, expected, linebreak='\n', record_defaults=record_defaults)423 @combinations.generate(test_base.default_test_combinations())424 def testCsvDataset_withCR(self):425 # Test that when the line separator is '\r', parsing works with all buffer426 # sizes427 record_defaults = [['NA']] * 3428 inputs = [['abc,def,ghi', '0,1,2', ',,']]429 expected = [['abc', 'def', 'ghi'], ['0', '1', '2'], ['NA', 'NA', 'NA']]430 self._test_dataset_on_buffer_sizes(431 inputs, expected, linebreak='\r', record_defaults=record_defaults)432 @combinations.generate(test_base.default_test_combinations())433 def testCsvDataset_withCRLF(self):434 # Test that when the line separator is '\r\n', parsing works with all buffer435 # sizes436 record_defaults = [['NA']] * 3437 inputs = [['abc,def,ghi', '0,1,2', ',,']]438 expected = [['abc', 'def', 'ghi'], ['0', '1', '2'], ['NA', 'NA', 'NA']]439 self._test_dataset_on_buffer_sizes(440 inputs, expected, linebreak='\r\n', record_defaults=record_defaults)441 @combinations.generate(test_base.default_test_combinations())442 def testCsvDataset_withBufferSizeAndQuoted(self):443 record_defaults = [['NA']] * 3444 inputs = [['"\n\n\n","\r\r\r","abc"', '"0","1","2"', '"","",""']]445 expected = [['\n\n\n', '\r\r\r', 'abc'], ['0', '1', '2'],446 ['NA', 'NA', 'NA']]447 self._test_dataset_on_buffer_sizes(448 inputs, expected, linebreak='\n', record_defaults=record_defaults)449 @combinations.generate(test_base.default_test_combinations())450 def testCsvDataset_withCRAndQuoted(self):451 # Test that when the line separator is '\r', parsing works with all buffer452 # sizes453 record_defaults = [['NA']] * 3454 inputs = [['"\n\n\n","\r\r\r","abc"', '"0","1","2"', '"","",""']]455 expected = [['\n\n\n', '\r\r\r', 'abc'], ['0', '1', '2'],456 ['NA', 'NA', 'NA']]457 self._test_dataset_on_buffer_sizes(458 inputs, expected, linebreak='\r', record_defaults=record_defaults)459 @combinations.generate(test_base.default_test_combinations())460 def testCsvDataset_withCRLFAndQuoted(self):461 # Test that when the line separator is '\r\n', parsing works with all buffer462 # sizes463 record_defaults = [['NA']] * 3464 inputs = [['"\n\n\n","\r\r\r","abc"', '"0","1","2"', '"","",""']]465 expected = [['\n\n\n', '\r\r\r', 'abc'], ['0', '1', '2'],466 ['NA', 'NA', 'NA']]467 self._test_dataset_on_buffer_sizes(468 inputs, expected, linebreak='\r\n', record_defaults=record_defaults)469 @combinations.generate(test_base.default_test_combinations())470 def testCsvDataset_withGzipCompressionType(self):471 record_defaults = [['NA']] * 3472 inputs = [['"\n\n\n","\r\r\r","abc"', '"0","1","2"', '"","",""']]473 expected = [['\n\n\n', '\r\r\r', 'abc'], ['0', '1', '2'],474 ['NA', 'NA', 'NA']]475 self._test_dataset_on_buffer_sizes(476 inputs,477 expected,478 linebreak='\r\n',479 compression_type='GZIP',480 record_defaults=record_defaults)481 @combinations.generate(test_base.default_test_combinations())482 def testCsvDataset_withZlibCompressionType(self):483 record_defaults = [['NA']] * 3484 inputs = [['"\n\n\n","\r\r\r","abc"', '"0","1","2"', '"","",""']]485 expected = [['\n\n\n', '\r\r\r', 'abc'], ['0', '1', '2'],486 ['NA', 'NA', 'NA']]487 self._test_dataset_on_buffer_sizes(488 inputs,489 expected,490 linebreak='\r\n',491 compression_type='ZLIB',492 record_defaults=record_defaults)493 @combinations.generate(test_base.default_test_combinations())494 def testCsvDataset_withScalarDefaults(self):495 record_defaults = [constant_op.constant(0, dtype=dtypes.int64)] * 4496 inputs = [[',,,', '1,1,1,', ',2,2,2']]497 self._test_dataset(498 inputs, [[0, 0, 0, 0], [1, 1, 1, 0], [0, 2, 2, 2]],499 record_defaults=record_defaults)500 @combinations.generate(test_base.default_test_combinations())501 def testCsvDataset_with2DDefaults(self):502 record_defaults = [constant_op.constant([[0]], dtype=dtypes.int64)] * 4503 inputs = [[',,,', '1,1,1,', ',2,2,2']]504 if context.executing_eagerly():505 err_spec = errors.InvalidArgumentError, (506 'Each record default should be at '507 'most rank 1')508 else:509 err_spec = ValueError, 'Shape must be at most rank 1 but is rank 2'510 with self.assertRaisesWithPredicateMatch(*err_spec):511 self._test_dataset(512 inputs, [[0, 0, 0, 0], [1, 1, 1, 0], [0, 2, 2, 2]],513 record_defaults=record_defaults)514if __name__ == '__main__':...

Full Screen

Full Screen

TimelinePresentationModel.js

Source:TimelinePresentationModel.js Github

copy

Full Screen

...126 var startTime = record.startTime();127 var endTime = record.endTime();128 if (!lastRecord)129 return null;130 if (lastRecord.record().type() !== record.type())131 return null;132 if (!WebInspector.TimelineUIUtils.isCoalescable(record.type()))133 return null;134 if (lastRecord.record().endTime() + coalescingThresholdMillis < startTime)135 return null;136 if (endTime + coalescingThresholdMillis < lastRecord.record().startTime())137 return null;138 if (lastRecord.presentationParent().coalesced())139 return lastRecord.presentationParent();140 return this._replaceWithCoalescedRecord(lastRecord);141 },142 /**143 * @param {!WebInspector.TimelinePresentationModel.Record} presentationRecord144 * @return {!WebInspector.TimelinePresentationModel.Record}145 */146 _replaceWithCoalescedRecord: function(presentationRecord)147 {148 var record = presentationRecord.record();149 var parent = presentationRecord._presentationParent;150 var coalescedRecord = new WebInspector.TimelinePresentationModel.CoalescedRecord(record);151 coalescedRecord._collapsed = true;152 coalescedRecord._presentationChildren.push(presentationRecord);153 presentationRecord._presentationParent = coalescedRecord;154 if (presentationRecord.hasWarnings() || presentationRecord.childHasWarnings())155 coalescedRecord._childHasWarnings = true;156 coalescedRecord._presentationParent = parent;157 parent._presentationChildren[parent._presentationChildren.indexOf(presentationRecord)] = coalescedRecord;158 return coalescedRecord;159 },160 /**161 * @param {!WebInspector.TimelinePresentationModel.Record} presentationRecord162 */163 _updateCoalescingParent: function(presentationRecord)164 {165 var parentRecord = presentationRecord._presentationParent;166 if (parentRecord.endTime() < presentationRecord.endTime())167 parentRecord._endTime = presentationRecord.endTime();168 },169 /**170 * @param {?RegExp} textFilter171 */172 setTextFilter: function(textFilter)173 {174 var records = this._recordToPresentationRecord.valuesArray();175 for (var i = 0; i < records.length; ++i)176 records[i]._expandedOrCollapsedWhileFiltered = false;177 this._textFilter = textFilter;178 },179 refreshRecords: function()180 {181 this.reset();182 var modelRecords = this._model.records();183 for (var i = 0; i < modelRecords.length; ++i)184 this.addRecord(modelRecords[i]);185 },186 invalidateFilteredRecords: function()187 {188 delete this._filteredRecords;189 },190 /**191 * @return {!Array.<!WebInspector.TimelinePresentationModel.Record>}192 */193 filteredRecords: function()194 {195 if (!this._rootRecord.presentationChildren().length)196 this.refreshRecords();197 if (this._filteredRecords)198 return this._filteredRecords;199 var recordsInWindow = [];200 var stack = [{children: this._rootRecord._presentationChildren, index: 0, parentIsCollapsed: false, parentRecord: {}}];201 var revealedDepth = 0;202 function revealRecordsInStack() {203 for (var depth = revealedDepth + 1; depth < stack.length; ++depth) {204 if (stack[depth - 1].parentIsCollapsed) {205 stack[depth].parentRecord._presentationParent._expandable = true;206 return;207 }208 stack[depth - 1].parentRecord._collapsed = false;209 recordsInWindow.push(stack[depth].parentRecord);210 stack[depth].windowLengthBeforeChildrenTraversal = recordsInWindow.length;211 stack[depth].parentIsRevealed = true;212 revealedDepth = depth;213 }214 }215 while (stack.length) {216 var entry = stack[stack.length - 1];217 var records = entry.children;218 if (records && entry.index < records.length) {219 var record = records[entry.index];220 ++entry.index;221 if (record.startTime() < this._windowEndTime && record.endTime() > this._windowStartTime) {222 if (this._model.isVisible(record.record())) {223 record._presentationParent._expandable = true;224 if (this._textFilter)225 revealRecordsInStack();226 if (!entry.parentIsCollapsed) {227 recordsInWindow.push(record);228 revealedDepth = stack.length;229 entry.parentRecord._collapsed = false;230 }231 }232 }233 record._expandable = false;234 stack.push({children: record._presentationChildren,235 index: 0,236 parentIsCollapsed: entry.parentIsCollapsed || (record._collapsed && (!this._textFilter || record._expandedOrCollapsedWhileFiltered)),237 parentRecord: record,238 windowLengthBeforeChildrenTraversal: recordsInWindow.length});239 } else {240 stack.pop();241 revealedDepth = Math.min(revealedDepth, stack.length - 1);242 entry.parentRecord._visibleChildrenCount = recordsInWindow.length - entry.windowLengthBeforeChildrenTraversal;243 }244 }245 this._filteredRecords = recordsInWindow;246 return recordsInWindow;247 },248 __proto__: WebInspector.Object.prototype249}250/**251 * @constructor252 * @param {?WebInspector.TimelinePresentationModel.Record} parentRecord253 */254WebInspector.TimelinePresentationModel.Record = function(parentRecord)255{256 /**257 * @type {!Array.<!WebInspector.TimelinePresentationModel.Record>}258 */259 this._presentationChildren = [];260 if (parentRecord) {261 this._presentationParent = parentRecord;262 parentRecord._presentationChildren.push(this);263 }264}265WebInspector.TimelinePresentationModel.Record.prototype = {266 /**267 * @return {number}268 */269 startTime: function()270 {271 throw new Error("Not implemented.");272 },273 /**274 * @return {number}275 */276 endTime: function()277 {278 throw new Error("Not implemented.");279 },280 /**281 * @return {number}282 */283 selfTime: function()284 {285 throw new Error("Not implemented.");286 },287 /**288 * @return {!WebInspector.TimelineModel.Record}289 */290 record: function()291 {292 throw new Error("Not implemented.");293 },294 /**295 * @return {!Array.<!WebInspector.TimelinePresentationModel.Record>}296 */297 presentationChildren: function()298 {299 return this._presentationChildren;300 },301 /**302 * @return {boolean}303 */304 coalesced: function()305 {306 return false;307 },308 /**309 * @return {boolean}310 */311 collapsed: function()312 {313 return this._collapsed;314 },315 /**316 * @param {boolean} collapsed317 */318 setCollapsed: function(collapsed)319 {320 this._collapsed = collapsed;321 this._expandedOrCollapsedWhileFiltered = true;322 },323 /**324 * @return {?WebInspector.TimelinePresentationModel.Record}325 */326 presentationParent: function()327 {328 return this._presentationParent || null;329 },330 /**331 * @return {number}332 */333 visibleChildrenCount: function()334 {335 return this._visibleChildrenCount || 0;336 },337 /**338 * @return {boolean}339 */340 expandable: function()341 {342 return !!this._expandable;343 },344 /**345 * @return {boolean}346 */347 hasWarnings: function()348 {349 return false;350 },351 /**352 * @return {boolean}353 */354 childHasWarnings: function()355 {356 return this._childHasWarnings;357 },358 /**359 * @return {?WebInspector.TimelineRecordListRow}360 */361 listRow: function()362 {363 return this._listRow;364 },365 /**366 * @param {!WebInspector.TimelineRecordListRow} listRow367 */368 setListRow: function(listRow)369 {370 this._listRow = listRow;371 },372 /**373 * @return {?WebInspector.TimelineRecordGraphRow}374 */375 graphRow: function()376 {377 return this._graphRow;378 },379 /**380 * @param {!WebInspector.TimelineRecordGraphRow} graphRow381 */382 setGraphRow: function(graphRow)383 {384 this._graphRow = graphRow;385 }386}387/**388 * @constructor389 * @extends {WebInspector.TimelinePresentationModel.Record}390 * @param {!WebInspector.TimelineModel.Record} record391 * @param {?WebInspector.TimelinePresentationModel.Record} parentRecord392 */393WebInspector.TimelinePresentationModel.ActualRecord = function(record, parentRecord)394{395 WebInspector.TimelinePresentationModel.Record.call(this, parentRecord);396 this._record = record;397 if (this.hasWarnings()) {398 for (var parent = this._presentationParent; parent && !parent._childHasWarnings; parent = parent._presentationParent)399 parent._childHasWarnings = true;400 }401}402WebInspector.TimelinePresentationModel.ActualRecord.prototype = {403 /**404 * @return {number}405 */406 startTime: function()407 {408 return this._record.startTime();409 },410 /**411 * @return {number}412 */413 endTime: function()414 {415 return this._record.endTime();416 },417 /**418 * @return {number}419 */420 selfTime: function()421 {422 return this._record.selfTime();423 },424 /**425 * @return {!WebInspector.TimelineModel.Record}426 */427 record: function()428 {429 return this._record;430 },431 /**432 * @return {boolean}433 */434 hasWarnings: function()435 {436 return !!this._record.warnings();437 },438 __proto__: WebInspector.TimelinePresentationModel.Record.prototype439}440/**441 * @constructor442 * @extends {WebInspector.TimelinePresentationModel.Record}443 * @param {!WebInspector.TimelineModel.Record} record444 */445WebInspector.TimelinePresentationModel.CoalescedRecord = function(record)446{447 WebInspector.TimelinePresentationModel.Record.call(this, null);448 this._startTime = record.startTime();449 this._endTime = record.endTime();450}451WebInspector.TimelinePresentationModel.CoalescedRecord.prototype = {452 /**453 * @return {number}454 */455 startTime: function()456 {457 return this._startTime;458 },459 /**460 * @return {number}461 */462 endTime: function()463 {464 return this._endTime;465 },466 /**467 * @return {number}468 */469 selfTime: function()470 {471 return 0;472 },473 /**474 * @return {!WebInspector.TimelineModel.Record}475 */476 record: function()477 {478 return this._presentationChildren[0].record();479 },480 /**481 * @return {boolean}482 */483 coalesced: function()484 {485 return true;486 },487 /**488 * @return {boolean}489 */490 hasWarnings: function()491 {492 return false;...

Full Screen

Full Screen

acts_records_test.py

Source:acts_records_test.py Github

copy

Full Screen

...24 self.tn = "test_name"25 self.details = "Some details about the test execution."26 self.float_extra = 12345.5678927 self.json_extra = {"ha": "whatever"}28 def verify_record(self, record, result, details, extras):29 # Verify each field.30 self.assertEqual(record.test_name, self.tn)31 self.assertEqual(record.result, result)32 self.assertEqual(record.details, details)33 self.assertEqual(record.extras, extras)34 self.assertTrue(record.begin_time, "begin time should not be empty.")35 self.assertTrue(record.end_time, "end time should not be empty.")36 # UID is not used at the moment, should always be None.37 self.assertIsNone(record.uid)38 # Verify to_dict.39 d = {}40 d[records.TestResultEnums.RECORD_NAME] = self.tn41 d[records.TestResultEnums.RECORD_RESULT] = result42 d[records.TestResultEnums.RECORD_DETAILS] = details43 d[records.TestResultEnums.RECORD_EXTRAS] = extras44 d[records.TestResultEnums.RECORD_BEGIN_TIME] = record.begin_time45 d[records.TestResultEnums.RECORD_END_TIME] = record.end_time46 d[records.TestResultEnums.47 RECORD_LOG_BEGIN_TIME] = record.log_begin_time48 d[records.TestResultEnums.RECORD_LOG_END_TIME] = record.log_end_time49 d[records.TestResultEnums.RECORD_UID] = None50 d[records.TestResultEnums.RECORD_CLASS] = None51 d[records.TestResultEnums.RECORD_EXTRA_ERRORS] = {}52 d[records.TestResultEnums.RECORD_STACKTRACE] = record.stacktrace53 actual_d = record.to_dict()54 self.assertDictEqual(actual_d, d)55 # Verify that these code paths do not cause crashes and yield non-empty56 # results.57 self.assertTrue(str(record), "str of the record should not be empty.")58 self.assertTrue(repr(record), "the record's repr shouldn't be empty.")59 self.assertTrue(record.json_str(), ("json str of the record should "60 "not be empty."))61 """ Begin of Tests """62 def test_result_record_pass_none(self):63 record = records.TestResultRecord(self.tn)64 record.test_begin()65 record.test_pass()66 self.verify_record(67 record=record,68 result=records.TestResultEnums.TEST_RESULT_PASS,69 details=None,70 extras=None)71 def test_result_record_pass_with_float_extra(self):72 record = records.TestResultRecord(self.tn)73 record.test_begin()74 s = signals.TestPass(self.details, self.float_extra)75 record.test_pass(s)76 self.verify_record(77 record=record,78 result=records.TestResultEnums.TEST_RESULT_PASS,79 details=self.details,80 extras=self.float_extra)81 def test_result_record_pass_with_json_extra(self):82 record = records.TestResultRecord(self.tn)83 record.test_begin()84 s = signals.TestPass(self.details, self.json_extra)85 record.test_pass(s)86 self.verify_record(87 record=record,88 result=records.TestResultEnums.TEST_RESULT_PASS,89 details=self.details,90 extras=self.json_extra)91 def test_result_record_fail_none(self):92 record = records.TestResultRecord(self.tn)93 record.test_begin()94 record.test_fail()95 self.verify_record(96 record=record,97 result=records.TestResultEnums.TEST_RESULT_FAIL,98 details=None,99 extras=None)100 def test_result_record_fail_with_float_extra(self):101 record = records.TestResultRecord(self.tn)102 record.test_begin()103 s = signals.TestFailure(self.details, self.float_extra)104 record.test_fail(s)105 self.verify_record(106 record=record,107 result=records.TestResultEnums.TEST_RESULT_FAIL,108 details=self.details,109 extras=self.float_extra)110 def test_result_record_fail_with_json_extra(self):111 record = records.TestResultRecord(self.tn)112 record.test_begin()113 s = signals.TestFailure(self.details, self.json_extra)114 record.test_fail(s)115 self.verify_record(116 record=record,117 result=records.TestResultEnums.TEST_RESULT_FAIL,118 details=self.details,119 extras=self.json_extra)120 def test_result_record_skip_none(self):121 record = records.TestResultRecord(self.tn)122 record.test_begin()123 record.test_skip()124 self.verify_record(125 record=record,126 result=records.TestResultEnums.TEST_RESULT_SKIP,127 details=None,128 extras=None)129 def test_result_record_skip_with_float_extra(self):130 record = records.TestResultRecord(self.tn)131 record.test_begin()132 s = signals.TestSkip(self.details, self.float_extra)133 record.test_skip(s)134 self.verify_record(135 record=record,136 result=records.TestResultEnums.TEST_RESULT_SKIP,137 details=self.details,138 extras=self.float_extra)139 def test_result_record_skip_with_json_extra(self):140 record = records.TestResultRecord(self.tn)141 record.test_begin()142 s = signals.TestSkip(self.details, self.json_extra)143 record.test_skip(s)144 self.verify_record(145 record=record,146 result=records.TestResultEnums.TEST_RESULT_SKIP,147 details=self.details,148 extras=self.json_extra)149 def test_result_add_operator_success(self):150 record1 = records.TestResultRecord(self.tn)151 record1.test_begin()152 s = signals.TestPass(self.details, self.float_extra)153 record1.test_pass(s)154 tr1 = records.TestResult()155 tr1.add_record(record1)156 device1 = ControllerInfoRecord('TestClass', 'MockDevice', 'device1')157 tr1.add_controller_info_record(device1)158 record2 = records.TestResultRecord(self.tn)159 record2.test_begin()160 s = signals.TestPass(self.details, self.json_extra)161 record2.test_pass(s)162 tr2 = records.TestResult()163 tr2.add_record(record2)164 device2 = ControllerInfoRecord('TestClass', 'MockDevice', 'device2')165 tr2.add_controller_info_record(device2)166 tr2 += tr1167 self.assertTrue(tr2.passed, [tr1, tr2])168 self.assertTrue(tr2.controller_info, [device1, device2])169 def test_result_add_operator_type_mismatch(self):170 record1 = records.TestResultRecord(self.tn)171 record1.test_begin()172 s = signals.TestPass(self.details, self.float_extra)173 record1.test_pass(s)174 tr1 = records.TestResult()175 tr1.add_record(record1)176 expected_msg = "Operand .* of type .* is not a TestResult."177 with self.assertRaisesRegexp(TypeError, expected_msg):178 tr1 += "haha"179 def test_is_all_pass(self):180 s = signals.TestPass(self.details, self.float_extra)181 record1 = records.TestResultRecord(self.tn)182 record1.test_begin()183 record1.test_pass(s)184 s = signals.TestSkip(self.details, self.float_extra)185 record2 = records.TestResultRecord(self.tn)186 record2.test_begin()187 record2.test_skip(s)188 tr = records.TestResult()189 tr.add_record(record1)190 tr.add_record(record2)191 tr.add_record(record1)192 self.assertEqual(len(tr.passed), 2)193 self.assertTrue(tr.is_all_pass)194 def test_is_all_pass_negative(self):195 s = signals.TestFailure(self.details, self.float_extra)196 record1 = records.TestResultRecord(self.tn)197 record1.test_begin()198 record1.test_fail(s)199 record2 = records.TestResultRecord(self.tn)200 record2.test_begin()201 record2.test_error(s)202 tr = records.TestResult()203 tr.add_record(record1)204 tr.add_record(record2)205 self.assertFalse(tr.is_all_pass)206if __name__ == "__main__":...

Full Screen

Full Screen

RecordSet.js

Source:RecordSet.js Github

copy

Full Screen

1(function() {2 var gCount = -1;34 var Dom=YAHOO.util.Dom,5 Assert=YAHOO.util.Assert,6 ObjectAssert=YAHOO.util.ObjectAssert,7 ArrayAssert=YAHOO.util.ArrayAssert,8 DateAssert=YAHOO.util.DateAssert,9 UserAction=YAHOO.util.UserAction,10 TestCase = YAHOO.tool.TestCase,11 TestLogger = YAHOO.tool.TestLogger,12 TestRunner = YAHOO.tool.TestRunner,13 TestSuite = YAHOO.tool.TestSuite,1415 DataSource = YAHOO.util.DataSource,16 DataTable = YAHOO.widget.DataTable,17 ColumnSet = YAHOO.widget.ColumnSet,18 RecordSet = YAHOO.widget.RecordSet;1920 /**21 *22 *23 * Base DataTable test template. Sets up values for a DataTable instance.24 *25 *26 */27 var dtBaseTemplate = {28 name: "DataTable Base Tests",29 30 dsData: [31 {a:"0a",b:"0b",c:"0c"},32 {a:"1a",b:"1b",c:"1c"},33 {a:"2a",b:"2b",c:"2c"},34 {a:"3a",b:"3b",c:"3c"}35 ],36 37 dsConfig: {38 responseType:YAHOO.util.DataSource.TYPE_JSARRAY,39 responseSchema:{fields:["a","b","c"]}40 },4142 columns: [{key:"a"},{key:"b"},{key:"c"}]43 };4445 /**46 *47 *48 * Base DataTable test case.49 *50 *51 */52 function DataTableTestCase(template) {53 DataTableTestCase.superclass.constructor.call(this, template);54 };55 YAHOO.lang.extend(DataTableTestCase, TestCase);5657 DataTableTestCase.prototype.setUp = function() {58 // Create container anew59 this.container = document.createElement("div");60 ///this.container.id = "testDTContainer"; // Is this necessary?61 document.body.appendChild(this.container);62 63 // Create DataSource anew64 this.datasource = new YAHOO.util.DataSource(this.dsData, this.dsConfig);65 };6667 DataTableTestCase.prototype.tearDown = function() {68 // Destroy DataTable69 this.datatable.destroy();70 this.datatable = null;71 72 // Destroy container73 if(this.container !== null) {74 YAHOO.util.Event.purgeElement(this.container, true);75 document.body.removeChild(this.container);76 this.container = null;77 }78 79 // TODO: need a destroy method80 this.datasource = null;81 };8283 DataTableTestCase.prototype.createInstance = function(oDT, oConfig) {84 oDT = oDT || DataTable;85 this.datatable = new oDT(this.container, this.columns, this.datasource, oConfig);86 gCount++;87 return this.datatable;88 };8990 /**91 *92 *93 * Tests RecordSet APIs.94 *95 *96 */97 var rsRecordSetTemplate = YAHOO.lang.merge(dtBaseTemplate, {98 name: "DataTable RecordSet Tests",99100 testGetRecordSet: function() {101 var dt = this.createInstance();102 var rs = dt.getRecordSet();103104 Assert.isInstanceOf(RecordSet, rs, "Expected a RecordSet"); 105 },106 107 testGetRecord: function() {108 var dt = this.createInstance();109 //dt.subscribe("initEvent", function() {110 var rs = dt.getRecordSet();111 var oRecord = rs._records[3];112 var sRecordId = oRecord.getId();113 114 var el = dt.getTbodyEl().rows[3];115 var oTestRecord = dt.getRecord(el);116 Assert.areSame(oRecord, oTestRecord, "Expected to get Record by el reference");117 118 var el = dt.getTbodyEl().rows[3].cells[2];119 oTestRecord = dt.getRecord(el);120 Assert.areSame(oRecord, oTestRecord, "Expected to get Record by el reference child");121 122 el = Dom.get(oTestRecord.getId());123 oTestRecord = null;124 oTestRecord = dt.getRecord(el);125 Assert.areSame(oRecord, oTestRecord, "Expected to get Record by DOM ID");126 127 oTestRecord = dt.getRecord(3);128 Assert.areSame(oRecord, oTestRecord, "Expected to get Record by position index");129 130 oTestRecord = dt.getRecord(sRecordId);131 Assert.areSame(oRecord, oTestRecord, "Expected to get Record by Record ID");132 133 oTestRecord = dt.getRecord(oRecord);134 Assert.areSame(oRecord, oTestRecord, "Expected to get Record by Record instance");135 136 oTestRecord = rs.getRecord(3);137 Assert.areSame(oRecord, oTestRecord, "Expected to get Record by position index (RecordSet method)");138 139 oTestRecord = rs.getRecord(sRecordId);140 Assert.areSame(oRecord, oTestRecord, "Expected to get Record by Record ID (RecordSet method)");141 142 oTestRecord = rs.getRecord(oRecord);143 Assert.areSame(oRecord, oTestRecord, "Expected to get Record by Record instance (RecordSet method)");144 //});145 },146 147 testGetInvalidRecord: function() {148 var dt = this.createInstance();149 //dt.subscribe("initEvent", function() {150 var rs = dt.getRecordSet();151 var oRecord = rs._records[3];152 dt.deleteRow(3);153 154 oTestRecord = dt.getRecord(oRecord);155 Assert.areSame(null, oTestRecord, "Expected not to get Record by invalid Record instance");156 157 oTestRecord = rs.getRecord(oRecord);158 Assert.areSame(null, oTestRecord, "Expected not to get Record by invalid Record instance (RecordSet method)");159 //});160 },161162 testUpdateKey: function() {163 var dt = this.createInstance();164 //dt.subscribe("initEvent", function() {165 var rs = dt.getRecordSet();166 var oTestRecord = rs._records[0];167 rs.updateKey(0, "b", "xxx");168 Assert.areSame("xxx", oTestRecord.getData("b"), "Failed to update key b of Record 0 by position index");169 170 rs.updateKey(oTestRecord, "b", "zzz");171 Assert.areSame("zzz", oTestRecord.getData("b"), "Failed to update key b of Record 0 by instance");172 173 rs.updateKey(oTestRecord.getId(), "b", "yyy");174 Assert.areSame("yyy", oTestRecord.getData("b"), "Failed to update key b of Record 0 by ID");175 //});176 },177 178 testdeleteRecord: function() {179 var dt = this.createInstance(),180 rs = dt.getRecordSet();181 rs.subscribe("recordDeleteEvent", function(o) {182 Assert.areSame(3, rs.getLength(), "Failed to delete record at index 2.");183 Assert.areEqual("1a",o.data.a, "Failed to return deleted data.");184 });185 rs.deleteRecord(1);186 },187 188 testdeleteRecords: function() {189 var dt = this.createInstance(),190 rs = dt.getRecordSet();191 rs.subscribe("recordsDeleteEvent", function(o) {192 Assert.areSame(2, rs.getLength(), "Failed to delete record at index 2.");193 Assert.areEqual("1a",o.deletedData[0].a, "Failed to return deleted data (0).");194 Assert.areEqual("2a",o.deletedData[1].a, "Failed to return deleted data (1).");195 });196 rs.deleteRecords(1,2);197 }198 });199 var rsRecordSetTest = new DataTableTestCase(rsRecordSetTemplate);200 201 /**202 *203 *204 * Runs tests.205 *206 *207 */208 YAHOO.util.Event.addListener(window, "load", function() {209 var recordsetsuite = new TestSuite("RecordSet Test Suite");210 recordsetsuite.add(rsRecordSetTest);211 212 TestRunner.add(recordsetsuite);213 }); ...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run ATX automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful