How to use get_cpu_arch method in autotest

Best Python code snippet using autotest_python

test_utils_cpu.py

Source:test_utils_cpu.py Github

copy

Full Screen

...323power management:324"""325 with mock.patch('avocado.utils.cpu.open',326 return_value=self._get_file_mock(cpu_output)):327 self.assertEqual(cpu.get_cpu_arch(), "i386")328 @unittest.skipUnless(recent_mock(),329 "mock library version cannot (easily) patch open()")330 def test_cpu_arch_x86_64(self):331 cpu_output = b"""processor : 0332vendor_id : GenuineIntel333cpu family : 6334model : 60335model name : Intel(R) Core(TM) i7-4810MQ CPU @ 2.80GHz336stepping : 3337microcode : 0x24338cpu MHz : 1766.058339cache size : 6144 KB340physical id : 0341siblings : 8342core id : 0343cpu cores : 4344apicid : 0345initial apicid : 0346fpu : yes347fpu_exception : yes348cpuid level : 13349wp : yes350flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc cpuid aperfmperf pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm cpuid_fault epb invpcid_single pti tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt ibpb ibrs stibp dtherm ida arat pln pts351bugs : cpu_meltdown spectre_v1 spectre_v2352bogomips : 5586.93353clflush size : 64354cache_alignment : 64355address sizes : 39 bits physical, 48 bits virtual356power management:357"""358 with mock.patch('avocado.utils.cpu.open',359 return_value=self._get_file_mock(cpu_output)):360 self.assertEqual(cpu.get_cpu_arch(), "x86_64")361 @unittest.skipUnless(recent_mock(),362 "mock library version cannot (easily) patch open()")363 def test_cpu_arch_ppc64_power8(self):364 cpu_output = b"""processor : 88365cpu : POWER8E (raw), altivec supported366clock : 3325.000000MHz367revision : 2.1 (pvr 004b 0201)368timebase : 512000000369platform : PowerNV370model : 8247-21L371machine : PowerNV 8247-21L372firmware : OPAL v3373"""374 with mock.patch('avocado.utils.cpu.open',375 return_value=self._get_file_mock(cpu_output)):376 self.assertEqual(cpu.get_cpu_arch(), "power8")377 @unittest.skipUnless(recent_mock(),378 "mock library version cannot (easily) patch open()")379 def test_cpu_arch_ppc64_le_power8(self):380 cpu_output = b"""processor : 88381cpu : POWER8E (raw), altivec supported382clock : 3325.000000MHz383revision : 2.1 (pvr 004b 0201)384timebase : 512000000385platform : PowerNV386model : 8247-21L387machine : PowerNV 8247-21L388firmware : OPAL v3389"""390 with mock.patch('avocado.utils.cpu.open',391 return_value=self._get_file_mock(cpu_output)):392 self.assertEqual(cpu.get_cpu_arch(), "power8")393 @unittest.skipUnless(recent_mock(),394 "mock library version cannot (easily) patch open()")395 def test_cpu_arch_ppc64_le_power9(self):396 cpu_output = b"""processor : 20397cpu : POWER9 (raw), altivec supported398clock : 2050.000000MHz399revision : 1.0 (pvr 004e 0100)400timebase : 512000000401platform : PowerNV402model : 8375-42A403machine : PowerNV 8375-42A404firmware : OPAL405"""406 with mock.patch('avocado.utils.cpu.open',407 return_value=self._get_file_mock(cpu_output)):408 self.assertEqual(cpu.get_cpu_arch(), "power9")409 @unittest.skipUnless(recent_mock(),410 "mock library version cannot (easily) patch open()")411 def test_cpu_arch_s390(self):412 cpu_output = b"""vendor_id : IBM/S390413# processors : 2414bogomips per cpu: 2913.00415max thread id : 0416features : esan3 zarch stfle msa ldisp eimm dfp edat etf3eh highgprs te sie417facilities : 0 1 2 3 4 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 30 31 32 33 34 35 36 37 40 41 42 43 44 45 46 47 48 49 50 51 52 57 64 65 66 67 68 69 70 71 72 73 75 76 77 78 81 82 131 132418cache0 : level=1 type=Data scope=Private size=96K line_size=256 associativity=6419cache1 : level=1 type=Instruction scope=Private size=64K line_size=256 associativity=4420cache2 : level=2 type=Data scope=Private size=1024K line_size=256 associativity=8421cache3 : level=2 type=Instruction scope=Private size=1024K line_size=256 associativity=8422cache4 : level=3 type=Unified scope=Shared size=49152K line_size=256 associativity=12423cache5 : level=4 type=Unified scope=Shared size=393216K line_size=256 associativity=24424processor 0: version = 00, identification = 3FC047, machine = 2827425processor 1: version = 00, identification = 3FC047, machine = 2827426cpu number : 0427cpu MHz dynamic : 5504428cpu MHz static : 5504429cpu number : 1430cpu MHz dynamic : 5504431cpu MHz static : 5504432"""433 with mock.patch('avocado.utils.cpu.open',434 return_value=self._get_file_mock(cpu_output)):435 self.assertEqual(cpu.get_cpu_arch(), "s390")436 @unittest.skipUnless(recent_mock(),437 "mock library version cannot (easily) patch open()")438 def test_cpu_arch_arm_v7(self):439 cpu_output = b"""Processor : ARMv7 Processor rev 2 (v7l)440BogoMIPS : 994.65441Features : swp half thumb fastmult vfp edsp thumbee neon vfpv3442CPU implementer : 0x41443CPU architecture: 7444CPU variant : 0x2445CPU part : 0xc08446CPU revision : 2447Hardware : herring448Revision : 0034449Serial : 3534268a5e0700ec450"""451 with mock.patch('avocado.utils.cpu.open',452 return_value=self._get_file_mock(cpu_output)):453 self.assertEqual(cpu.get_cpu_arch(), "arm")454 @unittest.skipUnless(recent_mock(),455 "mock library version cannot (easily) patch open()")456 def test_cpu_arch_arm_v8(self):457 cpu_output = b"""processor : 0458BogoMIPS : 200.00459Features : fp asimd evtstrm aes pmull sha1 sha2 crc32 cpuid460CPU implementer : 0x43461CPU architecture: 8462CPU variant : 0x1463CPU part : 0x0a1464CPU revision : 1465"""466 with mock.patch('avocado.utils.cpu.open',467 return_value=self._get_file_mock(cpu_output)):468 self.assertEqual(cpu.get_cpu_arch(), "aarch64")469 @unittest.skipUnless(recent_mock(),470 "mock library version cannot (easily) patch open()")471 def test_cpu_arch_risc_v(self):472 cpu_output = b"""hart : 1473isa : rv64imafdc474mmu : sv39475uarch : sifive,rocket0476"""477 with mock.patch('avocado.utils.cpu.open',478 return_value=self._get_file_mock(cpu_output)):479 self.assertEqual(cpu.get_cpu_arch(), "riscv")480 @unittest.skipUnless(recent_mock(),481 "mock library version cannot (easily) patch open()")482 def test_get_cpuidle_state_off(self):483 retval = {0: {0: 0}}484 with mock.patch('avocado.utils.cpu.cpu_online_list', return_value=[0]):485 with mock.patch('glob.glob', return_value=['/sys/devices/system/cpu/cpu0/cpuidle/state1']):486 with mock.patch('avocado.utils.cpu.open', return_value=io.BytesIO(b'0')):487 self.assertEqual(cpu.get_cpuidle_state(), retval)488 @unittest.skipUnless(recent_mock(),489 "mock library version cannot (easily) patch open()")490 def test_get_cpuidle_state_on(self):491 retval = {0: {0: 1}}492 with mock.patch('avocado.utils.cpu.cpu_online_list', return_value=[0]):493 with mock.patch('glob.glob', return_value=['/sys/devices/system/cpu/cpu0/cpuidle/state1']):...

Full Screen

Full Screen

__init__.py

Source:__init__.py Github

copy

Full Screen

...23###################24### System Info ###25###################26cpu_arch = ''27def get_cpu_arch():28 global cpu_arch29 if not cpu_arch:30 cpu_arch = subprocess.check_output(['uname', '-m']).strip().decode('ASCII')31 if re.match(r"^i.86$", cpu_arch):32 cpu_arch = 'i586'33 return cpu_arch34def get_os_release():35 os_release = {}36 with open('/etc/os-release') as f:37 for line in f.readlines():38 line = line.strip()39 if line.startswith('#') or '=' not in line:40 continue41 key, value = line.split('=', 1)42 key = key.strip()43 value = value.strip()44 if '"' in value:45 value = value.split('"', 1)[1].split('"', 1)[0]46 os_release[key] = value47 return os_release48def get_distribution(prefix=False, use_releasever_variable=False):49 os_release = get_os_release()50 name = os_release['NAME']51 version = os_release.get('VERSION') # VERSION is not set for TW52 if version:53 # strip prerelease suffix (eg. " Alpha")54 version = version.split(' ', 1)[0]55 if name == 'openSUSE Tumbleweed' or name == 'openSUSE MicroOS':56 project = 'openSUSE:Factory'57 elif name == 'openSUSE Leap':58 if use_releasever_variable:59 project = 'openSUSE:Leap:$releasever'60 else:61 project = 'openSUSE:Leap:' + version62 elif name.startswith('SLE'):63 project = 'SLE' + version64 if prefix:65 project = 'openSUSE.org:' + project66 return project67def get_version():68 os_release = get_os_release()69 version = os_release.get('VERSION') # VERSION is not set for TW70 return version71###############72### PACKMAN ###73###############74def add_packman_repo(dup=False):75 project = get_distribution(use_releasever_variable=config.get_key_from_config("use_releasever_var"))76 project = project.replace(':', '_')77 project = project.replace('Factory', 'Tumbleweed')78 add_repo(79 filename = 'packman',80 name = 'Packman',81 url = 'https://ftp.gwdg.de/pub/linux/misc/packman/suse/%s/' % project,82 auto_refresh = True,83 priority = 9084 )85 if dup:86 if get_backend() == BackendConstants.zypp:87 subprocess.call(['sudo', 'zypper', 'dist-upgrade', '--from', 'packman', '--allow-downgrade', '--allow-vendor-change'])88 elif get_backend() == BackendConstants.dnf:89 subprocess.call(['sudo', 'dnf', 'dup', '--setopt=allow_vendor_change=True', '--repo', 'packman'])90def install_packman_packages(packages, **kwargs):91 install_packages(packages, from_repo='packman', **kwargs)92################93### ZYPP/DNF ###94################95def url_normalize(url):96 return re.sub(r"^https?", "", url).rstrip('/').replace('$releasever', get_version() or '$releasever')97def get_repos():98 for repo_file in os.listdir(REPO_DIR):99 if not repo_file.endswith('.repo'):100 continue101 try:102 cp = configparser.ConfigParser()103 cp.read(os.path.join(REPO_DIR, repo_file))104 mainsec = cp.sections()[0]105 if not bool(int(cp.get(mainsec, "enabled"))):106 continue107 yield (re.sub(r"\.repo$", "", repo_file), cp.get(mainsec, "baseurl"))108 except Exception as e:109 print("Error parsing '%s': %r" % (repo_file, e))110def get_enabled_repo_by_url(url):111 for repo, repo_url in get_repos():112 if url_normalize(repo_url) == url_normalize(url):113 return repo114def add_repo(filename, name, url, enabled=True, gpgcheck=True, gpgkey=None, repo_type='rpm-md', auto_import_key=False, auto_refresh=False, priority=None):115 tf = tempfile.NamedTemporaryFile('w')116 tf.file.write("[%s]\n" % filename)117 tf.file.write("name=%s\n" % name)118 tf.file.write("baseurl=%s\n" % url)119 tf.file.write("enabled=%i\n" % enabled)120 tf.file.write("type=%s\n" % repo_type)121 tf.file.write("gpgcheck=%i\n" % gpgcheck)122 if gpgkey:123 subprocess.call(['sudo', 'rpm', '--import', gpgkey.replace('$releasever', get_version() or '$releasever')])124 tf.file.write("gpgkey=%s\n" % gpgkey)125 if auto_refresh:126 tf.file.write("autorefresh=1\n")127 if priority:128 tf.file.write("priority=%i\n" % priority)129 tf.file.flush()130 subprocess.call(['sudo', 'cp', tf.name, os.path.join(REPO_DIR, '%s.repo' % filename)])131 subprocess.call(['sudo', 'chmod', '644', os.path.join(REPO_DIR, '%s.repo' % filename)])132 tf.file.close()133 refresh_cmd = []134 if get_backend() == BackendConstants.zypp:135 refresh_cmd = ['sudo', 'zypper']136 if auto_import_key:137 refresh_cmd.append('--gpg-auto-import-keys')138 refresh_cmd.append('ref')139 elif get_backend() == BackendConstants.dnf:140 refresh_cmd = ['sudo', 'dnf', 'ref']141 subprocess.call(refresh_cmd)142def install_packages(packages, from_repo=None, allow_vendor_change=False, allow_arch_change=False, allow_downgrade=False, allow_name_change=False):143 if get_backend() == BackendConstants.zypp:144 args = ['sudo', 'zypper', 'in']145 if from_repo:146 args.extend(['--from', from_repo])147 elif get_backend() == BackendConstants.dnf:148 args = ['sudo', 'dnf', 'in']149 if from_repo:150 args.extend(['--repo', from_repo])151 if get_backend() == BackendConstants.zypp:152 if allow_downgrade:153 args.append('--allow-downgrade')154 if allow_arch_change:155 args.append('--allow-arch-change')156 if allow_name_change:157 args.append('--allow-name-change')158 if allow_vendor_change:159 args.append('--allow-vendor-change')160 elif get_backend() == BackendConstants.dnf:161 # allow_downgrade and allow_name_change are default in DNF162 if allow_vendor_change:163 args.append('--setopt=allow_vendor_change=True')164 args.extend(packages)165 subprocess.call(args)166###########167### OBS ###168###########169def search_published_binary(obs_instance, query):170 distribution = get_distribution(prefix=(obs_instance != 'openSUSE'))171 endpoint = '/search/published/binary/id'172 url = OBS_APIROOT[obs_instance] + endpoint173 if isinstance(query, list):174 xquery = "'" + "', '".join(query) + "'"175 else:176 xquery = "'%s'" % query177 xpath = "contains-ic(@name, %s) and path/project='%s'" % (xquery, distribution)178 url = requests.Request('GET', url, params={'match': xpath, 'limit': 0}).prepare().url179 try:180 r = requests.get(PROXY_URL, params={'obs_api_link': url, 'obs_instance': obs_instance})181 r.raise_for_status()182 dom = lxml.etree.fromstring(r.text)183 binaries = []184 for binary in dom.xpath('/collection/binary'):185 binary_data = {k: v for k, v in binary.items()}186 binary_data['obs_instance'] = obs_instance187 for k in ['name', 'project', 'repository', 'version', 'release', 'arch', 'filename', 'filepath', 'baseproject', 'type']:188 assert k in binary_data, 'Key "%s" missing' % k189 # Filter out ghost binary190 # (package has been deleted, but binary still exists)191 if not binary_data.get('package'):192 continue193 # Filter out branch projects194 if ':branches:' in binary_data['project']:195 continue196 # Filter out Packman personal projects197 if binary_data['obs_instance'] != 'openSUSE'and is_personal_project(binary_data['project']):198 continue199 # Filter out debuginfo, debugsource, devel, buildsymbols, lang and docs packages200 regex = r"-(debuginfo|debugsource|buildsymbols|devel|lang|l10n|trans|doc|docs)(-.+)?$"201 if re.match(regex, binary_data['name']):202 continue203 # Filter out source packages204 if binary_data['arch'] == 'src':205 continue206 # Filter architecture207 if binary_data['arch'] not in (get_cpu_arch(), 'noarch'):208 continue209 # Filter repo architecture210 if binary_data['repository'] == 'openSUSE_Factory' and (get_cpu_arch() not in ('x86_64' 'i586')):211 continue212 elif binary_data['repository'] == 'openSUSE_Factory_ARM' and not get_cpu_arch().startswith('arm') and not get_cpu_arch() == 'aarch64':213 continue214 elif binary_data['repository'] == 'openSUSE_Factory_PowerPC' and not get_cpu_arch().startswith('ppc'):215 continue216 elif binary_data['repository'] == 'openSUSE_Factory_zSystems' and not get_cpu_arch().startswith('s390'):217 continue218 elif binary_data['repository'] == 'openSUSE_Factory_RISCV' and not get_cpu_arch().startswith('risc'):219 continue220 binaries.append(binary_data)221 return binaries222 except requests.exceptions.HTTPError as e:223 if e.response.status_code == 413:224 print("Please use different search keywords. Some short keywords cause OBS timeout.")225 else:226 print("HTTPError: %s" % e)227 sys.exit(1)228def get_binary_names(binaries):229 names = []230 for binary in binaries:231 name = binary['name']232 if name not in names:233 names.append(name)234 return names235def sort_binaries(binaries):236 return sorted(binaries, key=lambda b: get_binary_weight(b), reverse=True)237def get_binary_weight(binary):238 weight = 0239 if is_official_project(binary['project']):240 weight += 20000241 elif is_personal_project(binary['project']):242 weight += 0243 else:244 weight += 10000245 if binary['name'] == binary['package']:246 weight += 1000247 dash_count = binary['name'].count('-')248 weight += 100 * (0.5**dash_count)249 if not (get_cpu_arch() == 'x86_64' and binary['arch'] == 'i586'):250 weight += 10251 weight -= len(binary['name'])252 return weight253def is_official_project(project):254 return project.startswith('openSUSE:')255def is_personal_project(project):256 return project.startswith('home:') or project.startswith('isv:')257def get_binaries_by_name(name, binaries):258 return [binary for binary in binaries if binary['name'] == name]259def install_binary(binary):260 name = binary['name']261 obs_instance = binary['obs_instance']262 arch = binary['arch']263 project = binary['project']...

Full Screen

Full Screen

installers.py

Source:installers.py Github

copy

Full Screen

...5import sys6import cpuinfo7import torch8from nebullvm.utils.general import check_module_version9def get_cpu_arch():10 arch = cpuinfo.get_cpu_info()["arch"].lower()11 if "x86" in arch:12 return "x86"13 else:14 return "arm"15def _get_os():16 return platform.system()17def install_tvm(working_dir: str = None):18 """Helper function for installing ApacheTVM.19 This function needs some prerequisites for running, as a valid `git`20 installation and having MacOS or a Linux-distribution as OS.21 Args:22 working_dir (str, optional): The directory where the tvm repo will be23 cloned and installed.24 """25 path = Path(__file__).parent26 # install pre-requisites27 installation_file_prerequisites = str(28 path / "install_tvm_prerequisites.sh"29 )30 subprocess.run(31 ["bash", installation_file_prerequisites],32 cwd=working_dir or Path.home(),33 )34 installation_file = str(path / "install_tvm.sh")35 hardware_config = get_cpu_arch()36 if torch.cuda.is_available():37 hardware_config = f"{hardware_config}_cuda"38 env_dict = {39 "CONFIG_PATH": str(40 path / f"tvm_installers/{hardware_config}/config.cmake"41 ),42 **dict(os.environ.copy()),43 }44 subprocess.run(45 ["bash", installation_file],46 cwd=working_dir or Path.home(),47 env=env_dict,48 )49def install_bladedisc():50 """Helper function for installing BladeDisc."""51 has_cuda = False52 if torch.cuda.is_available():53 has_cuda = True54 path = Path(__file__).parent55 installation_file = str(path / "install_bladedisc.sh")56 subprocess.Popen(["bash", installation_file, str(has_cuda).lower()])57def install_torch_tensor_rt():58 """Helper function for installing Torch-TensorRT.59 The function will install the software only if a cuda driver is available.60 """61 if not torch.cuda.is_available():62 raise RuntimeError(63 "Torch-TensorRT can run just on Nvidia machines. "64 "No available cuda driver has been found."65 )66 elif not check_module_version(torch, min_version="1.12.0"):67 raise RuntimeError(68 "Torch-TensorRT can be installed only from Pytorch 1.12. "69 "Please update your Pytorch version."70 )71 # Verify that TensorRT is installed, otherwise install it72 try:73 import tensorrt # noqa F40174 except ImportError:75 install_tensor_rt()76 # cmd = [77 # "pip3",78 # "install",79 # "torch-tensorrt>=1.2.0",80 # "-f",81 # "https://github.com/pytorch/TensorRT/releases",82 # ]83 cmd = [84 "pip3",85 "install",86 "torch-tensorrt",87 "--find-links",88 "https://github.com/pytorch/TensorRT/releases/expanded_assets/v1.2.0",89 ]90 subprocess.run(cmd)91def install_tf2onnx():92 cmd = ["pip3", "install", "tf2onnx>=1.8.4"]93 subprocess.run(cmd)94def install_tensorflow():95 # Tensorflow 2.10 for now it's not supported96 # Will be supported when tf2onnx library will support flatbuffers >= 2.x97 cmd = ["pip3", "install", "tensorflow>=2.7.0,<2.10"]98 subprocess.run(cmd)99def install_tensor_rt():100 """Helper function for installing TensorRT.101 The function will install the software only if a cuda driver is available.102 """103 if not torch.cuda.is_available():104 raise RuntimeError(105 "TensorRT can run just on Nvidia machines. "106 "No available cuda driver has been found."107 )108 path = Path(__file__).parent109 installation_file = str(path / "install_tensor_rt.sh")110 subprocess.run(["bash", installation_file])111def install_openvino(with_optimization: bool = True):112 """Helper function for installing the OpenVino compiler.113 This function just works on intel machines.114 Args:115 with_optimization (bool): Flag for installing the full openvino engine116 or limiting the installation to the tools need for inference117 models.118 """119 processor = cpuinfo.get_cpu_info()["brand_raw"].lower()120 if "intel" not in processor:121 raise RuntimeError(122 f"Openvino can run just on Intel machines. "123 f"You are trying to install it on {processor}"124 )125 openvino_version = "openvino-dev" if with_optimization else "openvino"126 cmd = ["pip3", "install", f"{openvino_version}[onnx]"]127 subprocess.run(cmd)128 # Reinstall updated versions of libraries that were downgraded by openvino129 cmd = ["pip3", "install", "onnx>=1.12"]130 subprocess.run(cmd)131 cmd = ["pip3", "install", "scipy>=1.7.3"]132 subprocess.run(cmd)133def install_onnxruntime():134 """Helper function for installing the right version of onnxruntime."""135 distribution_name = "onnxruntime"136 if torch.cuda.is_available():137 distribution_name = f"{distribution_name}-gpu"138 if _get_os() == "Darwin" and get_cpu_arch() == "arm":139 cmd = ["conda", "install", "-y", distribution_name]140 else:141 cmd = ["pip3", "install", distribution_name]142 subprocess.run(cmd)143 # install requirements for onnxruntime.transformers144 cmd = ["pip3", "install", "coloredlogs", "sympy"]145 subprocess.run(cmd)146def install_deepsparse():147 """Helper function for installing DeepSparse."""148 python_minor_version = sys.version_info.minor149 cmd = ["apt-get", "install", f"python3.{python_minor_version}-venv"]150 subprocess.run(cmd)151 cmd = ["pip3", "install", "deepsparse"]152 subprocess.run(cmd)153def install_intel_neural_compressor():154 """Helper function for installing Intel Neural Compressor."""155 cmd = ["pip3", "install", "neural-compressor"]156 subprocess.run(cmd)157def install_onnx_simplifier():158 """Helper function for installing ONNX simplifier."""159 if get_cpu_arch() != "arm":160 # Install onnx simplifier161 cmd = ["pip3", "install", "onnxsim"]...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run autotest automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful