How to use hot_add_disk_serial method in lisa

Best Python code snippet using lisa_python

storage.py

Source:storage.py Github

copy

Full Screen

...240 """,241 timeout=TIME_OUT,242 requirement=simple_requirement(disk=DiskStandardHDDLRS()),243 )244 def hot_add_disk_serial(self, log: Logger, node: Node) -> None:245 self._hot_add_disk_serial(246 log, node, DiskType.StandardHDDLRS, self.DEFAULT_DISK_SIZE_IN_GB247 )248 @TestCaseMetadata(249 description="""250 This test case will verify that the standard ssd data disks disks can251 be added serially while the vm is running. The test steps are same as252 `hot_add_disk_serial`.253 """,254 timeout=TIME_OUT,255 requirement=simple_requirement(disk=DiskStandardSSDLRS()),256 )257 def hot_add_disk_serial_standard_ssd(self, log: Logger, node: Node) -> None:258 self._hot_add_disk_serial(259 log, node, DiskType.StandardSSDLRS, self.DEFAULT_DISK_SIZE_IN_GB260 )261 @TestCaseMetadata(262 description="""263 This test case will verify that the premium ssd data disks disks can264 be added serially while the vm is running. The test steps are same as265 `hot_add_disk_serial`.266 """,267 timeout=TIME_OUT,268 requirement=simple_requirement(disk=DiskPremiumSSDLRS()),269 )270 def hot_add_disk_serial_premium_ssd(self, log: Logger, node: Node) -> None:271 self._hot_add_disk_serial(272 log, node, DiskType.PremiumSSDLRS, self.DEFAULT_DISK_SIZE_IN_GB273 )274 @TestCaseMetadata(275 description="""276 This test case will verify that the standard HDD data disks can277 be added in one go (parallel) while the vm is running.278 Steps:279 1. Get maximum number of data disk for the current vm_size.280 2. Get the number of data disks already added to the vm.281 3. Add maximum number of data disks to the VM in parallel.282 4. Verify that the disks are added are available in the OS.283 5. Remove the disks from the vm in parallel.284 6. Verify that the disks are removed from the OS.285 """,286 timeout=TIME_OUT,287 requirement=simple_requirement(disk=DiskStandardHDDLRS()),288 )289 def hot_add_disk_parallel(self, log: Logger, node: Node) -> None:290 self._hot_add_disk_parallel(291 log, node, DiskType.StandardHDDLRS, self.DEFAULT_DISK_SIZE_IN_GB292 )293 @TestCaseMetadata(294 description="""295 This test case will verify that the standard ssd data disks disks can296 be added serially while the vm is running. The test steps are same as297 `hot_add_disk_parallel`.298 """,299 timeout=TIME_OUT,300 requirement=simple_requirement(disk=DiskStandardSSDLRS()),301 )302 def hot_add_disk_parallel_standard_ssd(self, log: Logger, node: Node) -> None:303 self._hot_add_disk_parallel(304 log, node, DiskType.StandardSSDLRS, self.DEFAULT_DISK_SIZE_IN_GB305 )306 @TestCaseMetadata(307 description="""308 This test case will verify that the premium ssd data disks disks can309 be added serially while the vm is running. The test steps are same as310 `hot_add_disk_parallel`.311 """,312 timeout=TIME_OUT,313 requirement=simple_requirement(disk=DiskPremiumSSDLRS()),314 )315 def hot_add_disk_parallel_premium_ssd(self, log: Logger, node: Node) -> None:316 self._hot_add_disk_parallel(317 log, node, DiskType.PremiumSSDLRS, self.DEFAULT_DISK_SIZE_IN_GB318 )319 @TestCaseMetadata(320 description="""321 This test case will verify mount azure nfs on guest successfully.322 """,323 timeout=TIME_OUT,324 requirement=simple_requirement(supported_features=[Nfs]),325 priority=2,326 )327 def verify_azure_file_share_nfs(self, log: Logger, node: Node) -> None:328 nfs = node.features[Nfs]329 mount_dir = "/mount/azure_share"330 nfs.create_share()331 storage_account_name = nfs.storage_account_name332 mount_nfs = f"{storage_account_name}.file.core.windows.net"333 server_shared_dir = f"{nfs.storage_account_name}/{nfs.file_share_name}"334 try:335 node.tools[NFSClient].setup(336 mount_nfs,337 server_shared_dir,338 mount_dir,339 )340 except Exception as identifier:341 raise LisaException(342 f"fail to mount {server_shared_dir} into {mount_dir}"343 f"{identifier.__class__.__name__}: {identifier}."344 )345 finally:346 nfs.delete_share()347 node.tools[NFSClient].stop(mount_dir)348 def after_case(self, log: Logger, **kwargs: Any) -> None:349 node: Node = kwargs["node"]350 disk = node.features[Disk]351 # cleanup any disks added as part of the test352 # If the cleanup operation fails, mark node to be recycled353 try:354 disk.remove_data_disk()355 except Exception:356 raise BadEnvironmentStateException357 def _hot_add_disk_serial(358 self, log: Logger, node: Node, type: DiskType, size: int359 ) -> None:360 disk = node.features[Disk]361 lsblk = node.tools[Lsblk]362 # get max data disk count for the node363 assert node.capability.disk364 assert isinstance(node.capability.disk.max_data_disk_count, int)365 max_data_disk_count = node.capability.disk.max_data_disk_count366 log.debug(f"max_data_disk_count: {max_data_disk_count}")367 # get the number of data disks already added to the vm368 assert isinstance(node.capability.disk.data_disk_count, int)369 current_data_disk_count = node.capability.disk.data_disk_count370 log.debug(f"current_data_disk_count: {current_data_disk_count}")371 # disks to be added to the vm...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run lisa automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful