How to use run_in_parallel method in lisa

Best Python code snippet using lisa_python

debugger.py

Source:debugger.py Github

copy

Full Screen

1#!/usr/bin/env python2# -*- coding: utf-8 -*-3"""Debugger to save results during training.4"""5from __future__ import annotations6import threading7from queue import Queue8from typing import Optional9from torch import Tensor10from torchkit.core.type import Callable11from torchkit.core.utils import console12__all__ = [13 "Debugger"14]15# MARK: - Debugger16class Debugger:17 """18 19 Attributes:20 every_n_epochs (int):21 Number of epochs between debugging. To disable, set22 `every_n_epochs=0`. Default: `1`.23 run_in_parallel (bool):24 If `True` runs debugging process in a separated thread.25 Default: `True`.26 queue_size (int):27 Debug queue size. It should equal the value of `save_max_n`.28 Default: `20`.29 save_max_n (int):30 Maximum debugging items to be kept. Default: `20`.31 save_to_subdir (bool):32 Save all debug images of the same epoch to a sub-directory naming33 after the epoch number. Default: `True`.34 image_quality (int):35 Image quality to be saved. Default: `95`.36 verbose (bool):37 If `True` shows the results on the screen. Default: `False`.38 show_max_n (int):39 Maximum debugging items to be shown. Default: `8`.40 show_func (FunCls, optional):41 Function to visualize the debug results. Default: `None`.42 wait_time (float):43 Pause some times before showing the next image. Default: `0.001`.44 """45 46 # MARK: Magic Functions47 48 def __init__(49 self,50 every_n_epochs : int = 1,51 run_in_parallel: bool = True,52 queue_size : Optional[int] = 20,53 save_max_n : int = 20,54 save_to_subdir : bool = True,55 image_quality : int = 95,56 verbose : bool = False,57 show_max_n : int = 8,58 show_func : Optional[Callable] = None,59 wait_time : float = 0.001,60 *args, **kwargs61 ):62 super().__init__()63 self.every_n_epochs = every_n_epochs64 self.run_in_parallel = run_in_parallel65 self.queue_size = queue_size66 self.save_max_n = save_max_n67 self.save_to_subdir = save_to_subdir68 self.image_quality = image_quality69 self.verbose = verbose70 self.show_max_n = show_max_n71 self.show_func = show_func72 self.wait_time = wait_time73 self.debug_queue = None74 self.thread_debugger = None75 # self.init_thread()76 77 # MARK: Configure78 79 def init_thread(self):80 if self.run_in_parallel:81 self.debug_queue = Queue(maxsize=self.queue_size)82 self.thread_debugger = threading.Thread(83 target=self.show_results_parallel84 )85 86 # MARK: Run87 88 def run(89 self,90 input : Optional[Tensor] = None,91 target : Optional[Tensor] = None,92 pred : Optional[Tensor] = None,93 filepath: Optional[str] = None,94 ):95 """Run the debugger process."""96 if self.show_func:97 if self.thread_debugger:98 self.debug_queue.put([input, target, pred, filepath])99 else:100 self.show_results(101 input=input, target=target, pred=pred, filepath=filepath102 )103 def run_routine_start(self):104 """Perform operations when run routine starts."""105 self.init_thread()106 if self.thread_debugger and not self.thread_debugger.is_alive():107 self.thread_debugger.start()108 109 def run_routine_end(self):110 """Perform operations when run routine ends."""111 if self.thread_debugger and self.thread_debugger.is_alive():112 self.debug_queue.put([None, None, None, None])113 def is_alive(self) -> bool:114 """Return whether the thread is alive."""115 if self.thread_debugger:116 return self.thread_debugger.is_alive()117 return False118 119 # MARK: Visualize120 def show_results(121 self,122 input : Optional[Tensor] = None,123 target : Optional[Tensor] = None,124 pred : Optional[Tensor] = None,125 filepath: Optional[str] = None,126 *args, **kwargs127 ):128 self.show_func(129 input = input,130 target = target,131 pred = pred,132 filepath = filepath,133 image_quality = self.image_quality,134 verbose = self.verbose,135 show_max_n = self.show_max_n,136 wait_time = self.wait_time,137 *args, **kwargs138 )139 def show_results_parallel(self):140 """Draw `result` in a separated thread."""141 while True:142 (input, target, pred, filepath) = self.debug_queue.get()143 if input is None:144 break145 self.show_results(146 input=input, target=target, pred=pred, filepath=filepath147 )148 149 # Stop debugger thread150 self.thread_debugger.join()151 152 # MARK: Utils153 154 def print(self):...

Full Screen

Full Screen

parallel_test.py

Source:parallel_test.py Github

copy

Full Screen

...15from pyocd.core.helpers import ConnectHelper16from pyocd.probe.pydapaccess import DAPAccess17import threading18import multiprocessing19def run_in_parallel(function, args_list):20 """Create and run a thread in parallel for each element in args_list21 Wait until all threads finish executing. Throw an exception if an exception22 occurred on any of the threads.23 """24 def _thread_helper(idx, func, args):25 """Run the function and set result to True if there was not error"""26 func(*args)27 result_list[idx] = True28 result_list = [False] * len(args_list)29 thread_list = []30 for idx, args in enumerate(args_list):31 thread = threading.Thread(target=_thread_helper,32 args=(idx, function, args))33 thread.start()34 thread_list.append(thread)35 for thread in thread_list:36 thread.join()37 for result in result_list:38 if result is not True:39 raise Exception("Running in thread failed")40def run_in_processes(function, args_list):41 """Create and run a processes in parallel for each element in args_list42 Wait until all processes finish executing. Throw an exception if an43 exception occurred on any of the processes.44 """45 process_list = []46 for args in args_list:47 process = multiprocessing.Process(target=function, args=args)48 process.start()49 process_list.append(process)50 error_during_run = False51 for process in process_list:52 process.join()53 if process.exitcode != 0:54 error_during_run = True55 if error_during_run:56 raise Exception("Running in process failed")57def list_boards(id_list):58 """List all connected DAPLink boards repeatedly59 Assert that they are the same as the id list passed in.60 """61 for _ in range(0, 20):62 device_list = DAPAccess.get_connected_devices()63 found_id_list = [device.get_unique_id() for device in device_list]64 found_id_list.sort()65 assert id_list == found_id_list, "Expected %s, got %s" % \66 (id_list, found_id_list)67def search_and_lock(board_id):68 """Repeatedly lock a board with the given ID"""69 for _ in range(0, 20):70 device = DAPAccess.get_device(board_id)71 device.open()72 device.close()73 with ConnectHelper.session_with_chosen_probe(board_id=board_id):74 pass75def open_already_opened(board_id):76 """Open a device that is already open to verify it gives an error"""77 device = DAPAccess.get_device(board_id)78 try:79 device.open()80 assert False81 except DAPAccess.DeviceError:82 pass83def parallel_test():84 """Test that devices can be found and opened in parallel"""85 device_list = DAPAccess.get_connected_devices()86 id_list = [device.get_unique_id() for device in device_list]87 id_list.sort()88 if len(id_list) < 2:89 print("Need at least 2 boards to run the parallel test")90 exit(-1)91 # Goal of this file is to test that:92 # -The process of listing available boards does not interfere93 # with other processes enumerating, opening, or using boards94 # -Opening and using a board does not interfere with another process95 # processes which is enumerating, opening, or using boards as96 # long as that is not the current board97 print("Listing board from multiple threads at the same time")98 args_list = [(id_list,) for _ in range(5)]99 run_in_parallel(list_boards, args_list)100 print("Listing board from multiple processes at the same time")101 run_in_processes(list_boards, args_list)102 print("Opening same board from multiple threads at the same time")103 device = DAPAccess.get_device(id_list[0])104 device.open()105 open_already_opened(id_list[0])106 args_list = [(id_list[0],) for _ in range(5)]107 run_in_parallel(open_already_opened, args_list)108 device.close()109 print("Opening same board from multiple processes at the same time")110 device = DAPAccess.get_device(id_list[0])111 device.open()112 open_already_opened(id_list[0])113 args_list = [(id_list[0],) for _ in range(5)]114 run_in_processes(open_already_opened, args_list)115 device.close()116 print("Opening different boards from different threads")117 args_list = [(board_id,) for board_id in id_list]118 run_in_parallel(search_and_lock, args_list)119 print("Opening different boards from different processes")120 run_in_processes(search_and_lock, args_list)121 print("Test passed")122if __name__ == "__main__":123 multiprocessing.set_start_method('spawn')...

Full Screen

Full Screen

subsplit.py

Source:subsplit.py Github

copy

Full Screen

1#!/bin/python32# Script to run cvat_vsm on a directory of Sentinel-2 products.3#4# Copyright 2021 KappaZeta Ltd.5#6# Licensed under the Apache License, Version 2.0 (the "License");7# you may not use this file except in compliance with the License.8# You may obtain a copy of the License at9#10# http://www.apache.org/licenses/LICENSE-2.011#12# Unless required by applicable law or agreed to in writing, software13# distributed under the License is distributed on an "AS IS" BASIS,14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.15# See the License for the specific language governing permissions and16# limitations under the License.17import os18import argparse19import subprocess20def subsplit(path_src):21 """22 Process a Sentinel-2 product.23 """24 subprocess.run(["cm_vsm", "-d", path_src])25def main():26 parser = argparse.ArgumentParser(description='Subsplit all S2 products in .SAFE directories')27 parser.add_argument('s2_path', type=str, help='Path to the directory with S2 products')28 parser.add_argument('-j', '--num-jobs', type=int, default=1, help='Number of jobs to process in parallel')29 args = parser.parse_args()30 # TODO:: Turn into command-line arguments.31 run_in_parallel = True32 paths = []33 # Map reduced product names to shapefile paths.34 for root, dirs, files in os.walk(args.s2_path):35 if root.endswith(".SAFE"):36 paths.append(root)37 print(root)38 if not run_in_parallel:39 subsplit(root)40 # Run all splittings in parallel.41 if run_in_parallel:42 commands = [["cm_vsm", "-d", p] for p in paths]43 for i in range(0, len(commands), args.num_jobs):44 cmds = commands[i:(i + args.num_jobs)]45 procs = [subprocess.Popen(p) for p in cmds]46 for p in procs:47 p.wait()48if __name__ == "__main__":...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run lisa automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful