Best Python code snippet using autotest_python
qatrack_file_upload.py
Source:qatrack_file_upload.py  
1from datetime import datetime2from pathlib import Path3from qcpump.pumps.base import STRING, BasePump, DIRECTORY, BOOLEAN, MULTCHOICE4from qcpump.pumps.common.qatrack import QATrackFetchAndPostTextFile, QATrackFetchAndPostBinaryFile5class BaseQATrackGenericUploader:6    TEST_LIST_CONFIG = {7        'name': "Test List",8        'multiple': False,9        'dependencies': ["QATrack+ API"],10        'validation': 'validate_test_list',11        'fields': [12            {13                'name': 'name',14                'type': STRING,15                'required': True,16                'help': "Enter the name of the Test List you want to upload data to.",17            },18            {19                'name': 'slug',20                'label': "Test Macro Name",21                'type': STRING,22                'required': True,23                'help': "Enter the macro name of the Upload test in this test list.",24                'default': 'upload',25            },26        ]27    }28    FILE_TYPE_CONFIGS = {29        'name': 'File Types',30        'fields': [31            {32                'name': 'recursive',33                'type': BOOLEAN,34                'required': True,35                'default': False,36                'help': "Should files from subdirectories be included?",37            },38            {39                'name': 'pattern',40                'type': STRING,41                'required': True,42                'default': "*",43                'help': (44                    "Enter a file globbing pattern (e.g. 'some-name-*.txt') to only "45                    "include certain files. Use '*' to include all files."46                ),47            },48            {49                'name': 'ignore pattern',50                'type': STRING,51                'required': True,52                'default': "",53                'help': (54                    "Enter a file globbing pattern (e.g. 'some-name-*.txt') to ignore "55                    "certain files. Leave blank to not exclude any files."56                ),57            },58        ],59    }60    DIRECTORY_CONFIG = {61        'name': 'Directories',62        'multiple': True,63        'validation': 'validate_source_dest',64        'dependencies': ["QATrack+ API"],65        'fields': [66            {67                'name': 'unit name',68                'label': "QATrack+ Unit Name",69                'type': MULTCHOICE,70                'required': True,71                'help': "Select the name of the unit in the QATrack+ database",72                'choices': 'get_qatrack_unit_choices',73            },74            {75                'name': 'source',76                'type': DIRECTORY,77                'required': True,78                'help': "Enter the root directory you want to read files from.",79            },80            {81                'name': 'destination',82                'type': DIRECTORY,83                'required': True,84                'help': (85                    "Enter the target directory that you want to move files to after they are uploaded. "86                    "(Leave blank if you don't want the files to be moved)"87                ),88            },89        ],90    }91    def validate_source_dest(self, values):92        """Ensure that source and destination directories are set."""93        valid = values['source'] and Path(values['source']).is_dir()94        msg = "OK" if valid else "You must set a valid source directory"95        return valid, msg96    def validate_test_list(self, values):97        """Ensure a test list name is given"""98        valid = bool(values['name'] and values['slug'])99        msgs = []100        if not values['name']:101            msgs.append("You must set a test list name")102        if not values['slug']:103            msgs.append("You must set a test macro name")104        return valid, "OK" if valid else '\n'.join(msgs)105    def fetch_records(self):106        searcher_config = self.get_config_values("File Types")[0]107        records = []108        self.move_to = {}109        for unit_dir in self.get_config_values("Directories"):110            path_searcher = searcher_config.copy()111            path_searcher.update(unit_dir)112            from_dir = path_searcher['source']113            to_dir = path_searcher['destination']114            paths = self.get_paths(path_searcher)115            for path in paths:116                move_to = Path(str(path).replace(from_dir, to_dir)) if to_dir else None117                records.append((unit_dir['unit name'], path, move_to))118        return records119    def post_process(self, record):120        unit, path, move_to = record121        try:122            move_to.parent.mkdir(parents=True, exist_ok=True)123            path.replace(move_to)124            msg = f"Moved {path} to {move_to}"125        except Exception as e:126            msg = f"Failed to move {path} to {move_to}: {e}"127        self.log_info(msg)128    def get_paths(self, mover):129        """Get a listing of all files in our source directory and filter them based on our config options"""130        globber = self.construct_globber(mover['pattern'], mover['recursive'])131        self.log_debug(f"Getting paths with globber: '{globber}' and mover: {mover}")132        all_paths = Path(mover['source']).glob(globber)133        return self.filter_paths(all_paths, mover['ignore pattern'])134    def construct_globber(self, pattern, recursive):135        """Consutruct a globber for reading from our source directory"""136        return f"**/{pattern}" if recursive else pattern137    def filter_paths(self, paths, ignore_pattern):138        """Filter out any paths that match our ignore pattern"""139        paths = (p for p in paths if not p.is_dir())140        if ignore_pattern in ["", None]:141            return list(paths)142        return [p for p in paths if not p.match(f"*/{ignore_pattern}")]143    def test_list_for_record(self, record):144        """Use the same test list name for all files"""145        return self.get_config_value("Test List", "name")146    def qatrack_unit_for_record(self, record):147        """Accept a record to process and return a QATrack+ Unit name. Must be overridden in subclasses"""148        unit, path, move_to = record149        return unit150    def id_for_record(self, record):151        unit, path, move_to = record152        modified = datetime.fromtimestamp(path.stat().st_mtime).isoformat()153        return f"QCPump/GenericTextFileUploader/{unit}/{modified}/{path.stem}"154    def slug_and_filename_for_record(self, record):155        unit, path, move_to = record156        slug = self.get_config_value("Test List", "slug")157        return slug, path.stem158class QATrackGenericTextFileUploader(BaseQATrackGenericUploader, QATrackFetchAndPostTextFile, BasePump):159    DISPLAY_NAME = "QATrack+ File Upload: Generic Text File"160    HELP_URL = "https://qcpump.qatrackplus.com/en/stable/pumps/qatrack_file_upload.html"161    CONFIG = [162        QATrackFetchAndPostTextFile.QATRACK_API_CONFIG,163        BaseQATrackGenericUploader.TEST_LIST_CONFIG,164        BaseQATrackGenericUploader.FILE_TYPE_CONFIGS,165        BaseQATrackGenericUploader.DIRECTORY_CONFIG,166    ]167class QATrackGenericBinaryFileUploader(BaseQATrackGenericUploader, QATrackFetchAndPostBinaryFile, BasePump):168    DISPLAY_NAME = "QATrack+ File Upload: Generic Binary File"169    HELP_URL = "https://qcpump.qatrackplus.com/en/stable/pumps/qatrack_file_upload.html"170    CONFIG = [171        QATrackFetchAndPostTextFile.QATRACK_API_CONFIG,172        BaseQATrackGenericUploader.TEST_LIST_CONFIG,173        BaseQATrackGenericUploader.FILE_TYPE_CONFIGS,174        BaseQATrackGenericUploader.DIRECTORY_CONFIG,...synthesizer.py
Source:synthesizer.py  
1import time2import traceback3import numpy as np4from concurrent import futures5from optable.synthesis import path_searcher as path_searcher_mod6import optable.synthesis.manipulation_candidate as mc_mod7from optable import _core8class Synthesizer(object):9    def __init__(self, dataset, timer, priority_perturbation=0):10        self.priority_perturbation = priority_perturbation11        self.timer = timer12        max_depth = max([3, dataset.max_depth + 1])13        path_searcher = path_searcher_mod.PathSearcher(max_depth)14        self.__paths = path_searcher.search(dataset)15        manipulations = []16        for path in self.__paths:17            for manipulation_candidate in mc_mod.manipulation_candidates:18                manipulations += manipulation_candidate.search(19                    path, dataset)20        self.__manipulations = manipulations21        self.sort()22        self.__feature_num = None23        self.__timeout = None24        self.__start_time = None25        self.__dataset = dataset26    @property27    def manipulations(self):28        return self.__manipulations29    @property30    def paths(self):31        return self.__paths32    def sort(self):33        priorities = {manip: manip.priority34                      + self.priority_perturbation * np.random.uniform()35                      for manip in self.__manipulations}36        self.__manipulations = \37            [k for k, v in sorted(priorities.items(), key=lambda x: x[1])]38    def synthesis_at(self, index):39        if (time.time() - self.__start_time) > self.__timeout:40            return41        if self.timer.memory_usage > 13:42            return43        if self.__dataset.tables['main'].new_data_size >= self.__feature_num:44            return45        manipulation = self.__manipulations[index]46        try:47            manipulation.synthesis()48        except Exception as e:49            traceback.print_exc()50        if index % 10 == 0:51            self.timer.print("{} synthesis finished!".format(index))52            self.timer.print_memory_usage()53        if index % 100 == 0:54            _core.malloc_trim(0)55            gc.collect()56    def synthesis(self, feature_num, timeout):57        self.__feature_num = feature_num58        self.__timeout = timeout59        self.__start_time = time.time()60        self.timer.print("{} synthesis start".format(feature_num))61        with futures.ThreadPoolExecutor(max_workers=4) as executor:62            data = executor.map(63                self.synthesis_at, list(range(len(self.__manipulations))))64        if (time.time() - self.__start_time) > self.__timeout:65            self.timer.print("timeout")66        self.__feature_num = None67        self.__timeout = None...lab05.py
Source:lab05.py  
1import random2from Lab05.classes.network import Network3from Lab05.classes.connection import Connection4def path_searcher(N, strat, node_list):5    # finding paths based on best latency:6    used_paths = list()7    connections = list()8    power = 0.0019    for i in node_list:10        # REMOVED POWER11        connections.append(Connection(i[0], i[1], power))12    used_paths = N.stream(connections)13    path_cnt = 014    for i in connections:15        print("\nConnection " + strat + ": " + str(i.input + "->" + i.output), end='')16        used_path = 'None'17        if i.snr != 0:18            used_path = used_paths[path_cnt]19            path_cnt += 120        print("\t\t\tBest available latency path: " + used_path)21        print("Latency: " + str(i.latency), end='')22        print("\t\tSNR: " + str(i.snr))23        print("\t\tBit rate: " + str(i.bit_rate))24    # freeing lines25    for i in N.lines.values():26        for j in range(N.number_of_channels):27            i.set_state(j, 1)28    N.route_space.to_csv('used_paths_' + strat + '_lat.csv')29    N.reset_route_space()30    # finding paths based on best snr:31    connections = list()32    for i in node_list:33        connections.append(Connection(i[0], i[1], power))34    used_paths = N.stream(connections, 'snr')35    path_cnt = 036    for i in connections:37        print("\nConnection " + strat + ": " + str(i.input + "->" + i.output), end='')38        used_path = 'None'39        if i.snr != 0:40            used_path = used_paths[path_cnt]41            path_cnt += 142        print("\t\t\tBest available SNR path found: " + used_path)43        print("Latency: " + str(i.latency), end='')44        print("\t\tSNR: " + str(i.snr))45        print("\t\tBit rate: " + str(i.bit_rate))46    N.route_space.to_csv('used_paths_' + strat + '_snr.csv')47if __name__ == '__main__':48    N_fixed = Network(10, 'json_files/nodes_fixed.json')49    N_flex = Network(10, 'json_files/nodes_flex.json')50    N_shannon = Network(10, 'json_files/nodes_shannon.json')51    N_fixed.connect()52    N_flex.connect()53    N_shannon.connect()54    # N.draw()55    nodes = N_fixed.nodes.keys()56    node_list = list()57    for i in range(100):58        node_list.append(random.sample(nodes, 2))59    # Fixed-rate60    path_searcher(N_fixed, 'fixed_rate', node_list)61    # Flex-rate62    path_searcher(N_flex, 'flex_rate', node_list)63    # Shannon64    path_searcher(N_shannon, 'shannon', node_list)...Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
