How to use prepare_metadata method in avocado

Best Python code snippet using avocado_python

dso_map.py

Source:dso_map.py Github

copy

Full Screen

...68write_case_config = True # Set true when wanting to update the QMax values in the system_case_config FNCS array69find_county = False # Set True if you need to use API to find county as a function of latitude and longitude70write_industrials = False # Set True if you want to write out the industrial load tape.71# ======== END INPUT SETTINGS ========================72def prepare_metadata(node, end_row, feeder_mode, high_renewables_case, DSO_Load_Threshold):73 sheet_name = node + 'BusValues'74 if high_renewables_case:75 case_file = node + "-hi-metadata-" + feeder_mode76 config_file = node + "_hi_system_case_config"77 else:78 case_file = node + "-metadata-" + feeder_mode79 config_file = node + "_system_case_config"80 if find_county:81 out = csv.writer(open("county.csv", "w"), delimiter=',', lineterminator='\n')82 if write_industrials:83 dso_list = []84 indust_load_list = []85 use_dso_list = []86 book = xlrd.open_workbook(data_path + 'bus_mapping.xlsx')87 sheet = book.sheet_by_name(sheet_name)88 # os.rename(case_path + case_file + '.json', case_path + case_file + '_old.json')89 with open(data_path + 'metadata-general.json') as json_file:90 data = json.load(json_file)91 # # Clear out all existing DSOs in the file92 # for key in list(data.keys()):93 # if 'DSO' in key:94 # del data[key]95 # Get values from spreadsheet96 for irow in range(2, end_row):97 busid = int(sheet.cell(irow, 4).value)98 busname = sheet.cell(irow, 6).value99 climatezone = int(sheet.cell(irow, 7).value)100 ashrae_zone = sheet.cell(irow, 10).value101 blm_zone = int(sheet.cell(irow, 11).value)102 latitude = sheet.cell(irow, 0).value103 longitude = sheet.cell(irow, 1).value104 utiltype = sheet.cell(irow, 8).value105 if utiltype == 'Town':106 utiltype = 'Suburban'107 ownership_type = sheet.cell(irow, 9).value108 peakseason = sheet.cell(irow, 14).value109 county = sheet.cell(irow, 34).value110 roof_top_PV_MW = sheet.cell(irow, 38).value111 if high_renewables_case:112 max_load = sheet.cell(irow, 40).value113 else:114 max_load = sheet.cell(irow, 39).value115 congestion_factor = sheet.cell(irow, 41).value116 bus_simulated = sheet.cell(irow, 42).value117 total_average_load = sheet.cell(irow, 16).value118 if total_average_load == 0:119 total_average_load = 0.001120 res_average_load = sheet.cell(irow, 19).value121 comm_average_load = sheet.cell(irow, 20).value122 indust_average_load = sheet.cell(irow, 21).value123 res_customers = int(sheet.cell(irow, 25).value)124 comm_customers = int(sheet.cell(irow, 26).value)125 indust_customers = int(sheet.cell(irow, 27).value)126 total_customers = res_customers + comm_customers + indust_customers127 # Need to have non-zero residential customers for prepare case to run128 if total_customers == 0:129 total_customers = 1130 res_customers = 1131 if write_industrials:132 dso_list.append("Bus" + str(busid))133 indust_load_list.append(round(indust_average_load, 1))134 if find_county:135 # Find county by lat and longitude136 # From https://geo.fcc.gov/api/census/137 response = requests.get("https://geo.fcc.gov/api/census/area?lat=" + str(latitude) + "&lon=" + str(longitude) + "&format=json")138 test = json.loads(response.content.decode("utf-8"))139 county = test['results'][0]['county_name']140 out.writerow([latitude, longitude, county])141 # Values for lean version142 if feeder_mode == 'lean':143 # if utiltype == "Rural":144 # feeders = {"feeder1": {"name": "R4-25.00-1", "ercot": False},145 # "feeder2": {"name": "R5-12.47-3", "ercot": False}}146 # num_gld_homes = 2192147 # elif utiltype == "Suburban":148 # if climatezone == 3:149 # feeders = {"feeder1": {"name": "R3-12.47-3", "ercot": False}}150 # num_gld_homes = 1326151 # if climatezone == 4:152 # feeders = {"feeder1": {"name": "R3-12.47-3", "ercot": False},153 # "feeder2": {"name": "R5-12.47-3", "ercot": False}}154 # num_gld_homes = 2865155 # if climatezone == 5:156 # feeders = {"feeder1": {"name": "R5-12.47-1", "ercot": False},157 # "feeder2": {"name": "R5-12.47-5", "ercot": False}}158 # num_gld_homes = 2541159 # elif utiltype == "Urban":160 # if climatezone == 3:161 # # Omitted for now as feeder R3-12.47-1 has large static load162 # # feeders = {"feeder1": {"name": "R3-12.47-1", "ercot": False},163 # # "feeder2": {"name": "R4-12.47-1", "ercot": False}}164 # # num_gld_homes = 980165 # feeders = {"feeder1": {"name": "R4-12.47-1", "ercot": False},166 # "feeder2": {"name": "R5-12.47-1", "ercot": False}}167 # num_gld_homes = 1525168 # if climatezone == 4:169 # feeders = {"feeder1": {"name": "R4-12.47-1", "ercot": False},170 # "feeder2": {"name": "R4-12.47-2", "ercot": False}}171 # num_gld_homes = 893172 # if climatezone == 5:173 # # Omitted for now as feeder R5-12.47-4 has large static load174 # # feeders = {"feeder1": {"name": "R5-12.47-1", "ercot": False},175 # # "feeder2": {"name": "R5-12.47-2", "ercot": False},176 # # "feeder3": {"name": "R5-12.47-4", "ercot": False}}177 # # num_gld_homes = 2234178 # feeders = {"feeder1": {"name": "R5-12.47-1", "ercot": False},179 # "feeder2": {"name": "R5-12.47-2", "ercot": False},180 # "feeder3": {"name": "R5-12.47-5", "ercot": False}}181 # num_gld_homes = 2847182 #183 # # Values for lean version184 # if feeder_mode == 'test':185 if utiltype == "Rural":186 # feeders = {"feeder1": {"name": "R4-25.00-1", "ercot": False},187 # "feeder2": {"name": "R5-12.47-5", "ercot": False}}188 feeders = {"feeder1": {"name": "R5-12.47-5", "ercot": False}}189 num_gld_homes = 1539190 elif utiltype == "Suburban":191 feeders = {"feeder1": {"name": "R5-12.47-5", "ercot": False}}192 num_gld_homes = 1539193 # if climatezone == 3:194 # feeders = {"feeder1": {"name": "R3-12.47-3", "ercot": False}}195 # num_gld_homes = 1326196 # if climatezone == 4:197 # feeders = {"feeder1": {"name": "R3-12.47-3", "ercot": False}}198 # num_gld_homes = 1326199 # if climatezone == 5:200 # feeders = {"feeder1": {"name": "R5-12.47-5", "ercot": False}}201 # num_gld_homes = 1539202 elif utiltype == "Urban":203 if climatezone == 3:204 feeders = {"feeder1": {"name": "R4-12.47-1", "ercot": False},205 "feeder2": {"name": "R5-12.47-1", "ercot": False}}206 num_gld_homes = 1525207 if climatezone == 4:208 feeders = {"feeder1": {"name": "R4-12.47-1", "ercot": False},209 "feeder2": {"name": "R4-12.47-2", "ercot": False}}210 num_gld_homes = 893211 if climatezone == 5:212 feeders = {"feeder1": {"name": "R5-12.47-1", "ercot": False},213 "feeder2": {"name": "R5-12.47-2", "ercot": False}}214 num_gld_homes = 1308215 elif feeder_mode == 'stub':216 feeders = {"feeder1": {217 "name": "GC-12.47-1",218 "ercot": False219 }}220 num_gld_homes = 0.1221 elif feeder_mode == 'skinny':222 feeders = {223 "feeder1": {224 "name": "R4-25.00-1",225 "ercot": False226 }}227 num_gld_homes = 168228 elif feeder_mode == 'slim':229 if busid == 2:230 feeders = {231 "feeder1": {232 "name": "R4-12.47-1",233 "ercot": False234 }}235 num_gld_homes = 523236 else:237 feeders = {"feeder1": {238 "name": "GC-12.47-1",239 "ercot": False240 }}241 num_gld_homes = 0.1242 if write_case_config:243 # [bus id, name, gld_scale, Pnom, Qnom, curve_scale, curve_skew, Pinit, Qinit]244 if bus_simulated:245 use_dso_list.append([busid, "Substation_" + str(busid),246 (res_customers / num_gld_homes),247 max_load * congestion_factor, 0, 0.5, 0, total_average_load, 0])248 else:249 use_dso_list.append([busid, "Substation_" + str(busid),250 (res_customers / num_gld_homes),251 0, 0, 0.5, 0, total_average_load, 0])252 if DSO_Load_Threshold == 'File':253 if bus_simulated:254 dso_simulate = True255 else:256 dso_simulate = False257 else:258 if total_average_load >= DSO_Load_Threshold:259 dso_simulate = True260 else:261 dso_simulate = False262 data["DSO_" + str(busid)] = {263 "bus_number": busid,264 "used": dso_simulate, # If true the DSO will be instantiated in prepare case and simulated265 "name": busname,266 "climate_zone": climatezone,267 "county": county,268 "latitude": latitude,269 "longitude": longitude,270 "time_zone_offset": -6,271 "utility_type": utiltype,272 "ownership_type": ownership_type,273 "ashrae_zone": ashrae_zone,274 "blm_zone": blm_zone,275 "peak_season": peakseason,276 "substation": "Substation_" + str(busid),277 "random_seed": busid,278 "feeders": feeders,279 "RCI energy mix": {280 "residential": round(res_average_load / total_average_load, 4),281 "commercial": round(comm_average_load / total_average_load, 4),282 "industrial": round(indust_average_load / total_average_load, 4)283 },284 "number_of_customers": total_customers,285 "number_of_gld_homes": num_gld_homes,286 "comm_customers_per_bldg": 2.09,287 "number_of_substations": 1,288 "MVA_growth_rate": 0.01,289 "weather_file": "weather_Bus_" + str(busid) + "_" + str(latitude) + "_" + str(longitude) + ".dat",290 "RCI customer count mix": {291 "residential": round(res_customers / total_customers, 4),292 "commercial": round(comm_customers / total_customers, 4),293 "industrial": round(indust_customers / total_customers, 4)294 },295 "average_load_MW": total_average_load,296 "rooftop_pv_rating_MW": roof_top_PV_MW,297 "winter_peak_MW": 0,298 "summer_peak_MW": 0,299 "capacity_limit_MW": 0,300 "scaling_factor": res_customers / num_gld_homes,301 "total_other_O&M": 0,302 "bilaterals": {303 "price": 11.11,304 "load_fraction": 0.11305 },306 "DSO_system_energy_fraction": 0.11307 }308 print("\n=== {0:d} DSOs Defined in Metadata File =====".format(len(data) - 1))309 json_file.close()310 # write it in the original data file311 with open(data_path + case_file + '.json', 'w') as outfile:312 json.dump(data, outfile, indent=2)313 # write out FNCS array in system_config314 if write_case_config:315 with open(case_path + config_file + '.json') as caseconfig_file:316 case_data = json.load(caseconfig_file)317 case_data['DSO'] = use_dso_list318 for i in range(len(case_data['bus'])):319 if len(case_data['bus'][i]) == 13:320 case_data['bus'][i].append(0)321 case_data['bus'][i].append(0)322 caseconfig_file.close()323 with open(case_path + config_file + '.json', 'w') as caseoutfile:324 json.dump(case_data, caseoutfile, indent=2)325 # write out industrial load file326 if write_industrials:327 days = 35328 num_stamps = 35 * 24 * 12329 time_stamps = [n * 300 for n in range(num_stamps)]330 array = [indust_load_list for i in range(len(time_stamps))]331 indust_df = pd.DataFrame(array,332 index=time_stamps,333 columns=dso_list)334 indust_df.index.name = 'seconds'335 indust_df.to_csv(data_path + '/200_indust_p.csv')336# def prepare_metadata(node, end_row, feeder_mode, high_renewables_case, DSO_Load_Threshold):337prepare_metadata('8', 10, 'lean', True, 0)338# prepare_metadata('8', 10, 'skinny', True, 0)339# prepare_metadata('8', 10, 'stub', True, 0)340prepare_metadata('8', 10, 'lean', False, 0)341# prepare_metadata('8', 10, 'skinny', False, 0)342# prepare_metadata('8', 10, 'test', False, 0)343# prepare_metadata('8', 10, 'stub', False, 0)344# prepare_metadata('200', 202, 'lean', True, 1130)345# prepare_metadata('200', 202, 'skinny', True, 300)346# prepare_metadata('200', 202, 'stub', True, 300)347# prepare_metadata('200', 202, 'lean', False, 1130)348# prepare_metadata('200', 202, 'test', False, 1130)349prepare_metadata('200', 202, 'lean', False, 'File')350prepare_metadata('200', 202, 'lean', True, 'File')351# prepare_metadata('200', 202, 'skinny', False, 300)...

Full Screen

Full Screen

pipeline_manager.py

Source:pipeline_manager.py Github

copy

Full Screen

...30 train_d(self.config_file_d, self.profiler)31 def train_r(self):32 self.profiler = Profiler(self.config_file_r)33 train_r(self.config_file_r, self.profiler)34 def prepare_metadata(self):35 self.profiler = Profiler(self.config_file_d)36 prepare_metadata(self.config_dataset, self.profiler)37 def test_d(self):38 self.profiler = Profiler(self.config_file_d)39 test_d(self.config_file_d, self.profiler)40 def test_r(self):41 self.profiler = Profiler(self.config_file_r)42 test_r(self.config_file_r, self.profiler)43 def test_one_d(self, path=None, out_path=None):44 self.profiler = Profiler(self.config_file_d)45 test_one_d(self.config_file_d, path, out_path, self.profiler)46 def test_one_r(self, path, out_path):47 self.profiler = Profiler(self.config_file_r)48 test_one_r(self.config_file_r, path, out_path, self.profiler)49 def test_one_rd(self, path=None, out_path=None):50 self.profiler = Profiler(self.config_file_d)51 test_one_rd(self.config_file_r, self.config_file_d, path, out_path, self.profiler)52 def test_entire_folder_d(self, path, out_path):53 self.profiler = Profiler(self.config_file_d)54 test_entire_folder_d(self.config_file_d, path, out_path, self.profiler)55 def test_entire_folder_r(self, path, out_path):56 self.profiler = Profiler(self.config_file_r)57 test_entire_folder_r(self.config_file_r, path, out_path, self.profiler)58 def test_entire_folder_rd(self, path, out_path):59 self.profiler = Profiler(self.config_file_d)60 test_entire_folder_rd(self.config_file_r, self.config_file_d, path, out_path, self.profiler)61def train_d(config, profiler):62 """63 Have a look at main.py for description64 :param config: dynamic variables taken from the folder configs65 :param profiler: A class to generate statistics of time usage of each function66 """67 from .Dlmodel.TrainTestD import TrainTestD68 driver = profiler(TrainTestD, config, 'train', profiler, profiler_type='once')69 success = profiler(driver.train_d, profiler_type='once')70 log.info(success)71 profiler.dump()72 profiler.plot()73def train_r(config, profiler):74 """75 Have a look at main.py for description76 :param config: dynamic variables taken from the folder configs77 :param profiler: A class to generate statistics of time usage of each function78 """79 from .Dlmodel.TrainTestR import TrainTestR80 driver = profiler(TrainTestR, config, 'train', profiler, profiler_type='once')81 success = profiler(driver.train_r, profiler_type='once')82 log.info(success)83 profiler.dump()84 profiler.plot()85def test_d(config, profiler):86 """87 Have a look at main.py for description88 :param config: dynamic variables taken from the folder configs89 :param profiler: A class to generate statistics of time usage of each function90 """91 from .Dlmodel.TrainTestD import TrainTestD92 driver = profiler(TrainTestD, config, 'test', profiler, profiler_type='once') # , profiler_type='single'93 profiler(driver.start_testing, profiler_type='once')94 profiler(driver.test_d, profiler_type='once')95def test_r(config, profiler):96 """97 Have a look at main.py for description98 :param config: dynamic variables taken from the folder configs99 :param profiler: A class to generate statistics of time usage of each function100 """101 from .Dlmodel.TrainTestR import TrainTestR102 driver = profiler(TrainTestR, config, 'test', profiler, profiler_type='once') # , profiler_type='single'103 profiler(driver.start_testing, profiler_type='once')104 profiler(driver.test_r, profiler_type='once')105def test_one_r(config, path, out_path, profiler):106 """107 Have a look at main.py for description108 :param config: dynamic variables taken from the folder configs109 :param path: Input Image path110 :param out_path: Output Image Path with filename as text_predicted111 :param profiler: A class to generate statistics of time usage of each function112 """113 from .Dlmodel.TestOneImageR import TestOneImageRClass114 driver = profiler(TestOneImageRClass, config, 'test_one', profiler, profiler_type='once')115 profiler(driver.test_one_image_r, path, out_path, profiler_type='once')116def test_entire_folder_r(config_file_r, path, out_path, profiler):117 """118 Have a look at main.py for description119 :param config_file_r: dynamic variables taken from the folder configs120 :param path: Input Folder path121 :param out_path: Output Folder Path with filename as text_predicted122 :param profiler: A class to generate statistics of time usage of each function123 """124 from .Dlmodel.TestOneImageR import TestOneImageRClass125 driver = profiler(TestOneImageRClass, config_file_r, 'test_one', profiler, profiler_type='once')126 images = [i for i in sorted(os.listdir(path)) if '.' != i[0]]127 for image in images:128 print("Recognising text in image:", image)129 profiler(driver.test_one_image_r, path + '/' + image, out_path, profiler_type='once')130def test_one_rd(config_r, config_d, path, out_path, profiler):131 """132 Have a look at main.py for description133 :param config_r: dynamic variables taken from the folder configs(recognition)134 :param config_d: dynamic variables taken from the folder configs(detection)135 :param path: Input Image path136 :param out_path: Output Image Path with filename as text_predicted137 :param profiler: A class to generate statistics of time usage of each function138 """139 from .Dlmodel.TestOneImageRD import TestOneImageRDClass140 driver = profiler(TestOneImageRDClass, config_r, config_d, 'test_one', profiler, profiler_type='once')141 contours, text = profiler(driver.test_one_image_rd, path, out_path, profiler_type='once')142 import pickle143 pickle.dump([contours, text], open(out_path + '/output.pkl', 'wb'))144 return contours, text145def test_entire_folder_rd(config_r, config_d, ipath, opath, profiler):146 """147 Have a look at main.py for description148 :param config_r: dynamic variables taken from the folder configs(recognition)149 :param config_d: dynamic variables taken from the folder configs(detection)150 :param ipath: Input Image path151 :param opath: Output Image Path with filename as text_predicted152 :param profiler: A class to generate statistics of time usage of each function153 """154 def gen_rd(model, path, out, out_label):155 """156 A Recursive function to be called to generate output which is described in main.py157 :param model: model object which contains functions to detect and recognise158 :param path: Current Folder path159 :param out: Current Output path160 :param out_label: Current Label path161 """162 all_files = os.listdir(path)163 # all_files is a list with all the files(could be a directory) in the directory 'path'164 # creates input and output directories, and checks that image format is correct165 for file_i in all_files:166 if file_i.split('.')[-1].lower() in ['jpeg', 'png', 'jpg']:167 log.info(path + '/' + file_i)168 if not (os.path.exists(out + '/' + file_i) and os.path.exists(169 out_label + '/' + '.'.join(file_i.split('.')[:-1]) + '.pkl')):170 contours, text = model.test_one_image_rd(path + '/' + file_i, out + '/' + file_i)171 with open(out_label + '/' + '.'.join(file_i.split('.')[:-1]) + '.pkl', 'wb') as f:172 pickle.dump([contours, text], f)173 elif os.path.isdir(path + '/' + file_i):174 if not os.path.exists(out + '/' + file_i):175 os.mkdir(out + '/' + file_i)176 if not os.path.exists(out_label + '/' + file_i):177 os.mkdir(out_label + '/' + file_i)178 gen_rd(model, path + '/' + file_i, out + '/' + file_i, out_label + '/' + file_i)179 # if the file in the list all_files is a directory, call the function again180 # creates output directory if it doesn't exist, calls gen_rd181 first_out_label = opath + '_label'182 log.info(ipath, opath, first_out_label)183 from .Dlmodel.TestOneImageRD import TestOneImageRDClass184 if not os.path.exists(opath):185 os.mkdir(opath)186 if not os.path.exists(first_out_label):187 os.mkdir(first_out_label)188 driver = profiler(TestOneImageRDClass, config_r, config_d, 'test_one', profiler, profiler_type='once')189 profiler(gen_rd, driver, ipath, opath, first_out_label, profiler_type='once')190def test_one_d(config, path, out_path, profiler):191 """192 Have a look at main.py for description193 :param config: dynamic variables taken from the folder configs194 :param path: Input Image path195 :param out_path: Output Image Path with filename as text_predicted196 :param profiler: A class to generate statistics of time usage of each function197 """198 from .Dlmodel.TestOneImageD import TestOneImageDClass199 driver = profiler(TestOneImageDClass, config, 'test_one', profiler, profiler_type='once')200 profiler(driver.test_one_image_d, path, out_path, profiler_type='once')201def test_entire_folder_d(config, ipath, opath, profiler):202 """203 Have a look at main.py for description204 :param config: dynamic variables taken from the folder configs205 :param ipath: Input Image path206 :param opath: Output Image Path with filename as text_predicted207 :param profiler: A class to generate statistics of time usage of each function208 """209 def gen_d(model, path, out):210 """211 A Recursive function to be called to generate output which is described in main.py212 :param model: model object which contains functions to detect and recognise213 :param path: Current Folder path214 :param out: Current Output path215 """216 all_files = os.listdir(path)217 # all_files is a list with all the files(could be a directory) in the directory 'path'218 # creates input and output directories, and checks that image format is correct219 for file_i in all_files:220 if file_i.split('.')[-1].lower() in ['jpeg', 'png', 'jpg', 'gif']:221 log.info(path + '/' + file_i)222 if not os.path.exists(out + '/' + file_i):223 model.test_one_image_d(path + '/' + file_i, out + '/' + file_i)224 elif os.path.isdir(path + '/' + file_i):225 if not os.path.exists(out + '/' + file_i):226 os.mkdir(out + '/' + file_i)227 gen_d(model, path + '/' + file_i, out + '/' + file_i)228 # if the file in the list all_files is a directory, call the function again229 # creates output directory if it doesn't exist, calls gen230 print(ipath, opath)231 from .Dlmodel.TestOneImageD import TestOneImageDClass232 if not os.path.exists(opath):233 os.mkdir(opath)234 driver = profiler(TestOneImageDClass, config, 'test_one', profiler=profiler, profiler_type='once')235 profiler(gen_d, driver, ipath, opath, profiler_type='once')236def prepare_metadata(config, profiler):237 """238 Have a look at main.py for description239 :param config: dynamic variables taken from the folder configs240 :param profiler: A class to generate statistics of time usage of each function241 """242 datasets = set(config['dataset_train'] + config['dataset_test'])243 for d_name in datasets:244 if d_name == 'COCO':245 from .prepare_metadata.prepare_metadata import MetaCoco as Meta246 elif d_name == 'IC13':247 from .prepare_metadata.prepare_metadata import MetaIC13 as Meta248 elif d_name == 'IC15':249 from .prepare_metadata.prepare_metadata import MetaIC15 as Meta250 elif d_name == 'SYNTH':...

Full Screen

Full Screen

prepare_kinetics_metadata.py

Source:prepare_kinetics_metadata.py Github

copy

Full Screen

1import os2import argparse3import pickle4from torchvision.datasets.kinetics import Kinetics5def prepare_metadata(split='train', force=False):6 print(f'preparing kinetics split {split}...')7 metadata_filename = f'./data/kinetics400_{split}_metadata.pkl'8 9 if not force and os.path.exists(metadata_filename):10 with open(metadata_filename, 'rb') as f:11 metadata = pickle.load(f)12 return metadata13 14 dataset_root = '~/workspace/dataset/kinetics/k400'15 dataset_root = os.path.expanduser(dataset_root)16 kinetics400_ds = Kinetics(17 root=dataset_root,18 frames_per_clip=16,19 split=split,20 num_workers=16,21 frame_rate=2,22 )23 24 with open(metadata_filename, 'wb') as f:25 pickle.dump(kinetics400_ds.metadata, f)26if __name__ == "__main__":27 parser = argparse.ArgumentParser()28 parser.add_argument('--split', choices=['train', 'val', 'all'])29 parser.add_argument('--force', action='store_true')30 args = parser.parse_args()31 force = args.force32 33 if args.split == 'all':34 prepare_metadata(split='train', force=force)35 prepare_metadata(split='val', force=force)36 else:...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run avocado automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful