How to use file_name method in avocado

Best Python code snippet using avocado_python

create_training_data_with_rotation.py

Source:create_training_data_with_rotation.py Github

copy

Full Screen

1'''2Created on Nov 27, 20153@author: kannanharidas4'''5import Image6import os7from random import randint8from fileinput import filename9import csv10import shutil11#REFERENCE: http://effbot.org/imagingbook/image.htm12def super_impose_image_for_testing(image_file, rotate, filepath_to_save, file_name, type):13 # background = Image.open(testing_files_path+file_name)14 if type == 0:15 background = Image.open(testing_files_path + file_name)16 else:17 background = Image.open(training_files_path + file_name)18 foreground = Image.open(image_file)19 new_foreground = foreground.convert('RGBA')20 # size = new_foreground.size21 # angle_rand = randint(0,340)22 # print "size = "+str(new_foreground.size)23 new_foreground_rotate = new_foreground.rotate(rotate)24 # new_foreground.rotate(180).show()25 print file_name + " modified!"26 x, y = foreground.size27 x_rand = randint(0, (384 - x))28 y_rand = randint(0, (256 - y))29 background.paste(new_foreground_rotate, (x_rand, y_rand, x_rand + x, y_rand + y), new_foreground_rotate)30 background.save(filepath_to_save)31 # test_files_with_sparky = test_files_with_sparky +132 testing_files_label[file_name] = 133def write_csv(file_path_to_save, values):34 training_files_labels_file = open(file_path_to_save, "wb")35 writer1 = csv.writer(training_files_labels_file)36 # testing_files_labels_file = open(testing_filepath+"testing_files_labels.csv","wb")37 # writer2 = csv.writer(testing_files_labels_file)38 for key, value in values.iteritems():39 writer1.writerow([key, value])40 # for key, value in values_testing.iteritems():41 # writer2.writerow([key,value])42 training_files_labels_file.close()43 # testing_files_labels_file.close()44########################################################################################################################45def randomize_spary_superimposition(filepath_to_save, file_name, type):46 rand_int_for_sparky = randint(0, 20)47 if rand_int_for_sparky % 4 == 0:48 super_impose_image_for_testing("rsz_sparky.gif", 0, filepath_to_save, file_name, type)49 elif rand_int_for_sparky % 4 == 1:50 super_impose_image_for_testing("sparky_large.gif", 0, filepath_to_save, file_name, type)51 elif rand_int_for_sparky % 4 == 2:52 super_impose_image_for_testing("rsz_sparky.gif", randint(0, 340), filepath_to_save, file_name, type)53 elif rand_int_for_sparky % 4 == 3:54 super_impose_image_for_testing("sparky_large.gif", randint(0, 340), filepath_to_save, file_name, type)55def testing_data_full_random():56 for file_name in test_file_names:57 try:58 rand_int = randint(0, 20)59 if rand_int % 2 == 0:60 randomize_spary_superimposition(testing_files_path_with_random_sparky + file_name, file_name, 0)61 else:62 shutil.copyfile(testing_files_path + file_name, testing_files_path_with_random_sparky + file_name)63 testing_files_label[file_name] = 064 except Exception as detail:65 print "Something bad has happened!!! This is the error ==> ", detail66 write_csv(testing_files_path_with_random_sparky + "testing_files_labels.csv", testing_files_label)67 for file_name in train_file_names:68 try:69 rand_int = randint(0, 20)70 if rand_int % 2 == 0:71 randomize_spary_superimposition(training_files_path_with_random_sparky + file_name, file_name, 1)72 else:73 shutil.copyfile(training_files_path + file_name, training_files_path_with_random_sparky + file_name)74 training_files_label[file_name] = 075 except Exception as detail:76 print "Something bad has happened!!! This is the error ==> ", detail77 write_csv(training_files_path_with_random_sparky + "training_files_labels.csv", training_files_label)78########################################################################################################################79def small_sparky_superimposition(filepath_to_save, file_name, type):80 super_impose_image_for_testing("rsz_sparky.gif", 0, filepath_to_save, file_name, type)81def output_data_small_sparky():82 for file_name in test_file_names:83 try:84 rand_int = randint(0, 20)85 if rand_int % 2 == 0:86 small_sparky_superimposition(testing_files_path_with_small_sparky + file_name, file_name, 0)87 else:88 shutil.copyfile(testing_files_path + file_name, testing_files_path_with_small_sparky + file_name)89 testing_files_label[file_name] = 090 except Exception as detail:91 print "Something bad has happened!!! This is the error ==> ", detail92 write_csv(testing_files_path_with_small_sparky + "testing_files_labels.csv", testing_files_label)93 for file_name in train_file_names:94 try:95 rand_int = randint(0, 20)96 if rand_int % 2 == 0:97 small_sparky_superimposition(training_files_path_with_small_sparky + file_name, file_name, 1)98 else:99 shutil.copyfile(training_files_path + file_name, training_files_path_with_small_sparky + file_name)100 training_files_label[file_name] = 0101 except Exception as detail:102 print "Something bad has happened!!! This is the error ==> ", detail103 write_csv(training_files_path_with_small_sparky + "training_files_labels.csv", training_files_label)104########################################################################################################################105def large_sparky_superimposition(filepath_to_save, file_name, type):106 super_impose_image_for_testing("sparky_large.gif", 0, filepath_to_save, file_name, type)107def output_data_large_sparky():108 for file_name in test_file_names:109 try:110 rand_int = randint(0, 20)111 if rand_int % 2 == 0:112 large_sparky_superimposition(testing_files_path_with_large_sparky + file_name, file_name, 0)113 else:114 shutil.copyfile(testing_files_path + file_name, testing_files_path_with_large_sparky + file_name)115 testing_files_label[file_name] = 0116 except Exception as detail:117 print "Something bad has happened!!! This is the error ==> ", detail118 write_csv(testing_files_path_with_large_sparky + "testing_files_labels.csv", testing_files_label)119 for file_name in train_file_names:120 try:121 rand_int = randint(0, 20)122 if rand_int % 2 == 0:123 large_sparky_superimposition(training_files_path_with_large_sparky + file_name, file_name, 1)124 else:125 shutil.copyfile(training_files_path + file_name, training_files_path_with_large_sparky + file_name)126 training_files_label[file_name] = 0127 except Exception as detail:128 print "Something bad has happened!!! This is the error ==> ", detail129 write_csv(training_files_path_with_large_sparky + "training_files_labels.csv", training_files_label)130########################################################################################################################131def small_sparky_with_angle_superimposition(filepath_to_save, file_name, type):132 super_impose_image_for_testing("rsz_sparky.gif", randint(0, 340), filepath_to_save, file_name, type)133def output_data_small_sparky_with_angle():134 for file_name in test_file_names:135 try:136 rand_int = randint(0, 20)137 if rand_int % 2 == 0:138 small_sparky_with_angle_superimposition(testing_files_path_with_small_sparky_rotate + file_name,139 file_name, 0)140 else:141 shutil.copyfile(testing_files_path + file_name, testing_files_path_with_small_sparky_rotate + file_name)142 testing_files_label[file_name] = 0143 except Exception as detail:144 print "Something bad has happened!!! This is the error ==> ", detail145 write_csv(testing_files_path_with_small_sparky_rotate + "testing_files_labels.csv", testing_files_label)146 for file_name in train_file_names:147 try:148 rand_int = randint(0, 20)149 if rand_int % 2 == 0:150 small_sparky_with_angle_superimposition(training_files_path_with_small_sparky_rotate + file_name,151 file_name, 1)152 else:153 shutil.copyfile(training_files_path + file_name,154 training_files_path_with_small_sparky_rotate + file_name)155 training_files_label[file_name] = 0156 except Exception as detail:157 print "Something bad has happened!!! This is the error ==> ", detail158 write_csv(training_files_path_with_small_sparky_rotate + "training_files_labels.csv", training_files_label)159########################################################################################################################160def large_sparky_with_angle_superimposition(filepath_to_save, file_name, type):161 super_impose_image_for_testing("sparky_large.gif", randint(0, 340), filepath_to_save, file_name, type)162def output_data_large_sparky_with_angle():163 testing_files_label = {}164 training_files_label = {}165 for file_name in test_file_names:166 try:167 rand_int = randint(0, 20)168 if rand_int % 2 == 0:169 large_sparky_with_angle_superimposition(testing_files_path_with_large_sparky_rotate + file_name, file_name, 0)170 else:171 shutil.copyfile(testing_files_path + file_name, testing_files_path_with_large_sparky_rotate + file_name)172 testing_files_label[file_name] = 0173 except Exception as detail:174 print "Something bad has happened!!! This is the error ==> ", detail175 write_csv(testing_files_path_with_large_sparky_rotate + "testing_files_labels.csv", testing_files_label)176 for file_name in train_file_names:177 try:178 rand_int = randint(0, 20)179 if rand_int % 2 == 0:180 large_sparky_with_angle_superimposition(training_files_path_with_large_sparky_rotate + file_name, file_name, 1)181 else:182 shutil.copyfile(training_files_path + file_name,183 training_files_path_with_large_sparky_rotate + file_name)184 training_files_label[file_name] = 0185 except Exception as detail:186 print "Something bad has happened!!! This is the error ==> ", detail187 write_csv(training_files_path_with_large_sparky_rotate + "training_files_labels.csv", training_files_label)188########################################################################################################################189if __name__ == '__main__':190 pass191testing_files_label = {}192training_files_label = {}193testing_files_path = "./../Test_Images/Testing_input/"194training_files_path = "./../Test_Images/Training_input/"195# Training file paths196training_files_path_with_small_sparky = "./../Test_Images/Output_files/Small_sparky/Training/"197training_files_path_with_large_sparky = "./../Test_Images/Output_files/Large_sparky/Training/"198training_files_path_with_small_sparky_rotate = "./../Test_Images/Output_files/Small_sparky_rotate/Training/"199training_files_path_with_large_sparky_rotate = "./../Test_Images/Output_files/Large_sparky_rotate/Training/"200training_files_path_with_random_sparky = "./../Test_Images/Output_files/Random_sparky/Training/"201# Testing file paths202testing_files_path_with_small_sparky = "./../Test_Images/Output_files/Small_sparky/Testing/"203testing_files_path_with_large_sparky = "./../Test_Images/Output_files/Large_sparky/Testing/"204testing_files_path_with_small_sparky_rotate = "./../Test_Images/Output_files/Small_sparky_rotate/Testing/"205testing_files_path_with_large_sparky_rotate = "./../Test_Images/Output_files/Large_sparky_rotate/Testing/"206testing_files_path_with_random_sparky = "./../Test_Images/Output_files/Random_sparky/Testing/"207test_file_names = os.listdir(testing_files_path)208train_file_names = os.listdir(training_files_path)209test_files_with_sparky = 0210train_files_with_sparky = 0211print "large sparky with angle - start"212output_data_large_sparky_with_angle()213print "large sparky with angle - end"214testing_files_label = {}215training_files_label = {}216print "small sparky with angle - start"217output_data_small_sparky_with_angle()218print "small sparky with angle - end"219testing_files_label = {}220training_files_label = {}221print "large sparky - start"222output_data_large_sparky()223print "large sparky - end"224testing_files_label = {}225training_files_label = {}226print "small sparky - start"227output_data_small_sparky()228print "small sparky - end"229testing_files_label = {}230training_files_label = {}231print "random sparky - start"232testing_data_full_random()233print "random sparky - end"234# for file_name in train_file_names:235# try:236# rand_int = randint(0,9)237# if rand_int % 2 == 0 :238# background = Image.open(training_files_path+file_name)239# foreground = Image.open("rsz_sparky.gif")240# new_foreground = foreground.convert('RGBA')241# x,y = foreground.size242# x_rand = randint(0,374)243# y_rand = randint (0,241)244# background.paste(new_foreground, (x_rand, y_rand ,x_rand+10 ,y_rand+15), new_foreground)245# background.save(training_files_path+file_name)246# train_files_with_sparky = train_files_with_sparky +1247# training_files_label[file_name] = 1248# else:249# training_files_label[file_name] = 0250# except:251# pass252# training_files_labels_file = open("training_files_labels.csv","wb")253# writer1 = csv.writer(training_files_labels_file)254# testing_files_labels_file = open("testing_files_labels.csv","wb")255# writer2 = csv.writer(testing_files_labels_file)256# training_files_labels_file.close()...

Full Screen

Full Screen

parser.py

Source:parser.py Github

copy

Full Screen

1import os2import logging3import json4#TODO Set logger 5logging.basicConfig(6 level = logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s,"7)8LOGGER = logging.getLogger()9#Path verification10'''def gen_path(file_name):11 if file_name == '':12 raise EnvironmentError("No path were specified")13 else:14 file_name = "text"15 strip = file_name.split("/")16 directory = file_name.replace(strip[-1], "")17 if not os._exists(file_name):18 os.mkdir(directory)'''19def _path_correction(path):20 if path.__contains__("\\"):21 return path.replace("\\", "/")22 else:23 return path24def write_file(file_name: str, data: dict):25 #gen_path(file_name)26 file_name = _path_correction(file_name)27 #Get dict28 if not type(data) == dict:29 raise TypeError("Argument: \"data\" must be dictionary type.\nwrite_file(\"file_name\", \"overwrite\" = True, \"data\" = { }")30 elif data.__len__ == 0:31 raise ValueError("\"data\" must not be empty, for erasing data use clear() instead")32 else:33 if os._exists(file_name):34 os.remove(file_name)35 jstream = open(file_name, "w+")36 json.dump(data, jstream)37 jstream.close()38 #Logger39 log = file_name.split("/")40 LOGGER.info(f"{log[-1]} successfully created")41def read_file(file_name: str):42 jstream = open(file_name, "r")43 dt = jstream.read()44 jstream.close()45 46 data = json.loads(dt)47 return data48#file_name = Name of the file, data = tuple to filter, returns a tuple49def get_data(file_name: str, data: tuple, as_dict = False):50 DATA = read_file(file_name)51 if as_dict:52 if data.count == 0:53 return DATA54 else:55 dData = dict()56 keys = tuple(DATA.keys())57 values = tuple(DATA.values())58 for x in keys:59 dData[x] = values[keys.index(x)]60 61 return dData62 else:63 if data.count == 0:64 return tuple(DATA.values())65 else:66 lData = list()67 for x in data:68 lData.append(DATA[x])69 return tuple(lData)70def update_file(file_name: str, data: dict):71 file_name = _path_correction(file_name)72 if file_name == "":73 raise ValueError("\"file_name\" must not be empty")74 elif data.__len__() == 0:75 raise ValueError("\"data\" must contain data to update")76 else:77 try:78 DATA = read_file(file_name)79 except:80 LOGGER.warning(f"No {file_name} were found, a new file will be created")81 DATA = dict()82 finally:83 keys = tuple(data.keys())84 for x in keys:85 DATA[x] = data[x]86 write_file(file_name, DATA)87 #Logging88 log = file_name.split("/")89 LOGGER.info(f"{log[-1]} updated successfully")90def delete_data(file_name: str):91 file_name = _path_correction(file_name)92 if file_name == "":93 raise ValueError("\"file_name\" must not be empty")94 else:95 os.remove(file_name)96 log = file_name.split("/")97 LOGGER.info(f"{log[-1]} was removed")98def delete_many(*file_names: str):99 for x in file_names:100 if not type(x) == str:101 LOGGER.error(f"{x} is not a string")102 else:...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run avocado automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful