How to use get_filelist method in autotest

Best Python code snippet using autotest_python

pcio.py

Source:pcio.py Github

copy

Full Screen

...11BASE_DIR = os.path.dirname(os.path.abspath(__file__))12PROJECT_DIR = os.path.dirname(BASE_DIR)13DATA_DIR = os.path.join(PROJECT_DIR, 'data')14def load_ycb_pcd(filedir, filelist):15 forder_1 = get_filelist(filedir, 'filelist')16 box_forder = []17 cylinder_forder = []18 sphere_forder = []19 box_forder.extend(get_filelist(forder_1[0], filelist))20 box_object_list = get_objectlist(forder_1[0], filelist)21 22 cylinder_forder.extend(get_filelist(forder_1[1], filelist))23 cylinder_object_list = get_objectlist(forder_1[1], filelist)24 25 sphere_forder.extend(get_filelist(forder_1[2], filelist))26 sphere_object_list = get_objectlist(forder_1[2], filelist)27 box_list=[]28 cylinder_list=[]29 sphere_list=[]30 object_total = []31 for i in range(len(box_forder)):32 box_list.extend(get_filelist(box_forder[i], 'filelist'))33 object_total.extend([box_object_list[i]]*50)34 for i in range(len(cylinder_forder)):35 cylinder_list.extend(get_filelist(cylinder_forder[i], 'filelist'))36 object_total.extend([cylinder_object_list[i]]*50)37 for i in range(len(sphere_forder)):38 sphere_list.extend(get_filelist(sphere_forder[i], 'filelist'))39 object_total.extend([sphere_object_list[i]]*50)40 label = np.full(len(box_list), 0, dtype=int)41 label = np.append(label, (np.full(len(cylinder_list), 1, dtype=int)), axis=0)42 label = np.append(label, (np.full(len(sphere_list), 2, dtype=int)), axis=0)43 pcd_list = []44 pcd_list.extend(box_list)45 pcd_list.extend(cylinder_list)46 pcd_list.extend(sphere_list)47 np_pcd = []48 for i in range(len(pcd_list)):49 np_pcd.append(load_pcd(pcd_list[i]))50 data = np.array(np_pcd)51 return data, label, np.array(object_total)52# def load_partial_ycb_pcd(filedir, filelist):53# forder_1 = get_filelist(filedir, "filelist")54# box_forder = []55# cylinder_forder = []56# sphere_forder = []57# box_forder.extend(get_filelist(forder_1[0], filelist))58# box_object_list = get_objectlist(forder_1[0], filelist)59 60# cylinder_forder.extend(get_filelist(forder_1[1], filelist))61# cylinder_object_list = get_objectlist(forder_1[1], filelist)62 63# sphere_forder.extend(get_filelist(forder_1[2], filelist))64# sphere_object_list = get_objectlist(forder_1[2], filelist)65 66# box_list=[]67# cylinder_list=[]68# sphere_list=[]69# object_total = []70# for i in range(len(box_forder)):71# box_list.extend(get_filelist(box_forder[i], 'filelist'))72# object_total.extend([box_object_list[i]]*50)73# for i in range(len(cylinder_forder)):74# cylinder_list.extend(get_filelist(cylinder_forder[i], 'filelist'))75# object_total.extend([cylinder_object_list[i]]*50)76# for i in range(len(sphere_forder)):77# sphere_list.extend(get_filelist(sphere_forder[i], 'filelist'))78# object_total.extend([sphere_object_list[i]]*50)79 80# label = np.full(len(box_list), 0, dtype=int)81# label = np.append(label, (np.full(len(cylinder_list), 1, dtype=int)), axis=0)82# label = np.append(label, (np.full(len(sphere_list), 2, dtype=int)), axis=0)83# pcd_list = []84# pcd_list.extend(box_list)85# pcd_list.extend(cylinder_list)86# pcd_list.extend(sphere_list)87# np_pcd = []88# for i in range(len(pcd_list)):89# np_pcd.append(load_pcd(pcd_list[i]))90# data = np.array(np_pcd)91# return data, label, np.array(object_total)92def load_shapes_pcd(filedir, filelist):93 forder_1 = get_filelist(filedir, filelist)94 box_list = []95 cylinder_list = []96 sphere_list = []97 for i in range(len(forder_1)):98 box_list.extend(get_filelist(forder_1[i]+'box/', 'filelist'))99 cylinder_list.extend(get_filelist(forder_1[i]+'cylinder/', 'filelist'))100 sphere_list.extend(get_filelist(forder_1[i]+'sphere/', 'filelist'))101 102 # print(cylinder_list)103 label = np.full(len(box_list), 0, dtype=int)104 label = np.append(label, (np.full(len(cylinder_list), 1, dtype=int)), axis=0)105 label = np.append(label, (np.full(len(sphere_list), 2, dtype=int)), axis=0)106 pcd_list = []107 pcd_list.extend(box_list)108 pcd_list.extend(cylinder_list)109 pcd_list.extend(sphere_list)110 np_pcd = []111 for i in range(len(pcd_list)):112 np_pcd.append(load_pcd(pcd_list[i]))113 data = np.array(np_pcd)114 return data, label115def load_oneforder_pcd(filedir, filelist='filelist', sample_point=1000):116 forder_1 = get_filelist(filedir, filelist)117 forder_2 = []118 forder_2.extend(get_filelist(forder_1[0], 'filelist'))119 label = np.full(len(get_filelist(forder_1[0], 'filelist')), 0, dtype=int)120 for i in range(1,len(forder_1)):121 forder_2.extend(get_filelist(forder_1[i], 'filelist'))122 label = np.append(label, (np.full(len(get_filelist(forder_1[i], 'filelist')), i, dtype=int)), axis=0)123 np_pcd = []124 for i in range(len(forder_2)):125 sample_pcd = load_pcd(forder_2[i])126 # np.random.shuffle(sample_pcd)127 np_pcd.append(sample_pcd)128 data = np.array(np_pcd)129 return data, label130def load_threeforder_pcd(filedir, filelist1='filelist', filelist2='filelist', filelist3='filelist', sample_point=1000):131 label = []132 np_pcd = []133 ######################134 # first layer folder:135 folder1 = get_filelist(filedir, filelist1)136 for i, fd1 in enumerate(folder1):137 # print('layer_1:',i)138 # print(fd1)139 ######################140 # first layer folder:141 folder2 = get_filelist(fd1, filelist2)142 for j, fd2 in enumerate(folder2):143 # print('layer_2:',j)144 # print(fd2)145 ######################146 # first layer folder:147 folder3 = get_filelist(fd2, filelist3)148 for k, fd3 in enumerate(folder3):149 # print('layer_3', k)150 # print(fd3)151 np_pcd.append(load_pcd(fd3))152 label.append(i)153 # np_pcd=np.array(np_pcd)154 # print(label)155 # print(np.array(np_pcd).shape)156 return(np.array(np_pcd), label)157def load_npy(file_dir):158 data = np.load(os.path.join(file_dir, 'data.npy'))159 label = np.loadtxt(os.path.join(file_dir, 'label.dat'), dtype=int)160 # if check_npy_md5(file_dir, data):161 # return data, label162 return data, label163def load_partid_dat(file_dir, filelist, sample_times=10, sample_point=1000):164 forder_1 = get_filelist(filedir, 'filelist')165 forder_2 = []166 forder_2.extend(get_filelist(forder_1[0], filelist))167 label = np.full(len(get_filelist(forder_1[0], filelist))*sample_times, 0, dtype=int)168 for i in range(1,len(forder_1)):169 forder_2.extend(get_filelist(forder_1[i], filelist))170 label = np.append(label, (np.full(len(get_filelist(forder_1[i], filelist))*sample_times, i, dtype=int)), axis=0)171 np_pcd = []172 for i in range(len(forder_2)):173 sample_pcd = loadtxt(forder_2[i][:-3]+'.dat')174 np.random.shuffle(sample_pcd)175 idx_begin = 0176 for j in range(sample_times):177 idx_end=idx_begin+sample_point178 np_pcd.append(sample_pcd[idx_begin:idx_end])179 idx_begin=idx_end+1180 data = np.array(np_pcd)181 data = np.load(os.path.join(file_dir, 'data.npy'))182 label = np.loadtxt(os.path.join(file_dir, 'label.dat'), dtype=int)183 if check_npy_md5(file_dir, data):184 return data, label185 return data, label186def load_objectlist(file_dir):187 object_list = np.loadtxt(os.path.join(file_dir, 'object_list.dat'), dtype=str)188 return object_list189def save_npy(data, label, file_dir):190 np.save(os.path.join(file_dir, 'data.npy'), data)191 np.savetxt(os.path.join(file_dir, 'label.dat'), label, fmt='%d')192 # m = md5()193 # m.update(data.data)194 # md5_v = m.hexdigest()195 196 # f=open(os.path.join(file_dir, 'md5.dat'), 'w+')197 # f.write(md5_v)198 # f.close()199 return200def check_npy_md5(file_dir, data):201 m_input = md5()202 m_input.update(data.data)203 md5_input = m_input.hexdigest()204 205 f=open(os.path.join(file_dir, 'md5.dat'), 'r')206 if f.mode == 'r':207 md5_abs = f.read()208 if md5_input==md5_abs:209 print("Good md5 value of input data")210 return True211 print("Wrong md5 value of input data")212 return False213def move_to_origin(np_pcd):214 mx = np.mean(np_pcd, axis=0)215 np_pcd = np_pcd - mx216 return np_pcd217def move_to_origin_batch(np_pcd):218 mv_pcd = []219 for i in np_pcd:220 mv_pcd.append(move_to_origin(i))221 return np.array(mv_pcd)222def move_to_ws(np_pcd, x=0.1, y=0.1, z=0.1):223 bbox_ = bbox_nppcd(np_pcd)224 move_ = (bbox_[0]-x, bbox_[2]-y, bbox_[4]-z)225 np_pcd = np_pcd - move_226 return np_pcd227def move_to_ws_batch(np_pcd, x=0.1, y=0.1, z=0.1):228 mv_pcd = []229 for i in np_pcd:230 mv_pcd.append(move_to_ws(i, x, y, z))231 return np.array(mv_pcd)232def norm_nppcd(np_pcd, scale=1.0):233 box_ = bbox_nppcd(np_pcd)234 max_ = np.sqrt(box_[1]**2 + box_[3]**2 + box_[5]**2) * scale235 return np_pcd/max_236def norm_nppcd_batch(np_pcd, scale=1.0):237 norm_pcd = []238 for pcd in np_pcd:239 norm_pcd.append(norm_nppcd(pcd, scale))240 return np.array(norm_pcd)241def rotate_point_cloud(batch_data):242 """ Randomly rotate the point clouds to augument the dataset243 rotation is per shape based along up direction244 Input:245 BxNx3 array, original batch of point clouds246 Return:247 BxNx3 array, rotated batch of point clouds248 """249 rotated_data = np.zeros(batch_data.shape, dtype=np.float32)250 for k in range(batch_data.shape[0]):251 rotation_angle = np.random.uniform() * 2 * np.pi252 cosval = np.cos(rotation_angle)253 sinval = np.sin(rotation_angle)254 rotation_matrix = np.array([[cosval, 0, sinval],255 [0, 1, 0],256 [-sinval, 0, cosval]])257 shape_pc = batch_data[k, ...]258 rotated_data[k, ...] = np.dot(shape_pc.reshape((-1, 3)), rotation_matrix)259 return rotated_data260def bbox_nppcd(np_pcd):261 bbox = np.min(np_pcd[:,0]), np.max(np_pcd[:,0]), np.min(np_pcd[:,1]), np.max(np_pcd[:,1]), np.min(np_pcd[:,2]), np.max(np_pcd[:,2]) 262 return bbox263def load_pcd(filename):264 pcd = open3d.io.read_point_cloud(filename)265 np_pcd = np.asarray(pcd.points)266 return np_pcd267def load_open3d_pcd(filename):268 pcd = open3d.io.read_point_cloud(filename)269 return pcd270def save_pcd(np_pcd, filename, filedir):271 if not os.path.exists(filedir):272 os.makedirs(filedir)273 # pcd = open3d.PointCloud()274 # pcd.points = open3d.Vector3dVector(np_pcd)275 # open3d.write_point_cloud('./' +filedir + '/' + filename + '.pcd', pcd)276 save_dir=os.path.join(filedir, filename+'.pcd')277 save_nppcd(np_pcd, save_dir)278 return279def np_to_pcd(np_pcd):280 pcd = open3d.PointCloud()281 pcd.points = open3d.Vector3dVector(np_pcd)282 return pcd283def nppcd_to_open3d(np_pcd):284 pcd = open3d.PointCloud()285 pcd.points = open3d.Vector3dVector(np_pcd)286 return pcd287def save_pcd_dir(np_pcd, filename, filedir):288 if not os.path.exists(filedir):289 os.makedirs(filedir)290 save_dir = os.path.join(filedir, str(filename) + '.pcd')291 save_nppcd(np_pcd, save_dir)292 return293def save_nppcd(np_pcd, save_dir, save_normals=False, save_xyz=False):294 # if not os.path.exists(filedir):295 # os.makedirs(filedir)296 # filename = str(filename) + '.pcd'297 # save_dir = os.path.join(filedir, filename)298 f=open(save_dir,'w+')299 if not save_xyz:300 f.write("# .PCD v0.7 - Point Cloud Data file format\n")301 f.write("VERSION 0.7\n")302 if save_normals:303 f.write("FIELDS x y z normal_x normal_y normal_z\n")304 f.write("SIZE 4 4 4 4 4 4\n")305 f.write("TYPE F F F F F F\n")306 f.write("COUNT 1 1 1 1 1 1\n")307 else:308 f.write("FIELDS x y z\n")309 f.write("SIZE 4 4 4\n")310 f.write("TYPE F F F\n")311 f.write("COUNT 1 1 1\n")312 f.write("WIDTH "+str(np_pcd.shape[0])+"\n")313 f.write("HEIGHT 1\n")314 f.write("VIEWPOINT 0 0 0 1 0 0 0\n")315 f.write("POINTS "+str(np_pcd.shape[0])+"\n")316 f.write("DATA ascii\n")317 f.close()318 save_points(np_pcd, save_dir)319 else:320 f.close()321 save_points(np_pcd, save_dir)322def save_points(np_pcd, save_dir):323 f_ = open(save_dir, 'a')324 for point in np_pcd:325 f_.write( str(np.float32(point[0])) + ' ' + str(np.float32(point[1])) + ' ' + str(np.float32(point[2])) + '\n')326 f_.close()327def show_nppcd(np_pcd):328 pcd = open3d.geometry.PointCloud()329 pcd.points = open3d.utility.Vector3dVector(np_pcd)330 open3d.visualization.draw_geometries_with_vertex_selection([pcd])331 # open3d.visualization.VisualizerWithKeyCallback([pcd])332 return333def show_nppcd_list(np_pcd_list):334 pcd_list = []335 for i in range(len(np_pcd_list)):336 pcd = open3d.geometry.PointCloud()337 pcd.points = open3d.utility.Vector3dVector(np_pcd_list[i])338 pcd_list.append(pcd)339 open3d.visualization.draw_geometries(pcd_list)340 # open3d.visualization.VisualizerWithKeyCallback(pcd_list)341 # print((pcd_list))342def get_filelist(filedir, listname='filelist'):343 filelist=[]344 for line in open(os.path.join(filedir ,listname)):345 # if os.path.join(filedir, line.rstrip()) == '/':346 # return filelist347 filelist.append(os.path.join(filedir, line.rstrip()))348 return filelist349def get_shapelist(filedir):350 shapelist=[]351 for line in open(filedir + "/shapelist"):352 shapelist.append(line.rstrip())353 return shapelist354def get_filename(filedir, listname):355 filename=[]356 for line in open(os.path.join(filedir ,listname)):357 filename.append(line.rstrip())358 return filename359def get_listshape(filelist):360 listshape = np.array(np.shape(filelist))361 return listshape362def get_objectlist(filedir, listname):363 objectlist=[]364 for line in open(filedir + "/" +listname):365 a = line.rstrip('\n')366 a = a[:-4]367 objectlist.append(a)368 return objectlist369def load_data(file_dir, listname):370 filelist_1 = get_filelist(file_dir, listname)371 filelist_2 = []372 data = []373 label = []374 for idx in range(len(filelist_1)):375 filelist_2.append(get_filelist(filelist_1[idx], 'filelist'))376 377 for data_type in range(len(filelist_1)):378 for object_class in range(len(filelist_2[data_type])):379 filelist_3 = (get_filelist(filelist_2[data_type][object_class], 'filelist'))380 for object_idx in range(len(filelist_3)):381 label.append(object_class)382 data.append(load_pcd(filelist_3[object_class]))383 384 data = np.array(data, np.float64)385 label = np.array(label)386 print("***************************************")387 print("Loaded data shape",data.shape)388 print("Loaded label shape",label.shape)389 print("***************************************")390 return data, label391def load_data_ycb(file_dir, listname):392 filelist_1 = get_filelist(file_dir, listname)393 filelist_2 = []394 filelist_3 = []395 labellist = []396 for idx in range(len(filelist_1)):397 filelist_2.append(get_filelist(filelist_1[idx], 'filelist'))398 for i in range(len(filelist_1)):399 for j in range(len(filelist_2[i])):400 filelist_3.append(get_filelist(filelist_2[i][j], 'filelist'))401 labellist.append(i)402 data = []403 label = []404 for i in range(len(filelist_3)):405 for j in range(len(filelist_3[i])):406 data.append(load_pcd(filelist_3[i][j]))407 if i<12:408 label.append(0)409 elif i<24:410 label.append(1)411 else:412 label.append(2)413 414 data = np.array(data, np.float64)415 label = np.array(label)416 print("***************************************")417 print("Loaded data shape",data.shape)418 print("Loaded label shape",label.shape)419 print("***************************************")420 return data, label421def sample_numbatch(np_pcd, num_batch):422 idx = np.arange(0, np_pcd.shape[0])423 np.random.shuffle(idx)424 rand_pcd = np_pcd[idx]425 pcd_size = np_pcd.shape[0]426 batchsize = pcd_size // num_batch427 out_pcd = []428 for batch_idx in range(num_batch):429 start_idx = batch_idx * batchsize430 end_idx = (batch_idx+1) * batchsize431 out_pcd.append(rand_pcd[start_idx:end_idx, :])432 return np.array(out_pcd)433def sample_numpoint(np_pcd, num_point, num_batch):434 idx = np.arange(0, np_pcd.shape[0])435 np.random.shuffle(idx)436 rand_pcd = np_pcd[idx]437 pcd_size = np_pcd.shape[0]438 batchsize = num_point439 out_pcd = []440 if (pcd_size // num_point)<num_batch:441 sub_batch = pcd_size // num_point442 num_sample = num_batch // (pcd_size // num_point)443 for sample_idx in range(num_sample+1):444 np.random.shuffle(idx)445 rand_pcd = np_pcd[idx]446 for batch_idx in range(sub_batch):447 start_idx = batch_idx * batchsize448 end_idx = (batch_idx+1) * batchsize449 out_pcd.append(rand_pcd[start_idx:end_idx, :])450 else:451 for batch_idx in range(num_batch):452 start_idx = batch_idx * batchsize453 end_idx = (batch_idx+1) * batchsize454 out_pcd.append(rand_pcd[start_idx:end_idx, :])455 return np.array(out_pcd)456## Sampling ycb point cloud files (.ply) in folder ../data/ycb/ by fixed number of batches457## Data folder containing just two layers458def exe_sample_fix_numbatch_2layers(num_batch=100, data_forder='ycb/'): 459 data_dir = DATA_DIR460 data_dir = os.path.join(data_dir, data_forder)461 filelist = get_filelist(data_dir)462 shapelist = get_shapelist(data_dir)463 filedir = []464 filename = []465 for idx in range(len(filelist)):466 filedir.append(get_filelist(filelist[idx]))467 filename.append(get_filename(filelist[idx]))468 469 filelist_shape = get_listshape(filedir)470 for i in range(filelist_shape[0]):471 for j in range(filelist_shape[1]):472 pcd = load_pcd(filedir[i][j] + '.ply')473 sample = sample_numbatch(pcd, num_batch)474 for k in range(num_batch//2):475 save_pcd(sample[k], str(k), shapelist[i]+ '/' +filename[i][j])476 for k in range(num_batch//2):477 save_pcd(sample[k+50], str(k), 'test/'+shapelist[i]+ '/' +filename[i][j])478## Sampling ycb point cloud files (.ply) in folder ../data/ycb/ by fixed number of points479## Data folder containing just two layers480def exe_sample_fix_numpoint_2layers(num_point = 1000, num_batch=2, data_forder='ycb/'): 481 data_dir = DATA_DIR482 data_dir = os.path.join(data_dir, data_forder)483 filelist = get_filelist(data_dir)484 shapelist = get_shapelist(data_dir)485 filedir = []486 filename = []487 for idx in range(len(filelist)):488 filedir.append(get_filelist(filelist[idx]))489 filename.append(get_filename(filelist[idx]))490 491 filelist_shape = get_listshape(filedir)492 for i in range(filelist_shape[0]):493 for j in range(filelist_shape[1]):494 pcd = load_pcd(filedir[i][j] + '.ply')495 sample = sample_numpoint(pcd, num_point, num_batch)496 for k in range(num_batch//2):497 save_pcd(move_to_origin(sample[k]), str(k), shapelist[i]+ '/' +filename[i][j])498 for k in range(num_batch//2):499 save_pcd(move_to_origin(sample[k+num_batch//2]), str(k), 'test/'+shapelist[i]+ '/' +filename[i][j])500if __name__ == "__main__":501 DATA_DIR = os.path.join(DATA_DIR, 'shapes')502 DATA_DIR = os.path.join(DATA_DIR, 'shapes_meter')503 # filelist = 'filelist'504 # data, label = load_shapes_pcd(DATA_DIR, filelist)505 data2, label2 = load_npy(DATA_DIR)506 # if data.all()==data2.all():507 # print("good")508 # m = md5()509 # m.update(data2.all())510 # bm = m.hexdigest()511 # # bm = binascii.hexlify(m)512 # print(bm)513 # # data3, label3 = load_npy(DATA_DIR)514 # # m2 = md5()515 # # m2.update(data2.all())516 # # bm2 = m2.hexdigest()517 # # print(bm2)518 # # f=open(os.path.join(DATA_DIR, 'md5.dat'), 'w+')519 # # f.write(bm2)520 # # f.close()521 # f=open(os.path.join(DATA_DIR, 'md5.dat'), 'r')522 # if f.mode == 'r':523 # bm3 = f.read()524 # if bm == bm3:525 # print("good")526 # exe_sample_fix_numpoint_2layers()527 # data_forder='ycb/'528 # data_dir = DATA_DIR529 # data_dir = os.path.join(data_dir, data_forder)530 # filelist = get_filelist(data_dir)531 # shapelist = get_shapelist(data_dir)532 # filedir = []533 # filename = []534 # for idx in range(len(filelist)):535 # filedir.append(get_filelist(filelist[idx]))536 # filename.append(get_filename(filelist[idx]))537 538 # filelist_shape = get_listshape(filedir)539 # for i in range(filelist_shape[0]):540 # for j in range(filelist_shape[1]):541 # pcd = load_pcd(filedir[i][j] + '.ply')542 # print(pcd.shape)543 # pcd = load_pcd('./box/026_sponge/1.pcd')544 # show_nppcd(pcd)545 546 # num_point = 1000547 # num_batch = 2548 # data_forder='ycb/'549 # data_dir = DATA_DIR550 # data_dir = os.path.join(data_dir, data_forder)551 # filelist = get_filelist(data_dir)552 # shapelist = get_shapelist(data_dir)553 # filedir = []554 # filename = []555 # for idx in range(len(filelist)):556 # filedir.append(get_filelist(filelist[idx]))557 # filename.append(get_filename(filelist[idx]))558 559 # filelist_shape = get_listshape(filedir)560 # for i in range(filelist_shape[0]):561 # for j in range(filelist_shape[1]):562 # pcd = load_pcd(filedir[i][j] + '.ply') * 10563 # sample = sample_numpoint(pcd, num_point, num_batch)564 # show_nppcd(sample[0])565 # origin_sample = move_to_origin(sample[0])...

Full Screen

Full Screen

preprocess.py

Source:preprocess.py Github

copy

Full Screen

1import os2import numpy as np3from tqdm import tqdm4from torch import from_numpy, flip, cat5def get_filelist(path) -> list:6 """ Gets list of files from directories which one level belows the 'path' directory.7 :return list of filepaths """8 retlist = []9 # Get subdirectories name list10 dirlist = os.listdir(path)11 # Get list of files from subdirectories12 for directory in dirlist:13 templist = os.listdir(os.path.join(path, directory))14 retlist += list(map(lambda x: os.path.join(path, directory, x), templist))15 return retlist16def get_sampled_data(dirpath, n_loads=None) -> list:17 """ Function loads mel-frequency spectrograms samples [80, 80],18 :n_loads - quantity of files to load from 'dirpath' directory19 :return list of samples """20 # Load n_loads files, or load all21 if n_loads:22 files = get_filelist(dirpath)[:n_loads]23 else:24 files = get_filelist(dirpath)25 # List for return26 samples = []27 # Used for progressbar28 total = len(files)29 loop = tqdm(enumerate(files), total=total, leave=True)30 # This string just for beauty looking in console31 str_data_info = os.path.split(os.path.split(dirpath)[0])[1] + '\\' + os.path.split(dirpath)[1]32 for num, path in loop:33 # Load sample from file34 #samples.append(np.load(path).astype(np.single))35 mel_data = np.load(path).astype(np.single)36 samples += get_window_samples(mel_data, 80)37 38 # Turn progressbar39 loop.set_description(f'Loading {str_data_info}')40 return samples41def get_sample(mel_data, idx, window):42 mirrored_rows = 043 if idx < window:44 mel_data = cat([flip(mel_data[:, :, 1:window-idx], dims=[2]), mel_data], dim=2)45 mirrored_rows = window - idx46 sample = mel_data[:, :, idx - window + mirrored_rows:idx + 1 + mirrored_rows]47 return sample48def get_window_samples(mel_data, window):49 data_shape = mel_data.shape50 if mel_data.shape[0] % window:51 to_mirror = window - mel_data.shape[0] % window52 mel_data = np.concatenate((mel_data, np.flip(mel_data[-to_mirror-1:-1], axis=0)), axis=0)53 return list(mel_data.reshape(-1, window, data_shape[1]))54def get_raw_data(dirpath, n_loads=None) -> list:55 """ Function loads mel-frequency spectrograms [N, 80],56 :n_loads - quantity of files to load from 'dirpath' directory57 :return list of samples """58 # Load n_loads files, or load all59 if n_loads:60 files = get_filelist(dirpath)[:n_loads]61 else:62 files = get_filelist(dirpath)63 # List for return64 samples = []65 # Used for progressbar66 total = len(files)67 loop = tqdm(enumerate(files), total=total, leave=True)68 # This string just for beauty looking in console69 str_data_info = os.path.split(os.path.split(dirpath)[0])[1] + '\\' + os.path.split(dirpath)[1]70 for num, path in loop:71 # Load data from file72 samples.append(np.load(path).astype(np.single))73 # Turn progressbar74 loop.set_description(f'Loading {str_data_info}')...

Full Screen

Full Screen

filelist.py

Source:filelist.py Github

copy

Full Screen

1def get_filelist(url):2 if url.startswith('rsync'):3 import mb.crawlers.rsync4 return mb.crawlers.rsync.get_filelist(url)5 elif url.startswith('http'):6 import mb.crawlers.http7 return mb.crawlers.http.get_filelist(url)8 elif url.startswith('ftp'):9 import mb.crawlers.ftp10 return mb.crawlers.ftp.get_filelist(url)11 else:12 import sys...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run autotest automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful