How to use root_path method in Slash

Best Python code snippet using slash

test_storage_disk.py

Source:test_storage_disk.py Github

copy

Full Screen

1from os import rmdir, makedirs, remove2from os.path import isdir, isfile, getmtime, getsize3from shutil import rmtree4from pathlib import Path 5from datetime import datetime6from numpy.random import randint7from pytest import raises8from sdaab.disk.storage_disk import StorageDisk9from sdaab.utils.get_config import dict_config10def generate_folder_path(dict_config=dict_config):11 assert dict_config["ENV"] == "TESTING"12 root_path = Path(dict_config["DISK"]["ROOT_PATH"] + \13 "/sdaab-" + datetime.now().strftime("%Y-%m-%d-%H-%M-%S-%f-") + \14 str(randint(0, 1000)))15 makedirs(root_path)16 assert isdir(root_path)17 return root_path18def remove_folder(path):19 assert isdir(path)20 rmtree(path)21def test_storage_disk_init():22 root_path = generate_folder_path()23 try:24 s = StorageDisk(root_path="/this/folder/does/not/exist")25 except Exception as e:26 print(e)27 r = True28 assert r29 s = StorageDisk(root_path=root_path)30 assert s.initialized()31 assert s.get_type() == "DISK"32 33 try:34 s = StorageDisk(root_path="this/folder/does/not/exist")35 except Exception as e:36 print(e)37 r = True38 assert r39 remove_folder(root_path)40def test_storage_disk_mkdir():41 root_path = generate_folder_path()42 assert isdir(root_path)43 s = StorageDisk(root_path=root_path)44 assert s.initialized()45 46 s.mkdir("/tmp1")47 assert isdir(root_path / "tmp1")48 49 s.mkdir("tmp2")50 assert isdir(root_path / "tmp2")51 52 tmp_folders = [x.name for x in root_path.iterdir()]53 assert sorted(tmp_folders) == ["tmp1", "tmp2"]54 55 tmp_mtime = getmtime(root_path / "tmp2")56 try:57 s.mkdir("tmp2")58 except Exception as e:59 print(e)60 assert tmp_mtime == getmtime(root_path / "tmp2")61 s.mkdir("tmp/tmp3")62 assert isdir(root_path / "tmp/tmp3")63 assert isdir(root_path / "tmp")64 remove_folder(root_path)65def test_storage_disk_get_type():66 root_path = generate_folder_path()67 assert isdir(root_path)68 s = StorageDisk(root_path=root_path)69 assert s.initialized()70 assert s.get_type() == "DISK"71 try:72 s = StorageDisk(root_path="this/folder/does/not/exist")73 except Exception as e:74 print(e)75 r = True76 assert r77 remove_folder(root_path)78def test_storage_disk_mkdir_cd_pwd():79 root_path = generate_folder_path()80 assert isdir(root_path)81 s = StorageDisk(root_path=root_path)82 assert s.initialized()83 s.mkdir("level1/level2")84 s.mkdir("/level1/level2/level3")85 assert isdir(root_path / "level1/level2/level3")86 s.cd("level1")87 assert s.pwd() == "/level1"88 s.cd("level2")89 assert s.pwd() == "/level1/level2"90 s.cd("/level1/level2/level3")91 assert s.pwd() == "/level1/level2/level3"92 s.mkdir("level4")93 assert isdir(root_path / "level1/level2/level3/level4")94 s.cd("..")95 assert s.pwd() == "/level1/level2"96 s.cd("../..")97 assert s.pwd() == "/"98 try:99 s.cd("../..")100 except Exception as e:101 print(e)102 assert s.pwd() == "/"103 s.mkdir("tmp")104 isdir(root_path / "tmp")105 remove_folder(root_path)106def test_storage_disk_cd_ls_exists():107 root_path = generate_folder_path()108 assert isdir(root_path)109 s = StorageDisk(root_path=root_path)110 assert s.initialized()111 makedirs(root_path / "level1/level2")112 Path(root_path / "level1/level2/level2.txt").touch()113 Path(root_path / "level1/level1.txt").touch()114 assert s.ls() == ["level1"]115 assert sorted(s.ls("level1")) == ["level1.txt", "level2"]116 s.cd("level1")117 assert sorted(s.ls()) == ["level1.txt", "level2"]118 assert s.ls("/level1/level2") == ["level2.txt"]119 assert s.exists("level1.txt")120 assert s.exists("/level1/level2/level2.txt")121 makedirs(root_path / "level1/level2/level3")122 assert s.exists("/level1/level2/level3")123 assert s.ls("/level1/level2/level3") == []124 try: 125 s.ls("/folder/that/does/not/exist")126 except Exception as e:127 print(e)128 r = None129 assert r is None130 s.cd("/level1/level2/level3")131 assert not s.exists("level4.txt")132 assert not s.exists("level4")133 assert s.exists("/level1/level2/level3")134 remove_folder(root_path)135def test_storage_disk_upload():136 root_path = generate_folder_path()137 assert isdir(root_path)138 s = StorageDisk(root_path=root_path)139 assert s.initialized()140 makedirs(root_path / "level1/level2")141 Path(root_path / "level1/level2/level2.txt").touch()142 Path(root_path / "level1/level1.txt").touch()143 Path(root_path / "level0.txt").touch()144 makedirs(root_path / "uploaded")145 s.upload(root_path / "level0.txt", "/uploaded/uploaded_level0.txt")146 s.upload(root_path / "level1/level1.txt", "/uploaded/uploaded_level1.txt")147 s.cd("uploaded")148 assert s.pwd() == "/uploaded"149 s.upload(root_path / "level1/level2/level2.txt", "uploaded_level2.txt")150 assert isfile(root_path / "uploaded/uploaded_level0.txt")151 assert isfile(root_path / "uploaded/uploaded_level1.txt")152 assert isfile(root_path / "uploaded/uploaded_level2.txt")153 assert getsize(root_path / "uploaded/uploaded_level0.txt") \154 == getsize(root_path / "level0.txt")155 assert getsize(root_path / "uploaded/uploaded_level1.txt") \156 == getsize(root_path / "level1/level1.txt")157 assert getsize(root_path / "uploaded/uploaded_level2.txt") \158 == getsize(root_path / "level1/level2/level2.txt")159 s.cd("/level1")160 try:161 s.upload("level2", "/uploaded/uploaded_level2")162 except Exception as e:163 print(e)164 assert not isdir(root_path / "uploaded/uploaded_level2")165 assert not isfile(root_path / "uploaded/uploaded_level2")166 remove_folder(root_path)167def test_storage_disk_download():168 root_path = generate_folder_path()169 assert isdir(root_path)170 s = StorageDisk(root_path=root_path)171 assert s.initialized()172 makedirs(root_path / "level1/level2")173 Path(root_path / "level1/level2/level2.txt").touch()174 Path(root_path / "level1/level1.txt").touch()175 Path(root_path / "level0.txt").touch()176 makedirs(root_path / "downloaded")177 s.download("level0.txt", root_path / "downloaded/downloaded_level0.txt")178 s.download("/level1/level1.txt", \179 root_path / "downloaded/downloaded_level1.txt")180 s.cd("level1")181 s.download("level2/level2.txt", \182 root_path / "downloaded/downloaded_level2.txt")183 assert isfile(root_path / "downloaded/downloaded_level0.txt")184 assert isfile(root_path / "downloaded/downloaded_level1.txt")185 assert isfile(root_path / "downloaded/downloaded_level2.txt")186 assert getsize(root_path / "downloaded/downloaded_level0.txt") \187 == getsize(root_path / "level0.txt")188 assert getsize(root_path / "downloaded/downloaded_level1.txt") \189 == getsize(root_path / "level1/level1.txt")190 assert getsize(root_path / "downloaded/downloaded_level2.txt") \191 == getsize(root_path / "level1/level2/level2.txt")192 s.cd("/level1")193 try:194 s.download("level2", "/downloaded/downloaded_level2")195 except Exception as e:196 print(e)197 assert not isdir(root_path / "downloaded/downloaded_level2")198 assert not isfile(root_path / "downloaded/downloaded_level2")199 remove_folder(root_path)200def test_storage_disk_size_rm():201 root_path = generate_folder_path()202 assert isdir(root_path)203 s = StorageDisk(root_path=root_path)204 assert s.initialized()205 makedirs(root_path / "folder")206 assert s.size("folder") == 0207 with open(root_path / "folder/text.txt", "a") as f:208 f.write("ciao")209 assert s.size("folder") == getsize(root_path / "folder/text.txt")210 assert s.size("/folder/text.txt") == getsize(root_path / "folder/text.txt")211 with open(root_path / "folder/text_2.txt", "a") as f:212 f.write("buongiorno")213 assert s.size("folder") == \214 getsize(root_path / "folder/text.txt") + \215 getsize(root_path / "folder/text_2.txt")216 s.rm("/folder/text.txt")217 assert not isfile(root_path / "folder/text.txt")218 219 s.rm("folder/")220 assert not isdir(root_path / "folder")221 remove_folder(root_path)222def test_storage_disk_upload_download_memory():223 root_path = generate_folder_path()224 assert isdir(root_path)225 s = StorageDisk(root_path=root_path)226 assert s.initialized()227 my_variable = 1102228 s.upload_from_memory(my_variable, "v1")229 s.upload_from_memory(my_variable, "/v2")230 makedirs(root_path / "level1")231 s.upload_from_memory(my_variable, "level1/v3")232 s.upload_from_memory(my_variable, "/level1/v4")233 s.cd("level1")234 s.upload_from_memory(my_variable, "v5")235 s.upload_from_memory(my_variable, "v6")236 v1 = s.download_to_memory("/v1")237 v2 = s.download_to_memory("../v2")238 v3 = s.download_to_memory("/level1/v3")239 v4 = s.download_to_memory("v4")240 s.cd("/")241 v5 = s.download_to_memory("level1/v5")242 v6 = s.download_to_memory("/level1/v6")243 try:244 s.upload_from_memory(my_variable, "/level1/level2/v10")245 except Exception as e:246 print(e)247 assert not isfile(root_path / "level1/level2/v10")248 try:249 v10 = s.download_to_memory("level1/level2/v10")250 except Exception as e:251 v10 = None252 assert my_variable == v1253 assert v1 == v2254 assert v2 == v3255 assert v3 == v4256 assert v4 == v5257 assert v5 == v6258 assert v10 is None259 remove_folder(root_path)260def test_storage_disk_rename():261 root_path = generate_folder_path()262 assert isdir(root_path)263 s = StorageDisk(root_path=root_path)264 assert s.initialized()265 makedirs(root_path / "level1")266 Path(root_path / "name0").touch()267 Path(root_path / "level1/name1").touch()268 try:269 s.rename("name0", "level1/name0")270 except Exception as e:271 print(e)272 assert isfile(root_path / "name0")273 s.rename("name0", "new_name0")274 assert isfile(root_path / "new_name0")275 assert not isfile(root_path / "name0")276 s.rename("/level1", "/new_level1")277 assert isdir(root_path / "new_level1")278 assert not isdir(root_path / "level1")279 s.rename("/new_level1/name1", "new_level1/new_name1")280 assert isfile(root_path / "new_level1/new_name1")281 assert not isfile(root_path / "new_level1/name1") 282 remove_folder(root_path)283def test_storage_disk_mv():284 root_path = generate_folder_path()285 assert isdir(root_path)286 s = StorageDisk(root_path=root_path)287 assert s.initialized()288 makedirs(root_path / "folder1")289 makedirs(root_path / "folder2")290 Path(root_path / "file0").touch()291 Path(root_path / "folder1/file0").touch()292 with open(root_path / "file0", "a") as f:293 f.write("ciao")294 s.mv("file0", "folder2/file0")295 assert isfile(root_path / "folder2/file0")296 s.mv("folder2/file0", "/file0")297 assert isfile(root_path / "file0")298 try:299 s.mv("file0", "folder1/file0")300 except Exception as e:301 print(e)302 assert isfile(root_path/ "file0")303 assert getsize(root_path / "folder1/file0") != getsize(root_path / "file0")304 s.mv("file0", "/folder2/file0")305 assert isfile(root_path / "folder2/file0")306 s.mv("/folder2/file0", "file0")307 assert isfile(root_path / "file0")308 s.cd("folder2")309 s.mv("/file0", "file0")310 assert isfile(root_path / "folder2/file0")311 s.mv("file0", "/file0")312 s.cd("/")313 s.mv("file0", "folder2/file0000")314 assert isfile(root_path / "folder2/file0000")315 s.mv("folder2/file0000", "/file0")316 s.mv("folder1", "/folder2/folder1111")317 assert isdir(root_path / "folder2/folder1111")318 assert isfile(root_path / "folder2/folder1111/file0")319 assert not isdir(root_path / "folder1")320 remove_folder(root_path)321def test_storage_disk_cp():322 root_path = generate_folder_path()323 assert isdir(root_path)324 s = StorageDisk(root_path=root_path)325 assert s.initialized()326 makedirs(root_path / "folder1")327 makedirs(root_path / "folder2")328 Path(root_path / "file0").touch()329 Path(root_path / "folder1/file0").touch()330 with open(root_path / "file0", "a") as f:331 f.write("ciao")332 s.cp("file0", "folder2/file0")333 assert isfile(root_path / "folder2/file0")334 assert isfile(root_path / "file0")335 assert getsize(root_path / "folder2/file0") == getsize(root_path / "file0")336 remove(root_path / "folder2/file0")337 try:338 s.cp("file0", "folder1/file0")339 except Exception as e:340 print(e)341 assert isfile(root_path/ "file0")342 assert getsize(root_path / "folder1/file0") != getsize(root_path / "file0")343 s.cp("file0", "/folder2/file0")344 assert isfile(root_path / "folder2/file0")345 assert isfile(root_path / "file0")346 remove(root_path / "folder2/file0")347 s.cd("folder2")348 s.cp("/file0", "file0")349 assert isfile(root_path / "folder2/file0")350 remove(root_path / "folder2/file0")351 s.cd("/")352 s.cp("file0", "folder2/file0000")353 assert isfile(root_path / "folder2/file0000")354 remove(root_path / "folder2/file0000")355 try:356 s.cp("file0", "/folder1/file0")357 except Exception as e:358 print(e)359 assert getsize(root_path / "file0") != \360 getsize(root_path / "folder1/file0")361 remove(root_path / "folder1/file0")362 s.cp("file0", "/folder1/file0")363 assert getsize(root_path / "file0") == \364 getsize(root_path / "folder1/file0")365 s.cp("folder1", "/folder2/folder1111")366 assert isdir(root_path / "folder2/folder1111")367 assert isfile(root_path / "folder2/folder1111/file0")368 assert isdir(root_path / "folder1")369 assert getsize(root_path / "folder2/folder1111/file0") == \370 getsize(root_path / "folder1/file0")371 remove_folder(root_path)372def test_storage_disk_append():373 root_path = generate_folder_path()374 assert isdir(root_path)375 s = StorageDisk(root_path=root_path)376 assert s.initialized()377 makedirs(root_path / "folder")378 Path(root_path / "folder/file.txt").touch()379 s.append("/folder/file.txt", "ciao")380 s.append("folder/file.txt", "ciao")381 s.cd("folder")382 s.append("file.txt", "ciao")383 with open(root_path / "folder/file.txt", "r") as f:384 assert f.read() == "ciaociaociao"385 386 try:387 s.append("/folder/file_not_found", "ciao")388 except Exception as e:389 print(e)390 assert not isfile(root_path / "folder/file_not_found")391 remove_folder(root_path)392def test_storage_disk_tmp():393 root_path = generate_folder_path()394 assert isdir(root_path)395 s = StorageDisk(root_path=root_path)396 assert s.initialized()397 # Do your stuff...

Full Screen

Full Screen

create_data.py

Source:create_data.py Github

copy

Full Screen

1import argparse2from os import path as osp3from tools.data_converter import indoor_converter as indoor4from tools.data_converter import kitti_converter as kitti5from tools.data_converter import lyft_converter as lyft_converter6from tools.data_converter import nuscenes_converter as nuscenes_converter7from tools.data_converter.create_gt_database import create_groundtruth_database8def kitti_data_prep(root_path, info_prefix, version, out_dir):9 """Prepare data related to Kitti dataset.10 Related data consists of '.pkl' files recording basic infos,11 2D annotations and groundtruth database.12 Args:13 root_path (str): Path of dataset root.14 info_prefix (str): The prefix of info filenames.15 version (str): Dataset version.16 out_dir (str): Output directory of the groundtruth database info.17 """18 kitti.create_kitti_info_file(root_path, info_prefix)19 kitti.create_reduced_point_cloud(root_path, info_prefix)20 info_train_path = osp.join(root_path, f'{info_prefix}_infos_train.pkl')21 info_val_path = osp.join(root_path, f'{info_prefix}_infos_val.pkl')22 info_trainval_path = osp.join(root_path,23 f'{info_prefix}_infos_trainval.pkl')24 info_test_path = osp.join(root_path, f'{info_prefix}_infos_test.pkl')25 kitti.export_2d_annotation(root_path, info_train_path)26 kitti.export_2d_annotation(root_path, info_val_path)27 kitti.export_2d_annotation(root_path, info_trainval_path)28 kitti.export_2d_annotation(root_path, info_test_path)29 create_groundtruth_database(30 'KittiDataset',31 root_path,32 info_prefix,33 f'{out_dir}/{info_prefix}_infos_train.pkl',34 relative_path=False,35 mask_anno_path='instances_train.json',36 with_mask=(version == 'mask'))37def nuscenes_data_prep(root_path,38 info_prefix,39 version,40 dataset_name,41 out_dir,42 max_sweeps=10):43 """Prepare data related to nuScenes dataset.44 Related data consists of '.pkl' files recording basic infos,45 2D annotations and groundtruth database.46 Args:47 root_path (str): Path of dataset root.48 info_prefix (str): The prefix of info filenames.49 version (str): Dataset version.50 dataset_name (str): The dataset class name.51 out_dir (str): Output directory of the groundtruth database info.52 max_sweeps (int): Number of input consecutive frames. Default: 1053 """54 nuscenes_converter.create_nuscenes_infos(55 root_path, info_prefix, version=version, max_sweeps=max_sweeps)56 if version == 'v1.0-test':57 info_test_path = osp.join(root_path, f'{info_prefix}_infos_test.pkl')58 nuscenes_converter.export_2d_annotation(59 root_path, info_test_path, version=version)60 return61 info_train_path = osp.join(root_path, f'{info_prefix}_infos_train.pkl')62 info_val_path = osp.join(root_path, f'{info_prefix}_infos_val.pkl')63 nuscenes_converter.export_2d_annotation(64 root_path, info_train_path, version=version)65 nuscenes_converter.export_2d_annotation(66 root_path, info_val_path, version=version)67 create_groundtruth_database(dataset_name, root_path, info_prefix,68 f'{out_dir}/{info_prefix}_infos_train.pkl')69def lyft_data_prep(root_path,70 info_prefix,71 version,72 dataset_name,73 out_dir,74 max_sweeps=10):75 """Prepare data related to Lyft dataset.76 Related data consists of '.pkl' files recording basic infos,77 and 2D annotations.78 Although the ground truth database is not used in Lyft, it can also be79 generated like nuScenes.80 Args:81 root_path (str): Path of dataset root.82 info_prefix (str): The prefix of info filenames.83 version (str): Dataset version.84 dataset_name (str): The dataset class name.85 out_dir (str): Output directory of the groundtruth database info.86 Not used here if the groundtruth database is not generated.87 max_sweeps (int): Number of input consecutive frames. Default: 1088 """89 lyft_converter.create_lyft_infos(90 root_path, info_prefix, version=version, max_sweeps=max_sweeps)91 if version == 'v1.01-test':92 return93 train_info_name = f'{info_prefix}_infos_train'94 val_info_name = f'{info_prefix}_infos_val'95 info_train_path = osp.join(root_path, f'{train_info_name}.pkl')96 info_val_path = osp.join(root_path, f'{val_info_name}.pkl')97 lyft_converter.export_2d_annotation(98 root_path, info_train_path, version=version)99 lyft_converter.export_2d_annotation(100 root_path, info_val_path, version=version)101def scannet_data_prep(root_path, info_prefix, out_dir, workers):102 """Prepare the info file for scannet dataset.103 Args:104 root_path (str): Path of dataset root.105 info_prefix (str): The prefix of info filenames.106 out_dir (str): Output directory of the generated info file.107 workers (int): Number of threads to be used.108 """109 indoor.create_indoor_info_file(110 root_path, info_prefix, out_dir, workers=workers)111def s3dis_data_prep(root_path, info_prefix, out_dir, workers):112 """Prepare the info file for s3dis dataset.113 Args:114 root_path (str): Path of dataset root.115 info_prefix (str): The prefix of info filenames.116 out_dir (str): Output directory of the generated info file.117 workers (int): Number of threads to be used.118 """119 indoor.create_indoor_info_file(120 root_path, info_prefix, out_dir, workers=workers)121def sunrgbd_data_prep(root_path, info_prefix, out_dir, workers):122 """Prepare the info file for sunrgbd dataset.123 Args:124 root_path (str): Path of dataset root.125 info_prefix (str): The prefix of info filenames.126 out_dir (str): Output directory of the generated info file.127 workers (int): Number of threads to be used.128 """129 indoor.create_indoor_info_file(130 root_path, info_prefix, out_dir, workers=workers)131def waymo_data_prep(root_path,132 info_prefix,133 version,134 out_dir,135 workers,136 max_sweeps=5):137 """Prepare the info file for waymo dataset.138 Args:139 root_path (str): Path of dataset root.140 info_prefix (str): The prefix of info filenames.141 out_dir (str): Output directory of the generated info file.142 workers (int): Number of threads to be used.143 max_sweeps (int): Number of input consecutive frames. Default: 5 \144 Here we store pose information of these frames for later use.145 """146 from tools.data_converter import waymo_converter as waymo147 splits = ['training', 'validation', 'testing']148 for i, split in enumerate(splits):149 load_dir = osp.join(root_path, 'waymo_format', split)150 if split == 'validation':151 save_dir = osp.join(out_dir, 'kitti_format', 'training')152 else:153 save_dir = osp.join(out_dir, 'kitti_format', split)154 converter = waymo.Waymo2KITTI(155 load_dir,156 save_dir,157 prefix=str(i),158 workers=workers,159 test_mode=(split == 'test'))160 converter.convert()161 # Generate waymo infos162 out_dir = osp.join(out_dir, 'kitti_format')163 kitti.create_waymo_info_file(out_dir, info_prefix, max_sweeps=max_sweeps)164 create_groundtruth_database(165 'WaymoDataset',166 out_dir,167 info_prefix,168 f'{out_dir}/{info_prefix}_infos_train.pkl',169 relative_path=False,170 with_mask=False)171parser = argparse.ArgumentParser(description='Data converter arg parser')172parser.add_argument('dataset', metavar='kitti', help='name of the dataset')173parser.add_argument(174 '--root-path',175 type=str,176 default='./data/kitti',177 help='specify the root path of dataset')178parser.add_argument(179 '--version',180 type=str,181 default='v1.0',182 required=False,183 help='specify the dataset version, no need for kitti')184parser.add_argument(185 '--max-sweeps',186 type=int,187 default=10,188 required=False,189 help='specify sweeps of lidar per example')190parser.add_argument(191 '--out-dir',192 type=str,193 default='./data/kitti',194 required='False',195 help='name of info pkl')196parser.add_argument('--extra-tag', type=str, default='kitti')197parser.add_argument(198 '--workers', type=int, default=4, help='number of threads to be used')199args = parser.parse_args()200if __name__ == '__main__':201 if args.dataset == 'kitti':202 kitti_data_prep(203 root_path=args.root_path,204 info_prefix=args.extra_tag,205 version=args.version,206 out_dir=args.out_dir)207 elif args.dataset == 'nuscenes' and args.version != 'v1.0-mini':208 train_version = f'{args.version}-trainval'209 nuscenes_data_prep(210 root_path=args.root_path,211 info_prefix=args.extra_tag,212 version=train_version,213 dataset_name='NuScenesDataset',214 out_dir=args.out_dir,215 max_sweeps=args.max_sweeps)216 test_version = f'{args.version}-test'217 nuscenes_data_prep(218 root_path=args.root_path,219 info_prefix=args.extra_tag,220 version=test_version,221 dataset_name='NuScenesDataset',222 out_dir=args.out_dir,223 max_sweeps=args.max_sweeps)224 elif args.dataset == 'nuscenes' and args.version == 'v1.0-mini':225 train_version = f'{args.version}'226 nuscenes_data_prep(227 root_path=args.root_path,228 info_prefix=args.extra_tag,229 version=train_version,230 dataset_name='NuScenesDataset',231 out_dir=args.out_dir,232 max_sweeps=args.max_sweeps)233 elif args.dataset == 'lyft':234 train_version = f'{args.version}-train'235 lyft_data_prep(236 root_path=args.root_path,237 info_prefix=args.extra_tag,238 version=train_version,239 dataset_name='LyftDataset',240 out_dir=args.out_dir,241 max_sweeps=args.max_sweeps)242 test_version = f'{args.version}-test'243 lyft_data_prep(244 root_path=args.root_path,245 info_prefix=args.extra_tag,246 version=test_version,247 dataset_name='LyftDataset',248 out_dir=args.out_dir,249 max_sweeps=args.max_sweeps)250 elif args.dataset == 'waymo':251 waymo_data_prep(252 root_path=args.root_path,253 info_prefix=args.extra_tag,254 version=args.version,255 out_dir=args.out_dir,256 workers=args.workers,257 max_sweeps=args.max_sweeps)258 elif args.dataset == 'scannet':259 scannet_data_prep(260 root_path=args.root_path,261 info_prefix=args.extra_tag,262 out_dir=args.out_dir,263 workers=args.workers)264 elif args.dataset == 's3dis':265 s3dis_data_prep(266 root_path=args.root_path,267 info_prefix=args.extra_tag,268 out_dir=args.out_dir,269 workers=args.workers)270 elif args.dataset == 'sunrgbd':271 sunrgbd_data_prep(272 root_path=args.root_path,273 info_prefix=args.extra_tag,274 out_dir=args.out_dir,...

Full Screen

Full Screen

directory_functions.py

Source:directory_functions.py Github

copy

Full Screen

1import os2import datetime3import contextlib4def ReadConfig(config):5 try:6 # Lock file can not exist7 assert not (os.path.isfile(config["root_path"] + "config.lock"))8 # Create lock9 lock = open(config["root_path"] + "config.lock", "w")10 lock.close()11 # Load config files into arrays / objects12 configFile = open(config["root_path"] + "config.conf", "r")13 fileContent = configFile.read()14 # Splitting up content into lines15 configLines = fileContent.split("\n")16 for line in configLines:17 # line.split(" = ")[0] : the conig name18 # line.split(" = ")[1] : the config parameter19 # EOF or wrong config entry20 if " = " not in line:21 break22 if (line.split(" = ")[1]).isnumeric():23 config[str(line.split(" = ")[0])] = int(line.split(" = ")[1])24 else:25 config[str(line.split(" = ")[0])] = line.split(" = ")[1]26 # Print the config dictionary27 print("Edit config.conf to change the settings. Current config:")28 for entry in config:29 print(" " + entry + " = " + str(config[entry]))30 print("\n\n")31 configFile.close()32 # Delete lock33 os.remove(config["root_path"] + "config.lock")34 except Exception as error:35 print("WARNING: Lock file exists. File can not be opened. (config.conf) Error message: " + str(error))36 with contextlib.suppress(FileNotFoundError):37 os.remove(config["root_path"] + "config.lock")38def ReadLists(config, vidsList, mp3List, clipsList):39 try:40 # Make sure that files are not being written at the moment41 assert not os.path.isfile(config["root_path"] + "vids.lock")42 assert not os.path.isfile(config["root_path"] + "mp3.lock")43 assert not os.path.isfile(config["root_path"] + "clips.lock")44 # Check which files we already scanned. We store them in 2 dat files, 'vids.dat', 'mp3.dat'45 # Create locks46 lock1 = open(config["root_path"] + "vids.lock", "w")47 lock2 = open(config["root_path"] + "mp3.lock", "w")48 lock3 = open(config["root_path"] + "clips.lock", "w")49 lock1.close()50 lock2.close()51 lock3.close()52 vidsDat = open(config["root_path"] + "vids.dat", "r")53 mp3Dat = open(config["root_path"] + "mp3.dat", "r")54 clipsDat = open(config["root_path"] + "clips.dat", "r")55 vidsContent = vidsDat.read()56 mp3Content = mp3Dat.read()57 clipsContent = clipsDat.read()58 # Now we should have the content of both files59 # Splitting up content into lines60 vidsLines = vidsContent.split("\n")61 mp3Lines = mp3Content.split("\n")62 clipsLines = clipsContent.split("\n")63 # Processing the vids file content. File name and render-number is separated by TAB64 for line in vidsLines:65 # print("line", line.split("\t")[1])66 if "\t" in line:67 vidsList[line.split("\t")[0]] = int(line.split("\t")[1])68 else:69 print("This line is not processable (from vids.dat) (Probably this is an empty line) ")70 # Processing the mp3 file content. File name and render-number is separated by TAB71 for line in mp3Lines:72 if "\t" in line:73 mp3List[line.split("\t")[0]] = int(line.split("\t")[1])74 else:75 print("This line is not processable (from mp3.dat) (Probably this is an empty line) ")76 # Processing the clips file content. File name and name of mp3 is separated by TAB77 for line in clipsLines:78 if "\t" in line:79 clipsList[line.split("\t")[0]] = line.split("\t")[1]80 else:81 print("This line is not processable (from clips.dat) (Probably this is an empty line) ")82 # At this point, we should have the file contents stored in 3 dictionaries83 print("At this point, we should have the file contents stored in 3 dictionaries")84 print("vids", vidsList)85 print("mp3", mp3List)86 print("clips", clipsList)87 vidsDat.close()88 mp3Dat.close()89 clipsDat.close()90 # Delete locks91 os.remove(config["root_path"] + "vids.lock")92 os.remove(config["root_path"] + "mp3.lock")93 os.remove(config["root_path"] + "clips.lock")94 except Exception as error:95 print("WARNING: One or more lock file exist for vids.dat, mp3.dat, clips.dat. Most likely these files are being written at the moment. Files can not be opened. ReadLists() will skip. Error message: " + str(error))96 with contextlib.suppress(FileNotFoundError):97 os.remove(config["root_path"] + "vids.lock")98 os.remove(config["root_path"] + "mp3.lock")99 os.remove(config["root_path"] + "clips.lock")100def WriteConfig(config):101 try:102 # Lock file can not exist103 assert not (os.path.isfile(config["root_path"] + "config.lock"))104 # Create lock105 lock = open(config["root_path"] + "config.lock", "w")106 lock.close()107 # Open config file, save config object to file108 configFile = open(config["root_path"] + "config.conf", "w")109 for key in config:110 configFile.write(key + " = " + str(config[key]) + "\n")111 configFile.close()112 # Delete lock113 os.remove(config["root_path"] + "config.lock")114 return True115 except Exception as error:116 print("WARNING: Lock file exists. Changes were not written to prevent data loss. (config.conf) Error message: " + str(error))117 return False118def WriteLists(config, vidsList, mp3List, clipsList):119 allSuccessfull = True120 # Write vids121 try:122 # Lock file can not exist123 assert not (os.path.isfile(config["root_path"] + "vids.lock"))124 # Createe lock125 lock = open(config["root_path"] + "vids.lock", "w")126 lock.close()127 # Open vids.dat file, overwrite content128 vidsFile = open(config["root_path"] + "vids.dat", "w")129 for key in vidsList:130 vidsFile.write(key + "\t" + str(vidsList[key]) + "\n")131 vidsFile.close()132 # Delete lock133 os.remove(config["root_path"] + "vids.lock")134 except Exception as error:135 print("WARNING: Lock file exists. Changes were not written to prevent data loss. (vids.dat) Error message: " + str(error))136 with contextlib.suppress(FileNotFoundError):137 os.remove(config["root_path"] + "vids.lock")138 allSuccessfull = False139 # Write mp3140 try:141 # Lock file can not exist142 assert not (os.path.isfile(config["root_path"] + "mp3.lock"))143 # Createe lock144 lock = open(config["root_path"] + "mp3.lock", "w")145 lock.close()146 # Open mp3.dat file, overwrite content147 mp3File = open(config["root_path"] + "mp3.dat", "w")148 for key in mp3List:149 mp3File.write(key + "\t" + str(mp3List[key]) + "\n")150 mp3File.close()151 # Delete lock152 os.remove(config["root_path"] + "mp3.lock")153 except Exception as error:154 print("WARNING: Lock file exists. Changes were not written to prevent data loss. (mp3.dat) Error message: " + str(error))155 with contextlib.suppress(FileNotFoundError):156 os.remove(config["root_path"] + "mp3.lock")157 allSuccessfull = False158 # Write clips159 try:160 # Lock file can not exist161 assert not (os.path.isfile(config["root_path"] + "clips.lock"))162 # Createe lock163 lock = open(config["root_path"] + "clips.lock", "w")164 lock.close()165 # Open clips.dat file, overwrite content166 print("What is the content of clipsList?")167 print(clipsList)168 clipsFile = open(config["root_path"] + "clips.dat", "w")169 for key in clipsList:170 clipsFile.write(key + "\t" + clipsList[key] + "\n")171 clipsFile.close()172 # Delete lock173 os.remove(config["root_path"] + "clips.lock")174 except Exception as error:175 print("WARNING: Lock file exists. Changes were not written to prevent data loss. (clips.dat) Error message: " + str(error))176 with contextlib.suppress(FileNotFoundError):177 os.remove(config["root_path"] + "clips.lock")178 allSuccessfull = False179 return allSuccessfull180def CheckNewFiles(config, vidsList, mp3List):181 # Log file182 mainLog = open(config["root_path"] + "logs/main.log", "a+")183 now = str(datetime.datetime.now()).rsplit(".", 1)[0]184 # Scan the folders for files185 currentVids = os.listdir(config["vids_path"])186 currentMp3 = os.listdir(config["mp3_path"])187 # We need to close the files at the end, if open188 isVidsFileOpen = False189 isMp3FileOpen = False190 # Only open files if there are new elements.191 if len(currentVids) != len(vidsList):192 vidsDat = open(config["root_path"] + "vids.dat", "a+")193 isVidsFileOpen = True194 if len(currentMp3) != len(mp3List):195 mp3Dat = open(config["root_path"] + "mp3.dat", "a+")196 isMp3FileOpen = True197 # Decide which files are new198 for entry in currentVids:199 if entry in vidsList or entry == ".gitkeep":200 # We don't need to do anything, file already is on our list201 # If we find ".gitkeep", we skip202 continue203 else:204 # New video has been rendered 0 times in clips205 vidsList[entry] = 0206 # Save to file207 vidsDat.write(entry + "\t" + "0\n")208 for entry in currentMp3:209 if entry in mp3List or entry == ".gitkeep":210 # We don't need to do anything, file already is on our list211 # If we find ".gitkeep", we skip212 continue213 else:214 # New video has been rendered 0 times in clips215 mp3List[entry] = 0216 # Save to file217 mp3Dat.write(entry + "\t" + "0\n")218 # Close files, if open219 if isMp3FileOpen:220 mp3Dat.close()221 mainLog.write(now + " Wrote new elements to mp3.dat\n")222 if isVidsFileOpen:223 vidsDat.close()224 mainLog.write(now + " Wrote new elements to vids.dat\n")...

Full Screen

Full Screen

pretrain.py

Source:pretrain.py Github

copy

Full Screen

1from . import encoder2from . import model3from . import framework4import torch5import os6import sys7import json8import numpy as np9import logging10root_url = "https://thunlp.oss-cn-qingdao.aliyuncs.com/"11default_root_path = os.path.join(os.getenv('HOME'), '.opennre')12def check_root(root_path=default_root_path):13 if not os.path.exists(root_path):14 os.mkdir(root_path)15 os.mkdir(os.path.join(root_path, 'benchmark'))16 os.mkdir(os.path.join(root_path, 'pretrain'))17 os.mkdir(os.path.join(root_path, 'pretrain/nre'))18def download_wiki80(root_path=default_root_path):19 check_root()20 if not os.path.exists(os.path.join(root_path, 'benchmark/wiki80')):21 os.mkdir(os.path.join(root_path, 'benchmark/wiki80'))22 os.system('wget -P ' + os.path.join(root_path, 'benchmark/wiki80') + ' ' + root_url + 'opennre/benchmark/wiki80/wiki80_rel2id.json')23 os.system('wget -P ' + os.path.join(root_path, 'benchmark/wiki80') + ' ' + root_url + 'opennre/benchmark/wiki80/wiki80_train.txt')24 os.system('wget -P ' + os.path.join(root_path, 'benchmark/wiki80') + ' ' + root_url + 'opennre/benchmark/wiki80/wiki80_val.txt')25def download_tacred(root_path=default_root_path):26 check_root()27 if not os.path.exists(os.path.join(root_path, 'benchmark/tacred')):28 os.mkdir(os.path.join(root_path, 'benchmark/tacred'))29 os.system('wget -P ' + os.path.join(root_path, 'benchmark/tacred') + ' ' + root_url + 'opennre/benchmark/tacred/tacred_rel2id.json')30 logging.info('Due to copyright limits, we only provide rel2id for TACRED. Please download TACRED manually and convert the data to OpenNRE format if needed.')31def download_nyt10(root_path=default_root_path):32 check_root()33 if not os.path.exists(os.path.join(root_path, 'benchmark/nyt10')):34 os.mkdir(os.path.join(root_path, 'benchmark/nyt10'))35 os.system('wget -P ' + os.path.join(root_path, 'benchmark/nyt10') + ' ' + root_url + 'opennre/benchmark/nyt10/nyt10_rel2id.json')36 os.system('wget -P ' + os.path.join(root_path, 'benchmark/nyt10') + ' ' + root_url + 'opennre/benchmark/nyt10/nyt10_train.txt')37 os.system('wget -P ' + os.path.join(root_path, 'benchmark/nyt10') + ' ' + root_url + 'opennre/benchmark/nyt10/nyt10_test.txt')38 os.system('wget -P ' + os.path.join(root_path, 'benchmark/nyt10') + ' ' + root_url + 'opennre/benchmark/nyt10/nyt10_val.txt')39def download_wiki_distant(root_path=default_root_path):40 check_root()41 if not os.path.exists(os.path.join(root_path, 'benchmark/wiki_distant')):42 os.mkdir(os.path.join(root_path, 'benchmark/wiki_distant'))43 os.system('wget -P ' + os.path.join(root_path, 'benchmark/wiki_distant') + ' ' + root_url + 'opennre/benchmark/wiki_distant/wiki_distant_rel2id.json')44 os.system('wget -P ' + os.path.join(root_path, 'benchmark/wiki_distant') + ' ' + root_url + 'opennre/benchmark/wiki_distant/wiki_distant_train.txt')45 os.system('wget -P ' + os.path.join(root_path, 'benchmark/wiki_distant') + ' ' + root_url + 'opennre/benchmark/wiki_distant/wiki_distant_test.txt')46 os.system('wget -P ' + os.path.join(root_path, 'benchmark/wiki_distant') + ' ' + root_url + 'opennre/benchmark/wiki_distant/wiki_distant_val.txt')47def download_semeval(root_path=default_root_path):48 check_root()49 if not os.path.exists(os.path.join(root_path, 'benchmark/semeval')):50 os.mkdir(os.path.join(root_path, 'benchmark/semeval'))51 os.system('wget -P ' + os.path.join(root_path, 'benchmark/semeval') + ' ' + root_url + 'opennre/benchmark/semeval/semeval_rel2id.json')52 os.system('wget -P ' + os.path.join(root_path, 'benchmark/semeval') + ' ' + root_url + 'opennre/benchmark/semeval/semeval_train.txt')53 os.system('wget -P ' + os.path.join(root_path, 'benchmark/semeval') + ' ' + root_url + 'opennre/benchmark/semeval/semeval_test.txt')54 os.system('wget -P ' + os.path.join(root_path, 'benchmark/semeval') + ' ' + root_url + 'opennre/benchmark/semeval/semeval_val.txt')55def download_glove(root_path=default_root_path):56 check_root()57 if not os.path.exists(os.path.join(root_path, 'pretrain/glove')):58 os.mkdir(os.path.join(root_path, 'pretrain/glove'))59 os.system('wget -P ' + os.path.join(root_path, 'pretrain/glove') + ' ' + root_url + 'opennre/pretrain/glove/glove.6B.50d_mat.npy')60 os.system('wget -P ' + os.path.join(root_path, 'pretrain/glove') + ' ' + root_url + 'opennre/pretrain/glove/glove.6B.50d_word2id.json')61def download_bert_base_uncased(root_path=default_root_path):62 check_root()63 if not os.path.exists(os.path.join(root_path, 'pretrain/bert-base-uncased')):64 os.mkdir(os.path.join(root_path, 'pretrain/bert-base-uncased'))65 os.system('wget -P ' + os.path.join(root_path, 'pretrain/bert-base-uncased') + ' ' + root_url + 'opennre/pretrain/bert-base-uncased/config.json')66 os.system('wget -P ' + os.path.join(root_path, 'pretrain/bert-base-uncased') + ' ' + root_url + 'opennre/pretrain/bert-base-uncased/pytorch_model.bin')67 os.system('wget -P ' + os.path.join(root_path, 'pretrain/bert-base-uncased') + ' ' + root_url + 'opennre/pretrain/bert-base-uncased/vocab.txt')68def download_pretrain(model_name, root_path=default_root_path):69 ckpt = os.path.join(root_path, 'pretrain/nre/' + model_name + '.pth.tar')70 if not os.path.exists(ckpt):71 os.system('wget -P ' + os.path.join(root_path, 'pretrain/nre') + ' ' + root_url + 'opennre/pretrain/nre/' + model_name + '.pth.tar')72def download(name, root_path=default_root_path):73 if not os.path.exists(os.path.join(root_path, 'benchmark')):74 os.mkdir(os.path.join(root_path, 'benchmark'))75 if not os.path.exists(os.path.join(root_path, 'pretrain')):76 os.mkdir(os.path.join(root_path, 'pretrain'))77 if name == 'nyt10':78 download_nyt10(root_path=root_path)79 elif name == 'wiki_distant':80 download_wiki_distant(root_path=root_path)81 elif name == 'semeval':82 download_semeval(root_path=root_path)83 elif name == 'wiki80':84 download_wiki80(root_path=root_path)85 elif name == 'tacred':86 download_tacred(root_path=root_path)87 elif name == 'glove':88 download_glove(root_path=root_path)89 elif name == 'bert_base_uncased':90 download_bert_base_uncased(root_path=root_path)91 else:92 raise Exception('Cannot find corresponding data.')93def get_model(model_name, root_path=default_root_path):94 check_root()95 ckpt = os.path.join(root_path, 'pretrain/nre/' + model_name + '.pth.tar')96 if model_name == 'wiki80_cnn_softmax':97 download_pretrain(model_name, root_path=root_path)98 download('glove', root_path=root_path)99 download('wiki80', root_path=root_path)100 wordi2d = json.load(open(os.path.join(root_path, 'pretrain/glove/glove.6B.50d_word2id.json')))101 word2vec = np.load(os.path.join(root_path, 'pretrain/glove/glove.6B.50d_mat.npy'))102 rel2id = json.load(open(os.path.join(root_path, 'benchmark/wiki80/wiki80_rel2id.json')))103 sentence_encoder = encoder.CNNEncoder(token2id=wordi2d,104 max_length=40,105 word_size=50,106 position_size=5,107 hidden_size=230,108 blank_padding=True,109 kernel_size=3,110 padding_size=1,111 word2vec=word2vec,112 dropout=0.5)113 m = model.SoftmaxNN(sentence_encoder, len(rel2id), rel2id)114 m.load_state_dict(torch.load(ckpt, map_location='cpu')['state_dict'])115 return m116 elif model_name in ['wiki80_bert_softmax', 'wiki80_bertentity_softmax']:117 download_pretrain(model_name, root_path=root_path)118 download('bert_base_uncased', root_path=root_path)119 download('wiki80', root_path=root_path)120 rel2id = json.load(open(os.path.join(root_path, 'benchmark/wiki80/wiki80_rel2id.json')))121 if 'entity' in model_name:122 sentence_encoder = encoder.BERTEntityEncoder(123 max_length=80, pretrain_path=os.path.join(root_path, 'pretrain/bert-base-uncased'))124 else:125 sentence_encoder = encoder.BERTEncoder(126 max_length=80, pretrain_path=os.path.join(root_path, 'pretrain/bert-base-uncased'))127 m = model.SoftmaxNN(sentence_encoder, len(rel2id), rel2id)128 m.load_state_dict(torch.load(ckpt, map_location='cpu')['state_dict'])129 return m130 elif model_name in ['tacred_bert_softmax', 'tacred_bertentity_softmax']:131 download_pretrain(model_name, root_path=root_path)132 download('bert_base_uncased', root_path=root_path)133 download('tacred', root_path=root_path)134 rel2id = json.load(open(os.path.join(root_path, 'benchmark/tacred/tacred_rel2id.json')))135 if 'entity' in model_name:136 sentence_encoder = encoder.BERTEntityEncoder(137 max_length=80, pretrain_path=os.path.join(root_path, 'pretrain/bert-base-uncased'))138 else:139 sentence_encoder = encoder.BERTEncoder(140 max_length=80, pretrain_path=os.path.join(root_path, 'pretrain/bert-base-uncased'))141 m = model.SoftmaxNN(sentence_encoder, len(rel2id), rel2id)142 m.load_state_dict(torch.load(ckpt, map_location='cpu')['state_dict'])143 return m144 else:...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run Slash automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful