How to use get_root_folder method in lisa

Best Python code snippet using lisa_python

read_utilities.py

Source:read_utilities.py Github

copy

Full Screen

...20 return scipy_arff.loadarff(filepath)21 elif mode == "arff":22 return arff.load(open(filepath, "r"))23 return None24def get_root_folder() -> str:25 """26 Get root folder27 Return:28 os.path: root folder29 """30 return os.path.dirname(os.path.dirname(__file__))31def get_dataset_info(name: str):32 """33 Get information regarding particular {name} dataset34 Args:35 name: str36 Return:37 dict or None: specified dict by name from dataset_info.json38 """39 data = load_dataset_info()40 if name in data:41 return data[name]42 else:43 print(44 f"Dataset {name} not present in the local catalogue. To update catalogue, run update_local_info()"45 )46 return None47def load_catalogue_info(root_folder: str = None) -> dict:48 """49 Loading content of catalogue_info.json.50 Contains information regarding datasets paths and configs51 Args:52 root_folder:str - root folder53 Return:54 dict: catalogue_info.json content55 """56 if not root_folder:57 root_folder = get_root_folder()58 return json.load(open(f"{root_folder}/jsons/catalogue_info.json"))59def load_dataset_info(root_folder: str = None) -> dict:60 """61 Loading content of dataset_info.json.62 Contains information regarding datasets paths and configs63 Args:64 root_folder:str - root folder65 Return:66 dict: dataset_info.json content67 """68 if not root_folder:69 root_folder = get_root_folder()70 return json.load(open(f"{root_folder}/jsons/dataset_info.json"))71def load_description(name: str, path: str = "") -> str or None:72 """73 Loading dataset decsription by name74 Args:75 name : str - dataset name76 Return:77 description: str or None - cluster dataset in pandas Dataframe.78 None if file isn't found in dataset folder.79 Errors:80 NotImplementedError - if some functionality isn't implemented in scipy.io arff method81 FileNotFoundError - dataset file isn't present82 """83 if not path:84 root_folder = get_root_folder()85 dataset_info = get_dataset_info(name)86 path = f'{root_folder}/{dataset_info["local_filepath"]}'87 try:88 data = read_arff(f"{path}")89 description = pd.DataFrame(data[1])90 except NotImplementedError:91 data = read_arff(f"{path}", mode="arff")92 description = pd.DataFrame(data["description"])93 except FileNotFoundError:94 print(95 f"Dataset {name} not present in the local catalogue. To update catalogue, run update_local_info()"96 )97 return None98 return description99def load(name: str, path: str = "") -> pd.DataFrame or None:100 """101 Loading dataset by name102 Args:103 name : str - dataset name104 Return:105 df: pd.DataFrame or None - cluster dataset in pandas Dataframe.106 None if file isn't found in dataset folder.107 Errors:108 NotImplementedError - if some functionality isn't implemented in scipy.io arff method109 FileNotFoundError - dataset file isn't present110 """111 if not path:112 root_folder = get_root_folder()113 dataset_info = get_dataset_info(name)114 path = f'{root_folder}/{dataset_info["local_filepath"]}'115 try:116 data = read_arff(f"{path}")117 df = pd.DataFrame(data[0])118 df.columns = df.columns.str.lower()119 except NotImplementedError:120 data = read_arff(f"{path}", mode="arff")121 df = pd.DataFrame(122 data["data"], columns=[x[0].lower() for x in data["attributes"]]123 )124 except FileNotFoundError:125 print(126 f"Dataset {name} not present in the local catalogue. To update catalogue, run update_local_info()"127 )128 return None129 df = preprocessing_utilities.preprocessing(df)130 return df131def download(datasets: str or List[str], overwrite: bool = False):132 """133 Download dataset from github repo.134 Args:135 datasets: str or List[str] - one or single dataset names136 overwrite: bool - overwrite if dataset is present137 """138 if isinstance(datasets, str):139 datasets = [datasets]140 status_datasets = {141 "Dataset_not_found_in_catalogue": [],142 "Download_success": [],143 "Filepath_not_valid": [],144 }145 root_folder = get_root_folder()146 dataset_info = load_dataset_info(root_folder)147 for dataset in datasets:148 if dataset not in dataset_info:149 status_datasets["Dataset_not_found_in_catalogue"].append(dataset)150 else:151 github_path = dataset_info[dataset]["github_filepath"]152 r = requests.get(github_path, allow_redirects=True)153 if r.status_code != 200:154 status_datasets["Filepath_not_valid"].append(dataset)155 open(156 f'{root_folder}/datasets/{dataset_info[dataset]["name"]}.{dataset_info[dataset]["filetype"]}',157 "w",158 ).write(r.text)159 status_datasets["Download_success"].append(dataset)160 for status in status_datasets:161 if status_datasets[status]:162 print(f"{status}: {status_datasets[status]}")163def list_local_datasets() -> List:164 """165 Listing all available datasets locally.166 Return:167 filename: List - list of all locally avaliable datasets168 """169 root_folder = get_root_folder()170 catalogue_info = load_catalogue_info(root_folder)171 dataset_info = load_dataset_info(root_folder)172 local_filepath_dict = {173 dataset_info[filename]["local_filepath"]: filename for filename in dataset_info174 }175 filenames = [176 local_filepath_dict[f'{catalogue_info["PATH_TO_LOCAL"]}/{x}']177 if f'{catalogue_info["PATH_TO_LOCAL"]}/{x}' in local_filepath_dict178 else x179 for x in os.listdir(f'{root_folder}/{catalogue_info["PATH_TO_LOCAL"]}')180 ]181 return filenames182def update_local_jsons():183 root_folder = get_root_folder()184 catalogue_info = load_catalogue_info()185 github_path = catalogue_info["PATH_TO_GITHUB"]186 for file in ["catalogue_info.json", "dataset_info.json"]:187 r = requests.get(f"{github_path}/{file}", allow_redirects=True)188 if r.status_code == 200:189 with open(f"{root_folder}/jsons/{file}", "w") as outfile:190 json.dump(r.json(), outfile)191 print(f"{file} successfully updated")192 else:193 print(f"{file} wasn't updated")194if __name__ == "__main__":...

Full Screen

Full Screen

constants.py

Source:constants.py Github

copy

Full Screen

2from typing import Union, Dict, List3from .folders import get_root_folder4CONSTANTS: Dict[str, Union[int, str, Dict, List]] = {5 'FOLDERS': {6 'CSV_BASE_FILEPATH': os.path.join(get_root_folder(), "data"),7 'EXTERNAL_CSV_BASE_FILEPATH': os.path.join(get_root_folder(), "data", "external"),8 'INTERIM_CSV_BASE_FILEPATH': os.path.join(get_root_folder(), "data", "interim"),9 'PROCESSED_CSV_BASE_FILEPATH': os.path.join(get_root_folder(), "data", "processed"),10 'RAW_CSV_BASE_FILEPATH': os.path.join(get_root_folder(), "data", "raw"),11 },12 'SCRAPING': {13 'LAST_YEAR': 2022,14 'N_PREVIOUS_YEARS': 32,15 'SEASON_SUMMARY_URL': "https://www.basketball-reference.com/leagues/NBA_{}.html",16 'FOLDER_TO_ID_HASH': {17 'per_game_stats': 'per_game-team',18 'per_100_possesions_stats': 'per_poss-team',19 'season_total_stats': 'totals-team',20 'advanced_stats': 'advanced-team'21 },22 }23}24def get_scraping_constants():...

Full Screen

Full Screen

paths.py

Source:paths.py Github

copy

Full Screen

1from pathlib import Path2def get_root_folder():3 # print("ROOT FOLDER in paths.py:", Path(__file__).parent.parent)4 return Path(__file__).parent.parent5def get_visualization_folder():6 # print("Visualization FOLDER in paths.py:, ", get_root_folder() / "visualization")7 return get_root_folder() / "visualization"8def get_data_folder():...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run lisa automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful