How to use get_config_from_file method in toolium

Best Python code snippet using toolium_python

batchassist_test.py

Source:batchassist_test.py Github

copy

Full Screen

...232 with open(conffile) as f:233 for line in f:234 confdict = json.loads(line)235 assert refdict[key] == confdict236def get_config_from_file(conf_from_file):237 configfilepaths = []238 targets = conf_from_file.get_targets()239 nontargetlist = []240 targets.sort()241 for target in targets:242 (243 minsize, maxsize, mpprimer, exception, target, path,244 intermediate, qc_gene, mfold, skip_download,245 assemblylevel, skip_tree, nolist,246 offline, ignore_qc, mfethreshold, customdb,247 blastseqs, probe, blastdbv5248 ) = conf_from_file.get_config(target)249 config = CLIconf(250 minsize, maxsize, mpprimer, exception, target, path,251 intermediate, qc_gene, mfold, skip_download,252 assemblylevel, nontargetlist, skip_tree,253 nolist, offline, ignore_qc, mfethreshold, customdb,254 blastseqs, probe, blastdbv5)255 config.save_config()256 cfilepath = os.path.join(257 config.path, config.target, "config", "config.json")258 configfilepaths.append(cfilepath)259 print(configfilepaths)260 return configfilepaths261def test_sys_exit(monkeypatch):262 monkeypatch.setattr('builtins.input', fail_input)263 with pytest.raises(SystemExit):264 conf_from_file = Config()265 monkeypatch.setattr('builtins.input', fail_startinput)266 with pytest.raises(SystemExit):267 conf_from_file = Config()268# mockprimer_csv = os.path.join(path, "Summary", target, abbr + "_primer.csv")269def test_default_input(monkeypatch):270 monkeypatch.setattr('builtins.input', alldef_input)271 conf_from_file = Config()272 configpath = get_config_from_file(conf_from_file)[0]273 compare_configfiles(reference_dict, configpath, "0")274 defdir = os.path.join("/", "Lactobacillus_curvatus")275 if os.path.isdir(defdir):276 shutil.rmtree(defdir)277def test_offline(monkeypatch):278 monkeypatch.setattr('builtins.input', offline_input)279 conf_from_file = Config()280 configpath = get_config_from_file(conf_from_file)[0]281 compare_configfiles(reference_dict, configpath, "1")282 if os.path.isfile(configpath):283 os.remove(configpath)284 configpath = get_config_from_file(conf_from_file)[1]285 compare_configfiles(reference_dict, configpath, "2")286 if os.path.isfile(configpath):287 os.remove(configpath)288 monkeypatch.setattr('builtins.input', offline_input2)289 conf_from_file = Config()290 configpath = get_config_from_file(conf_from_file)[0]291 compare_configfiles(reference_dict, configpath, "1")292 if os.path.isfile(configpath):293 os.remove(configpath)294 configpath = get_config_from_file(conf_from_file)[1]295 compare_configfiles(reference_dict, configpath, "2")296 if os.path.isfile(configpath):297 os.remove(configpath)298def test_nodefault(monkeypatch):299 G.create_directory(tmpdir)300 dbpath_tmp = os.path.join(tmpdir, "customdb.fas.nsq")301 with open(dbpath_tmp, "w") as f:302 f.write(">mockDB")303 try:304 monkeypatch.setattr('builtins.input', nodef_input)305 conf_from_file = Config()306 configpath = get_config_from_file(conf_from_file)[0]307 compare_configfiles(reference_dict, configpath, "3")308 if os.path.isfile(configpath):309 os.remove(configpath)310 configpath = get_config_from_file(conf_from_file)[1]311 compare_configfiles(reference_dict, configpath, "4")312 if os.path.isfile(configpath):313 os.remove(configpath)314 monkeypatch.setattr('builtins.input', nodef_input2)315 conf_from_file = Config()316 configpath = get_config_from_file(conf_from_file)[0]317 compare_configfiles(reference_dict, configpath, "8")318 configpath = get_config_from_file(conf_from_file)[1]319 compare_configfiles(reference_dict, configpath, "9")320 finally:321 if os.path.isfile(dbpath_tmp):322 os.remove(dbpath_tmp)323def test_start_all(monkeypatch):324 monkeypatch.setattr('builtins.input', start_input)325 conf_from_file = Config()326 configpath = get_config_from_file(conf_from_file)[0]327 compare_configfiles(reference_dict, configpath, "8")328 if os.path.isfile(configpath):329 os.remove(configpath)330 configpath = get_config_from_file(conf_from_file)[1]331 compare_configfiles(reference_dict, configpath, "9")332 if os.path.isfile(configpath):333 os.remove(configpath)334def test_start_unknown(monkeypatch):335 print("Start unknown")336 mock = batchassist_mock(start_oneinput, start_unknown)337 monkeypatch.setattr('builtins.input', mock.prompt_input)338 conf_from_file = Config()339 configpath = get_config_from_file(conf_from_file)[0]340 compare_configfiles(reference_dict, configpath, "8")341def test_start_wrong(monkeypatch):342 print("Start wrong")343 mock = batchassist_mock(save_wronginput, start_wronginput)344 monkeypatch.setattr('builtins.input', mock.prompt_input)345 conf_from_file = Config()346 configpath = get_config_from_file(conf_from_file)[0]347 compare_configfiles(reference_dict, configpath, "8")348 if os.path.isfile(configpath):349 os.remove(configpath)350def test_not_valid_input(monkeypatch):351 print("test not valid input")352 dbpath_tmp = os.path.join(tmpdir, "customdb.fas.nsq")353 with open(dbpath_tmp, "w") as f:354 f.write(">mockDB")355 try:356 mock = batchassist_mock(nodef_input, wrong_input)357 monkeypatch.setattr('builtins.input', mock.prompt_input)358 conf_from_file = Config()359 configpath = get_config_from_file(conf_from_file)[0]360 compare_configfiles(reference_dict, configpath, "3")361 if os.path.isfile(configpath):362 os.remove(configpath)363 configpath = get_config_from_file(conf_from_file)[1]364 compare_configfiles(reference_dict, configpath, "4")365 if os.path.isfile(configpath):366 os.remove(configpath)367 mock = batchassist_mock(nodef_input, wrong_input2)368 monkeypatch.setattr('builtins.input', mock.prompt_input)369 conf_from_file = Config()370 configpath = get_config_from_file(conf_from_file)[0]371 compare_configfiles(reference_dict, configpath, "7")372 if os.path.isfile(configpath):373 os.remove(configpath)374 finally:375 if os.path.isfile(dbpath_tmp):376 os.remove(dbpath_tmp)377def test_incomplete_config(monkeypatch):378 testfile = os.path.join(379 "/", "primerdesign", "test", "Lactobacillus_curvatus",380 "config", "config.json")381 incomplete_dict = {382 "target": "lactobacillus_curvatus", "path": "/primerdesign/test"}383 with open(testfile, "w") as f:384 f.write(json.dumps(incomplete_dict))385 monkeypatch.setattr('builtins.input', start_input)386 conf_from_file = Config()387 configpath = get_config_from_file(conf_from_file)[0]388 compare_configfiles(reference_dict, configpath, "5")389def prepare_tmp_db():390 t = os.path.join(testfiles_dir, "tmp_config.json")391 tmp_path = os.path.join(pipe_dir, "tmp_config.json")392 if os.path.isfile(tmp_path):393 os.remove(tmp_path)394 shutil.copy(t, tmp_path)395def remove_tmp_db():396 tmp_path = os.path.join(pipe_dir, "tmp_config.json")397 if os.path.isfile(tmp_path):398 os.remove(tmp_path)399def change_tmp_db():400 tmp_path = os.path.join(pipe_dir, "tmp_config.json")401 tmp_dict = {402 "new_run": {403 'modus': "continue", "targets": None,404 "path": "/primerdesign/test"},405 "email": "biologger@protonmail.com"}406 with open(tmp_path, "w") as f:407 f.write(json.dumps(tmp_dict))408def change_db_again():409 tmp_path = os.path.join(pipe_dir, "tmp_config.json")410 tmp_dict = {411 "new_run": {412 'modus': "continue",413 "targets": ["Lactobacillus_curvatus", "Lactobacillus_sunkii"],414 "path": "/primerdesign/test"},415 "email": "biologger@protonmail.com"}416 with open(tmp_path, "w") as f:417 f.write(json.dumps(tmp_dict))418def test_autorun():419 from speciesprimer import auto_run420 prepare_tmp_db()421 targets, conf_from_file, use_configfile = auto_run()422 assert targets == ["Lactobacillus_curvatus"]423 configpath = get_config_from_file(conf_from_file)[0]424 compare_configfiles(reference_dict, configpath, "6")425 change_tmp_db()426 targets, conf_from_file, use_configfile = auto_run()427 targets.sort()428 assert targets == ["Lactobacillus_curvatus", "Lactobacillus_helveticus"]429 configpath = get_config_from_file(conf_from_file)[0]430 compare_configfiles(reference_dict, configpath, "6")431 change_db_again()432 targets, conf_from_file, use_configfile = auto_run()433 assert targets == ["Lactobacillus_curvatus"]434 configpath = get_config_from_file(conf_from_file)[0]435 compare_configfiles(reference_dict, configpath, "6")436 remove_tmp_db()437def test_shellrun(monkeypatch):438 nt_nal = os.path.join("/", "blastdb", "nt.nal")439 if os.path.isfile(nt_nal):440 conf1 = os.path.join(441 "/", "primerdesign", "test", "Lactobacillus_curvatus",442 "config", "config.json")443 conf2 = os.path.join(444 "/", "primerdesign", "test", "Lactobacillus_helveticus",445 "config", "config.json")446 test = os.path.join("/", "primerdesign", "test")447 if os.path.isdir(test):448 shutil.rmtree(test)...

Full Screen

Full Screen

__main__.py

Source:__main__.py Github

copy

Full Screen

...65 for _, seq, threshold, score in l:66 results.append(search_bigsi(bigsi, seq, threshold, score))67 return results68API = hug.API("bigsi-%s" % str(__version__))69def get_config_from_file(config_file):70 if config_file is None:71 if os.environ.get("BIGSI_CONFIG"):72 config_file = os.environ.get("BIGSI_CONFIG")73 else:74 return DEFAULT_CONFIG75 with open(config_file, "r") as infile:76 config = yaml.load(infile, Loader=yaml.FullLoader)77 return config78def chunks(l, n):79 """Yield successive n-sized chunks from l."""80 for i in range(0, len(l), n):81 yield l[i : i + n]82@hug.object(name="bigsi", version="0.1.1", api=API)83@hug.object.urls("/", requires=())84class bigsi(object):85 @hug.object.cli86 @hug.object.post("/insert", output_format=hug.output_format.pretty_json)87 def insert(self, config: hug.types.text, bloomfilter, sample):88 """Inserts a bloom filter into the graph89 e.g. bigsi insert ERR1010211.bloom ERR101021190 """91 config = get_config_from_file(config)92 index = BIGSI(config)93 return insert(index=index, bloomfilter=bloomfilter, sample=sample)94 @hug.object.cli95 @hug.object.post("/bloom")96 def bloom(self, ctx, outfile, config=None):97 """Creates a bloom filter from a sequence file or cortex graph. (fastq,fasta,bam,ctx)98 e.g. index insert ERR1010211.ctx99 """100 config = get_config_from_file(config)101 bf = bloom(102 config=config,103 outfile=outfile,104 kmers=extract_kmers_from_ctx(ctx, config["k"]),105 )106 @hug.object.cli107 @hug.object.post("/build", output_format=hug.output_format.pretty_json)108 def build(109 self,110 bloomfilters: hug.types.multiple = [],111 samples: hug.types.multiple = [],112 from_file: hug.types.text = None,113 config: hug.types.text = None,114 ):115 config = get_config_from_file(config)116 if from_file and bloomfilters:117 raise ValueError(118 "You can only specify blooms via from_file or bloomfilters, but not both"119 )120 elif from_file:121 samples = []122 bloomfilters = []123 with open(from_file, "r") as tsvfile:124 reader = csv.reader(tsvfile, delimiter="\t")125 for row in reader:126 bloomfilters.append(row[0])127 samples.append(row[1])128 if samples:129 assert len(samples) == len(bloomfilters)130 else:131 samples = bloomfilters132 if config.get("max_build_mem_bytes"):133 max_memory_bytes = humanfriendly.parse_size(config["max_build_mem_bytes"])134 else:135 max_memory_bytes = None136 return build(137 config=config,138 bloomfilter_filepaths=bloomfilters,139 samples=samples,140 max_memory=max_memory_bytes,141 )142 @hug.object.cli143 @hug.object.post("/merge", output_format=hug.output_format.pretty_json)144 def merge(self, config: hug.types.text, merge_config: hug.types.text):145 config = get_config_from_file(config)146 merge_config = get_config_from_file(merge_config)147 index1 = BIGSI(config)148 index2 = BIGSI(merge_config)149 merge(index1, index2)150 return {"result": "merged %s into %s." % (merge_config, config)}151 @hug.object.cli152 @hug.object.post(153 "/search",154 response_headers={"Access-Control-Allow-Origin": "*"},155 output=hug.output_format.text,156 )157 @hug.object.get(158 "/search",159 examples="seq=ACACAAACCATGGCCGGACGCAGCTTTCTGA",160 response_headers={"Access-Control-Allow-Origin": "*"},161 output=hug.output_format.text,162 )163 def search(164 self,165 seq: hug.types.text,166 threshold: hug.types.float_number = 1.0,167 config: hug.types.text = None,168 score: hug.types.smart_boolean = False,169 format: hug.types.one_of(["json", "csv"]) = "json",170 ):171 config = get_config_from_file(config)172 bigsi = BIGSI(config)173 d = search_bigsi(bigsi, seq, threshold, score)174 if format == "csv":175 return d_to_csv(d)176 else:177 return json.dumps(d, indent=4)178 @hug.object.cli179 @hug.object.post(180 "/variant_search",181 response_headers={"Access-Control-Allow-Origin": "*"},182 output=hug.output_format.text,183 )184 @hug.object.get(185 "/variant_search",186 response_headers={"Access-Control-Allow-Origin": "*"},187 output=hug.output_format.text,188 )189 def variant_search(190 self,191 reference: hug.types.text,192 ref: hug.types.text,193 pos: hug.types.number,194 alt: hug.types.text,195 gene: hug.types.text = None,196 genbank: hug.types.text = None,197 config: hug.types.text = None,198 format: hug.types.one_of(["json", "csv"]) = "json",199 ):200 config = get_config_from_file(config)201 bigsi = BIGSI(config)202 if genbank and gene:203 d = BIGSIAminoAcidMutationSearch(bigsi, reference, genbank).search(204 gene, ref, pos, alt205 )206 elif genbank or gene:207 raise ValueError("genbank and gene must be supplied together")208 else:209 d = BIGSIVariantSearch(bigsi, reference).search(ref, pos, alt)210 d["citation"] = "http://dx.doi.org/10.1038/s41587-018-0010-1"211 if format == "csv":212 return d_to_csv(d)213 else:214 return json.dumps(d, indent=4)215 @hug.object.cli216 @hug.object.post(217 "/bulk_search",218 response_headers={"Access-Control-Allow-Origin": "*"},219 output=hug.output_format.text,220 )221 @hug.object.get(222 "/bulk_search",223 examples="seqfile=query.fasta",224 response_headers={"Access-Control-Allow-Origin": "*"},225 output=hug.output_format.text,226 )227 def bulk_search(228 self,229 fasta: hug.types.text,230 threshold: hug.types.float_number = 1.0,231 config: hug.types.text = None,232 score: hug.types.smart_boolean = False,233 format: hug.types.one_of(["json", "csv"]) = "json",234 stream: hug.types.smart_boolean = False,235 ):236 config = get_config_from_file(config)237 fasta = Fasta(fasta)238 if not stream:239 _config = copy.copy(config)240 _config["nproc"] = 1241 csv_combined = ""242 nproc = config.get("nproc", 1)243 with multiprocessing.Pool(processes=nproc) as pool:244 args = [(_config, str(seq), threshold, score) for seq in fasta.values()]245 dd = pool.map_async(246 search_bigsi_parallel, chunks(args, math.ceil(len(args) / nproc))247 ).get()248 dd = [item for sublist in dd for item in sublist]249 if format == "csv":250 return "\n".join([d_to_csv(d, False, False) for d in dd])251 else:252 return json.dumps(dd, indent=4)253 else:254 bigsi = BIGSI(config)255 csv_combined = ""256 for i, seq in enumerate(fasta.values()):257 seq = str(seq)258 d = {259 "query": seq,260 "threshold": threshold,261 "results": bigsi.search(seq, threshold, score),262 "citation": "http://dx.doi.org/10.1038/s41587-018-0010-1",263 }264 if format == "csv":265 if i == 0:266 with_header = True267 carriage_return = False268 elif i == len(fasta) - 1:269 carriage_return = True270 else:271 with_header = False272 carriage_return = False273 csv_result = d_to_csv(d, with_header, carriage_return)274 csv_combined += csv_result275 if stream:276 print(csv_result)277 else:278 if stream:279 print(json.dumps(d))280 @hug.object.cli281 @hug.object.delete("/", output_format=hug.output_format.pretty_json)282 def delete(self, config: hug.types.text = None):283 config = get_config_from_file(config)284 get_storage(config).delete_all()285def main():286 API.cli()287if __name__ == "__main__":...

Full Screen

Full Screen

main.py

Source:main.py Github

copy

Full Screen

1from PIInterface import PIInterface2from QSeeIntegrator import QSeeIntegrator3import json4def get_config_from_file(filepath):5 try:6 with open(filepath, 'r') as j:7 json_data = json.load(j)8 return json_data9 except FileNotFoundError as e:10 print("No configuration found")11 raise(e)12def main():13 #Initialize PI Interface and QSeeIntegrator Configurations14 pi_config = get_config_from_file("configurations/pi.json")15 qsee_config = get_config_from_file("configurations/qsee.json")16 #Collect PI Data17 pi_interface = PIInterface(pi_config)18 pi_data = pi_interface.collect_data(params=pi_config.get("StreamSetParameters"))19 #Transform PI Data for QSEE Analytics POST API Request20 qsee_integrator = QSeeIntegrator(qsee_config)21 transform_data = qsee_integrator.transform_data(pi_data)22 for readings in transform_data:23 print("Performing QSEE analytics api call")24 resp = qsee_integrator.post_analysis(transform_data[readings])25 resp_data = get_config_from_file("configurations/sample_qsee_response.json") #sample response26 if resp.status_code == 200:27 resp_data = resp.json()28 print("Pushing analytics data to PI")29 try:30 pi_interface.post_analytics_result(resp_data)31 except Exception as e:32 raise(e)33 else:34 pass #TODO Exception35if __name__ == "__main__":...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run toolium automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful