How to use _clean_up_files method in lisa

Best Python code snippet using lisa_python

test_deblur.py

Source:test_deblur.py Github

copy

Full Screen

1# -----------------------------------------------------------------------------2# Copyright (c) 2014--, The Qiita Development Team.3#4# Distributed under the terms of the BSD 3-clause License.5#6# The full license is in the file LICENSE, distributed with this software.7# -----------------------------------------------------------------------------8from unittest import main9from os import close, remove, chmod10from shutil import copyfile, rmtree11from tempfile import mkstemp, mkdtemp12from json import dumps, load13from os.path import exists, isdir, join14from os import environ15from qiita_client.testing import PluginTestCase16from qp_deblur import plugin17from qp_deblur.deblur import (18 deblur, generate_deblur_workflow_commands)19class deblurTests(PluginTestCase):20 def setUp(self):21 # this will allow us to see the full errors22 self.maxDiff = None23 plugin("https://localhost:8383", 'register', 'ignored')24 self.params = {25 'Positive filtering database': 'default',26 'Negative filtering database': 'default',27 'Mean per nucleotide error rate': 0.005,28 'Error probabilities for each Hamming distance': (29 '1, 0.06, 0.02, 0.02, 0.01, 0.005, 0.005, '30 '0.005, 0.001, 0.001, 0.001, 0.0005'),31 'Insertion/deletion (indel) probability': 0.01,32 'Maximum number of insertion/deletion (indel)': 3,33 'Sequence trim length (-1 for no trimming)': 100,34 'Minimum dataset-wide read threshold': 0,35 'Minimum per-sample read threshold': 2,36 'Threads per sample': 1, 'Jobs to start': 1,37 'Reference phylogeny for SEPP': 'Greengenes_13.8'}38 self._clean_up_files = []39 # to prevent test from timing out, we need to pre-populate the archive40 # with placements for Deblur fragments to avoid long running SEPP.41 # Therefore, we need a job-id to infer merging scheme which can only be42 # done after demultiplexing job is created. Thus, actuall population43 # needs to be done within the test.44 self.features = dict()45 with open('support_files/sepp/placements.json', 'r') as f:46 for placement in load(f)['placements']:47 self.features[placement['nm'][0][0]] = \48 dumps(placement['p'])49 # saving current value of PATH50 self.oldpath = environ['PATH']51 def tearDown(self):52 # restore eventually changed PATH env var53 environ['PATH'] = self.oldpath54 for fp in self._clean_up_files:55 if exists(fp):56 if isdir(fp):57 rmtree(fp)58 else:59 remove(fp)60 def test_generate_deblur_workflow_commands_error(self):61 with self.assertRaises(ValueError):62 generate_deblur_workflow_commands(63 ['fastq/s1.fastq', 'fastq/s1.fastq'],64 'output', self.params)65 def test_generate_deblur_workflow_commands(self):66 exp = ('deblur workflow --seqs-fp "fastq/s1.fastq" --output-dir '67 '"output" --error-dist "1, 0.06, 0.02, 0.02, 0.01, '68 '0.005, 0.005, 0.005, 0.001, 0.001, 0.001, 0.0005" '69 '--indel-max "3" --indel-prob "0.01" --jobs-to-start "1" '70 '--mean-error "0.005" --min-reads "0" --min-size "2" '71 '--threads-per-sample "1" '72 '--trim-length "100"')73 obs = generate_deblur_workflow_commands(74 ['fastq/s1.fastq'], 'output', self.params)75 self.assertEqual(obs, exp)76 def test_deblur_no_target_gene(self):77 # generating filepaths78 fd, fp = mkstemp(suffix='_seqs.demux')79 close(fd)80 self._clean_up_files.append(fp)81 copyfile('support_files/no_sepp_seqs.demux', fp)82 prep_info_dict = {83 'SKB7.640196': {84 'description_prep': 'SKB7', 'platform': 'Illumina'},85 'SKB8.640193': {86 'description_prep': 'SKB8', 'platform': 'Illumina'}87 }88 data = {'prep_info': dumps(prep_info_dict),89 # magic #1 = testing study90 'study': 1,91 'data_type': 'Metagenomic'}92 pid = self.qclient.post('/apitest/prep_template/', data=data)['prep']93 # inserting artifacts94 data = {95 'filepaths': dumps([(fp, 'preprocessed_fastq')]),96 'type': "Demultiplexed",97 'name': "New demultiplexed artifact",98 'prep': pid}99 aid = self.qclient.post('/apitest/artifact/', data=data)['artifact']100 self.params['Demultiplexed sequences'] = aid101 data = {'user': 'demo@microbio.me',102 'command': dumps(['deblur', '2021.09', 'Deblur 2021.09']),103 'status': 'running',104 'parameters': dumps(self.params)}105 jid = self.qclient.post('/apitest/processing_job/', data=data)['job']106 out_dir = mkdtemp()107 self._clean_up_files.append(out_dir)108 # pre-populate archive with fragment placements109 self.qclient.patch(url="/qiita_db/archive/observations/",110 op="add", path=jid,111 value=dumps(self.features))112 success, ainfo, msg = deblur(self.qclient, jid, self.params, out_dir)113 self.assertEqual(114 'deblur was developed only for amplicon sequencing data', msg)115 self.assertFalse(success)116 def test_deblur_no_tree(self):117 # generating filepaths118 fd, fp = mkstemp(suffix='_seqs.demux')119 close(fd)120 self._clean_up_files.append(fp)121 copyfile('support_files/no_sepp_seqs.demux', fp)122 prep_info_dict = {123 'SKB7.640196': {124 'description_prep': 'SKB7', 'platform': 'Illumina'},125 'SKB8.640193': {126 'description_prep': 'SKB8', 'platform': 'Illumina'}127 }128 data = {'prep_info': dumps(prep_info_dict),129 # magic #1 = testing study130 'study': 1,131 'data_type': 'ITS'}132 pid = self.qclient.post('/apitest/prep_template/', data=data)['prep']133 # inserting artifacts134 data = {135 'filepaths': dumps([(fp, 'preprocessed_fastq')]),136 'type': "Demultiplexed",137 'name': "New demultiplexed artifact",138 'prep': pid}139 aid = self.qclient.post('/apitest/artifact/', data=data)['artifact']140 self.params['Demultiplexed sequences'] = aid141 data = {'user': 'demo@microbio.me',142 'command': dumps(['deblur', '2021.09', 'Deblur 2021.09']),143 'status': 'running',144 'parameters': dumps(self.params)}145 jid = self.qclient.post('/apitest/processing_job/', data=data)['job']146 out_dir = mkdtemp()147 self._clean_up_files.append(out_dir)148 # pre-populate archive with fragment placements149 self.qclient.patch(url="/qiita_db/archive/observations/",150 op="add", path=jid,151 value=dumps(self.features))152 success, ainfo, msg = deblur(self.qclient, jid, self.params, out_dir)153 self.assertEqual("", msg)154 self.assertTrue(success)155 self.assertEqual("BIOM", ainfo[0].artifact_type)156 self.assertEqual(1, len(ainfo))157 self.assertEqual(158 [(join(out_dir, 'deblur_out', 'all.biom'), 'biom'),159 (join(out_dir, 'deblur_out', 'all.seqs.fa'),160 'preprocessed_fasta')], ainfo[0].files)161 def test_no_valid_values_platform_error(self):162 # generating filepaths163 fd, fp = mkstemp(suffix='_seqs.demux')164 close(fd)165 self._clean_up_files.append(fp)166 copyfile('support_files/filtered_5_seqs.demux', fp)167 # inserting new prep template168 prep_info_dict = {169 'SKB7.640196': {'description_prep': 'SKB7', 'platform': 'foo'},170 'SKB8.640193': {'description_prep': 'SKB8', 'platform': 'bar'}171 }172 data = {'prep_info': dumps(prep_info_dict),173 # magic #1 = testing study174 'study': 1,175 'data_type': '16S'}176 pid = self.qclient.post('/apitest/prep_template/', data=data)['prep']177 # inserting artifacts178 data = {179 'filepaths': dumps([(fp, 'preprocessed_demux')]),180 'type': "Demultiplexed",181 'name': "New demultiplexed artifact",182 'prep': pid}183 aid = self.qclient.post('/apitest/artifact/', data=data)['artifact']184 self.params['Demultiplexed sequences'] = aid185 data = {'user': 'demo@microbio.me',186 'command': dumps(['deblur', '2021.09', 'Deblur 2021.09']),187 'status': 'running',188 'parameters': dumps(self.params)}189 jid = self.qclient.post('/apitest/processing_job/', data=data)['job']190 out_dir = mkdtemp()191 self._clean_up_files.append(out_dir)192 success, ainfo, msg = deblur(self.qclient, jid, self.params, out_dir)193 self.assertEqual('deblur is only valid for Illumina `platform`, '194 'current values in the Preparation Information File: '195 'bar, foo', msg)196 self.assertFalse(success)197 def test_no_platform_error(self):198 # generating filepaths199 fd, fp = mkstemp(suffix='_seqs.demux')200 close(fd)201 self._clean_up_files.append(fp)202 copyfile('support_files/filtered_5_seqs.demux', fp)203 # inserting new prep template204 prep_info_dict = {205 'SKB7.640196': {'description_prep': 'SKB7'},206 'SKB8.640193': {'description_prep': 'SKB8'}207 }208 data = {'prep_info': dumps(prep_info_dict),209 # magic #1 = testing study210 'study': 1,211 'data_type': '16S'}212 pid = self.qclient.post('/apitest/prep_template/', data=data)['prep']213 # inserting artifacts214 data = {215 'filepaths': dumps([(fp, 'preprocessed_demux')]),216 'type': "Demultiplexed",217 'name': "New demultiplexed artifact",218 'prep': pid}219 aid = self.qclient.post('/apitest/artifact/', data=data)['artifact']220 self.params['Demultiplexed sequences'] = aid221 data = {'user': 'demo@microbio.me',222 'command': dumps(['deblur', '2021.09', 'Deblur 2021.09']),223 'status': 'running',224 'parameters': dumps(self.params)}225 jid = self.qclient.post('/apitest/processing_job/', data=data)['job']226 out_dir = mkdtemp()227 self._clean_up_files.append(out_dir)228 success, ainfo, msg = deblur(self.qclient, jid, self.params, out_dir)229 self.assertEqual('Preparation Information File does not have a '230 'platform column, which is required', msg)231 self.assertFalse(success)232 def test_deblur(self):233 # generating filepaths234 fd, fp = mkstemp(suffix='_seqs.demux')235 close(fd)236 self._clean_up_files.append(fp)237 copyfile('support_files/filtered_5_seqs.demux', fp)238 # inserting new prep template239 prep_info_dict = {240 'SKB7.640196': {241 'description_prep': 'SKB7', 'platform': 'Illumina'},242 'SKB8.640193': {243 'description_prep': 'SKB8', 'platform': 'Illumina'}244 }245 data = {'prep_info': dumps(prep_info_dict),246 # magic #1 = testing study247 'study': 1,248 'data_type': '16S'}249 pid = self.qclient.post('/apitest/prep_template/', data=data)['prep']250 # inserting artifacts251 data = {252 'filepaths': dumps([(fp, 'preprocessed_fastq')]),253 'type': "Demultiplexed",254 'name': "New demultiplexed artifact",255 'prep': pid}256 aid = self.qclient.post('/apitest/artifact/', data=data)['artifact']257 self.params['Demultiplexed sequences'] = aid258 data = {'user': 'demo@microbio.me',259 'command': dumps(['deblur', '2021.09', 'Deblur 2021.09']),260 'status': 'running',261 'parameters': dumps(self.params)}262 jid = self.qclient.post('/apitest/processing_job/', data=data)['job']263 out_dir = mkdtemp()264 self._clean_up_files.append(out_dir)265 # pre-populate archive with fragment placements266 self.qclient.patch(url="/qiita_db/archive/observations/",267 op="add", path=jid,268 value=dumps(self.features))269 success, ainfo, msg = deblur(self.qclient, jid, self.params, out_dir)270 self.assertEqual("", msg)271 self.assertTrue(success)272 self.assertEqual("BIOM", ainfo[0].artifact_type)273 self.assertEqual(274 [(join(out_dir, 'deblur_out', 'all.biom'), 'biom'),275 (join(out_dir, 'deblur_out', 'all.seqs.fa'),276 'preprocessed_fasta')], ainfo[0].files)277 def test_deblur_demux(self):278 # generating filepaths279 fd, fp = mkstemp(suffix='_seqs.demux')280 close(fd)281 self._clean_up_files.append(fp)282 copyfile('support_files/filtered_5_seqs.demux', fp)283 # inserting new prep template284 prep_info_dict = {285 'SKB7.640196': {286 'description_prep': 'SKB7', 'platform': 'Illumina'},287 'SKB8.640193': {288 'description_prep': 'SKB8', 'platform': 'Illumina'}289 }290 data = {'prep_info': dumps(prep_info_dict),291 # magic #1 = testing study292 'study': 1,293 'data_type': '16S'}294 pid = self.qclient.post('/apitest/prep_template/', data=data)['prep']295 # inserting artifacts296 data = {297 'filepaths': dumps([(fp, 'preprocessed_demux')]),298 'type': "Demultiplexed",299 'name': "New demultiplexed artifact",300 'prep': pid}301 aid = self.qclient.post('/apitest/artifact/', data=data)['artifact']302 self.params['Demultiplexed sequences'] = aid303 data = {'user': 'demo@microbio.me',304 'command': dumps(['deblur', '2021.09', 'Deblur 2021.09']),305 'status': 'running',306 'parameters': dumps(self.params)}307 jid = self.qclient.post('/apitest/processing_job/', data=data)['job']308 out_dir = mkdtemp()309 self._clean_up_files.append(out_dir)310 # pre-populate archive with fragment placements311 self.qclient.patch(url="/qiita_db/archive/observations/",312 op="add", path=jid,313 value=dumps(self.features))314 success, ainfo, msg = deblur(self.qclient, jid, self.params, out_dir)315 self.assertEqual("", msg)316 self.assertTrue(success)317 self.assertEqual("BIOM", ainfo[0].artifact_type)318 self.assertEqual("BIOM", ainfo[1].artifact_type)319 self.assertEqual(320 [(join(out_dir, 'deblur_out', 'deblured', 'all.biom'), 'biom'),321 (join(out_dir, 'deblur_out', 'deblured', 'all.seqs.fa'),322 'preprocessed_fasta')], ainfo[0].files)323 file_tree = join(out_dir, 'deblur_out',324 'deblured', 'insertion_tree.relabelled.tre')325 self.assertEqual(326 [(join(out_dir, 'deblur_out', 'deblured', 'reference-hit.biom'),327 'biom'),328 (join(out_dir, 'deblur_out', 'deblured',329 'reference-hit.seqs.fa'), 'preprocessed_fasta'),330 (file_tree, 'plain_text')331 ], ainfo[1].files)332 # finally we are gonna test that the patch is added to the tree333 with open(file_tree, 'r') as tree_fp:334 tree = tree_fp.read()335 self.assertTrue(tree.endswith("'k__Bacteria':0.0);\n"))336 def test_deblur_failingbin(self):337 # generating filepaths338 fd, fp = mkstemp(suffix='_seqs.demux')339 close(fd)340 self._clean_up_files.append(fp)341 copyfile('support_files/filtered_5_seqs.demux', fp)342 # inserting new prep template343 prep_info_dict = {344 'SKB7.640196': {345 'description_prep': 'SKB7', 'platform': 'Illumina'},346 'SKB8.640193': {347 'description_prep': 'SKB8', 'platform': 'Illumina'}348 }349 data = {'prep_info': dumps(prep_info_dict),350 # magic #1 = testing study351 'study': 1,352 'data_type': '16S'}353 pid = self.qclient.post('/apitest/prep_template/', data=data)['prep']354 # inserting artifacts355 data = {356 'filepaths': dumps([(fp, 'preprocessed_demux')]),357 'type': "Demultiplexed",358 'name': "New demultiplexed artifact",359 'prep': pid}360 aid = self.qclient.post('/apitest/artifact/', data=data)['artifact']361 self.params['Demultiplexed sequences'] = aid362 data = {'user': 'demo@microbio.me',363 'command': dumps(['deblur', '2021.09', 'Deblur 2021.09']),364 'status': 'running',365 'parameters': dumps(self.params)}366 jid = self.qclient.post('/apitest/processing_job/', data=data)['job']367 out_dir = mkdtemp()368 self._clean_up_files.append(out_dir)369 # create a fake deblur binary that will always fail370 fp_fake_deblur = join(out_dir, 'deblur')371 with open(fp_fake_deblur, 'w') as f:372 f.write('#!/bin/bash\nexit 123\n')373 chmod(fp_fake_deblur, 0o775)374 environ['PATH'] = '%s:%s' % (out_dir, self.oldpath)375 success, ainfo, msg = deblur(self.qclient, jid, self.params, out_dir)376 self.assertFalse(success)377 self.assertEqual(ainfo, None)378 self.assertIn('Error running deblur', msg)379 def test_deblur_failing_guppy(self):380 # generating filepaths381 fd, fp = mkstemp(suffix='_seqs.demux')382 close(fd)383 self._clean_up_files.append(fp)384 copyfile('support_files/filtered_5_seqs.demux', fp)385 # inserting new prep template386 prep_info_dict = {387 'SKB7.640196': {388 'description_prep': 'SKB7', 'platform': 'Illumina'},389 'SKB8.640193': {390 'description_prep': 'SKB8', 'platform': 'Illumina'}391 }392 data = {'prep_info': dumps(prep_info_dict),393 # magic #1 = testing study394 'study': 1,395 'data_type': '16S'}396 pid = self.qclient.post('/apitest/prep_template/', data=data)['prep']397 # inserting artifacts398 data = {399 'filepaths': dumps([(fp, 'preprocessed_demux')]),400 'type': "Demultiplexed",401 'name': "New demultiplexed artifact",402 'prep': pid}403 aid = self.qclient.post('/apitest/artifact/', data=data)['artifact']404 self.params['Demultiplexed sequences'] = aid405 data = {'user': 'demo@microbio.me',406 'command': dumps(['deblur', '2021.09', 'Deblur 2021.09']),407 'status': 'running',408 'parameters': dumps(self.params)}409 jid = self.qclient.post('/apitest/processing_job/', data=data)['job']410 out_dir = mkdtemp()411 self._clean_up_files.append(out_dir)412 # pre-populate archive with fragment placements413 self.qclient.patch(url="/qiita_db/archive/observations/",414 op="add", path=jid,415 value=dumps(self.features))416 # create a fake guppy binary that will always fail417 fp_fake_guppy = join(out_dir, 'guppy')418 with open(fp_fake_guppy, 'w') as f:419 f.write('#!/bin/bash\nexit 123\n')420 chmod(fp_fake_guppy, 0o775)421 environ['PATH'] = '%s:%s' % (out_dir, self.oldpath)422 success, ainfo, msg = deblur(self.qclient, jid, self.params, out_dir)423 self.assertFalse(success)424 self.assertEqual(ainfo, None)425 self.assertIn('Error running guppy', msg)426 def test_deblur_keyerror(self):427 # generating filepaths428 fd, fp = mkstemp(suffix='_seqs.demux')429 close(fd)430 self._clean_up_files.append(fp)431 copyfile('support_files/filtered_5_seqs.demux', fp)432 # inserting new prep template433 prep_info_dict = {434 'SKB7.640196': {435 'description_prep': 'SKB7', 'platform': 'Illumina'},436 'SKB8.640193': {437 'description_prep': 'SKB8', 'platform': 'Illumina'}438 }439 data = {'prep_info': dumps(prep_info_dict),440 # magic #1 = testing study441 'study': 1,442 'data_type': '16S'}443 pid = self.qclient.post('/apitest/prep_template/', data=data)['prep']444 # inserting artifacts445 data = {446 'filepaths': dumps([(fp, 'preprocessed_demux')]),447 'type': "Demultiplexed",448 'name': "New demultiplexed artifact",449 'prep': pid}450 aid = self.qclient.post('/apitest/artifact/', data=data)['artifact']451 self.params['Demultiplexed sequences'] = aid452 data = {'user': 'demo@microbio.me',453 'command': dumps(['deblur', '2021.09', 'Deblur 2021.09']),454 'status': 'running',455 'parameters': dumps(self.params)}456 jid = self.qclient.post('/apitest/processing_job/', data=data)['job']457 out_dir = mkdtemp()458 self._clean_up_files.append(out_dir)459 # pre-populate archive with fragment placements460 # make sure that at least one sequence got no placements via SEPP461 self.features[('TACGGAGGGTGCAAGCGTTATCCGGATTCACTGGGTTTAAAGGGTGCGTAGGT'462 'GGGTTGGTAAGTCAGTGGTGAAATCTCCGGGCTTAACTCGGAAACTG')] = ''463 self.qclient.patch(url="/qiita_db/archive/observations/",464 op="add", path=jid,465 value=dumps(self.features))466 success, ainfo, msg = deblur(self.qclient, jid, self.params, out_dir)467 self.assertEqual("", msg)468 self.assertTrue(success)469class deblurTests_binaryfail(PluginTestCase):470 def setUp(self):471 # this will allow us to see the full errors472 self.maxDiff = None473 plugin("https://localhost:8383", 'register', 'ignored')474 self.params = {475 'Positive filtering database': 'default',476 'Negative filtering database': 'default',477 'Mean per nucleotide error rate': 0.005,478 'Error probabilities for each Hamming distance': (479 '1, 0.06, 0.02, 0.02, 0.01, 0.005, 0.005, '480 '0.005, 0.001, 0.001, 0.001, 0.0005'),481 'Insertion/deletion (indel) probability': 0.01,482 'Maximum number of insertion/deletion (indel)': 3,483 'Sequence trim length (-1 for no trimming)': 100,484 'Minimum dataset-wide read threshold': 0,485 'Minimum per-sample read threshold': 2,486 'Threads per sample': 1, 'Jobs to start': 1,487 'Reference phylogeny for SEPP': 'Greengenes_13.8'}488 self._clean_up_files = []489 # saving current value of PATH490 self.oldpath = environ['PATH']491 def tearDown(self):492 # restore eventually changed PATH env var493 environ['PATH'] = self.oldpath494 for fp in self._clean_up_files:495 if exists(fp):496 if isdir(fp):497 rmtree(fp)498 else:499 remove(fp)500 def test_deblur_failing_sepp(self):501 # generating filepaths502 fd, fp = mkstemp(suffix='_seqs.demux')503 close(fd)504 self._clean_up_files.append(fp)505 copyfile('support_files/filtered_5_seqs.demux', fp)506 # inserting new prep template507 prep_info_dict = {508 'SKB7.640196': {509 'description_prep': 'SKB7', 'platform': 'Illumina'},510 'SKB8.640193': {511 'description_prep': 'SKB8', 'platform': 'Illumina'}512 }513 data = {'prep_info': dumps(prep_info_dict),514 # magic #1 = testing study515 'study': 1,516 'data_type': '16S'}517 pid = self.qclient.post('/apitest/prep_template/', data=data)['prep']518 # inserting artifacts519 data = {520 'filepaths': dumps([(fp, 'preprocessed_demux')]),521 'type': "Demultiplexed",522 'name': "New demultiplexed artifact",523 'prep': pid}524 aid = self.qclient.post('/apitest/artifact/', data=data)['artifact']525 self.params['Demultiplexed sequences'] = aid526 data = {'user': 'demo@microbio.me',527 'command': dumps(['deblur', '2021.09', 'Deblur 2021.09']),528 'status': 'running',529 'parameters': dumps(self.params)}530 jid = self.qclient.post('/apitest/processing_job/', data=data)['job']531 out_dir = mkdtemp()532 self._clean_up_files.append(out_dir)533 # create a fake sepp binary that will always fail534 fp_fake_sepp = join(out_dir, 'run-sepp.sh')535 with open(fp_fake_sepp, 'w') as f:536 f.write('#!/bin/bash\nexit 123\n')537 chmod(fp_fake_sepp, 0o775)538 environ['PATH'] = '%s:%s' % (out_dir, self.oldpath)539 success, ainfo, msg = deblur(self.qclient, jid, self.params, out_dir)540 self.assertFalse(success)541 self.assertEqual(ainfo, None)542 self.assertIn('Error running run-sepp.sh', msg)543if __name__ == '__main__':...

Full Screen

Full Screen

test_util.py

Source:test_util.py Github

copy

Full Screen

1# -----------------------------------------------------------------------------2# Copyright (c) 2014--, The Qiita Development Team.3#4# Distributed under the terms of the BSD 3-clause License.5#6# The full license is in the file LICENSE, distributed with this software.7# -----------------------------------------------------------------------------8from unittest import TestCase, main9from os import getcwd, close, remove10from os.path import exists, isdir11from shutil import rmtree12from tempfile import mkstemp13from qiita_client.util import system_call, get_sample_names_by_run_prefix14class UtilTests(TestCase):15 def setUp(self):16 self._clean_up_files = []17 def tearDown(self):18 for fp in self._clean_up_files:19 if exists(fp):20 if isdir(fp):21 rmtree(fp)22 else:23 remove(fp)24 def test_system_call(self):25 obs_out, obs_err, obs_val = system_call("pwd")26 self.assertEqual(obs_out, "%s\n" % getcwd())27 self.assertEqual(obs_err, "")28 self.assertEqual(obs_val, 0)29 def test_system_call_error(self):30 obs_out, obs_err, obs_val = system_call("IHopeThisCommandDoesNotExist")31 self.assertEqual(obs_out, "")32 self.assertTrue("not found" in obs_err)33 self.assertEqual(obs_val, 127)34 def test_get_sample_names_by_run_prefix(self):35 fd, fp = mkstemp()36 close(fd)37 with open(fp, 'w') as f:38 f.write(MAPPING_FILE)39 self._clean_up_files.append(fp)40 obs = get_sample_names_by_run_prefix(fp)41 exp = {'s3': 'SKB7.640196', 's2': 'SKD8.640184', 's1': 'SKB8.640193'}42 self.assertEqual(obs, exp)43 def test_get_sample_names_by_run_prefix_error(self):44 fd, fp = mkstemp()45 close(fd)46 with open(fp, 'w') as f:47 f.write(MAPPING_FILE_2)48 self._clean_up_files.append(fp)49 with self.assertRaises(ValueError):50 get_sample_names_by_run_prefix(fp)51MAPPING_FILE = (52 "#SampleID\tplatform\tbarcode\texperiment_design_description\t"53 "library_construction_protocol\tcenter_name\tprimer\trun_prefix\t"54 "instrument_model\tDescription\n"55 "SKB7.640196\tILLUMINA\tA\tA\tA\tANL\tA\ts3\tIllumina MiSeq\tdesc1\n"56 "SKB8.640193\tILLUMINA\tA\tA\tA\tANL\tA\ts1\tIllumina MiSeq\tdesc2\n"57 "SKD8.640184\tILLUMINA\tA\tA\tA\tANL\tA\ts2\tIllumina MiSeq\tdesc3\n"58)59MAPPING_FILE_2 = (60 "#SampleID\tplatform\tbarcode\texperiment_design_description\t"61 "library_construction_protocol\tcenter_name\tprimer\t"62 "run_prefix\tinstrument_model\tDescription\n"63 "SKB7.640196\tILLUMINA\tA\tA\tA\tANL\tA\ts3\tIllumina MiSeq\tdesc1\n"64 "SKB8.640193\tILLUMINA\tA\tA\tA\tANL\tA\ts1\tIllumina MiSeq\tdesc2\n"65 "SKD8.640184\tILLUMINA\tA\tA\tA\tANL\tA\ts1\tIllumina MiSeq\tdesc3\n"66)67if __name__ == '__main__':...

Full Screen

Full Screen

test_saver.py

Source:test_saver.py Github

copy

Full Screen

...3import glob4from subprocess import call5def test_saves_a_new_bi_season():6 season_name = "saver_test_1_season"7 _clean_up_files(season_name)8 _save_new_bisexual_season(season_name)9 assert (_files_are_present(season_name))10 _clean_up_files(season_name)11def test_saves_a_new_straight_season():12 season_name = "saver_test_2_season"13 _clean_up_files(season_name)14 _save_new_straight_season(season_name)15 assert (_files_are_present(season_name))16 _clean_up_files(season_name)17def test_saves_latest_results_of_bi_season():18 season_name = "bisexual_season_updater_1"19 _clean_up_files(season_name)20 _save_new_bisexual_season(season_name)21 updated_season = BisexualSeason(contestants=['A', 'B', 'C', 'D'],22 season_name=season_name,23 scenarios=[{('A', 'B'), ('C', 'D')}])24 Saver(updated_season).save()25 assert (_files_are_present(season_name, week=1))26 _clean_up_files(season_name)27def test_saves_latest_results_of_straight_season():28 season_name = "straight_season_updater_1"29 _clean_up_files(season_name)30 _save_new_straight_season(season_name)31 updated_season = StraightSeason(women=['A', 'B'],32 men=['C', 'D'],33 season_name=season_name,34 scenarios=[{('A', 'B'), ('C', 'D')}])35 Saver(updated_season).save()36 assert (_files_are_present(season_name, week=1))37 _clean_up_files(season_name)38def _files_are_present(season_name, week=0):39 week_files = _count_files_matching(season_name, "week{0}.csv".format(week))40 contestant_files = _count_files_matching(season_name, 'contestants.txt')41 men_files = _count_files_matching(season_name, 'men.txt')42 women_files = _count_files_matching(season_name, 'women.txt')43 return week_files == 1 and (contestant_files == 1 or (men_files == 1 and women_files == 1))44def _count_files_matching(season_name, file):45 return len(glob.glob('{0}/{1}'.format(season_name, file)))46def _save_new_bisexual_season(season_name):47 season = BisexualSeason(48 contestants=['A', 'B', 'C', 'D'],49 season_name = season_name,50 scenarios=[{('A', 'B'), ('C', 'D')}, {('A', 'C'), ('B', 'D')}])51 Saver(season).save()52def _clean_up_files(season_name):53 call(['rm', '-rf', season_name])54 assert not _files_are_present(season_name)55def _save_new_straight_season(season_name):56 season = StraightSeason(57 women=['A', 'B'],58 men=['C', 'D'],59 season_name=season_name,60 scenarios=[{('A', 'B'), ('C', 'D')}, {('A', 'D'), ('C', 'B')}])61 saver = Saver(season)...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run lisa automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful