How to use compare_files method in toolium

Best Python code snippet using toolium_python

xml_compare_test.py

Source:xml_compare_test.py Github

copy

Full Screen

...35 doc_1 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo><bar /></foo>'36 doc_2 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo><bar /></foo>'37 f1 = file_obj_from_string(doc_1)38 f2 = file_obj_from_string(doc_2)39 self.assertIsNone(xml_compare.compare_files(f1, f2))40 def test_doctype_version_mismatch(self):41 # Basic document42 doc_1 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo><bar /></foo>'43 doc_2 = get_xml_declaration('1.1', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo><bar /></foo>'44 f1 = file_obj_from_string(doc_1)45 f2 = file_obj_from_string(doc_2)46 self.assertRaises(Exception, xml_compare.compare_files, (f1, f2))47 def test_doctype_encoding_mismatch(self):48 # Basic document49 doc_1 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo><bar /></foo>'50 doc_2 = get_xml_declaration('1.0', 'utf-16') + (get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo><bar /></foo>').encode('utf-16')51 f1 = file_obj_from_string(doc_1)52 f2 = file_obj_from_string(doc_2)53 self.assertRaises(Exception, xml_compare.compare_files, (f1, f2))54 def test_doctype_system_url_mismatch(self):55 # Basic document56 doc_1 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test1.dtd') + '<foo><bar /></foo>'57 doc_2 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test2.dtd') + '<foo><bar /></foo>'58 f1 = file_obj_from_string(doc_1)59 f2 = file_obj_from_string(doc_2)60 self.assertRaises(Exception, xml_compare.compare_files, (f1, f2))61 def test_doctype_root_tag_mismatch(self):62 # Basic document63 doc_1 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo><bar /></foo>'64 doc_2 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('baz', 'SYSTEM', 'test.dtd') + '<baz><bar /></baz>'65 f1 = file_obj_from_string(doc_1)66 f2 = file_obj_from_string(doc_2)67 self.assertRaises(Exception, xml_compare.compare_files, (f1, f2))68 69 def test_successful_comapre_with_param(self):70 # Basic document71 doc_1 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo a="1" b="c"><bar /></foo>'72 doc_2 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo a="1" b="c"><bar /></foo>'73 f1 = file_obj_from_string(doc_1)74 f2 = file_obj_from_string(doc_2)75 self.assertIsNone(xml_compare.compare_files(f1, f2))76 def test_param_mismatch(self):77 # Basic document78 doc_1 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo a="1"><bar /></foo>'79 doc_2 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo b="c"><bar /></foo>'80 f1 = file_obj_from_string(doc_1)81 f2 = file_obj_from_string(doc_2)82 self.assertRaises(Exception, xml_compare.compare_files, (f1, f2))83 def test_child_num_mismatch(self):84 # Basic document85 doc_1 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo><bar /></foo>'86 doc_2 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo><bar /><bar /></foo>'87 f1 = file_obj_from_string(doc_1)88 f2 = file_obj_from_string(doc_2)89 self.assertRaises(Exception, xml_compare.compare_files, (f1, f2))90 91 def test_child_tag_mismatch(self):92 # Basic document93 doc_1 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo><bar /><car /></foo>'94 doc_2 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo><bar /><bar /></foo>'95 f1 = file_obj_from_string(doc_1)96 f2 = file_obj_from_string(doc_2)97 self.assertRaises(Exception, xml_compare.compare_files, (f1, f2))98 99 def test_text_compare(self):100 # Basic document101 doc_1 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo>bar</foo>'102 doc_2 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo>bar</foo>'103 f1 = file_obj_from_string(doc_1)104 f2 = file_obj_from_string(doc_2)105 self.assertIsNone(xml_compare.compare_files(f1, f2))106 107 def test_text_compare_with_whitespace(self):108 """109 Ensure that whitespace is ignored if ignore_whitespace is set to True.110 """111 # Basic document112 doc_1 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo>bar</foo>'113 doc_2 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + """<foo>114 115bar116</foo>"""117 f1 = file_obj_from_string(doc_1)118 f2 = file_obj_from_string(doc_2)119 self.assertIsNone(xml_compare.compare_files(f1, f2))120 121 def test_text_no_ignore_whitespace(self):122 """123 Ensure that whitespace is not ignored if ignore_whitespace is set to false.124 """125 126 # Basic document127 doc_1 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo>bar</foo>'128 doc_2 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + """<foo>129 130bar131</foo>"""132 f1 = file_obj_from_string(doc_1)133 f2 = file_obj_from_string(doc_2)134 with self.assertRaises(Exception):135 xml_compare.compare_files(f1, f2, ignore_whitespace = False)136 def test_ignore_empty(self):137 """138 Ensure that empty tags are ignored if ignore_empty_tags is set to True.139 140 """141 doc_1 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo></foo>'142 doc_2 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo><bar /></foo>'143 f1 = file_obj_from_string(doc_1)144 f2 = file_obj_from_string(doc_2)145 self.assertIsNone(xml_compare.compare_files(f1, f2))146 147 def test_no_ignore_empty(self):148 """149 Ensure that empty tags are not ignored if ignore_empty_tags is set to False.150 """151 doc_1 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo></foo>'152 doc_2 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo><bar /></foo>'153 f1 = file_obj_from_string(doc_1)154 f2 = file_obj_from_string(doc_2)155 with self.assertRaises(Exception):156 xml_compare.compare_files(f1, f2, ignore_empty_tags = False)157 158 def test_custom_compare_function(self):159 def lower_case_compare(s1, s2):160 return s1.lower() == s2.lower()161 162 doc_1 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo a="HeLlO"></foo>'163 doc_2 = get_xml_declaration('1.0', 'utf-8') + get_xml_doctype('foo', 'SYSTEM', 'test.dtd') + '<foo a="hElLo"></foo>'164 f1 = file_obj_from_string(doc_1)165 f2 = file_obj_from_string(doc_2)166 167 with self.assertRaises(Exception):168 # Compare using the default compare function, which should fail169 xml_compare.compare_files(f1, f2)170 171 # Compare using the custom, lower-case compare function172 self.assertIsNone(xml_compare.compare_files(f1, f2, compare_function = lower_case_compare))...

Full Screen

Full Screen

run_eval_all.py

Source:run_eval_all.py Github

copy

Full Screen

1import argparse2import sys3import numpy as np4np.random.seed(1)5import os, time6import math7import re8import tempfile9from tempfile import mkdtemp10from subprocess import Popen, check_output11import pandas as pd12import gzip13from os.path import splitext, basename, exists, abspath, isfile, getsize14def filesize(filename):15 if os.path.isfile(filename):16 return getsize(filename)17 else:18 return -119def test_sub_finished(filename, keyword):20 while not isfile(filename):21 time.sleep(60)22 while True:23 with open(filename) as f:24 all_lines = f.readlines()25 if len(all_lines) > 0:26 last = all_lines[-1]27 if keyword in last:28 break29 time.sleep(60)30def main(argv=sys.argv):31 parser = argparse.ArgumentParser(description='iVariant v0.01.')32 parser.add_argument("-m", dest='run_mode', default="all",33 help="annotation input")34 parser.add_argument("-i", dest='input', default="",35 help="annotation input")36 args = parser.parse_args()37 old_path = abspath("./")38 env_path = './'39 gpu_path = './'40 eval_path = './'41 if args.input == '':42 fea_input = '%s/input/input.list' % (env_path)43 else:44 fea_input = args.input45 log_file = './tvar.log'46 cmd = 'rm -f %s' % (log_file)47 check_output(cmd, shell=True)48 #49 # if args.run_mode == 'pca' or args.run_mode == 'all':50 # background_list = '/fs0/yangh8/DVAR/input/train.input'51 # cmd1 = 'python TVar_cpu.py -m fea_train -i %s -t 8' % (background_list)52 # print(cmd1)53 # check_output(cmd1, shell=True)54 if args.run_mode == 'fea' or args.run_mode == 'all':55 for line in open(fea_input):56 cmd = 'python TVar_cpu.py -m fea -i %s -t 8' % (line.rstrip())57 # check_output(cmd, shell=True)58 # if args.run_mode == 'cv' or args.run_mode == 'all':59 # cmds = []60 # train_input = './input/train.list'61 # for line in open(train_input):62 # cmd = 'python TVar_cpu.py -m fea -i %s -t 8' % (line.rstrip())63 # check_output(cmd, shell=True)64 # cmd1 = "python TVar_gpu.py -m cv"65 # cmds.append(cmd1)66 # for cmd in cmds:67 # check_output(cmd, shell=True)68 # cmd = 'python TVar_cpu.py -m cv'69 # check_output(cmd, shell=True)70 # print("CV OK!")71 # return72 if args.run_mode == 'train' or args.run_mode == 'all' or args.run_mode == 'train_score':73 cmds = []74 train_input = './input/train.list'75 for line in open(train_input):76 cmd = 'python TVar_cpu.py -m fea -i %s -t 8' % (line.rstrip())77 # check_output(cmd, shell=True)78 cmd1 = "python TVar_gpu.py -m train"79 # check_output(cmd1, shell=True)80 if args.run_mode == 'score' or args.run_mode == 'all' or args.run_mode == 'train_score':81 cmds = []82 for line in open(fea_input):83 cmd = "python TVar_gpu.py -m score -i %s" % (line.rstrip())84 cmds.append(cmd)85 # for cmd in cmds:86 # check_output(cmd, shell=True)87 # if args.run_mode == 'rare' or args.run_mode == 'all' or args.run_mode == 'train_score':88 # clinvar_input = './input/rare.input'89 # for line in open(clinvar_input):90 # cmd = 'python TVar_cpu.py -m fea -i %s -t 8' % (line.rstrip())91 # #check_output(cmd, shell=True)92 # cmds = []93 # for line in open(clinvar_input):94 # tissue = basename(line.rstrip()).replace("_rare_neg.gz", "")95 # tissue = tissue.replace("_rare_pos.gz", "")96 # cmd = "python TVar_gpu.py -m rare -n %s -i %s" % (tissue, line.rstrip())97 # cmds.append(cmd)98 # for cmd in cmds:99 # check_output(cmd, shell=True)100 # compare_files = []101 # cmd = "rm -f ./eval/rare.log"102 # check_output(cmd, shell=True)103 # for line in open(clinvar_input):104 # base_file = splitext(basename(line.rstrip()))[0]105 # score_file = './score/' + base_file + '.tvar'106 # compare_files.append(score_file)107 # if len(compare_files) == 2:108 # cmd = 'Rscript TVAR_gwas_test.R -p %s -q %s >> ./eval/rare.log' % (compare_files[0], compare_files[1])109 # check_output(cmd, shell=True)110 # compare_files.clear()111 #112 # if args.run_mode == 'gwas' or args.run_mode == 'all' or args.run_mode == 'train_score':113 # clinvar_input = './input/gwas.input'114 # for line in open(clinvar_input):115 # cmd = 'python TVar_cpu.py -m fea -i %s -t 8' % (line.rstrip())116 # #check_output(cmd, shell=True)117 # cmds = []118 # for line in open(clinvar_input):119 # tissue = basename(line.rstrip()).replace("_gwas_neg.gz", "")120 # tissue = tissue.replace("_gwas_pos.gz", "")121 # cmd = "python TVar_gpu.py -m gwas -n %s -i %s" % (tissue, line.rstrip())122 # cmds.append(cmd)123 # for cmd in cmds:124 # check_output(cmd, shell=True)125 # compare_files = []126 # cmd = ''127 # check_output(cmd, shell=True)128 # cmd = "rm -f ./eval/gwas.log"129 # check_output(cmd, shell=True)130 # for line in open(clinvar_input):131 # base_file = splitext(basename(line.rstrip()))[0]132 # score_file = './score/' + base_file + '.tvar'133 # compare_files.append(score_file)134 # if len(compare_files) == 2:135 # cmd = 'Rscript TVAR_gwas_test.R -p %s -q %s >> ./eval/gwas.log' % (compare_files[0], compare_files[1])136 # check_output(cmd, shell=True)137 # compare_files.clear()138 # if args.run_mode == 'eval' or args.run_mode == 'all' or args.run_mode == 'merge':139 # cmd = 'python remove_set.py'140 # check_output(cmd, shell=True)141 # cmd0 = 'python compare.py -m eval > four_sets.log'142 # check_output(cmd0, shell=True)143 if args.run_mode == 'comp' or args.run_mode == 'all' or args.run_mode == 'train_score':144 clinvar_input = './input/rare.input'145 compare_files = []146 log_file = './eval/rare.log'147 cmd = 'rm -f %s' % (log_file)148 check_output(cmd, shell=True)149 for line in open(clinvar_input):150 base_file = splitext(basename(line.rstrip()))[0]151 score_file = './score/' + base_file + '.deepsea'152 compare_files.append(score_file)153 if len(compare_files) == 2:154 cmd = 'Rscript TVAR_gwas_test.R -p %s -q %s >> %s' % (compare_files[0], compare_files[1], log_file)155 check_output(cmd, shell=True)156 compare_files.clear()157if __name__ == "__main__":...

Full Screen

Full Screen

test_filesys.py

Source:test_filesys.py Github

copy

Full Screen

...33 - Test two text files and verify the contents are different.34 """35 f1 = os.path.join(self._DIR_RESRC, 'test_filesys__file1__diff1.txt')36 f2 = os.path.join(self._DIR_RESRC, 'test_filesys__file2__diff1.txt')37 test = filesys.compare_files(file1=f1, file2=f2)38 utilities.assert_true(expected=False, test=test, msg=self._MSG1)39 def test01__compare_files__diff2(self):40 """Test the ``compare_files`` method, for different files.41 :Test:42 - Test two text files and verify the contents are different,43 although the file signatures are the same.44 """45 f1 = os.path.join(self._DIR_RESRC, 'test_filesys__file1__diff2.txt')46 f2 = os.path.join(self._DIR_RESRC, 'test_filesys__file2__diff2.txt')47 test = filesys.compare_files(file1=f1, file2=f2)48 utilities.assert_true(expected=False, test=test, msg=self._MSG1)49 def test01__compare_files__file_dir(self):50 """Test the ``compare_files`` method, for a file and a directory.51 :Test:52 - Pass a file and a directory into the testing method and verify53 the regular file signature test returns False.54 """55 f1 = os.path.join(self._DIR_RESRC, 'test_filesys__file1__same.txt')56 f2 = self._DIR_RESRC57 test = filesys.compare_files(file1=f1, file2=f2)58 utilities.assert_true(expected=False, test=test, msg=self._MSG1)59 def test01__compare_files__line_endings(self):60 """Test the ``compare_files`` method, for the same file with different61 line engines.62 :Test:63 - Test two text files and verify the contents are same, although64 having different line endings.65 """66 f1 = os.path.join(self._DIR_RESRC, 'test_filesys__file1__dos.txt')67 f2 = os.path.join(self._DIR_RESRC, 'test_filesys__file2__unix.txt')68 test = filesys.compare_files(file1=f1, file2=f2, contents_only=True)69 utilities.assert_true(expected=True, test=test, msg=self._MSG1)70 def test01__compare_files__same(self):71 """Test the ``compare_files`` method, for the same files.72 :Test:73 - Test two text files and verify the contents are the same.74 """75 f1 = os.path.join(self._DIR_RESRC, 'test_filesys__file1__same.txt')76 f2 = os.path.join(self._DIR_RESRC, 'test_filesys__file2__same.txt')77 test = filesys.compare_files(file1=f1, file2=f2)78 utilities.assert_true(expected=True, test=test, msg=self._MSG1)79 def test01__compare_files__sig_only__true(self):80 """Test the ``compare_files`` method, testing the signature only, for81 a True result.82 :Test:83 - Test the signature only for two files, expecting a True response.84 """85 f1 = os.path.join(self._DIR_RESRC, 'test_filesys__file1__same.txt')86 f2 = os.path.join(self._DIR_RESRC, 'test_filesys__file2__same.txt')87 test = filesys.compare_files(file1=f1, file2=f2, sig_only=True)88 utilities.assert_true(expected=True, test=test, msg=self._MSG1)89 def test01__compare_files__sig_only__false(self):90 """Test the ``compare_files`` method, testing the signature only, for91 a False result.92 :Test:93 - Test the signature only for two files, expecting a False response.94 """95 f1 = os.path.join(self._DIR_RESRC, 'test_filesys__file1__diff1.txt')96 f2 = os.path.join(self._DIR_RESRC, 'test_filesys__file2__diff1.txt')97 test = filesys.compare_files(file1=f1, file2=f2, sig_only=True)98 utilities.assert_true(expected=False, test=test, msg=self._MSG1)99 def test02__sig(self):100 """Test the ``_sig`` method, to verify the returned file signature.101 :Test:102 - Test a series of files, and verify the returned signatures are103 as expected.104 """105 f1 = os.path.join(self._DIR_RESRC, 'test_filesys__file1__same.txt')106 f2 = os.path.join(self._DIR_RESRC, 'test_filesys__file1__diff1.txt')107 f3 = os.path.join(self._DIR_RESRC, 'test_filesys__file1__dos.txt')108 files = [f1, f2, f3]109 exp = ((734, 32768, 33204), (734, 32768, 33204), (744, 32768, 33204))110 for f, e in zip(files, exp):111 with self.subTest(msg=f'{f=} {e=}'):...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run toolium automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful