How to use _clean method in ATX

Best Python code snippet using ATX

test_container.py

Source:test_container.py Github

copy

Full Screen

...6import os7import numpy as np8import pytest9import shutil10def _clean(path):11 if os.path.isdir(path):12 shutil.rmtree(path)13 if os.path.isfile(path):14 os.remove(path)15x = Input(shape=(10,), dtype='float32', name='input')16y = Dense(2)(x)17model = Model(x, y)18### ModelContainer19def test_model_container():20 model_container = ModelContainer()21 with pytest.raises(NotImplementedError):22 model_container.build()23 with pytest.raises(NotImplementedError):24 model_container.train()25 with pytest.raises(NotImplementedError):26 model_container.evaluate(None, None)27 with pytest.raises(NotImplementedError):28 model_container.save_model_json(None)29 with pytest.raises(NotImplementedError):30 model_container.load_model_from_json(None)31 with pytest.raises(NotImplementedError):32 model_container.save_model_weights(None)33 with pytest.raises(NotImplementedError):34 model_container.load_model_weights(None)35 with pytest.raises(NotImplementedError):36 model_container.check_if_model_exists(None)37 with pytest.raises(NotImplementedError):38 model_container.get_available_intermediate_outputs()39 with pytest.raises(NotImplementedError):40 model_container.get_intermediate_output(None)41### KerasModelContainer42def test_init():43 _clean('./model.json')44 model_container = KerasModelContainer(model)45 assert len(model_container.model.layers) == 246 assert model_container.model_name == "DCASEModelContainer"47 assert model_container.metrics == ['classification']48 model_container.save_model_json('./')49 model_container = KerasModelContainer(model_path='./')50 assert len(model_container.model.layers) == 251 _clean('./model.json')52def test_load_model_from_json():53 _clean('./model.json')54 model_container = KerasModelContainer(model)55 model_container.save_model_json('./')56 model_container = KerasModelContainer()57 model_container.load_model_from_json('./')58 assert len(model_container.model.layers) == 259 _clean('./model.json')60def test_save_model_from_json():61 _clean('./model.json')62 model_container = KerasModelContainer(model)63 model_container.save_model_json('./')64 assert os.path.exists('./model.json')65 _clean('./model.json')66def test_save_model_weights():67 weights_file = './best_weights.hdf5'68 _clean(weights_file)69 model_container = KerasModelContainer(model)70 model_container.save_model_weights('./')71 assert os.path.exists(weights_file)72 _clean(weights_file)73def test_save_model_weights():74 weights_file = './best_weights.hdf5'75 _clean(weights_file)76 model_container = KerasModelContainer(model)77 model_container.save_model_weights('./')78 weights = model_container.model.layers[1].get_weights()79 model_container.model.layers[1].set_weights([np.zeros((10,2)), np.zeros(2)])80 model_container.load_model_weights('./')81 new_weights = model_container.model.layers[1].get_weights()82 assert np.allclose(new_weights[0], weights[0])83 assert np.allclose(new_weights[1], weights[1])84 _clean(weights_file)85def test_check_if_model_exists():86 model_container = KerasModelContainer(model)87 model_file = './model.json'88 _clean(model_file)89 model_container.save_model_json('./')90 assert model_container.check_if_model_exists('./')91 92 x = Input(shape=(11,), dtype='float32', name='input')93 y = Dense(2)(x)94 new_model = Model(x, y) 95 model_container = KerasModelContainer(new_model)96 assert not model_container.check_if_model_exists('./')97 _clean(model_file)98 assert not model_container.check_if_model_exists('./')99def test_train():100 x = Input(shape=(4,), dtype='float32', name='input')101 y = Dense(2)(x)102 new_model = Model(x, y) 103 model_container = KerasModelContainer(new_model)104 X_train = np.concatenate((np.zeros((100, 4)), np.ones((100, 4))), axis=0)105 Y_train = np.zeros((200, 2))106 Y_train[:100, 0] = 1107 Y_train[100:, 1] = 1108 X_val = np.zeros((1, 4))109 Y_val = np.zeros((1, 2))110 Y_val[0, 0] = 1111 X_val2 = np.ones((1, 4))112 Y_val2 = np.zeros((1, 2))113 Y_val2[0, 1] = 1114 file_weights = './best_weights.hdf5'115 file_log = './training.log'116 _clean(file_weights)117 _clean(file_log)118 model_container.train((X_train, Y_train), ([X_val, X_val2], [Y_val, Y_val2]), epochs=3, label_list=['1', '2'])119 assert os.path.exists(file_weights)120 assert os.path.exists(file_log)121 _clean(file_weights)122 _clean(file_log)123 124 results = model_container.evaluate(([X_val, X_val2], [Y_val, Y_val2]), label_list=['1', '2'])125 assert results['classification'].results()['overall']['accuracy'] > 0.25126 # DataGenerator127 class ToyDataGenerator():128 def __init__(self, X_val, Y_val):129 self.X_val = X_val130 self.Y_val = Y_val131 def __len__(self):132 return 3133 def get_data_batch(self, index):134 return X_val, Y_val135 136 def shuffle_list(self):137 pass138 data_generator = ToyDataGenerator(X_train, Y_train)139 data_generator_val = ToyDataGenerator([X_val, X_val2], [Y_val, Y_val2])140 data_generator = KerasDataGenerator(data_generator)141 x = Input(shape=(4,), dtype='float32', name='input')142 y = Dense(2)(x)143 new_model = Model(x, y) 144 model_container = KerasModelContainer(new_model)145 model_container.train(146 data_generator,147 ([X_val, X_val2], [Y_val, Y_val2]),148 epochs=3,149 batch_size=None,150 label_list=['1', '2'])151 assert os.path.exists(file_weights)152 assert os.path.exists(file_log)153 _clean(file_weights)154 _clean(file_log)155 156 results = model_container.evaluate(([X_val, X_val2], [Y_val, Y_val2]), label_list=['1', '2'])157 assert results['classification'].results()['overall']['accuracy'] > 0.25158 # Other callbacks159 for metric in ["tagging", "sed"]:160 model_container = KerasModelContainer(new_model, metrics=[metric])161 file_weights = './best_weights.hdf5'162 file_log = './training.log'163 _clean(file_weights)164 _clean(file_log)165 model_container.train((X_train, Y_train), ([X_val, X_val2], [Y_val, Y_val2]), epochs=3, label_list=['1', '2'])166 #assert os.path.exists(file_weights)167 assert os.path.exists(file_log)168 _clean(file_weights)169 _clean(file_log)170 171 #results = model_container.evaluate(([X_val, X_val2], [Y_val, Y_val2]), label_list=['1', '2'])...

Full Screen

Full Screen

run_GATK_CNV.smk

Source:run_GATK_CNV.smk Github

copy

Full Screen

1# wildcard_constraints:2# seqID = config["seqID"]["sequencerun"]3rule collectReadCounts:4 input:5 bam="Results/{sample}_{seqID}/Data/{sample}_{seqID}-dedup.bam", #lambda wildcards: config["samples"][wildcards.sample],6 bai="Results/{sample}_{seqID}/Data/{sample}_{seqID}-dedup.bam.bai",7 interval=config["CNV"]["interval"], #"/projects/wp4/nobackup/workspace/arielle_test/CNV/bedFiles/TM_TE-annotated_closest-noduplicates.preprocessed.interval_list" #Better version? Should be same as other intervallist.8 output:9 "CNV/{sample}_{seqID}/{sample}_{seqID}.counts.hdf5",10 params:11 mergingRule="OVERLAPPING_ONLY",12 log:13 "logs/CNV/{sample}_{seqID}.collectReadCounts.log",14 container:15 config["singularitys"]["gatk4"]16 shell:17 "(gatk --java-options '-Xmx4g' CollectReadCounts -I {input.bam} -L {input.interval} "18 "--interval-merging-rule {params.mergingRule} -O {output} ) &> {log}"19rule denoiseReadCounts:20 input:21 hdf5PoN=config["CNV"]["PoN"],22 hdf5Tumor="CNV/{sample}_{seqID}/{sample}_{seqID}.counts.hdf5",23 output:24 stdCopyRatio="CNV/{sample}_{seqID}/{sample}_{seqID}_clean.standardizedCR.tsv",25 denoisedCopyRatio="CNV/{sample}_{seqID}/{sample}_{seqID}_clean.denoisedCR.tsv",26 log:27 "logs/CNV/{sample}_{seqID}-denoise.log",28 container:29 config["singularitys"]["gatk4"]30 shell:31 "(gatk --java-options '-Xmx4g' DenoiseReadCounts -I {input.hdf5Tumor} --count-panel-of-normals {input.hdf5PoN} "32 "--standardized-copy-ratios {output.stdCopyRatio} --denoised-copy-ratios {output.denoisedCopyRatio} ) &> {log}"33rule collectAllelicCounts:34 input:35 intervalList=config["CNV"]["interval"], #Better version? Should be same as other intervallist. Also should be in config.36 bam="Results/{sample}_{seqID}/Data/{sample}_{seqID}-dedup.bam", #lambda wildcards: config["samples"][wildcards.sample],37 bai="Results/{sample}_{seqID}/Data/{sample}_{seqID}-dedup.bam.bai",38 ref=config["reference"]["ref"],39 output:40 "CNV/{sample}_{seqID}/{sample}_{seqID}_clean.allelicCounts.tsv",41 log:42 "logs/CNV/{sample}_{seqID}_allelicCounts.log",43 container:44 config["singularitys"]["gatk4"]45 shell:46 "(gatk --java-options '-Xmx4g' CollectAllelicCounts -L {input.intervalList} -I {input.bam} -R {input.ref} "47 "-O {output} ) &> {log}"48rule modelSegments:49 input:50 denoisedCopyRatio="CNV/{sample}_{seqID}/{sample}_{seqID}_clean.denoisedCR.tsv",51 allelicCounts="CNV/{sample}_{seqID}/{sample}_{seqID}_clean.allelicCounts.tsv",52 output:53 "CNV/{sample}_{seqID}/{sample}_{seqID}_clean.modelBegin.seg",54 "CNV/{sample}_{seqID}/{sample}_{seqID}_clean.modelFinal.seg",55 "CNV/{sample}_{seqID}/{sample}_{seqID}_clean.cr.seg",56 "CNV/{sample}_{seqID}/{sample}_{seqID}_clean.modelBegin.af.param",57 "CNV/{sample}_{seqID}/{sample}_{seqID}_clean.modelBegin.cr.param",58 "CNV/{sample}_{seqID}/{sample}_{seqID}_clean.modelFinal.af.param",59 "CNV/{sample}_{seqID}/{sample}_{seqID}_clean.modelFinal.cr.param",60 "CNV/{sample}_{seqID}/{sample}_{seqID}_clean.hets.tsv",61 params:62 outDir="CNV/{sample}_{seqID}/",63 outPrefix="{sample}_{seqID}_clean",64 log:65 "logs/CNV/{sample}_{seqID}_modelSegments.log",66 container:67 config["singularitys"]["gatk4"]68 shell:69 "(gatk --java-options '-Xmx4g' ModelSegments --denoised-copy-ratios {input.denoisedCopyRatio} "70 "--allelic-counts {input.allelicCounts} --output {params.outDir} --output-prefix {params.outPrefix} ) &> {log}"71rule callCopyRatioSegments:72 input:73 "CNV/{sample}_{seqID}/{sample}_{seqID}_clean.cr.seg",74 output:75 "CNV/{sample}_{seqID}/{sample}_{seqID}_clean.calledCNVs.seg",76 log:77 "logs/CNV/{sample}_{seqID}_calledCRSegments.log",78 container:79 config["singularitys"]["gatk4"]80 shell:81 "(gatk CallCopyRatioSegments --input {input} --output {output} ) &> {log}"82rule plotModeledSegments:83 input:84 denoisedCopyRatio="CNV/{sample}_{seqID}/{sample}_{seqID}_clean.denoisedCR.tsv",85 allelicCounts="CNV/{sample}_{seqID}/{sample}_{seqID}_clean.hets.tsv",86 segments="CNV/{sample}_{seqID}/{sample}_{seqID}_clean.modelFinal.seg", #Vad hander om man anv'nder Results/{sample}/Reports/{sample}_clean.calledCNVs.seg ist'llet?87 refDict=config["reference"]["ref"][:-5] + "dict",88 output:89 "CNV/{sample}_{seqID}_clean.calledCNVs.modeled.png",90 params:91 outDir="CNV/",92 outPrefix="{sample}_{seqID}_clean.calledCNVs", #--minimum-contig-length 4670998393 pointSize=2.0,94 log:95 "logs/CNV/{sample}_{seqID}_plotSegments.log",96 container:97 config["singularitys"]["gatk4"]98 shell:99 "(gatk PlotModeledSegments --denoised-copy-ratios {input.denoisedCopyRatio} --allelic-counts {input.allelicCounts} "100 "--segments {input.segments} --sequence-dictionary {input.refDict} --point-size-allele-fraction {params.pointSize} "101 "--point-size-copy-ratio {params.pointSize} --output {params.outDir} --output-prefix {params.outPrefix} ) &> {log} "102#103# rule Filter_cnv:104# input:105# gatkSeg = "CNV/{sample}_{seqID}/{sample}_{seqID}_clean.calledCNVs.seg",106# bedfile = config["CNV"]["bedPoN"],107# png = "CNV/{sample}_{seqID}_clean.calledCNVs.modeled.png", #"Results/{sample}_{seqID}/Reports/{sample}_{seqID}_clean.calledCNVs.modeled.png",108# cytoCoord = config["CNV"]["cyto"]109# output:110# relevant_cnvs = "CNV/{sample}_{seqID}_clean.calledCNV-relevant_cnv-GATK4.xlsx" #"Results/{sample}_{seqID}/Reports/{sample}_{seqID}_clean.calledCNV-relevant_cnv-GATK4.xlsx"111# # cnv_done = "Tumor/{sample}_cnv_done.txt"112# params:113# outdir = "CNV/"#"Results/{sample}_{seqID}/Reports",114# container:115# config["singularitys"]["python"]116# log:117# "logs/Tumor/{sample}_{seqID}_relevant_cnvs-gatk4.log"118# shell:...

Full Screen

Full Screen

elasticsearch.py

Source:elasticsearch.py Github

copy

Full Screen

...24 def _generate_semver(cls, v, values) -> str:25 return f"v{values['major']}.{values['minor']}"26 @validator("document")27 def _cleanup_document(cls, v: _document.Document) -> dict:28 def _clean(d: Any) -> Any:29 if not d:30 return31 if isinstance(d, _document.PublicationReference):32 return d.dict(exclude_none=True, exclude_unset=True)33 if isinstance(d, str):34 return " ".join(re.findall(r"\w+", d))35 if isinstance(d, list):36 return [_clean(_d) for _d in d]37 if isinstance(d, _document.AlgorithmVariable):38 return [_clean(d.name), _clean(d.unit)]39 if isinstance(d, _document.DataAccessUrl):40 return [_clean(d.url), _clean(d.description)]41 if isinstance(d, _document.TextLeaf):42 return _clean(d.text)43 # Skipping equation indexing because those44 # are all latex code45 if isinstance(d, _document.EquationNode):46 return47 if isinstance(d, _document.EquationInlineNode):48 return49 if any(50 isinstance(d, i)51 for i in (52 _document.SectionWrapper,53 _document.DivWrapperNode,54 _document.BaseNode,55 )56 ):57 return _clean(d.children)58 raise Exception("Unhandled Node! ", d)59 return {60 field: _clean(getattr(v, field))61 for field in v.__fields__62 if field not in ["version_description"]63 }64class ElasticsearchAtbd(BaseModel):65 """Elasticsearch document representing an ATBD"""66 id: str67 title: str68 alias: Optional[str]69 version: ElasticsearchAtbdVersion70 class Config:71 """Config."""72 title = "ElasticsearchAtbdVersion"...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run ATX automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful