How to use _clear method in taiko

Best JavaScript code snippet using taiko

fy.py

Source:fy.py Github

copy

Full Screen

1#!/usr/bin/env python2# 3# ___ __ 4# .' _|.--.--.| |--..-----..----..-----.5# | _|| | || _ || _ || _|| _ |6# |__| |___ ||_____||_____||__| |___ |7# |_____| |_____|8#9# THE ULTIMATE SCALAR MAPPING FRAMEWORK FOR TRACKVIS (.TRK) FILES10#11#12#13# (c) 2012 FNNDSC, Children's Hospital Boston 14#15#16import fnmatch17import matplotlib18matplotlib.use( 'Agg' ) # switch to offscreen rendering19import matplotlib.pyplot as plot20from matplotlib.colors import LogNorm21import multiprocessing22import numpy as np23import os24import scipy.io25import shutil26import subprocess27import sys28import time29import fyborg30from FyFilterCortexAction import *31from FyFilterLengthAction import *32from FyLengthAction import *33from FyMapAction import *34from FyRadiusMapAction import *35import colortable36from logger import Logger37from _colors import Colors38from _common import FNNDSCParser39from _common import FNNDSCConsole as c40from _common import FNNDSCFileIO as io41class FyborgLogic:42 def __init( self ):43 '''44 '''45 def intro( self ):46 intro = Colors.CYAN + """47 48 _.--'''--._49 .' `.50 / \\51 .-' '-.52 / \\ .----------------.53 / _.--._ _.--._ \\ __i """ + Colors.RED + """*FYBORG POWER*""" + Colors.CYAN + """ |54 / / `-._ _.-' \ \\ '-.________________:55 : : """ + Colors.RED + """.--._""" + Colors.CYAN + """) (""" + Colors.RED + """_.--.""" + Colors.CYAN + """ : :56 | `. """ + Colors.RED + """/""" + Colors.CYAN + """ / : \ """ + Colors.RED + """\ """ + Colors.CYAN + """ .' |57 : `-.___.-':/ \:`-.___.-' :58 \ _ \:_:/ _ /59 `.' "`-. .-'" '.'60 : `-._ _.-' :61 | |62 : _.--._.--._ :63 \ ^-.__ __.-^ /64 `-. ''' .-'65 \ /66 /;`-._____.-';\\67 __..-'/ \\'-..__68 __.-' _.-' '-._ `-.__69 fsc70 FYBORG71 >> THE ULTIMATE SCALAR MAPPING FRAMEWORK FOR TRACKVIS FILES <<72 73 (c) 2012 FNNDSC / Boston Children's Hospital74 E-Mail us: dev@babyMRI.org75""" + Colors._CLEAR76 print intro77 def outro( self ):78 outro = Colors.CYAN + """79 .---.--. .--. 80 ,( ),.`. .'.--.`. 81 ; \\ / : \\ ;.'.' \\ ;82\\ ; """ + Colors.RED + """_""" + Colors.CYAN + """; :""" + Colors.RED + """_""" + Colors.CYAN + """ :""-/ /-. ;:83 \\ ;""" + Colors.RED + """'-""" + Colors.CYAN + """;":""" + Colors.RED + """-'""" + Colors.CYAN + """:"-/ /-._^. ;:84\\ \\ : : ; ; / / / \\ \\ ;:85\\\\ \\ :\\ V / : : : ; ;-';86 \\\\ \\ ; ;._.':,' ; ; : :-' 87\\ \\\\ \\ : : ; : ;o /-._; : : 88 \\ \\\\ \\ _;o; : ; '-'.'.-"`. :-^, 89 \\ \\\\ \\ .-.;:_" _..--"/ / _ ;y ; 90 \\ \\\\ \\ .' / '-,; :: : : (o) ; : 91 \\ \\\\ "-. / : ;: ;; ; ; / : 92bug : \\\\ \\ : ; :: ;; .' ;._..+: ; 93 : \\\\ \\ : _: ;: :: / ; ; ; ;(o): 94"-. \\ \\\\ \\/ Y' '. // ^ \\Y / / : '._.; 95\\ \\ \\ ;"-. ;/ 7"" / \\ :.-'.' .'; / / 96\\\\ \\ \\ : ":_ :"\\ ;..-^'--" .' / / / 97 \\\\ \\ "+.;-"" )._..^-"" / / .' .' 98 ;"+.;_.-" :--=<___) __..__ / :-" .' 99\\ :/_. ; .-" \\ _____.--""__..--"" ;.-" 100 ": '+' __..-\\/\\ ''''T__..___..-": 101 : :\\." \\/; ;: () ; .-" ; 102 "--q/\\ " :; :-" : 103 \\/; ;: ; ..-( 104 " :-\\__/-+""-. .^. 105 ; \\ ( \\; `. 106 /`. `-/\\ ,=. '. `. 107 : \\ \\ :"-:/ .`. \\ \\ 108 ; ; ;;"-;\\/ .'`."-. ; 109 : : ;"-.: \\/ .' j "-: 110 ; : :"-.; `: ,' ; \\ 111 : : :"-: "..': ; 112 ; ; ;"-; `=; ; : 113""" + Colors._CLEAR114 print outro115 def run( self, input, output, radius, length, stage, cortex_only, verbose ):116 '''117 '''118 if stage == 0:119 # create output directory120 # but not if we start with a different stage121 os.mkdir( output )122 # activate log file123 self.__debug = verbose124 self.__logger = Logger( os.path.join( output, 'log.txt' ), verbose )125 sys.stdout = self.__logger126 sys.stderr = self.__logger127 # the input data128 _inputs = {'adc':[ '*adc.nii', None],129 'b0':[ '*b0.nii', None],130 'e1':['*e1.nii', None],131 'e2':[ '*e2.nii', None],132 'e3':[ '*e3.nii', None],133 'fa':[ '*fa.nii', None],134 'fibers':['*streamline.trk', '*/final-trackvis/*.trk', None],135 'segmentation': ['*aparc+aseg.mgz', None],136 'T1':['*/mri/brain.mgz', None]137 }138 # the output data139 _outputs = {'T1':os.path.join( output, 'T1.nii' ),140 'segmentation':os.path.join( output, 'aparc+aseg.nii' ),141 'b0_T1_space':os.path.join( output, 'dti_b0_T1_space.nii' ),142 'adc_T1_space':os.path.join( output, 'dti_adc_T1_space.nii' ),143 'fa_T1_space':os.path.join( output, 'dti_fa_T1_space.nii' ),144 'e1_T1_space':os.path.join( output, 'dti_e1_T1_space.nii' ),145 'e2_T1_space':os.path.join( output, 'dti_e2_T1_space.nii' ),146 'e3_T1_space':os.path.join( output, 'dti_e3_T1_space.nii' ),147 'B0toT1matrix':os.path.join( output, 'B0-to-T1.mat' ),148 'fibers':os.path.join( output, 'fybers_T1_space.trk' ),149 'fibers_mapped':os.path.join( output, 'fybers_T1_space_mapped.trk' ),150 'fibers_mapped_length_filtered':os.path.join( output, 'fybers_T1_space_mapped_length_filtered.trk' ),151 'fibers_mapped_length_filtered_cortex_only':os.path.join( output, 'fybers_T1_space_mapped_length_filtered_cortex_only.trk' ),152 'fibers_final':os.path.join( output, 'fybers_final.trk' ),153 'matrix_all': os.path.join( output, 'matrix_all.mat' ),154 'matrix_fibercount': os.path.join( output, 'matrix_fibercount.csv' ),155 'matrix_length': os.path.join( output, 'matrix_length.csv' ),156 'matrix_adc': os.path.join( output, 'matrix_adc.csv' ),157 'matrix_inv_adc': os.path.join( output, 'matrix_inv_adc.csv' ),158 'matrix_fa': os.path.join( output, 'matrix_fa.csv' ),159 'matrix_e1': os.path.join( output, 'matrix_e1.csv' ),160 'matrix_e2': os.path.join( output, 'matrix_e2.csv' ),161 'matrix_e3': os.path.join( output, 'matrix_e3.csv' ),162 'roi':os.path.join( output, 'roi' )163 }164 self.intro()165 # 4 x beep166 print '\a\a\a\a\a\a\a'167 #time.sleep( 3 )168 # stage 1169 c.info( Colors.YELLOW + '>> STAGE [' + Colors.PURPLE + '1' + Colors.YELLOW + ']: ' + Colors.YELLOW + ' ANALYZING INPUT DATA' + Colors._CLEAR )170 if stage <= 2: # we can never skip stage 1 without skipping stage 2171 _inputs = self.analyze_input_data( input, _inputs )172 else:173 c.info( Colors.PURPLE + ' skipping it..' + Colors._CLEAR )174 # stage 2175 c.info( Colors.YELLOW + '>> STAGE [' + Colors.PURPLE + '2' + Colors.YELLOW + ']: ' + Colors.YELLOW + ' PREPROCESSING' + Colors._CLEAR )176 if stage <= 2:177 self.preprocessing( _inputs, _outputs )178 else:179 c.info( Colors.PURPLE + ' skipping it..' + Colors._CLEAR )180 # stage 3181 c.info( Colors.YELLOW + '>> STAGE [' + Colors.PURPLE + '3' + Colors.YELLOW + ']: ' + Colors.YELLOW + ' MAPPING' + Colors._CLEAR )182 if stage <= 3:183 self.mapping( _inputs, _outputs, radius )184 else:185 c.info( Colors.PURPLE + ' skipping it..' + Colors._CLEAR )186 c.info( Colors.YELLOW + '>> STAGE [' + Colors.PURPLE + '4' + Colors.YELLOW + ']: ' + Colors.YELLOW + ' FILTERING' + Colors._CLEAR )187 if stage <= 4:188 self.filtering( _inputs, _outputs, length, cortex_only )189 else:190 c.info( Colors.PURPLE + ' skipping it..' + Colors._CLEAR )191 c.info( Colors.YELLOW + '>> STAGE [' + Colors.PURPLE + '5' + Colors.YELLOW + ']: ' + Colors.YELLOW + ' CONNECTIVITY MATRICES' + Colors._CLEAR )192 if stage <= 5:193 self.connectivity( _inputs, _outputs, cortex_only )194 else:195 c.info( Colors.PURPLE + ' skipping it..' + Colors._CLEAR )196 c.info( Colors.YELLOW + '>> STAGE [' + Colors.PURPLE + '6' + Colors.YELLOW + ']: ' + Colors.YELLOW + ' ROI EXTRACTION' + Colors._CLEAR )197 if stage <= 6:198 self.roi_extract( _inputs, _outputs )199 else:200 c.info( Colors.PURPLE + ' skipping it..' + Colors._CLEAR )201 self.outro()202 c.info( '' )203 c.info( 'ALL DONE! SAYONARA..' )204 def preprocessing( self, inputs, outputs ):205 '''206 Co-Register the input files using Flirt.207 '''208 # convert the T1.mgz to T1.nii209 cmd = 'ss;'210 cmd += 'chb-fsstable;'211 cmd += 'mri_convert ' + inputs['T1'][-1] + ' ' + outputs['T1']212 c.info( Colors.YELLOW + ' Converting ' + Colors.PURPLE + 'T1.mgz' + Colors.YELLOW + ' to ' + Colors.PURPLE + 'T1.nii' + Colors.YELLOW + '!' + Colors._CLEAR )213 sp = subprocess.Popen( ["/bin/bash", "-i", "-c", cmd], stdout=sys.stdout )214 sp.communicate()215 # convert the aparc+aseg.mgz to aparc+aseg.nii216 cmd = 'ss;'217 cmd += 'chb-fsstable;'218 cmd += 'mri_convert ' + inputs['segmentation'][-1] + ' ' + outputs['segmentation']219 c.info( Colors.YELLOW + ' Converting ' + Colors.PURPLE + 'aparc+aseg.mgz' + Colors.YELLOW + ' to ' + Colors.PURPLE + 'aparc+aseg.nii' + Colors.YELLOW + '!' + Colors._CLEAR )220 sp = subprocess.Popen( ["/bin/bash", "-i", "-c", cmd], stdout=sys.stdout )221 sp.communicate()222 # register B0 to T1223 cmd = 'ss;'224 cmd += 'chb-fsstable;'225 flirtcmd = 'flirt -in ' + inputs['b0'][-1] + ' -ref ' + outputs['T1'] + ' -usesqform -nosearch -dof 6 -cost mutualinfo -out ' + outputs['b0_T1_space'] + '.gz -omat ' + outputs['B0toT1matrix'] + ';'226 cmd += flirtcmd227 cmd += 'gzip -d -f ' + outputs['b0_T1_space'] + '.gz;'228 self.__logger.debug( flirtcmd )229 c.info( Colors.YELLOW + ' Registering ' + Colors.PURPLE + os.path.split( inputs['b0'][-1] )[1] + Colors.YELLOW + ' to ' + Colors.PURPLE + 'T1.nii' + Colors.YELLOW + ' and storing ' + Colors.PURPLE + os.path.split( outputs['B0toT1matrix'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )230 sp = subprocess.Popen( ["/bin/bash", "-i", "-c", cmd], stdout=sys.stdout )231 sp.communicate()232 # resample all other DTI volumes to T1 space233 for i in inputs:234 if i == 'fibers' or i == 'segmentation' or i == 'T1' or i == 'b0':235 # we do not map these236 continue237 cmd = 'ss;'238 cmd += 'chb-fsstable;'239 flirtcmd = 'flirt -in ' + inputs[i][-1] + ' -ref ' + outputs['T1'] + ' -out ' + outputs[i + '_T1_space'] + '.gz -init ' + outputs['B0toT1matrix'] + ' -applyxfm;'240 cmd += flirtcmd241 cmd += 'gzip -d -f ' + outputs[i + '_T1_space'] + '.gz;'242 self.__logger.debug( flirtcmd )243 c.info( Colors.YELLOW + ' Resampling ' + Colors.PURPLE + os.path.split( inputs[i][-1] )[1] + Colors.YELLOW + ' as ' + Colors.PURPLE + os.path.split( outputs[i + '_T1_space'] )[1] + Colors.YELLOW + ' using ' + Colors.PURPLE + os.path.split( outputs['B0toT1matrix'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )244 sp = subprocess.Popen( ["/bin/bash", "-i", "-c", cmd], stdout=sys.stdout )245 sp.communicate()246 # resample the fibers to T1 space247 cmd = 'ss;'248 cmd += 'chb-fsstable;'249 transformcmd = 'track_transform ' + inputs['fibers'][-1] + ' ' + outputs['fibers'] + ' -src ' + inputs['b0'][-1] + ' -ref ' + outputs['T1'] + ' -reg ' + outputs['B0toT1matrix'] + ';'250 cmd += transformcmd251 self.__logger.debug( transformcmd )252 c.info( Colors.YELLOW + ' Transforming ' + Colors.PURPLE + os.path.split( inputs['fibers'][-1] )[1] + Colors.YELLOW + ' to ' + Colors.PURPLE + os.path.split( outputs['fibers'] )[1] + Colors.YELLOW + ' using ' + Colors.PURPLE + os.path.split( outputs['B0toT1matrix'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )253 sp = subprocess.Popen( ["/bin/bash", "-i", "-c", cmd], stdout=sys.stdout )254 sp.communicate()255 def mapping( self, inputs, outputs, radius ):256 '''257 Map all detected scalar volumes to each fiber.258 '''259 # check if we have all required input data260 # we need at least: 261 # - outputs['fibers'] == Track file in T1 space262 # - outputs['segmentation'] == Label Map263 if not os.path.exists( outputs['fibers'] ):264 c.error( Colors.RED + 'Could not find ' + Colors.YELLOW + outputs['fibers'] + Colors.RED + ' but we really need it to start with stage 3!!' + Colors._CLEAR )265 sys.exit( 2 )266 if not os.path.exists( outputs['segmentation'] ):267 c.error( Colors.RED + 'Could not find ' + Colors.YELLOW + outputs['segmentation'] + Colors.RED + ' but we really need it to start with stage 3!!' + Colors._CLEAR )268 sys.exit( 2 )269 actions = []270 for i in inputs:271 if i == 'fibers' or i == 'segmentation' or i == 'T1' or i == 'b0':272 # we do not map these273 continue274 if not os.path.exists( outputs[i + '_T1_space'] ):275 # we can't map this since we didn't find the file276 continue277 # for normal scalars: append it to the actions278 actions.append( FyMapAction( i, outputs[i + '_T1_space'] ) )279 c.info( Colors.YELLOW + ' Configuring mapping of ' + Colors.PURPLE + os.path.split( outputs[i + '_T1_space'] )[1] + Colors.YELLOW + ' to ' + Colors.PURPLE + os.path.split( outputs['fibers'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )280 # now the segmentation with the lookaround radius281 actions.append( FyRadiusMapAction( 'segmentation', outputs['segmentation'], radius ) )282 c.info( Colors.YELLOW + ' Configuring mapping of ' + Colors.PURPLE + os.path.split( outputs['segmentation'] )[1] + Colors.YELLOW + ' to ' + Colors.PURPLE + os.path.split( outputs['fibers'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )283 # and also the fiber length284 actions.append( FyLengthAction() )285 c.info( Colors.YELLOW + ' Configuring mapping of ' + Colors.PURPLE + 'fiber length' + Colors.YELLOW + ' to ' + Colors.PURPLE + os.path.split( outputs['fibers'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )286 # run, forest, run!!287 c.info( Colors.YELLOW + ' Performing configured mapping for ' + Colors.PURPLE + os.path.split( outputs['fibers'] )[1] + Colors.YELLOW + ' and storing as ' + Colors.PURPLE + os.path.split( outputs['fibers_mapped'] )[1] + Colors.YELLOW + ' (~ 30 minutes)!' + Colors._CLEAR )288 if self.__debug:289 fyborg.fyborg( outputs['fibers'], outputs['fibers_mapped'], actions, 'debug' )290 else:291 fyborg.fyborg( outputs['fibers'], outputs['fibers_mapped'], actions )292 def filtering( self, inputs, outputs, length, cortex_only ):293 '''294 Filter the mapped fibers.295 '''296 # check if we have all required input data297 # we need at least: 298 # - outputs['fibers_mapped'] == Track file in T1 space with mapped scalars299 if not os.path.exists( outputs['fibers_mapped'] ):300 c.error( Colors.RED + 'Could not find ' + Colors.YELLOW + outputs['fibers_mapped'] + Colors.RED + ' but we really need it to start with stage 4!!' + Colors._CLEAR )301 sys.exit( 2 )302 # find the order of the mapped scalars303 header = io.loadTrkHeaderOnly( outputs['fibers_mapped'] )304 scalars = list( header['scalar_name'] )305 # split the length range306 length = length.split( ' ' )307 min_length = int( length[0] )308 max_length = int( length[1] )309 # length filtering310 c.info( Colors.YELLOW + ' Filtering ' + Colors.PURPLE + 'fiber length' + Colors.YELLOW + ' to be ' + Colors.PURPLE + '>' + str( min_length ) + ' and <' + str( max_length ) + Colors.YELLOW + ' for ' + Colors.PURPLE + os.path.split( outputs['fibers_mapped'] )[1] + Colors.YELLOW + ' and store as ' + Colors.PURPLE + os.path.split( outputs['fibers_mapped_length_filtered'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )311 fyborg.fyborg( outputs['fibers_mapped'], outputs['fibers_mapped_length_filtered'], [FyFilterLengthAction( scalars.index( 'length' ), min_length, max_length )] )312 header = io.loadTrkHeaderOnly( outputs['fibers_mapped_length_filtered'] )313 new_count = header['n_count']314 c.info( Colors.YELLOW + ' Number of tracks after ' + Colors.PURPLE + 'length filtering' + Colors.YELLOW + ': ' + str( new_count ) + Colors.YELLOW + Colors._CLEAR )315 if cortex_only:316 # special cortex filtering317 c.info( Colors.YELLOW + ' Filtering for ' + Colors.PURPLE + 'valid cortex structures' + Colors.YELLOW + ' in ' + Colors.PURPLE + os.path.split( outputs['fibers_mapped_length_filtered'] )[1] + Colors.YELLOW + ' and store as ' + Colors.PURPLE + os.path.split( outputs['fibers_mapped_length_filtered_cortex_only'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )318 c.info( Colors.PURPLE + ' Conditions for valid fibers:' + Colors._CLEAR )319 c.info( Colors.PURPLE + ' 1.' + Colors.YELLOW + ' The fiber track has to pass through the cerebral white matter. (Label values: ' + Colors.PURPLE + '[2, 41]' + Colors.YELLOW + ')' + Colors._CLEAR )320 c.info( Colors.PURPLE + ' 2.' + Colors.YELLOW + ' The fiber track shall only touch sub-cortical structures not more than ' + Colors.PURPLE + '5 times' + Colors.YELLOW + '. (Label values: ' + Colors.PURPLE + '[10, 49, 16, 28, 60, 4, 43]' + Colors.YELLOW + ')' + Colors._CLEAR )321 c.info( Colors.PURPLE + ' 3.' + Colors.YELLOW + ' The track shall not pass through the corpus callosum (Labels: ' + Colors.PURPLE + '[251, 255]' + Colors.YELLOW + ') and end in the same hemisphere (Labels: ' + Colors.PURPLE + '[1000-1035]' + Colors.YELLOW + ' for left, ' + Colors.PURPLE + '[2000-2035]' + Colors.YELLOW + ' for right).' + Colors._CLEAR )322 fyborg.fyborg( outputs['fibers_mapped_length_filtered'], outputs['fibers_mapped_length_filtered_cortex_only'], [FyFilterCortexAction( scalars.index( 'segmentation' ) )] )323 header = io.loadTrkHeaderOnly( outputs['fibers_mapped_length_filtered_cortex_only'] )324 new_count = header['n_count']325 c.info( Colors.YELLOW + ' Number of tracks after ' + Colors.PURPLE + 'cortex filtering' + Colors.YELLOW + ': ' + str( new_count ) + Colors.YELLOW + Colors._CLEAR )326 c.info( Colors.YELLOW + ' Copied filtered tracks from ' + Colors.PURPLE + os.path.split( outputs['fibers_mapped_length_filtered_cortex_only'] )[1] + Colors.YELLOW + ' to ' + Colors.PURPLE + os.path.split( outputs['fibers_final'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )327 shutil.copyfile( outputs['fibers_mapped_length_filtered_cortex_only'], outputs['fibers_final'] )328 else:329 c.info( Colors.YELLOW + ' Info: ' + Colors.PURPLE + 'Cortical _and_ sub-cortical structures ' + Colors.YELLOW + 'will be included..' + Colors._CLEAR )330 c.info( Colors.YELLOW + ' Copied filtered tracks from ' + Colors.PURPLE + os.path.split( outputs['fibers_mapped_length_filtered'] )[1] + Colors.YELLOW + ' to ' + Colors.PURPLE + os.path.split( outputs['fibers_final'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )331 shutil.copyfile( outputs['fibers_mapped_length_filtered'], outputs['fibers_final'] )332 def connectivity( self, inputs, outputs, cortex_only ):333 '''334 Generate connectivity matrices using mapped values.335 '''336 # check if we have all required input data337 # we need at least: 338 # - outputs['fibers_mapped'] == Track file in T1 space with mapped scalars339 if not os.path.exists( outputs['fibers_final'] ):340 c.error( Colors.RED + 'Could not find ' + Colors.YELLOW + outputs['fibers_final'] + Colors.RED + ' but we really need it to start with stage 5!!' + Colors._CLEAR )341 sys.exit( 2 )342 s = io.loadTrk( outputs['fibers_final'] )343 tracks = s[0]344 header = s[1]345 scalarNames = header['scalar_name'].tolist()346 matrix = {}347 indices = {}348 # check if the segmentation is mapped349 try:350 indices['segmentation'] = scalarNames.index( 'segmentation' )351 except:352 c.error( Colors.RED + 'Could not find ' + Colors.YELLOW + 'segmentation' + Colors.RED + ' as a mapped scalar but we really need it!' )353 sys.exit( 2 )354 if cortex_only:355 labels = [2012, 2019, 2032, 2014, 2020, 2018, 2027, 2028, 2003, 2024, 2017, 2026, 2002, 2023, 2010, 2022, 2031, 2029, 2008, 2025, 2005, 2021, 2011, 2013, 2007, 2016, 2006, 2033, 2009, 2015, 2001, 2030, 2034, 2035, 1012, 1019, 1032, 1014, 1020, 1018, 1027, 1028, 1003, 1024, 1017, 1026, 1002, 1023, 1010, 1022, 1031, 1029, 1008, 1025, 1005, 1021, 1011, 1013, 1007, 1016, 1006, 1033, 1009, 1015, 1001, 1030, 1034, 1035]356 else:357 labels = [2012, 2019, 2032, 2014, 2020, 2018, 2027, 2028, 2003, 2024, 2017, 2026, 2002, 2023, 2010, 2022, 2031, 2029, 2008, 2025, 2005, 2021, 2011, 2013, 2007, 2016, 2006, 2033, 2009, 2015, 2001, 2030, 2034, 2035, 49, 50, 51, 52, 58, 53, 54, 1012, 1019, 1032, 1014, 1020, 1018, 1027, 1028, 1003, 1024, 1017, 1026, 1002, 1023, 1010, 1022, 1031, 1029, 1008, 1025, 1005, 1021, 1011, 1013, 1007, 1016, 1006, 1033, 1009, 1015, 1001, 1030, 1034, 1035, 10, 11, 12, 13, 26, 17, 18, 16]358 c.info( Colors.YELLOW + ' Getting ready to create connectivity matrices for the following labels: ' + Colors.PURPLE + str( labels ) + Colors._CLEAR )359 c.info( Colors.YELLOW + ' Note: Mapped scalar values along the points will be averaged for each fiber track.' + Colors._CLEAR )360 # create matrices for the attached scalars361 for i, s in enumerate( scalarNames ):362 if i >= header['n_scalars']:363 break364 if not s or s == 'segmentation':365 continue366 # this is a scalar value for which a matrix will be created367 matrix[s] = np.zeros( [len( labels ), len( labels )] )368 indices[s] = scalarNames.index( s )369 c.info( Colors.YELLOW + ' Preparing matrix (' + Colors.PURPLE + '[' + str( len( labels ) ) + 'x' + str( len( labels ) ) + ']' + Colors.YELLOW + ') for ' + Colors.PURPLE + s + Colors.YELLOW + ' values!' + Colors._CLEAR )370 if s == 'adc':371 s = 'inv_adc'372 matrix[s] = np.zeros( [len( labels ), len( labels )] )373 indices[s] = scalarNames.index( 'adc' )374 c.info( Colors.YELLOW + ' Preparing matrix (' + Colors.PURPLE + '[' + str( len( labels ) ) + 'x' + str( len( labels ) ) + ']' + Colors.YELLOW + ') for ' + Colors.PURPLE + s + Colors.YELLOW + ' values!' + Colors._CLEAR )375 # always create one for the fiber counts376 matrix['fibercount'] = np.zeros( [len( labels ), len( labels )] )377 indices['fibercount'] = 0378 c.info( Colors.YELLOW + ' Preparing matrix (' + Colors.PURPLE + '[' + str( len( labels ) ) + 'x' + str( len( labels ) ) + ']' + Colors.YELLOW + ') for ' + Colors.PURPLE + 'fibercount' + Colors.YELLOW + ' values!' + Colors._CLEAR )379 c.info( Colors.YELLOW + ' Analyzing fibers of ' + Colors.PURPLE + os.path.split( outputs['fibers_final'] )[1] + Colors.YELLOW + '..' + Colors._CLEAR )380 for tCounter, t in enumerate( tracks ):381 tCoordinates = t[0]382 tScalars = t[1]383 # find the segmentation labels for the start and end points384 start_label = tScalars[0, indices['segmentation']]385 end_label = tScalars[-1, indices['segmentation']]386 try:387 # now grab the index of the labels in our label list388 start_index = labels.index( start_label )389 end_index = labels.index( end_label )390 except:391 # this label is not monitored, so ignore this track392 continue393 # loop through all different scalars394 for m in matrix:395 # calculate the mean for each track396 value = np.mean( tScalars[:, indices[m]] )397 if m == 'inv_adc':398 # invert the value since it is 1-ADC399 value = 1 / value400 elif m == 'fibercount':401 # in the case of fibercount, add 1402 value = 1403 # store value in the matrix404 matrix[m][start_index, end_index] += value405 if not start_index == end_index:406 matrix[m][end_index, start_index] += value407 # fiber loop is done, all values are stored408 # now normalize the matrices409 np.seterr( all='ignore' ) # avoid div by 0 warnings410 cbar = None411 for m in matrix:412 if not m == 'fibercount':413 # normalize it414 matrix[m][:] /= matrix['fibercount']415 matrix[m] = np.nan_to_num( matrix[m] )416 # store the matrix417 c.info( Colors.YELLOW + ' Storing ' + Colors.PURPLE + m + Colors.YELLOW + ' connectivity matrix as ' + Colors.PURPLE + os.path.split( outputs['matrix_' + m] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )418 np.savetxt( outputs['matrix_' + m], matrix[m], delimiter='\t' )419 # store a picture420 picture_path = os.path.splitext( os.path.split( outputs['matrix_' + m] )[1] )[0] + '.png'421 c.info( Colors.YELLOW + ' Generating ' + Colors.PURPLE + m + ' image' + Colors.YELLOW + ' as ' + Colors.PURPLE + picture_path + Colors.YELLOW + '!' + Colors._CLEAR )422 img = plot.imshow( matrix[m], interpolation='nearest' )423 img.set_cmap( 'jet' )424 img.set_norm( LogNorm() )425 img.axes.get_xaxis().set_visible( False )426 img.axes.get_yaxis().set_visible( False )427 if not cbar:428 cbar = plot.colorbar()429 cbar.set_label( m )430 cbar.set_ticks( [] )431 plot.savefig( os.path.join( os.path.split( outputs['matrix_' + m] )[0], picture_path ) )432 np.seterr( all='warn' ) # reactivate div by 0 warnings433 # now store the matlab version as well434 c.info( Colors.YELLOW + ' Storing ' + Colors.PURPLE + 'matlab data bundle' + Colors.YELLOW + ' containing ' + Colors.PURPLE + 'all matrices' + Colors.YELLOW + ' as ' + Colors.PURPLE + os.path.split( outputs['matrix_all'] )[1] + Colors.YELLOW + '!' + Colors._CLEAR )435 scipy.io.savemat( outputs['matrix_all'], matrix, oned_as='row' )436 def roi_extract( self, inputs, outputs ):437 '''438 '''439 # check if we have all required input data440 # we need at least: 441 # - outputs['fibers_mapped'] == Track file in T1 space with mapped scalars442 if not os.path.exists( outputs['fibers_final'] ):443 c.error( Colors.RED + 'Could not find ' + Colors.YELLOW + outputs['fibers_final'] + Colors.RED + ' but we really need it to start with stage 6!!' + Colors._CLEAR )444 sys.exit( 2 )445 s = io.loadTrk( outputs['fibers_final'] )446 tracks = s[0]447 header = s[1]448 scalarNames = header['scalar_name'].tolist()449 labels = {}450 # check if the segmentation is mapped451 try:452 seg_index = scalarNames.index( 'segmentation' )453 except:454 c.error( Colors.RED + 'Could not find ' + Colors.YELLOW + 'segmentation' + Colors.RED + ' as a mapped scalar but we really need it!' )455 sys.exit( 2 )456 # create the roi subfolder457 if not os.path.exists( outputs['roi'] ):458 os.mkdir( outputs['roi'] )459 # parse the color table460 lut = colortable.freesurfer.split( '\n' )461 colors = {}462 for color in lut:463 if not color or color[0] == '#':464 continue465 splitted_line = color.split( ' ' )466 splitted_line = filter( None, splitted_line )467 colors[splitted_line[0]] = splitted_line[1]468 # loop through tracks469 for i, t in enumerate( tracks ):470 tCoordinates = t[0]471 tScalars = t[1]472 # grab the scalars for each point473 for scalar in tScalars:474 # but only the label value475 label_value = str( int( scalar[seg_index] ) )476 if not label_value in labels:477 labels[label_value] = []478 if not i in labels[label_value]:479 # store the unique fiber id for this label480 labels[label_value].append( i )481 # now loop through all detected labels482 for l in labels:483 new_tracks = []484 for t_id in labels[l]:485 # grab the fiber + scalars486 current_fiber = tracks[t_id]487 new_tracks.append( current_fiber )488 # now store the trk file489 trk_outputfile = l + '_' + colors[l] + '.trk'490 nii_outputfile = l + '_' + colors[l] + '.nii'491 c.info( Colors.YELLOW + ' Creating fiber ROI ' + Colors.PURPLE + trk_outputfile + Colors.YELLOW + '!' + Colors._CLEAR )492 io.saveTrk( os.path.join( outputs['roi'], trk_outputfile ), new_tracks, header, None, True )493 # also create a roi label volume for this label value494 c.info( Colors.YELLOW + ' Creating NII ROI ' + Colors.PURPLE + nii_outputfile + Colors.YELLOW + '!' + Colors._CLEAR )495 cmd = 'ss;'496 cmd += 'chb-fsstable;'497 cmd += 'mri_binarize --i ' + outputs['segmentation'] + ' --o ' + os.path.join( outputs['roi'], nii_outputfile ) + ' --match ' + l + ' --binval ' + l + ';'498 self.__logger.debug( cmd )499 sp = subprocess.Popen( ["/bin/bash", "-i", "-c", cmd], stdout=sys.stdout )500 sp.communicate()501 def analyze_input_data( self, input_directory, inputs ):502 '''503 Scan an input directory for all kind of inputs. Connectome Pipeline output has 504 higher priority than Tractography Pipeline output since the Connectome pipeline 505 also includes the Tractography output.506 507 Returns a dictionary of found files.508 '''509 for root, dirs, files in os.walk( input_directory ):510 dirs.sort()511 for f in files:512 fullpath = os.path.join( root, f )513 # try to find the files514 for _f in inputs:515 # don't check if we already found this one516 if inputs[_f][-1] != None:517 continue518 for _mask in inputs[_f][:-1]:519 if fnmatch.fnmatch( fullpath, _mask ):520 # this matches our regex521 c.info( Colors.YELLOW + ' Found ' + Colors.PURPLE + f + Colors.YELLOW + '!' + Colors._CLEAR )522 self.__logger.debug( 'Full path: ' + fullpath )523 inputs[_f][-1] = fullpath524 #time.sleep( 1 )525 # don't consider any other option526 break527 return inputs528# ENTRYPOINT529if __name__ == "__main__":530 # 1) scan input directory531 # 2) preform preprocessing 532 # 3) perform mapping533 # 4) perform filtering534 # 5) create connectivity matrices535 parser = FNNDSCParser( description='fyborg - THE ULTIMATE SCALAR MAPPING FRAMEWORK FOR TRACKVIS (.TRK) FILES' )536 parser.add_argument( '-i', '--input', action='store', dest='input', required=True, help='The input folder which gets scanned automatically for usable volume- and track-files.' )537 parser.add_argument( '-o', '--output', action='store', dest='output', required=True, help='The output folder which gets created if it does not exit' )538 parser.add_argument( '-r', '--radius', action='store', dest='radius', default=3, type=int, help='The look-a-round radius in voxels. E.g. --radius 10, DEFAULT: 3' )539 parser.add_argument( '-l', '--length', action='store', dest='length', default="20 200", help='The lower and upper borders for length thresholding in mm. E.g. --length "60 100", DEFAULT "20 200" ' )540 parser.add_argument( '-co', '--cortex_only', action='store_true', dest='cortex_only', help='Perform filtering for cortex specific analysis and skip sub-cortical structures.' )541 parser.add_argument( '-s', '--stage', action='store', dest='stage', default=0, type=int, help='Start with a specific stage while skipping the ones before. E.g. --stage 3 directly starts the mapping without preprocessing, --stage 4 starts with the filtering' )542 parser.add_argument( '-overwrite', '--overwrite', action='store_true', dest='overwrite', help='Overwrite any existing output. DANGER!!' )543 parser.add_argument( '-v', '--verbose', action='store_true', dest='verbose', help='Show verbose output' )544 # always show the help if no arguments were specified545 if len( sys.argv ) == 1:546 parser.print_help()547 sys.exit( 1 )548 options = parser.parse_args()549 # validate the inputs here550 if not os.path.isdir( options.input ):551 c.error( Colors.RED + 'Could not find the input directory! Specify a valid directory using -i $PATH.' + Colors._CLEAR )552 sys.exit( 2 )553 if os.path.exists( options.output ) and int( options.stage ) == 0:554 if not options.overwrite:555 c.error( Colors.RED + 'The output directory exists! Add --overwrite to erase previous content!' + Colors._CLEAR )556 c.error( Colors.RED + 'Or use --stage > 2 to start with a specific stage which re-uses the previous content..' + Colors._CLEAR )557 sys.exit( 2 )558 else:559 # silently delete the existing output560 shutil.rmtree( options.output )561 if options.stage > 0 and not os.path.exists( options.output ):562 # we start with a specific stage so we need the output stuff563 c.error( Colors.RED + 'The output directory does not exist! We need it when using -s/--stage to resume the process!' + Colors._CLEAR )564 sys.exit( 2 )565 logic = FyborgLogic()...

Full Screen

Full Screen

test_protectclass.py

Source:test_protectclass.py Github

copy

Full Screen

...16import unittest17class Test_protectName(unittest.TestCase):18 def setUp(self):19 from zope.security.checker import _clear20 _clear()21 def tearDown(self):22 from zope.security.checker import _clear23 _clear()24 def _callFUT(self, class_, name, permission):25 from zope.security.protectclass import protectName26 return protectName(class_, name, permission)27 def test_wo_existing_checker_w_zope_Public(self):28 from zope.security.checker import CheckerPublic29 from zope.security.checker import _checkers30 self._callFUT(Foo, 'bar', 'zope.Public')31 self.assertTrue(_checkers[Foo].get_permissions['bar'] is CheckerPublic)32 def test_w_existing_checker(self):33 from zope.security.checker import Checker34 from zope.security.checker import _checkers35 checker = _checkers[Foo] = Checker({})36 permission = object()37 self._callFUT(Foo, 'bar', permission)38 self.assertTrue(_checkers[Foo] is checker)39 self.assertTrue(checker.get_permissions['bar'] is permission)40class Test_protectSetAttribute(unittest.TestCase):41 def setUp(self):42 from zope.security.checker import _clear43 _clear()44 def tearDown(self):45 from zope.security.checker import _clear46 _clear()47 def _callFUT(self, class_, name, permission):48 from zope.security.protectclass import protectSetAttribute49 return protectSetAttribute(class_, name, permission)50 def test_wo_existing_checker_w_zope_Public(self):51 from zope.security.checker import CheckerPublic52 from zope.security.checker import _checkers53 self._callFUT(Foo, 'bar', 'zope.Public')54 self.assertTrue(_checkers[Foo].set_permissions['bar'] is CheckerPublic)55 def test_w_existing_checker(self):56 from zope.security.checker import Checker57 from zope.security.checker import _checkers58 checker = _checkers[Foo] = Checker({})59 permission = object()60 self._callFUT(Foo, 'bar', permission)61 self.assertTrue(_checkers[Foo] is checker)62 self.assertTrue(checker.set_permissions['bar'] is permission)63class Test_protectLikeUnto(unittest.TestCase):64 def setUp(self):65 from zope.security.checker import _clear66 _clear()67 def tearDown(self):68 from zope.security.checker import _clear69 _clear()70 def _callFUT(self, class_, like_unto):71 from zope.security.protectclass import protectLikeUnto72 return protectLikeUnto(class_, like_unto)73 def test_wo_existing_like_unto_checker(self):74 from zope.security.checker import _checkers75 self.assertFalse(Foo in _checkers)76 self._callFUT(Bar, Foo)77 self.assertFalse(Foo in _checkers)78 self.assertFalse(Bar in _checkers)79 def test_w_existing_like_unto_checker_wo_existing_checker(self):80 from zope.security.checker import Checker81 from zope.security.checker import CheckerPublic82 from zope.security.checker import defineChecker83 from zope.security.checker import _checkers...

Full Screen

Full Screen

test_logger.py

Source:test_logger.py Github

copy

Full Screen

...47 with self.assertRaises(ValueError):48 CoreLog.debug("hey")49 CoreLog("debug")50 CoreLog.info("hey")51 CoreLog()._clear()52 def test_mylogger_verbose(self):53 """verbose setting54 """55 print()56 CoreLog("debug")57 CoreLog.verbose("hey")58 CoreLog()._clear()59 def test_mylogger_notime(self):60 """without time61 """62 print()63 CoreLog("debug", time=False)64 CoreLog.verbose("hey")65 CoreLog()._clear()66 def test_mylogger_noname(self):67 """without name68 """69 print()70 CoreLog("debug", name="")71 CoreLog.verbose("hey")72 CoreLog()._clear()73 CoreLog("debug", name=None)74 CoreLog.verbose("hey")75 CoreLog()._clear()76 def test_mylogger_noname_notime(self):77 """without time and name78 """79 print()80 CoreLog("debug", name="", time=False)81 CoreLog.verbose("hey")82 CoreLog()._clear()83 def test_mylogger_debug(self):84 """debug setting85 """86 print()87 CoreLog("debug")88 CoreLog.debug("hey")89 CoreLog.verbose("hey")90 CoreLog.info("hey")91 CoreLog.warning("hey")92 CoreLog.error("hey")93 CoreLog.critical("hey")94 CoreLog.fatal("hey")95 CoreLog()._clear()96 def test_mylogger_verbose(self):97 """debug setting98 """99 print()100 CoreLog("verbose")101 CoreLog.debug("hey")102 CoreLog.verbose("hey")103 CoreLog.info("hey")104 CoreLog.warning("hey")105 CoreLog.error("hey")106 CoreLog.critical("hey")107 CoreLog()._clear()108 def test_mylogger_info(self):109 """debug setting110 """111 print()112 CoreLog("info")113 CoreLog.debug("hey")114 CoreLog.verbose("hey")115 CoreLog.info("hey")116 CoreLog.warning("hey")117 CoreLog.error("hey")118 CoreLog.critical("hey")119 CoreLog()._clear()120 def test_mylogger_warning(self):121 """debug setting122 """123 print()124 CoreLog("warning")125 CoreLog.debug("hey")126 CoreLog.verbose("hey")127 CoreLog.info("hey")128 CoreLog.warning("hey")129 CoreLog.error("hey")130 CoreLog.critical("hey")131 CoreLog()._clear()132 def test_mylogger_error(self):133 """debug setting134 """135 print()136 CoreLog("error")137 CoreLog.debug("hey")138 CoreLog.verbose("hey")139 CoreLog.info("hey")140 CoreLog.warning("hey")141 CoreLog.error("hey")142 CoreLog.critical("hey")143 CoreLog()._clear()144 def test_mylogger_critical(self):145 """debug setting146 """147 print()148 CoreLog("critical")149 CoreLog.debug("hey")150 CoreLog.verbose("hey")151 CoreLog.info("hey")152 CoreLog.warning("hey")153 CoreLog.error("hey")154 CoreLog.critical("hey")155 CoreLog()._clear()156 def test_mylogger_quiet(self):157 """debug setting158 """159 print()160 CoreLog("quiet")161 CoreLog.debug("hey")162 CoreLog.verbose("hey")163 CoreLog.info("hey")164 CoreLog.warning("hey")165 CoreLog.error("hey")166 CoreLog.critical("hey")167 CoreLog()._clear()168 def test_mylogger_fatal(self):169 """debug setting170 """171 print()172 CoreLog("fatal")173 CoreLog.debug("hey")174 CoreLog.verbose("hey")175 CoreLog.info("hey")176 CoreLog.warning("hey")177 CoreLog.error("hey")178 CoreLog.critical("hey")179 CoreLog()._clear()180if __name__ == '__main__':...

Full Screen

Full Screen

Credentials.py

Source:Credentials.py Github

copy

Full Screen

1#!/usr/bin/python2# coding:utf-83from Global import default4def _clear(number):5 result = []6 while number:7 result.append(chr(number % 128))8 number >>= 79 return ''.join(reversed(result))10def Credentials(auth_type='db', city=default['city']): 11 credentials = {'BJ': {}, 'WX': {}, 'XM': {}, 'LA': {}, 'DC': {}, 'KL': {}}12 credentials['BJ']['linux-simple'] = ('root', _clear(55599684644640946))13 credentials['BJ']['ple-tools-db'] = ('ple', _clear(675655278160864618085))14 credentials['BJ']['ldap'] = ('uid=root,cn=users,dc=xserver,dc=base-fx,dc=com', _clear(142739024786090804172849))15 credentials['BJ']['ad'] = ('cn=Administrator,cn=users,dc=ad,dc=base-fx,dc=com', _clear(938724368017542699568))16 credentials['BJ']['snmp'] = ('basenet')17 credentials['BJ']['localhost'] = ('root', _clear(55599684644640946))18 credentials['XM']['localhost'] = ('root', _clear(55599684644640946))19 credentials['WX']['localhost'] = ('root', _clear(55599684644640946))20 credentials['BJ']['switches'] = ('basefx', _clear(55599684644640946))21 credentials['qubedb'] = ('qube_admin',_clear(65753674419924920))22 credentials['BJ']['qube.base-fx.com'] = credentials['qubedb']23 credentials['BJ']['qube.bj.base-fx.com'] = credentials['qubedb']24 credentials['WX']['qube.wx.base-fx.com'] = credentials['qubedb']25 credentials['XM']['qube.xm.base-fx.com'] = credentials['qubedb']26 credentials['XM']['qube.kl.base-fx.com'] = credentials['qubedb']27 credentials['XM']['qube.dc.base-fx.com'] = credentials['qubedb']28 credentials['BJ']['xen-master.base-fx.com'] = ('itd', _clear(434545571551329))29 credentials['WX']['xen-master.base-fx.com'] = ('itd', _clear(434545571551329))30 credentials['XM']['xen-master.base-fx.com'] = ('itd', _clear(434545571551329))31 credentials['LA']['xen-master.base-fx.com'] = ('itd', _clear(434545571551329))32 credentials['DC']['xen-master.base-fx.com'] = ('itd', _clear(434545571551329))33 credentials['KL']['xen-master.base-fx.com'] = ('itd', _clear(434545571551329))34 credentials['BJ']['xen22.base-fx.com'] = ('itd', _clear(434545571551329))35 credentials['db'] = ('itd',_clear(63840988175379178))36 credentials['BJ']['db.base-fx.com'] = credentials['db']37 credentials['KL']['db.base-fx.com'] = credentials['db']38 credentials['DC']['db.base-fx.com'] = credentials['db']39 credentials['WX']['db.base-fx.com'] = credentials['db']40 credentials['XM']['db.base-fx.com'] = credentials['db']41 credentials['BJ']['dbd.base-fx.com'] = credentials['db']42 credentials['KL']['dbd.base-fx.com'] = credentials['db']43 credentials['DC']['dbd.base-fx.com'] = credentials['db']44 credentials['WX']['dbd.base-fx.com'] = credentials['db']45 credentials['XM']['dbd.base-fx.com'] = credentials['db']46 try:47 return credentials[city.upper()][auth_type]48 except:49 return credentials['BJ']['linux-simple']...

Full Screen

Full Screen

comment.py

Source:comment.py Github

copy

Full Screen

...25 self.id = '{0}/{1}'.format(m.group('t').lower(), m.group('r'))26class RTNoMatch(Exception):27 """No Match Exception"""28 pass29def _clear(section, lineno=0):30 return section[lineno].lstrip('# ').rstrip('.')31def _pass(section, lineno=0):32 return section[lineno]33def check(section):34 """Parse and Dispatch Errors35 .. seealso:: The :py:mod:`rtkit.errors` module36 .. doctest::37 >>> check(['# Unknown object type: spam'])38 Traceback (most recent call last):39 ...40 RTUnknownTypeError: Unknown object type: spam41 >>> check(["# Invalid object specification: 'spam'"])42 Traceback (most recent call last):43 ......

Full Screen

Full Screen

_uploader.py

Source:_uploader.py Github

copy

Full Screen

1#2# The XBUILD uploader.3#4# (c) 2012 The XTK Developers <dev@goXTK.com>5#6import os7import sys8import subprocess9import config10from _cdash import CDash11from _colors import Colors12#13#14#15class Uploader( object ):16 '''17 '''18 def run( self, options=None ):19 '''20 Performs the action.21 '''22 print 'Uploading results for ' + config.SOFTWARE_SHORT + '...'23 # check which submission type24 submissiontype = 'Experimental'25 if options.continuous:26 submissiontype = 'Continuous'27 elif options.nightly:28 submissiontype = 'Nightly'29 # now we create a dashboard submission file30 cdasher = CDash()31 #32 # build33 #34 print Colors.CYAN + 'Loading Build Report..' + Colors._CLEAR35 buildReport = os.path.join( config.TEMP_PATH, config.SOFTWARE_SHORT + '_Build.xml' )36 if os.path.isfile( buildReport ):37 # found a build report38 print Colors.ORANGE + 'Found Build Report!' + Colors._CLEAR39 with open( buildReport, 'r' ) as f:40 cdasher.submit( f.read(), submissiontype )41 print Colors.ORANGE + '..Successfully uploaded as ' + Colors.CYAN + submissiontype + Colors.ORANGE + '.' + Colors._CLEAR42 else:43 # not found44 print Colors.ORANGE + 'Not Found!' + Colors._CLEAR45 buildReport = None46 #47 # test48 #49 print Colors.CYAN + 'Loading Testing Report..' + Colors._CLEAR50 testReport = os.path.join( config.TEMP_PATH, config.SOFTWARE_SHORT + '_Test.xml' )51 if os.path.isfile( testReport ):52 # found a build report53 print Colors.ORANGE + 'Found Testing Report!' + Colors._CLEAR54 with open( testReport, 'r' ) as f:55 cdasher.submit( f.read(), submissiontype )56 print Colors.ORANGE + '..Successfully uploaded as ' + Colors.CYAN + submissiontype + Colors.ORANGE + '.' + Colors._CLEAR57 else:58 # not found59 print Colors.ORANGE + 'Not Found!' + Colors._CLEAR60 testReport = None61 #62 # coverage summary63 # 64 print Colors.CYAN + 'Loading Coverage Summary..' + Colors._CLEAR65 coverageReport = os.path.join( config.TEMP_PATH, config.SOFTWARE_SHORT + '_Coverage.xml' )66 if os.path.isfile( coverageReport ):67 # found a build report68 print Colors.ORANGE + 'Found Coverage Summary!' + Colors._CLEAR69 with open( coverageReport, 'r' ) as f:70 cdasher.submit( f.read(), submissiontype )71 print Colors.ORANGE + '..Successfully uploaded as ' + Colors.CYAN + submissiontype + Colors.ORANGE + '.' + Colors._CLEAR72 else:73 # not found74 print Colors.ORANGE + 'Not Found!' + Colors._CLEAR75 coverageReport = None76 #77 # coverage log78 # 79 print Colors.CYAN + 'Loading Coverage Log..' + Colors._CLEAR80 coverageLog = os.path.join( config.TEMP_PATH, config.SOFTWARE_SHORT + '_CoverageLog.xml' )81 if os.path.isfile( coverageLog ):82 # found a build report83 print Colors.ORANGE + 'Found Coverage Log!' + Colors._CLEAR84 with open( coverageLog, 'r' ) as f:85 cdasher.submit( f.read(), submissiontype )86 print Colors.ORANGE + '..Successfully uploaded as ' + Colors.CYAN + submissiontype + Colors.ORANGE + '.' + Colors._CLEAR87 else:88 # not found89 print Colors.ORANGE + 'Not Found!' + Colors._CLEAR90 coverageLog = None91 # delete old reports92 if buildReport:93 os.unlink( buildReport )94 if testReport:95 os.unlink( testReport )96 if coverageReport:97 os.unlink( coverageReport )98 if coverageLog:...

Full Screen

Full Screen

test.py

Source:test.py Github

copy

Full Screen

1#!/usr/bin/env python2import subprocess, time3import _test_functions as tf4from time import time, gmtime, strftime, sleep5# import the xbuild system6import sys7sys.path.append( './xbuild/' )8from _core import *9TESTS = ['test_mapping', 'test_mapping_2_2_2', 'test_mapping_real', 'test_mapping_neighbors', 'test_mapping_neighbors_real', 'test_mapping_real_outside_vol']10# ENTRYPOINT11if __name__ == "__main__":12 print 'Testing now...'13 print14 sleep( 1 )15 results = []16 for t in TESTS:17 for tool in ['_fyborg.py', '_matlab.py']:18 fullname = t + tool19 command = './' + fullname20 log = ''21 status = 'failed'22 start_time = time()23 process = subprocess.Popen( command, bufsize=0, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )24 for line in process.stdout:25 line = line.strip( '\n' )26 if line.find( 'ALL OK' ) != -1:27 # test ran through28 status = 'passed'29 log += line + '\n'30 print line31 end_time = time()32 execution_time = end_time - start_time33 # we need to also strip the bell and backspace chars from matlab errors34 results.append( [fullname, status, tf.Colors.strip( log.replace( '\a', '' ).replace( '\b', '' ) ) , execution_time, None, None] )35 print36 print37 print 'All done!!'38 print39 print tf.Colors.PURPLE + '================================' + tf.Colors._CLEAR40 print tf.Colors.ORANGE + ' R E S U L T S' + tf.Colors._CLEAR41 print tf.Colors.PURPLE + '================================' + tf.Colors._CLEAR42 for r in results:43 if r[1] == 'passed':44 result = tf.Colors.GREEN + 'PASSED'45 else:46 result = tf.Colors.RED + 'FAILED'47 print tf.Colors.CYAN + 'Test: ' + tf.Colors.PURPLE + r[0] + ' ' + result48 print49 # now we create a dashboard submission file50 cdasher = CDash()51 xmlfile = cdasher.run( ['Testing', results] )52 with open( os.path.join( config.TEMP_PATH, config.SOFTWARE_SHORT + '_Test.xml' ), 'w' ) as f:53 f.write( xmlfile )54 print tf.Colors.PURPLE + 'Uploading to CDash..' + tf.Colors._CLEAR55 # check which submission type56 submissiontype = 'Experimental'57 if len( sys.argv ) > 1 and sys.argv[1] == '-n':58 submissiontype = 'Nightly'59 print Colors.CYAN + 'Loading Testing Report..' + Colors._CLEAR60 testReport = os.path.join( config.TEMP_PATH, config.SOFTWARE_SHORT + '_Test.xml' )61 if os.path.isfile( testReport ):62 # found a build report63 print Colors.ORANGE + 'Found Testing Report!' + Colors._CLEAR64 with open( testReport, 'r' ) as f:65 cdasher.submit( f.read(), submissiontype )66 print Colors.ORANGE + '..Successfully uploaded as ' + Colors.CYAN + submissiontype + Colors.ORANGE + '.' + Colors._CLEAR67 else:68 # not found69 print Colors.ORANGE + 'Not Found!' + Colors._CLEAR70 testReport = None71 if testReport:...

Full Screen

Full Screen

_depsgenerator.py

Source:_depsgenerator.py Github

copy

Full Screen

1#2# The XBUILD depenceny generator.3#4# (c) 2012 The XTK Developers <dev@goXTK.com>5#6import os7import sys8import subprocess9import config10from _cdash import CDash11from _colors import Colors12from _jsfilefinder import JSFileFinder13#14#15#16class DepsGenerator( object ):17 '''18 '''19 def run( self, options=None ):20 '''21 Performs the action.22 '''23 print 'Generating dependency file for ' + config.SOFTWARE_SHORT + '...'24 # grab all js files25 filefinder = JSFileFinder()26 jsfiles = filefinder.run( ['USE_INCLUDES'] )27 arguments = []28 # closure base path29 basepath = os.path.dirname( os.path.dirname( config.CLOSUREDEPSWRITER_PATH ) )30 # set the output file31 arguments.extend( ['--output_file=' + config.DEPS_OUTPUT_PATH] )32 # add js files33 for j in jsfiles:34 arguments.extend( ['--path_with_depspath=' + j + ' ' + os.path.relpath( j, basepath )] )35 #36 # call the closure depswriter37 #38 command = [config.CLOSUREDEPSWRITER_PATH]39 command.extend( arguments )40 process = subprocess.Popen( command, bufsize=0, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )41 # no output is good42 noOutput = True43 # print any output in red since it probably indicates an error44 for line in process.stdout:45 line = line.strip( '\n' )46 print Colors.RED + line + Colors._CLEAR47 noOutput = False48 if noOutput:49 # all good and done50 print Colors.ORANGE + 'Dependency file ' + Colors.PURPLE + config.DEPS_OUTPUT_PATH + Colors.ORANGE + ' generated. ' + Colors._CLEAR51 print Colors.ORANGE + 'Usage:' + Colors._CLEAR52 print Colors.CYAN + ' <script type="text/javascript" src="' + os.path.relpath( config.CLOSUREGOOGBASE_PATH, os.path.join( config.SOFTWARE_PATH, '../' ) ) + '"></script>' + Colors._CLEAR53 print Colors.CYAN + ' <script type="text/javascript" src="' + os.path.relpath( config.DEPS_OUTPUT_PATH, os.path.join( config.SOFTWARE_PATH, '../' ) ) + '"></script>' + Colors._CLEAR54 else:55 # maybe an error...

Full Screen

Full Screen

Using AI Code Generation

copy

Full Screen

1const { openBrowser, goto, closeBrowser, _clear } = require('taiko');2(async () => {3 try {4 await openBrowser();5 await goto("google.com");6 await _clear("Search");7 } catch (e) {8 console.error(e);9 } finally {10 await closeBrowser();11 }12})();13### _clear(options)14const { openBrowser, goto, closeBrowser, _clear } = require('taiko');15(async () => {16 try {17 await openBrowser();18 await goto("google.com");19 await _clear({id: "lst-ib"});20 } catch (e) {21 console.error(e);22 } finally {23 await closeBrowser();24 }25})();26### _close()27const { openBrowser, goto, closeBrowser, _close } = require('taiko');28(async () => {29 try {30 await openBrowser();31 await goto("google.com");32 await _close();33 } catch (e) {34 console.error(e);35 } finally {36 await closeBrowser();37 }38})();39### _click()40const { openBrowser, goto, closeBrowser, _click } = require('taiko');41(async () => {42 try {43 await openBrowser();44 await goto("google.com");45 await _click("Search");46 } catch (e) {47 console.error(e);48 } finally {49 await closeBrowser();50 }51})();52### _click(options)53const { openBrowser, goto, closeBrowser, _click } = require('taiko');54(async () => {55 try {56 await openBrowser();

Full Screen

Using AI Code Generation

copy

Full Screen

1const { openBrowser, goto, button, closeBrowser } = require('taiko');2(async () => {3 try {4 await openBrowser();5 await goto("google.com");6 await button("I'm Feeling Lucky").click();7 await button("I'm Feeling Lucky")._clear();8 } catch (e) {9 console.error(e);10 } finally {11 await closeBrowser();12 }13})();

Full Screen

Using AI Code Generation

copy

Full Screen

1const { openBrowser, goto, _clear, textBox, write, closeBrowser } = require('taiko');2(async () => {3 try {4 await openBrowser();5 await write("Taiko", into(textBox("Search")));6 await _clear(textBox("Search"));7 await write("Taiko", into(textBox("Search")));8 } catch (error) {9 console.error(error);10 } finally {11 await closeBrowser();12 }13})();14_setCookie(cookieData);15const { openBrowser, goto, _setCookie, closeBrowser } = require('taiko');16(async () => {17 try {18 await openBrowser();19 await _setCookie({name: 'cookie_name', value: 'cookie_value', domain: 'google.com', path: '/', expires: (new Date()).getTime() + (1000 * 60 * 60)});20 } catch (error) {21 console.error(error);22 } finally {23 await closeBrowser();24 }25})();26_deleteCookie(name, domain, path);27const { openBrowser, goto, _deleteCookie, closeBrowser } = require('taiko');28(async () => {29 try {30 await openBrowser();31 await _deleteCookie('cookie_name', 'google.com', '/');32 } catch (error) {

Full Screen

Using AI Code Generation

copy

Full Screen

1const { openBrowser, goto, closeBrowser, _clear } = require('taiko');2(async () => {3 try {4 await openBrowser();5 await _clear("Search");6 } catch (e) {7 console.error(e);8 } finally {9 await closeBrowser();10 }11})();12const { openBrowser, goto, closeBrowser, _click, _text } = require('taiko');13(async () => {14 try {15 await openBrowser();16 await _click(_text("Search"));17 } catch (e) {18 console.error(e);19 } finally {20 await closeBrowser();21 }22})();23const { openBrowser, goto, closeBrowser, _closeTab } = require('taiko');24(async () => {25 try {26 await openBrowser();27 await _closeTab();28 } catch (e) {29 console.error(e);30 } finally {31 await closeBrowser();32 }33})();34const { openBrowser, goto, closeBrowser, _doubleClick, _text } = require('taiko');35(async () => {36 try {37 await openBrowser();38 await _doubleClick(_text("Search"));39 } catch (e) {40 console.error(e);41 } finally {42 await closeBrowser();43 }44})();45const { openBrowser, goto, closeBrowser, _dragAndDrop, _text } = require('taiko');46(async () => {47 try {48 await openBrowser();

Full Screen

Using AI Code Generation

copy

Full Screen

1const { openBrowser, goto, write, closeBrowser, _clear } = require('taiko');2(async () => {3 try {4 await openBrowser();5 await write("Hello World");6 await _clear();7 await closeBrowser();8 } catch (e) {9 console.error(e);10 } finally {11 }12})();

Full Screen

Using AI Code Generation

copy

Full Screen

1const { openBrowser, goto, _clear, write, closeBrowser, textBox } = require('taiko');2(async () => {3 try {4 await openBrowser();5 await goto("google.com");6 await write("Taiko", into(textBox({"id":"lst-ib"})));7 await _clear(textBox({"id":"lst-ib"}));8 } catch (e) {9 console.error(e);10 } finally {11 await closeBrowser();12 }13})();14- `element` **[ElementWrapper](#elementwrapper)** 15const { openBrowser, goto, _click, link, closeBrowser } = require('taiko');16(async () => {17 try {18 await openBrowser();19 await goto("google.com");20 await _click(link("Gmail"));21 } catch (e) {22 console.error(e);23 } finally {24 await closeBrowser();25 }26})();27const { openBrowser, goto, _closeBrowser } = require('taiko');28(async () => {29 try {30 await openBrowser();31 await goto("google.com");32 await _closeBrowser();33 } catch (e) {34 console.error(e);35 }36})();37const { openBrowser, goto, _count, link, closeBrowser } = require('taiko');38(async () => {39 try {40 await openBrowser();41 await goto("google.com");42 let count = await _count(link());43 console.log(count);44 } catch (e) {45 console.error(e);46 } finally {47 await closeBrowser();48 }49})();

Full Screen

Using AI Code Generation

copy

Full Screen

1const { openBrowser, goto, textBox, _clear, closeBrowser } = require('taiko');2(async () => {3 try {4 await openBrowser();5 await goto("google.com");6 await textBox().type("hello world");7 await _clear(textBox());8 await textBox().type("hello world");9 } catch (e) {10 console.error(e);11 } finally {12 await closeBrowser();13 }14})();15const { openBrowser, goto, _getBrowserInfo, closeBrowser } = require('taiko');16(async () => {17 try {18 await openBrowser();19 await goto("google.com");20 let info = await _getBrowserInfo();21 console.log(info);22 } catch (e) {23 console.error(e);24 } finally {25 await closeBrowser();26 }27})();28const { openBrowser, goto, _getElements, closeBrowser } = require('taiko');29(async () => {30 try {31 await openBrowser();32 await goto("google.com");33 let elements = await _getElements("input[type='text']");34 console.log(elements);35 } catch (e) {36 console.error(e);37 } finally {38 await closeBrowser();39 }40})();41const { openBrowser, goto, _getPlugins, closeBrowser } = require('taiko');42(async () => {43 try {44 await openBrowser();45 await goto("google.com");46 let plugins = await _getPlugins();47 console.log(plugins);48 } catch (e) {49 console.error(e);50 } finally {51 await closeBrowser();52 }53})();54const { openBrowser, goto, _getCookies, closeBrowser } = require('taiko');

Full Screen

Using AI Code Generation

copy

Full Screen

1(async () => {2 try {3 await openBrowser({headless:false});4 await goto("google.com");5 await write("Taiko");6 await press("Enter");7 await click("Taiko - Google Search");8 await _clear();9 } catch (e) {10 console.error(e);11 } finally {12 await closeBrowser();13 }14})();

Full Screen

Using AI Code Generation

copy

Full Screen

1const { openBrowser, closeBrowser, goto, click, button, _clear, write, text, link, image, toRightOf, toLeftOf, below } = require('taiko');2(async () => {3 try {4 await openBrowser();5 await _clear(textBox(toRightOf("Google Search")));6 await write("Taiko", into(textBox(toRightOf("Google Search"))));7 await click("Google Search");8 await click(link("Taiko - The Browser Automation Framework", below(image("Taiko - The Browser Automation Framework"))));9 await click("Docs");10 await click("API");11 await click("clear");12 await click("clear");13 await click("clear");

Full Screen

Using AI Code Generation

copy

Full Screen

1await _clear();2await closeBrowser();3await closeTab();4await count({ id: 'myId' });5await currentURL();6await $(link('Click me'));7await $$(link('Click me'));8await dragAndDrop($('[id="drag"]'), { x: 100, y: 100 });9await evaluate(() => {10 return document.title;11});12await focus(textBox('Enter your name'));

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run taiko automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful