How to use ls_tree method in Nose

Best Python code snippet using nose

infoshell_pro_lcc.py

Source:infoshell_pro_lcc.py Github

copy

Full Screen

1'''2Created on Jun 23, 20103@author: alexander4'''5from enthought.traits.api import \6 HasTraits, Directory, List, Int, Float, Any, Enum, \7 on_trait_change, File, Constant, Instance, Trait, \8 Array, Str, Property, cached_property, WeakRef, \9 Dict, Button, Color, Bool10from enthought.util.home_directory import \11 get_home_directory12from enthought.traits.ui.api import \13 View, Item, DirectoryEditor, TabularEditor, HSplit, Tabbed, VGroup, \14 TableEditor, Group, ListEditor, VSplit, HSplit, VGroup, HGroup, Spring, \15 Include16from enthought.mayavi import \17 mlab18from enthought.traits.ui.table_column import \19 ObjectColumn20from enthought.traits.ui.menu import \21 OKButton, CancelButton22from enthought.traits.ui.tabular_adapter \23 import TabularAdapter24from numpy import array, loadtxt, arange, sqrt, zeros, arctan, sin, cos, ones_like, \25 vstack, savetxt, hstack, argsort, fromstring, zeros_like, \26 copy, c_, newaxis, argmax, where, argsort, sqrt, frompyfunc27from math import pi28from string import split29import os30from scipy.io import read_array31from lc_manager import \32 LC, LCManager33DIRLIST = ['x', 'y']34SRLIST = ['M', 'N']35class LSArrayAdapter ( TabularAdapter ):36 columns = Property37 def _get_columns( self ):38# print 'GETTING COLUMNS', self.object.columns, self.object, self.object.__class__39 columns = self.object.columns40 return [ ( name, idx ) for idx, name in enumerate( columns ) ]41 font = 'Courier 10'42 alignment = 'right'43 format = '%5.2f'#'%g'44 even_bg_color = Color( 0xE0E0FF )45 width = Float( 80 )46 #@todo: format columns using 'column_id'47# adapter_column_map = Property(depends_on = 'adapters,columns')48class LS( HasTraits ):49 '''Limit state class50 '''51 # backward link to the info shell to access the52 # input data when calculating 53 # the limit-state-specific values54 #55 info_shell = WeakRef56 # parameters of the limit state57 #58 dir = Enum( DIRLIST )59 stress_res = Enum( SRLIST )60 #-------------------------------61 # ls columns62 #-------------------------------63 # defined in the subclasses64 #65 ls_columns = List66 show_ls_columns = Bool( True )67 #-------------------------------68 # sr columns69 #-------------------------------70 # stress resultant columns - for ULS this is defined in the subclasses71 #72 sr_columns = List( ['m', 'n'] )73 show_sr_columns = Bool( True )74 # stress resultant columns - generated from the parameter combination75 # dir and stress_res - one of 'Mx', 'Ny', 'Mx', 'Ny'76 #77 m_varname = Property( Str )78 def _get_m_varname( self ):79 # e.g. mx_N 80 appendix = self.dir + '_' + self.stress_res81 return 'm' + appendix82 n_varname = Property( Str )83 def _get_n_varname( self ):84 # e.g. nx_N 85 appendix = self.dir + '_' + self.stress_res86 return 'n' + appendix87 n = Property( Float )88 def _get_n( self ):89 return getattr( self.info_shell, self.n_varname )90 m = Property( Float )91 def _get_m( self ):92 return getattr( self.info_shell, self.m_varname )93 #-------------------------------94 # geo columns form info shell95 #-------------------------------96 geo_columns = List( [ 'elem_no', 'X', 'Y', 'Z', 'D_elem' ] )97 show_geo_columns = Bool( True )98 elem_no = Property( Float )99 def _get_elem_no( self ):100 return self.info_shell.elem_no101 X = Property( Float )102 def _get_X( self ):103 return self.info_shell.X104 Y = Property( Float )105 def _get_Y( self ):106 return self.info_shell.Y107 Z = Property( Float )108 def _get_Z( self ):109 return self.info_shell.Z110 D_elem = Property( Float )111 def _get_D_elem( self ):112 return self.info_shell.D_elem113 #-------------------------------114 # state columns form info shell115 #-------------------------------116 state_columns = List( ['mx', 'my', 'mxy', 'nx', 'ny', 'nxy', 'combi_key' ] )117 show_state_columns = Bool( True )118 mx = Property( Float )119 def _get_mx( self ):120 return self.info_shell.mx121 my = Property( Float )122 def _get_my( self ):123 return self.info_shell.my124 mxy = Property( Float )125 def _get_mxy( self ):126 return self.info_shell.mxy127 nx = Property( Float )128 def _get_nx( self ):129 return self.info_shell.nx130 ny = Property( Float )131 def _get_ny( self ):132 return self.info_shell.ny133 nxy = Property( Float )134 def _get_nxy( self ):135 return self.info_shell.nxy136 combi_key = Property( Float )137 def _get_combi_key( self ):138 return self.info_shell.combi_key139 #-------------------------------140 # ls table141 #-------------------------------142 # all columns associated with the limit state including the corresponding143 # stress resultants144 #145 columns = Property( List, depends_on = 'show_geo_columns, show_state_columns,\146 show_sr_columns, show_ls_columns' )147 @cached_property148 def _get_columns( self ):149 columns = []150 if self.show_geo_columns:151 columns += self.geo_columns152 if self.show_state_columns:153 columns += self.state_columns154 if self.show_sr_columns:155 columns += self.sr_columns156 if self.show_ls_columns:157 columns += self.ls_columns158 return columns159 # select column used for sorting the data in selected sorting order 160 #161 sort_column = Enum( values = 'columns' )162 def _sort_column_default( self ):163 return self.columns[-1]164 sort_order = Enum( 'descending', 'ascending', 'unsorted' )165 # get the maximum value of the chosen column166 #167 max_in_column = Enum( values = 'columns' )168 def _max_in_column_default( self ):169 return self.columns[-1]170 max_value = Property( depends_on = 'max_in_column' )171 def _get_max_value( self ):172 col = getattr( self, self.max_in_column )[:, 0]173 return max( col )174 # stack columns together for table used by TabularEditor175 #176 ls_table = Property( Array, depends_on = 'sort_column, sort_order, show_geo_columns, \177 show_state_columns, show_sr_columns, show_ls_columns' )178 @cached_property179 def _get_ls_table( self ):180 arr_list = [ getattr( self, col ) for col in self.columns ]181 # get the array currently selected by the sort_column enumeration182 #183 sort_arr = getattr( self, self.sort_column )[:, 0]184 sort_idx = argsort( sort_arr )185 ls_table = hstack( arr_list )186 if self.sort_order == 'descending':187 return ls_table[ sort_idx[::-1] ]188 if self.sort_order == 'ascending':189 return ls_table[ sort_idx ]190 if self.sort_order == 'unsorted':191 return ls_table192 #---------------------------------193 # plot outputs in mlab-window 194 #---------------------------------195 plot_column = Enum( values = 'columns' )196 plot = Button197 def _plot_fired( self ):198 X = self.info_shell.X[:, 0]199 Y = self.info_shell.Y[:, 0]200 Z = self.info_shell.Z[:, 0]201 plot_col = getattr( self, self.plot_column )[:, 0]202 mlab.points3d( X, Y, Z, plot_col )203 mlab.show()204 #-------------------------------205 # ls group206 #-------------------------------207 # @todo: the dynamic selection of the columns to be displayed 208 # does not work in connection with the LSArrayAdapter 209 ls_group = VGroup( 210 HGroup( Item( 'max_in_column' ),211 Item( 'max_value', style = 'readonly', format_str = '%6.2f' ),212 Item( 'max_value_all', style = 'readonly', format_str = '%6.2f' ),213 Item( 'max_case', style = 'readonly', label = 'found in case: ' ),214 ),215 HGroup( Item( 'sort_column' ),216 Item( 'sort_order' ),217 Item( 'show_geo_columns', label = 'show geo' ),218 Item( 'show_state_columns', label = 'show state' ),219 Item( 'show_sr_columns', label = 'show sr' ),220 Item( 'plot_column' ),221 Item( 'plot' ),222 ),223 )224class SLS( LS ):225 '''Serviceability limit state226 '''227 # ------------------------------------------------------------228 # SLS: material parameters (Inputs)229 # ------------------------------------------------------------230 # tensile strength [MPa]231 f_ctk = Float( 1.6, input = True )232 # flexural tensile strength [MPa]233 f_m = Float( 10.5, input = True )234 # ------------------------------------------------------------235 # SLS - derived params:236 # ------------------------------------------------------------237 # area238 #239 A = Property( Float, depends_on = 'info_shell.data_file_thickness' )240 def _get_A( self ):241 return self.info_shell.D_elem * 1.242 # moment of inertia243 #244 W = Property( Float, depends_on = 'info_shell.data_file_thickness' )245 def _get_W( self ):246 return 1. * self.info_shell.D_elem ** 2 / 6.247 # ------------------------------------------------------------248 # SLS: outputs249 # ------------------------------------------------------------250 ls_columns = List( ['sig_n', 'sig_m', 'eta_n', 'eta_m', 'eta_tot', ] )251 ls_values = Property( depends_on = '+input' )252 @cached_property253 def _get_ls_values( self ):254 '''get the outputs for SLS255 '''256 n = self.n257 m = self.m258 A = self.A259 W = self.W260 f_ctk = self.f_ctk261 f_m = self.f_m262 sig_n = n / A / 1000.263 sig_m = abs( m / W ) / 1000.264 eta_n = sig_n / f_ctk265 eta_m = sig_m / f_m266 eta_tot = eta_n + eta_m267 return { 'sig_n':sig_n, 'sig_m':sig_m,268 'eta_n':eta_n, 'eta_m':eta_m,269 'eta_tot':eta_tot }270 sig_n = Property271 def _get_sig_n( self ):272 return self.ls_values['sig_n']273 sig_m = Property274 def _get_sig_m( self ):275 return self.ls_values['sig_m']276 eta_n = Property277 def _get_eta_n( self ):278 return self.ls_values['eta_n']279 eta_m = Property280 def _get_eta_m( self ):281 return self.ls_values['eta_m']282 eta_tot = Property283 def _get_eta_tot( self ):284 return self.ls_values['eta_tot']285 #-------------------------------------------------------286 # get the maximum value and the corresponding case of 287 # the selected variable 'max_in_column' in all SLS sheets288 #-------------------------------------------------------289 max_value_all = Property( depends_on = 'max_in_column' )290 def _get_max_value_all( self ):291 return self.max_value_and_case['max_value']292 max_case = Property( depends_on = 'max_in_column' )293 def _get_max_case( self ):294 return self.max_value_and_case['max_case']295 max_value_and_case = Property( Dict, depends_on = 'max_in_column' )296 @cached_property297 def _get_max_value_and_case( self ):298 dir_list = DIRLIST299 sr_list = SRLIST300 ls_tree = self.info_shell.ls_tree301 max_value = 0.302 for dir in dir_list:303 for sr in sr_list:304 self.info_shell.ls_tree['SLS'][ sr ][ dir ].max_in_column = self.max_in_column305 max_value_ls = ls_tree['SLS'][ sr ][ dir ].max_value306 if max_value <= max_value_ls:307 max_value = max_value_ls308 max_case = 'S-' + sr + dir309 return { 'max_value':max_value,310 'max_case':max_case }311 #-------------------------------312 # ls view313 #-------------------------------314 # @todo: the dynamic selection of the columns to be displayed 315 # does not work in connection with the LSArrayAdapter 316 traits_view = View( VGroup( 317 HGroup( Item( name = 'f_ctk', label = 'Tensile strength concrete [MPa]: f_ctk ' ),318 Item( name = 'f_m', label = 'Flexural tensile trength concrete [MPa]: f_m ' )319 ),320 VGroup( 321 Include( 'ls_group' ),322 # @todo: currently LSArrayAdapter must be called both 323 # in SLS and ULS separately to configure columns 324 # arrangement individually325 #326 Item( 'ls_table', show_label = False,327 editor = TabularEditor( adapter = LSArrayAdapter() ) )328 ),329 ),330 resizable = True,331 scrollable = True,332 height = 1000,333 width = 1100334 )335class ULS( LS ):336 '''Ultimate limit state337 '''338 #--------------------------------------------------------339 # ULS: material parameters (Inputs)340 #--------------------------------------------------------341 # gamma-factor 342 gamma = Float( 1.5, input = True )343 # long term reduction factor344 beta = Float( 0.7, input = True )345 # INDEX l: longitudinal direction of the textile (MAG-02-02-06a)346 # characteristic tensile strength of the tensile specimen [N/mm2]347 f_tk_l = Float( 537, input = True )348 # design value of the tensile strength of the tensile specimen [N/mm2]349 # containing a gamma-factor of 1.5 and d long term reduction factor of 0.7350 # f_td_l = 251351 f_td_l = Property( Float, depends_on = '+input' )352 def _get_f_td_l( self ):353 return self.beta * self.f_tk_l / self.gamma354 # cross sectional area of the reinforcement [mm2/m]355 a_t_l = Float( 71.65, input = True )356 # INDEX q: orthogonal direction of the textile (MAG-02-02-06a)357 # characteristic tensile strength of the tensile specimen [N/mm2]358 f_tk_q = Float( 511, input = True )359 # design value of the tensile strength of the tensile specimen [kN/m]360 # f_td_q = 238361 f_td_q = Property( Float, depends_on = '+input' )362 def _get_f_td_q( self ):363 return self.beta * self.f_tk_q / self.gamma364 # cross sectional area of the reinforcement [mm2/m]365 a_t_q = Float( 53.31, input = True )366 # tensile strength of the textile reinforcement [kN/m]367 F_Rtex_l = Property( Float, depends_on = '+input' )368 def _get_F_Rtex_l( self ):369 return self.a_t_l * self.f_td_l / 1000.370 # tensile strength of the textile reinforcement [kN/m]371 F_Rtex_q = Property( Float )372 def _get_F_Rtex_q( self, depends_on = '+input' ):373 return self.a_t_q * self.f_td_q / 1000.374 # ------------------------------------------------------------375 # ULS - derived params:376 # ------------------------------------------------------------377 # Parameters for the cracked state (GdT):378 # assumptions!379 # (resultierende statische Nutzhoehe) 380 #381 d = Property( Float, depends_on = 'info_shell.data_file_thickness' )382 def _get_d( self ):383 return 0.75 * self.info_shell.D_elem384 # (Abstand Schwereachse zur resultierende Bewehrungslage) 385 # chose the same amount of reinforcement at the top as at the bottom 386 # i.e. zs = zs1 = zs2387 #388 zs = Property( Float, depends_on = 'info_shell.data_file_thickness' )389 def _get_zs( self ):390 return self.d - self.info_shell.D_elem / 2.391 # (Innerer Hebelarm) 392 #393 z = Property( Float )394 def _get_z( self ):395 return 0.9 * self.d396 # ------------------------------------------------------------397 # ULS: outputs398 # ------------------------------------------------------------399 ls_columns = List( [ 'e', 'm_Eds', 'f_t', 'beta_l', 'beta_q', 'f_Rtex', 'n_tex' ] )400# sr_columns = [ 'm', 'n', 'alpha' ]401 sr_columns = [ 'm', 'n', 'alpha', 'd', 'zs', 'z' ]402 alpha_varname = Property()403 def _get_alpha_varname( self ):404 return 'alpha_' + self.stress_res405 alpha = Property406 def _get_alpha( self ):407 return getattr( self.info_shell, self.alpha_varname )408 ls_values = Property( depends_on = '+input' )409 @cached_property410 def _get_ls_values( self ):411 '''get the outputs for ULS412 '''413 n = self.n414 m = self.m415 alpha = self.alpha416 zs = self.zs417 z = self.z418 F_Rtex_l = self.F_Rtex_l419 F_Rtex_q = self.F_Rtex_q420 # (Exzentrizitaet)421 e = abs( m / n )422 e[ n == 0 ] = 1E9 # if normal force is zero set e to very large value423 # moment at the height of the resulting reinforcement layer:424 m_Eds = abs( m ) - zs * n425 # tensile force in the reinforcement for bending and compression426 f_t = m_Eds / z + n427 # check if the two conditions are true:428 cond1 = n > 0429 cond2 = e < zs430 bool_arr = cond1 * cond2431 # in case of pure tension in the cross section:432 f_t[ bool_arr ] = n[ bool_arr ] * ( zs[ bool_arr ] + e[ bool_arr ] ) / ( zs[ bool_arr ] + zs[ bool_arr ] )433 # angel of deflection of the textile reinforcement for dimensioning in x-direction434 # distinguished between longtudinal (l) and transversal (q) direction435 # ASSUMPTION: worst case angle used436 # as first step use the worst case reduction due to deflection possible (at 55 degrees)437 beta_l = 55. * pi / 180. * ones_like( alpha )438 beta_q = ( 90. - 55. ) * pi / 180. * ones_like( alpha )439 # @todo: as second step use the value for an alternating layup (i.e. deflection angle)440 # @todo: get the correct formula for the demonstrator arrangement 441 # i.e. the RFEM coordinate system orientation442# beta_l = pi/2 - abs( alpha )443# beta_q = abs( alpha )444 # resulting strength of the bi-directional textile considering the 445 # deflection of the reinforcement in the loading direction:446 f_Rtex = F_Rtex_l * cos( beta_l ) * ( 1 - beta_l / ( pi / 2 ) ) + \447 F_Rtex_q * cos( beta_q ) * ( 1 - beta_q / ( pi / 2 ) )448 f_Rtex = 11.65 * ones_like( alpha )449 print 'NOTE: f_Rtex set to 11.65 kN/m !'450 # necessary number of reinfocement layers451 n_tex = f_t / f_Rtex452 return { 'e':e, 'm_Eds':m_Eds, 'f_t':f_t,453 'beta_l':beta_l, 'beta_q':beta_q, 'f_Rtex':f_Rtex,454 'n_tex':n_tex }455 e = Property456 def _get_e( self ):457 return self.ls_values['e']458 m_Eds = Property459 def _get_m_Eds( self ):460 return self.ls_values['m_Eds']461 f_t = Property462 def _get_f_t( self ):463 return self.ls_values['f_t']464 beta_l = Property465 def _get_beta_l( self ):466 return self.ls_values['beta_l']467 beta_q = Property468 def _get_beta_q( self ):469 return self.ls_values['beta_q']470 f_Rtex = Property471 def _get_f_Rtex( self ):472 return self.ls_values['f_Rtex']473 n_tex = Property474 def _get_n_tex( self ):475 return self.ls_values['n_tex']476 #-------------------------------------------------------477 # get the maximum value and the corresponding case of 478 # the selected variable 'max_in_column' in all ULS sheets479 #-------------------------------------------------------480 max_value_all = Property( depends_on = 'max_in_column' )481 def _get_max_value_all( self ):482 return self.max_value_and_case['max_value']483 max_case = Property( depends_on = 'max_in_column' )484 def _get_max_case( self ):485 return self.max_value_and_case['max_case']486 max_value_and_case = Property( Dict, depends_on = 'max_in_column' )487 @cached_property488 def _get_max_value_and_case( self ):489 dir_list = DIRLIST490 sr_list = SRLIST491 ls_tree = self.info_shell.ls_tree492 max_value = 0.493 for dir in dir_list:494 for sr in sr_list:495 self.info_shell.ls_tree['ULS'][ sr ][ dir ].max_in_column = self.max_in_column496 max_value_ls = ls_tree['ULS'][ sr ][ dir ].max_value497 if max_value <= max_value_ls:498 max_value = max_value_ls499 max_case = 'U-' + sr + dir500 return { 'max_value':max_value,501 'max_case':max_case }502 #-------------------------------503 # ls view504 #-------------------------------505 # @todo: the dynamic selection of the columns to be displayed 506 # does not work in connection with the LSArrayAdapter 507 traits_view = View( 508 VGroup( 509 HGroup( 510 VGroup( 511 Item( name = 'gamma', label = 'security factor material [-]: gamma ' ),512 Item( name = 'beta', label = 'reduction long term durability [-]: beta ' ),513 label = 'security factors'514 ),515 VGroup( 516 Item( name = 'f_tk_l', label = 'characteristic strength textil [MPa]: f_tk_l ', format_str = "%.1f" ),517 Item( name = 'f_td_l', label = 'design strength textil [MPa]: f_td_l ', style = 'readonly', format_str = "%.1f" ),518 Item( name = 'a_t_l', label = 'cross sectional area textil [mm^2]: a_t_l ', style = 'readonly', format_str = "%.1f" ),519 Item( name = 'F_Rtex_l', label = 'Strength textil [kN/m]: F_Rtex_l ', style = 'readonly', format_str = "%.0f" ),520 label = 'material properties (longitudinal)'521 ),522 VGroup( 523 Item( name = 'f_tk_q', label = 'characteristic strength textil [MPa]: f_tk_q ', format_str = "%.1f" ),524 Item( name = 'f_td_q', label = 'design strength textil [MPa]: f_td_q ', style = 'readonly', format_str = "%.1f" ),525 Item( name = 'a_t_q', label = 'cross sectional area textil [mm^2]: a_t_q ', style = 'readonly', format_str = "%.1f" ),526 Item( name = 'F_Rtex_q', label = 'Strength textil [kN/m]: F_Rtex_q ', style = 'readonly', format_str = "%.0f" ),527 label = 'material Properties (transversal)'528 ),529 ),530 VGroup( 531 Include( 'ls_group' ),532 Item( 'ls_table', show_label = False,533 editor = TabularEditor( adapter = LSArrayAdapter() ) )534 ),535 ),536 resizable = True,537 scrollable = True,538 height = 1000,539 width = 1100540 )541LSLIST = [ SLS, ULS ]542class InfoShell( HasTraits ):543 '''Assessment tool544 '''545 lc_list = List( Instance( LC ), input = True )546 def _lc_list_default( self ):547 return [ LC( name = 'G', category = 'dead-load', file_name = 'input_data_stress_resultants.csv' ),548 LC( name = 'G_A', category = 'additional dead-load', file_name = 'input_data_stress_resultants.csv' ),549 LC( name = 'W (Druck)', category = 'imposed-load', file_name = 'input_data_stress_resultants.csv',550 exclusive_to = ['W (Sog)'], psi_0 = 1.0, psi_1 = 1.0, psi_2 = 1.0 ),551 LC( name = 'W (Sog)', category = 'imposed-load', file_name = 'input_data_stress_resultants.csv',552 exclusive_to = ['W (Druck)'], psi_0 = 1.0, psi_1 = 1.0, psi_2 = 1.0 ),553 LC( name = 'S', category = 'imposed-load', file_name = 'input_data_stress_resultants.csv',554 exclusive_to = [], psi_0 = 1.0, psi_1 = 1.0, psi_2 = 1.0 )555 ]556 lcm = Property( Instance( LCManager ) )#, depends_on = 'lc_list' )557 def _get_lcm( self ):558 '''loading case manager'''559 return LCManager( lc_list = self.lc_list )560 lcc_tree = Property( Array )#, depends_on = 'lc_list' )561 def _get_lcc_tree( self ):562 '''loading case combination tree, e.g. lcc_tree['ULS']'''563 return self.lcm.lcc_tree564 current_ls = Enum( 'ULS', 'SLS' )565 #------------------------------------------566 # specify default data input files:567 #------------------------------------------568 # raw input file for thicknesses (and coordinates)569 #570 data_file_thickness = Str571 def _data_file_thickness_default( self ):572 return 'input_data_thickness.csv'573 #@todo: display all file_names and names of the lc's in the view574 # raw input file for element numbers, coordinates, and stress_resultants575 #576 data_file_stress_resultants = Str577 def _data_file_stress_resultants_default( self ):578 return self.lc_list[0].file_name579 #------------------------------------------580 # read the geometry data from file 581 # (corrds and thickness):582 #------------------------------------------583 def _read_thickness_data( self, file_name ):584 '''to read the stb-thickness save the xls-worksheet 585 to a csv-file using ';' as filed delimiter and ' ' (blank)586 as text delimiter.587 '''588 print '*** read thickness data ***'589 # get the column headings defined in the second row 590 # of the csv thickness input file591 # "Nr.;X;Y;Z;[mm]"592 #593 file = open( file_name, 'r' )594 first_line = file.readline()595 second_line = file.readline()596 column_headings = second_line.split( ';' )597 # remove '\n' from last string element in list598 column_headings[-1] = column_headings[-1][:-1]599 column_headings_arr = array( column_headings )600 elem_no_idx = where( 'Nr.' == column_headings_arr )[0]601 X_idx = where( 'X' == column_headings_arr )[0]602 Y_idx = where( 'Y' == column_headings_arr )[0]603 Z_idx = where( 'Z' == column_headings_arr )[0]604 thickness_idx = where( '[mm]' == column_headings_arr )[0]605 # read the float data:606 #607 input_arr = loadtxt( file_name, delimiter = ';', skiprows = 2 )608 # element number:609 #610 elem_no = input_arr[:, elem_no_idx]611 # coordinates [m]:612 #613 X_ = input_arr[:, X_idx]614 Y_ = input_arr[:, Y_idx]615 Z_ = input_arr[:, Z_idx]616 # element thickness [mm]:617 #618 thickness = input_arr[:, thickness_idx]619 return {'X_':X_, 'Y_':Y_, 'Z_':Z_,620 'thickness':thickness }621 # coordinates and element thickness read from file:622 # 623 thickness_data_dict = Property( Dict, depends_on = 'data_file_thickness' )624 @cached_property625 def _get_thickness_data_dict( self ):626 return self._read_thickness_data( self.data_file_thickness )627 X_ = Property( Array )628 def _get_X_( self ):629 return self.thickness_data_dict['X_']630 Y_ = Property( Array )631 def _get_Y_( self ):632 return self.thickness_data_dict['Y_']633 Z_ = Property( Array )634 def _get_Z_( self ):635 return self.thickness_data_dict['Z_']636 D_elem = Property( Array )637 def _get_D_elem( self ):638 '''element thickness (units changed form [mm] to [m])'''639 return self.thickness_data_dict['thickness'] / 1000.640 # ------------------------------------------------------------641 # Get the state data from the LC's 'state_data_dict' 642 # ------------------------------------------------------------643 # get elem_no and coordinates are taken from the first loading-case:644 # 645 state_data_dict = Property( Dict )#, depends_on = 'lc_list, current_ls' )646 @cached_property647 def _get_state_data_dict( self ):648 return self.lc_list[0].state_data_dict649 elem_no = Property( Array )650 def _get_elem_no( self ):651 return self.state_data_dict['elem_no']652 X = Property( Array )653 def _get_X( self ):654 return self.state_data_dict['X']655 Y = Property( Array )656 def _get_Y( self ):657 return self.state_data_dict['Y']658 Z = Property( Array )659 def _get_Z( self ):660 return self.state_data_dict['Z']661 # ------------------------------------------------------------662 # the stress resultants are taken from 'lcc_tree' depending 663 # on the current limit state 'current_ls' 664 # ------------------------------------------------------------665 mx = Property( Array, depends_on = 'current_ls' )666 @cached_property667 def _get_mx( self ):668 return self.lcc_tree[ self.current_ls ]['mx']669 my = Property( Array, depends_on = 'current_ls' )670 @cached_property671 def _get_my( self ):672 return self.lcc_tree[ self.current_ls ]['my']673 mxy = Property( Array, depends_on = 'current_ls' )674 @cached_property675 def _get_mxy( self ):676 return self.lcc_tree[ self.current_ls ]['mxy']677 nx = Property( Array, depends_on = 'current_ls' )678 @cached_property679 def _get_nx( self ):680 return self.lcc_tree[ self.current_ls ]['nx']681 ny = Property( Array, depends_on = 'current_ls' )682 @cached_property683 def _get_ny( self ):684 return self.lcc_tree[ self.current_ls ]['ny']685 nxy = Property( Array, depends_on = 'current_ls' )686 @cached_property687 def _get_nxy( self ):688 return self.lcc_tree[ self.current_ls ]['nxy']689 combi_key = Property( Array, depends_on = 'current_ls' )690 @cached_property691 def _get_combi_key( self ):692 return self.lcc_tree[ self.current_ls ]['combi_key']693 # ------------------------------------------------------------694 # check input files for consistency695 # ------------------------------------------------------------696 @on_trait_change( 'data_file_thickness, data_file_stress_resultants' )697 def _check_input_files_for_consistency( self ):698 '''Check if the coordinate order of the thickness input file is 699 identical to the order in the stress_resultant input file.700 Here the first sr-input file is taken. The internal consistency701 of all defined loading cases is checked in the LCManager.702 '''703 if not all( self.X ) == all( self.X_ ) or \704 not all( self.Y ) == all( self.Y_ ) or \705 not all( self.Z ) == all( self.Z_ ):706 raise ValueError, 'coordinates in file % s and file % s are not identical. Check input files for consistency ! ' \707 % ( self.data_file_thickness, self.data_file_stress_resultants )708 else:709 print '*** input files checked for consistency ( OK ) *** '710 return True711 # ------------------------------------------------------------712 # Index M: calculate principle moments with corresponding normal forces713 # ------------------------------------------------------------714 princ_values_M = Property( Dict, depends_on = 'data_file_stress_resultants' )715 @cached_property716 def _get_princ_values_M( self ):717 '''principle value of the moments forces:718 and principle angle of the moments forces:719 mx_M, my_M, nx_M, ny_M: transform the values in the principle direction720 '''721 # stress_resultants in global coordinates722 #723 mx = self.mx724 my = self.my725 mxy = self.mxy726 nx = self.nx727 ny = self.ny728 nxy = self.nxy729 # principal values730 #731 m1 = 0.5 * ( mx + my ) + 0.5 * sqrt( ( mx - my ) ** 2 + 4 * mxy ** 2 )732 m2 = 0.5 * ( mx + my ) - 0.5 * sqrt( ( mx - my ) ** 2 + 4 * mxy ** 2 )733 alpha_M = pi / 2. * ones_like( m1 )734 bool = m2 != mx735 alpha_M[ bool ] = arctan( mxy[ bool ] / ( m2[ bool ] - mx[ bool ] ) )736 # transform to principal directions737 #738 mx_M = 0.5 * ( my + mx ) - 0.5 * ( my - mx ) * cos( 2 * alpha_M ) - mxy * sin( 2 * alpha_M )739 my_M = 0.5 * ( my + mx ) + 0.5 * ( my - mx ) * cos( 2 * alpha_M ) + mxy * sin( 2 * alpha_M )740 nx_M = 0.5 * ( ny + nx ) - 0.5 * ( ny - nx ) * cos( 2 * alpha_M ) - nxy * sin( 2 * alpha_M )741 ny_M = 0.5 * ( ny + nx ) + 0.5 * ( ny - nx ) * cos( 2 * alpha_M ) + nxy * sin( 2 * alpha_M )742 return { 'm1':m1, 'm2':m2, 'alpha_M':alpha_M,743 'mx_M':mx_M, 'my_M':my_M,744 'nx_M':nx_M, 'ny_M':ny_M }745 m1 = Property( Float )746 def _get_m1( self ):747 return self.princ_values_M['m1']748 m2 = Property( Float )749 def _get_m2( self ):750 return self.princ_values_M['m2']751 alpha_M = Property( Float )752 def _get_alpha_M( self ):753 return self.princ_values_M['alpha_M']754 mx_M = Property( Float )755 def _get_mx_M( self ):756 return self.princ_values_M['mx_M']757 my_M = Property( Float )758 def _get_my_M( self ):759 return self.princ_values_M['my_M']760 nx_M = Property( Float )761 def _get_nx_M( self ):762 return self.princ_values_M['nx_M']763 ny_M = Property( Float )764 def _get_ny_M( self ):765 return self.princ_values_M['ny_M']766 # ------------------------------------------------------------767 # Index N: principle normal forces with corresponding moments768 # ------------------------------------------------------------769 princ_values_N = Property( Dict, depends_on = 'data_file_stress_resultants' )770 @cached_property771 def _get_princ_values_N( self ):772 '''principle value of the normal forces:773 and principle angle of the normal forces:774 mx_N, my_N, nx_N, ny_N: transform the values in the principle normal direction775 '''776 # stress_resultants in global coordinates777 #778 mx = self.mx779 my = self.my780 mxy = self.mxy781 nx = self.nx782 ny = self.ny783 nxy = self.nxy784 # principal values785 #786 n1 = 0.5 * ( nx + ny ) + 0.5 * sqrt( ( nx - ny ) ** 2 + 4 * nxy ** 2 )787 n2 = 0.5 * ( nx + ny ) - 0.5 * sqrt( ( nx - ny ) ** 2 + 4 * nxy ** 2 )788 alpha_N = pi / 2. * ones_like( n1 )789 bool = n2 != nx790 alpha_N[ bool ] = arctan( nxy[ bool ] / ( n2[ bool ] - nx[ bool ] ) )791 # transform to principal directions792 mx_N = 0.5 * ( my + mx ) - 0.5 * ( my - mx ) * cos( 2 * alpha_N ) - mxy * sin( 2 * alpha_N )793 my_N = 0.5 * ( my + mx ) + 0.5 * ( my - mx ) * cos( 2 * alpha_N ) + mxy * sin( 2 * alpha_N )794 nx_N = 0.5 * ( ny + nx ) - 0.5 * ( ny - nx ) * cos( 2 * alpha_N ) - nxy * sin( 2 * alpha_N )795 ny_N = 0.5 * ( ny + nx ) + 0.5 * ( ny - nx ) * cos( 2 * alpha_N ) + nxy * sin( 2 * alpha_N )796 return{'n1' : n1, 'n2' : n2, 'alpha_N' : alpha_N,797 'mx_N' : mx_N, 'my_N' : my_N,798 'nx_N' : nx_N, 'ny_N' : ny_N }799 n1 = Property( Float )800 def _get_n1( self ):801 return self.princ_values_N['n1']802 n2 = Property( Float )803 def _get_n2( self ):804 return self.princ_values_N['n2']805 alpha_N = Property( Float )806 def _get_alpha_N( self ):807 return self.princ_values_N['alpha_N']808 mx_N = Property( Float )809 def _get_mx_N( self ):810 return self.princ_values_N['mx_N']811 my_N = Property( Float )812 def _get_my_N( self ):813 return self.princ_values_N['my_N']814 nx_N = Property( Float )815 def _get_nx_N( self ):816 return self.princ_values_N['nx_N']817 ny_N = Property( Float )818 def _get_ny_N( self ):819 return self.princ_values_N['ny_N']820 #------------------------------------------821 # combinations of limit states, stress resultants and directions822 #------------------------------------------823 ls_tree = Dict824 def _ls_tree_default( self ):825 dir_list = DIRLIST826 sr_list = SRLIST827 ls_list = LSLIST828 ls_dict = {}829 for ls_class in ls_list:830 sr_dict = {}831 for sr in sr_list:832 dir_dict = {}833 for dir in dir_list:834 # set 'current_ls' to current 'ls_tree' ls-key835 # the change of attribute 'current_ls' triggers 836 # a 'depends_on' change and the Properties837 # of the stress resultants are refreshed 838 # 839 current_ls = ls_class.__name__840 self._reset_stress_resultants()841 print 'current_ls', current_ls842 dir_dict[ dir ] = ls_class( info_shell = self, dir = dir, sr = sr )843 sr_dict[ sr ] = dir_dict844 ls_dict[ ls_class.__name__ ] = sr_dict845 return ls_dict846# for k, v in d.iteritems():847# c[k] = v848 #------------------------------------------849 # get arrays for the TabularEditor:850 #------------------------------------------851 U_Mx = Property( Instance( ULS ) )852 def _get_U_Mx( self ):853 return self.ls_tree['ULS']['M']['x']854 U_My = Property( Instance( ULS ) )855 def _get_U_My( self ):856 return self.ls_tree['ULS']['M']['y']857 U_Nx = Property( Instance( ULS ) )858 def _get_U_Nx( self ):859 return self.ls_tree['ULS']['N']['x']860 U_Ny = Property( Instance( ULS ) )861 def _get_U_Ny( self ):862 return self.ls_tree['ULS']['N']['y']863 S_Mx = Property( Instance( SLS ) )864 def _get_S_Mx( self ):865 return self.ls_tree['SLS']['M']['x']866 S_My = Property( Instance( SLS ) )867 def _get_S_My( self ):868 return self.ls_tree['SLS']['M']['y']869 S_Nx = Property( Instance( SLS ) )870 def _get_S_Nx( self ):871 return self.ls_tree['SLS']['N']['x']872 S_Ny = Property( Instance( SLS ) )873 def _get_S_Ny( self ):874 return self.ls_tree['SLS']['N']['y']875 # ------------------------------------------------------------876 # View 877 # ------------------------------------------------------------878 traits_view = View( Item( 'data_file_stress_resultants', label = 'Evaluated input file for stress_resultants ',879 style = 'readonly', emphasized = True ),880 Item( 'data_file_thickness', label = 'Evaluated input file for thicknesses ',881 style = 'readonly', emphasized = True ),882 Item( 'S_Nx@' , label = "S-NX", show_label = False ),883# Tabbed( 884# Item( 'S_Nx@' , label = "S-NX", show_label = False ),885# Item( 'S_Ny@' , label = "S-NY", show_label = False ),886# Item( 'S_Mx@' , label = "S-MX", show_label = False ),887# Item( 'S_My@' , label = "S-MY", show_label = False ),888# Item( 'U_Nx@' , label = "U-NX", show_label = False ),889# Item( 'U_Ny@' , label = "U-NY", show_label = False ),890# Item( 'U_Mx@' , label = "U-MX", show_label = False ),891# Item( 'U_My@' , label = "U-MY", show_label = False ),892# scrollable = False,893# ),894 resizable = True,895 scrollable = True,896 height = 1000,897 width = 1100898 )899if __name__ == '__main__':900 ifs = InfoShell()901 ifs.lcc_tree['ULS']['mx']902 ifs.configure_traits()903# print ifs.columns904#905# ifs.selected_dir = 'y'906# print ifs.columns907#908# ifs.selected_sr = 'N'909# print ifs.columns910#911# ifs.selected_ls = ULS912# print ifs.columns913#914# print 'n1'915# print ifs.n1916# print ifs.my_M917#918# print 'n_table'...

Full Screen

Full Screen

test_import_orig.py

Source:test_import_orig.py Github

copy

Full Screen

...68 self._check_repo_state(repo, 'master', self.def_branches,69 tags=['upstream/2.6'])70 def _check_component_tarballs(self, repo, files):71 for file in files:72 ok_(to_bin(file) in repo.ls_tree('HEAD'),73 "Could not find component tarball file %s in %s" % (file, repo.ls_tree('HEAD')))74 ok_(to_bin(file) in repo.ls_tree('upstream'),75 "Could not find component tarball file %s in %s" % (file, repo.ls_tree('upstream')))76 @RepoFixtures.quilt30(DEFAULT_DSC, opts=['--pristine-tar'])77 def test_update(self, repo):78 """79 Test that importing a new version works80 """81 orig = self._orig('2.8')82 ok_(import_orig(['arg0',83 '--postimport=printenv > ../postimport.out',84 '--postunpack=printenv > ../postunpack.out',85 '--no-interactive', '--pristine-tar', orig]) == 0)86 self._check_repo_state(repo, 'master', ['master', 'upstream', 'pristine-tar'],87 tags=['debian/2.6-2', 'upstream/2.6', 'upstream/2.8'])88 ok_(os.path.exists('debian/changelog'))89 ok_(os.path.exists('../postimport.out'))90 self.check_hook_vars('../postimport', [("GBP_BRANCH", "master"),91 ("GBP_TAG", "upstream/2.8"),92 ("GBP_UPSTREAM_VERSION", "2.8"),93 ("GBP_DEBIAN_VERSION", "2.8-1")])94 @RepoFixtures.quilt30(DEFAULT_DSC, opts=['--pristine-tar'])95 def test_update_component_tarballs(self, repo):96 """97 Test importing new version with additional tarballs works98 """99 # Import 2.8100 orig = self._orig('2.8', dir='dsc-3.0-additional-tarballs')101 ok_(import_orig(['arg0', '--component=foo', '--no-interactive', '--pristine-tar', orig]) == 0)102 self._check_repo_state(repo, 'master', ['master', 'upstream', 'pristine-tar'],103 tags=['debian/2.6-2', 'upstream/2.6', 'upstream/2.8'])104 self._check_component_tarballs(repo, [b'foo/test1', b'foo/test2'])105 ok_(os.path.exists('debian/changelog'))106 dsc = DscFile.parse(_dsc_file(self.pkg, '2.8-1', dir='dsc-3.0-additional-tarballs'))107 # Check if we can rebuild the upstream tarball and additional tarball108 ptars = [('hello-debhelper_2.8.orig.tar.gz', 'pristine-tar', '', dsc.tgz),109 ('hello-debhelper_2.8.orig-foo.tar.gz', 'pristine-tar^', 'foo', dsc.additional_tarballs['foo'])]110 p = DebianPristineTar(repo)111 outdir = os.path.abspath('.')112 for f, w, s, o in ptars:113 eq_(repo.get_subject(w), 'pristine-tar data for %s' % f)114 old = self.hash_file(o)115 p.checkout('hello-debhelper', '2.8', 'gzip', outdir, component=s)116 out = os.path.join(outdir, f)117 new = self.hash_file(out)118 eq_(old, new, "Checksum %s of regenerated tarball %s does not match original %s" %119 (f, old, new))120 os.unlink(out)121 # Import 2.9122 orig = self._orig('2.9', dir='dsc-3.0-additional-tarballs')123 ok_(import_orig(['arg0', '--component=foo', '--no-interactive', '--pristine-tar', orig]) == 0)124 self._check_repo_state(repo, 'master', ['master', 'upstream', 'pristine-tar'],125 tags=['debian/2.6-2', 'upstream/2.6', 'upstream/2.8', 'upstream/2.9'])126 self._check_component_tarballs(repo, ['foo/test1', 'foo/test2', 'foo/test3'])127 ok_(os.path.exists('debian/changelog'))128 dsc = DscFile.parse(_dsc_file(self.pkg, '2.9-1', dir='dsc-3.0-additional-tarballs'))129 # Check if we can rebuild the upstream tarball and additional tarball130 ptars = [('hello-debhelper_2.9.orig.tar.gz', 'pristine-tar', '', dsc.tgz),131 ('hello-debhelper_2.9.orig-foo.tar.gz', 'pristine-tar^', 'foo', dsc.additional_tarballs['foo'])]132 p = DebianPristineTar(repo)133 outdir = os.path.abspath('.')134 for f, w, s, o in ptars:135 eq_(repo.get_subject(w), 'pristine-tar data for %s' % f)136 old = self.hash_file(o)137 p.checkout('hello-debhelper', '2.9', 'gzip', outdir, component=s)138 new = self.hash_file(os.path.join(outdir, f))139 eq_(old, new, "Checksum %s of regenerated tarball %s does not match original %s" %140 (f, old, new))141 def test_tag_exists(self):142 """Test that importing an already imported version fails"""143 repo = GitRepository.create(self.pkg)144 os.chdir(repo.path)145 orig = self._orig('2.6')146 # First import147 ok_(import_orig(['arg0', '--no-interactive', '--pristine-tar', orig]) == 0)148 heads = self.rem_refs(repo, self.def_branches)149 # Second import must fail150 ok_(import_orig(['arg0', '--no-interactive', '--pristine-tar', orig]) == 1)151 self._check_log(0, "gbp:error: Upstream tag 'upstream/2.6' already exists")152 # Check that the second import didn't change any refs153 self.check_refs(repo, heads)154 @RepoFixtures.quilt30(DEFAULT_DSC, opts=['--pristine-tar'])155 def test_update_fail_create_upstream_tag(self, repo):156 """157 Test that we can rollback from a failure to create the upstream158 tag159 """160 heads = self.rem_refs(repo, self.def_branches)161 orig = self._orig('2.8')162 with patch('gbp.git.repository.GitRepository.create_tag',163 side_effect=GitRepositoryError('this is a create tag error mock')):164 ok_(import_orig(['arg0', '--no-interactive', '--pristine-tar', orig]) == 1)165 self._check_repo_state(repo, 'master', ['master', 'upstream', 'pristine-tar'],166 tags=['debian/2.6-2', 'upstream/2.6'])167 self.check_refs(repo, heads)168 @RepoFixtures.quilt30(DEFAULT_DSC, opts=['--pristine-tar'])169 def test_update_fail_merge(self, repo):170 """171 Test that we can rollback from a failed merge172 """173 heads = self.rem_refs(repo, self.def_branches)174 orig = self._orig('2.8')175 with patch('gbp.scripts.import_orig.debian_branch_merge',176 side_effect=GitRepositoryError('this is a fail merge error mock')):177 ok_(import_orig(['arg0', '--no-interactive', '--pristine-tar', orig]) == 1)178 self._check_repo_state(repo, 'master', ['master', 'upstream', 'pristine-tar'],179 tags=['debian/2.6-2', 'upstream/2.6'])180 self.check_refs(repo, heads)181 @patch('gbp.git.repository.GitRepository.create_tag',182 side_effect=raise_if_tag_match('upstream/'))183 def test_initial_import_fail_create_upstream_tag(self, RepoMock):184 """185 Test that we can rollback from a failure to create the upstream186 tag on initial import187 """188 repo = GitRepository.create(self.pkg)189 os.chdir(repo.path)190 orig = self._orig('2.6')191 ok_(import_orig(['arg0', '--no-interactive', orig]) == 1)192 self._check_repo_state(repo, None, [], tags=[])193 def test_initial_import_fail_create_debian_branch(self):194 """195 Test that we can rollback from creating the Debian branch on196 initial import197 """198 repo = GitRepository.create(self.pkg)199 os.chdir(self.pkg)200 orig = self._orig('2.6')201 with patch('gbp.git.repository.GitRepository.create_branch',202 side_effect=GitRepositoryError('this is a create branch error mock')):203 ok_(import_orig(['arg0', '--no-interactive', '--pristine-tar', orig]) == 1)204 self._check_repo_state(repo, None, [], tags=[])205 @RepoFixtures.quilt30(DEFAULT_DSC, opts=['--pristine-tar'])206 def test_filter_with_component_tarballs(self, repo):207 """208 Test that using a filter works with component tarballs (#840602)209 """210 # copy data since we don't want the repacked tarball to end up in DEB_TEST_DATA_DIR211 os.mkdir('../tarballs')212 for f in ['hello-debhelper_2.8.orig-foo.tar.gz', 'hello-debhelper_2.8.orig.tar.gz']:213 src = os.path.join(DEB_TEST_DATA_DIR, 'dsc-3.0-additional-tarballs', f)214 shutil.copy(src, '../tarballs')215 ok_(import_orig(['arg0',216 '--component=foo',217 '--no-interactive',218 '--pristine-tar',219 '--filter-pristine-tar',220 '--filter=README*',221 '../tarballs/hello-debhelper_2.8.orig.tar.gz']) == 0)222 self._check_repo_state(repo, 'master', ['master', 'upstream', 'pristine-tar'],223 tags=['debian/2.6-2', 'upstream/2.6', 'upstream/2.8'])224 self._check_component_tarballs(repo, ['foo/test1', 'foo/test2'])225 ok_(b'COPYING' in repo.ls_tree('HEAD'))226 ok_(b'README' not in repo.ls_tree('HEAD'),227 "README not filtered out of %s" % repo.ls_tree('HEAD'))228 tar = '../hello-debhelper_2.8.orig.tar.gz'229 # Check if tar got filtered properly230 ok_(os.path.exists(tar))231 t = tarfile.open(name=tar, mode="r:gz")232 for f in ['hello-2.8/configure']:233 i = t.getmember(f)234 eq_(type(i), tarfile.TarInfo)235 for f in ['hello-2.8/README']:236 with assert_raises(KeyError):237 t.getmember(f)238 t.close()239 @RepoFixtures.quilt30(DEFAULT_DSC, opts=['--pristine-tar'])240 def test_filter_with_component_tarballs_and_postunpack_changes(self, repo):241 """242 Test that using a filter works with component tarballs (#840602) and243 that the postunpack hook can be used to do more sophisticated changes244 to the orig (#951534).245 """246 # copy data since we don't want the repacked tarball to end up in DEB_TEST_DATA_DIR247 os.mkdir('../tarballs')248 for f in ['hello-debhelper_2.8.orig-foo.tar.gz', 'hello-debhelper_2.8.orig.tar.gz']:249 src = os.path.join(DEB_TEST_DATA_DIR, 'dsc-3.0-additional-tarballs', f)250 shutil.copy(src, '../tarballs')251 ok_(import_orig(['arg0',252 '--component=foo',253 '--no-interactive',254 '--pristine-tar',255 '--filter-pristine-tar',256 '--filter=README*',257 '--postunpack=printenv > $GBP_SOURCES_DIR/postunpack.out;' +258 'rm $GBP_SOURCES_DIR/TODO',259 '../tarballs/hello-debhelper_2.8.orig.tar.gz']) == 0)260 self._check_repo_state(repo, 'master', ['master', 'upstream', 'pristine-tar'],261 tags=['debian/2.6-2', 'upstream/2.6', 'upstream/2.8'])262 self._check_component_tarballs(repo, ['foo/test1', 'foo/test2'])263 ok_(b'COPYING' in repo.ls_tree('HEAD'))264 ok_(b'README' not in repo.ls_tree('HEAD'),265 "README not filtered out of %s" % repo.ls_tree('HEAD'))266 ok_(b'TODO' not in repo.ls_tree('HEAD'),267 "TODO not filtered out of %s" % repo.ls_tree('HEAD'))268 tar = '../hello-debhelper_2.8.orig.tar.gz'269 # Check if tar got filtered properly270 ok_(os.path.exists(tar))271 t = tarfile.open(name=tar, mode="r:gz")272 for f in ['hello-2.8/configure', 'hello-2.8/postunpack.out']:273 i = t.getmember(f)274 eq_(type(i), tarfile.TarInfo)275 for f in ['hello-2.8/README', 'hello-2.8/TODO']:276 with assert_raises(KeyError):277 t.getmember(f)278 t.close()279 @RepoFixtures.quilt30(DEFAULT_DSC, opts=['--pristine-tar'])280 def test_filter_with_orig_tarball(self, repo):281 """282 Test that using a filter works with an upstream tarball that has283 already the correct name (#558777)284 """285 f = 'hello-debhelper_2.8.orig.tar.gz'286 src = os.path.join(DEB_TEST_DATA_DIR, 'dsc-3.0', f)287 shutil.copy(src, '..')288 ok_(import_orig(['arg0',289 '--no-interactive',290 '--pristine-tar',291 '--filter-pristine-tar',292 '--filter=README*',293 '../hello-debhelper_2.8.orig.tar.gz']) == 0)294 self._check_repo_state(repo, 'master', ['master', 'upstream', 'pristine-tar'],295 tags=['debian/2.6-2', 'upstream/2.6', 'upstream/2.8'])296 filtered = os.path.join('..', f)297 ok_(os.path.exists(filtered))298 eq_(os.readlink(filtered).split('/')[-1],299 'hello-debhelper_2.8.orig.gbp.tar.gz')300 # Check if tar got filtered properly301 t = tarfile.open(name=filtered, mode="r:gz")302 for f in ['hello-2.8/configure']:303 i = t.getmember(f)304 eq_(type(i), tarfile.TarInfo)305 for f in ['hello-2.8/README']:306 with assert_raises(KeyError):307 t.getmember(f)308 t.close()309 @RepoFixtures.quilt30(DEFAULT_DSC, opts=['--pristine-tar'])310 def test_filter_with_orig_tarball_and_postunpack_changes(self, repo):311 """312 Test that using a filter works with an upstream tarball that has313 already the correct name (#558777) and that the postunpack hook can314 be used to do more sophisticated changes to the orig (#951534).315 """316 f = 'hello-debhelper_2.8.orig.tar.gz'317 src = os.path.join(DEB_TEST_DATA_DIR, 'dsc-3.0', f)318 shutil.copy(src, '..')319 ok_(import_orig(['arg0',320 '--no-interactive',321 '--pristine-tar',322 '--filter-pristine-tar',323 '--filter=README*',324 '--postunpack=printenv > $GBP_SOURCES_DIR/postunpack.out;' +325 'rm $GBP_SOURCES_DIR/TODO',326 '../hello-debhelper_2.8.orig.tar.gz']) == 0)327 self._check_repo_state(repo, 'master', ['master', 'upstream', 'pristine-tar'],328 tags=['debian/2.6-2', 'upstream/2.6', 'upstream/2.8'])329 filtered = os.path.join('..', f)330 ok_(os.path.exists(filtered))331 eq_(os.readlink(filtered).split('/')[-1],332 'hello-debhelper_2.8.orig.gbp.tar.gz')333 # Check if tar got filtered properly334 t = tarfile.open(name=filtered, mode="r:gz")335 for f in ['hello-2.8/configure', 'hello-2.8/postunpack.out']:336 i = t.getmember(f)337 eq_(type(i), tarfile.TarInfo)338 for f in ['hello-2.8/README', 'hello-2.8/TODO']:339 with assert_raises(KeyError):340 t.getmember(f)341 t.close()342 @RepoFixtures.quilt30(DEFAULT_DSC, opts=['--pristine-tar'])343 def test_postunpack_changes_with_orig_tarball(self, repo):344 """345 Test that using a postunpack script to apply changes works with an346 upstream tarball that has already the correct name (#951534).347 """348 f = 'hello-debhelper_2.8.orig.tar.gz'349 src = os.path.join(DEB_TEST_DATA_DIR, 'dsc-3.0', f)350 shutil.copy(src, '..')351 ok_(import_orig(['arg0',352 '--no-interactive',353 '--pristine-tar',354 '--filter-pristine-tar',355 '--postunpack=printenv > $GBP_SOURCES_DIR/postunpack.out;' +356 'rm $GBP_SOURCES_DIR/TODO; rm $GBP_SOURCES_DIR/README',357 '../hello-debhelper_2.8.orig.tar.gz']) == 0)358 self._check_repo_state(repo, 'master', ['master', 'upstream', 'pristine-tar'],359 tags=['debian/2.6-2', 'upstream/2.6', 'upstream/2.8'])360 filtered = os.path.join('..', f)361 ok_(os.path.exists(filtered))362 eq_(os.readlink(filtered).split('/')[-1],363 'hello-debhelper_2.8.orig.gbp.tar.gz')364 # Check if tar got filtered properly365 t = tarfile.open(name=filtered, mode="r:gz")366 for f in ['hello-2.8/configure', 'hello-2.8/postunpack.out']:367 i = t.getmember(f)368 eq_(type(i), tarfile.TarInfo)369 for f in ['hello-2.8/README', 'hello-2.8/TODO']:370 with assert_raises(KeyError):371 t.getmember(f)372 t.close()373 @RepoFixtures.quilt30(DEFAULT_DSC, opts=['--pristine-tar'])374 def test_filter_unpacked_dir(self, repo):375 """376 Test that importing and filtering unpacked upstream source works.377 """378 f = 'hello-debhelper_2.8.orig.tar.gz'379 src = os.path.join(DEB_TEST_DATA_DIR, 'dsc-3.0', f)380 # Create an unpacked tarball we can import381 UnpackTarArchive(src, '..')()382 ok_(os.path.exists('../hello-2.8'))383 ok_(import_orig(['arg0',384 '--no-interactive',385 '--pristine-tar',386 '--filter-pristine-tar',387 '--filter=README*',388 '../hello-2.8']) == 0)389 self._check_repo_state(repo, 'master', ['master', 'upstream', 'pristine-tar'],390 tags=['debian/2.6-2', 'upstream/2.6', 'upstream/2.8'])391 filtered = os.path.join('..', f)392 ok_(os.path.exists(filtered))393 # Check if tar got filtered properly394 t = tarfile.open(name=filtered, mode="r:gz")395 for f in ['hello-2.8/configure']:396 i = t.getmember(f)397 eq_(type(i), tarfile.TarInfo)398 for f in ['hello-2.8/README']:399 with assert_raises(KeyError):400 t.getmember(f)401 t.close()402 @RepoFixtures.quilt30(DEFAULT_DSC, opts=['--pristine-tar'])403 def test_filter_unpacked_dir_with_postunpack_changes(self, repo):404 """405 Test that importing and filtering unpacked upstream source works and406 that the postunpack hook can be used to do more sophisticated changes407 to the orig (#951534).408 """409 f = 'hello-debhelper_2.8.orig.tar.gz'410 src = os.path.join(DEB_TEST_DATA_DIR, 'dsc-3.0', f)411 # Create an unpacked tarball we can import412 UnpackTarArchive(src, '..')()413 ok_(os.path.exists('../hello-2.8'))414 ok_(import_orig(['arg0',415 '--no-interactive',416 '--pristine-tar',417 '--filter-pristine-tar',418 '--filter=README*',419 '--postunpack=printenv > $GBP_SOURCES_DIR/postunpack.out;' +420 'rm $GBP_SOURCES_DIR/TODO',421 '../hello-2.8']) == 0)422 self._check_repo_state(repo, 'master', ['master', 'upstream', 'pristine-tar'],423 tags=['debian/2.6-2', 'upstream/2.6', 'upstream/2.8'])424 filtered = os.path.join('..', f)425 ok_(os.path.exists(filtered))426 # Check if tar got filtered properly427 t = tarfile.open(name=filtered, mode="r:gz")428 for f in ['hello-2.8/configure', 'hello-2.8/postunpack.out']:429 i = t.getmember(f)430 eq_(type(i), tarfile.TarInfo)431 for f in ['hello-2.8/README', 'hello-2.8/TODO']:432 with assert_raises(KeyError):433 t.getmember(f)434 t.close()435 @RepoFixtures.quilt30(DEFAULT_DSC, opts=['--pristine-tar'])436 def test_import_in_submodule(self, repo):437 """438 Test that importing works if repo is a git submodule (#674015)439 """440 parent_repo = GitRepository.create('../parent')441 parent_repo.add_submodule(repo.path)442 parent_repo.update_submodules(init=True, recursive=True)443 submodule = GitRepository(os.path.join(parent_repo.path,444 'hello-debhelper'))445 ok_(submodule.path.endswith, 'parent/hello-debhelper')446 os.chdir(submodule.path)447 orig = self._orig('2.8')448 submodule.create_branch('upstream', 'origin/upstream')449 ok_(import_orig(['arg0', '--no-interactive', orig]) == 0)450 def test_with_signaturefile(self):451 """452 Test that importing a new version with a signature file works453 """454 repo = ComponentTestGitRepository.create(self.pkg)455 os.chdir(self.pkg)456 orig = self._orig('2.8')457 ok_(import_orig(['arg0',458 '--postimport=printenv > ../postimport.out',459 '--postunpack=printenv > ../postunpack.out',460 '--no-interactive', '--pristine-tar',461 '--upstream-signatures=on', orig]) == 0)462 self._check_repo_state(repo, 'master', ['master', 'upstream', 'pristine-tar'],463 tags=['upstream/2.8'])464 ok_(os.path.exists('../postimport.out'))465 eq_(repo.ls_tree('pristine-tar'), {b'hello-debhelper_2.8.orig.tar.gz.id',466 b'hello-debhelper_2.8.orig.tar.gz.delta',467 b'hello-debhelper_2.8.orig.tar.gz.asc'})468 self.check_hook_vars('../postimport', [("GBP_BRANCH", "master"),469 ("GBP_TAG", "upstream/2.8"),470 ("GBP_UPSTREAM_VERSION", "2.8"),471 ("GBP_DEBIAN_VERSION", "2.8-1")])472 def test_with_auto_signaturefile(self):473 """474 Test that importing a new version with a signature file works475 when using auto mode.476 """477 repo = ComponentTestGitRepository.create(self.pkg)478 os.chdir(self.pkg)479 orig = self._orig('2.8')480 ok_(import_orig(['arg0',481 '--postimport=printenv > ../postimport.out',482 '--postunpack=printenv > ../postunpack.out',483 '--no-interactive', '--pristine-tar',484 '--upstream-signatures=auto', orig]) == 0)485 self._check_repo_state(repo, 'master', ['master', 'upstream', 'pristine-tar'],486 tags=['upstream/2.8'])487 ok_(os.path.exists('../postimport.out'))488 eq_(repo.ls_tree('pristine-tar'), {b'hello-debhelper_2.8.orig.tar.gz.id',489 b'hello-debhelper_2.8.orig.tar.gz.delta',490 b'hello-debhelper_2.8.orig.tar.gz.asc'})491 self.check_hook_vars('../postimport', [("GBP_BRANCH", "master"),492 ("GBP_TAG", "upstream/2.8"),493 ("GBP_UPSTREAM_VERSION", "2.8"),494 ("GBP_DEBIAN_VERSION", "2.8-1")])495 def test_postunpack_env_vars(self):496 """497 Test that the expected environment variables are set during498 postunpack hook.499 """500 ComponentTestGitRepository.create(self.pkg)501 os.chdir(self.pkg)502 orig = self._orig('2.8')...

Full Screen

Full Screen

test_ls.py

Source:test_ls.py Github

copy

Full Screen

1import pytest2import asyncio3from mock import Mock4import qth5from qth_ls import \6 Ls, \7 path_to_subdirectories, \8 listing_has_subdir, \9 get_path_listing10def AsyncMock(*args, **kwargs):11 return Mock(*args,12 side_effect=lambda *_, **__: asyncio.sleep(0),13 **kwargs)14def test_path_to_subdirectories():15 assert list(path_to_subdirectories("")) == [""]16 assert list(path_to_subdirectories("foo")) == [""]17 assert list(path_to_subdirectories("foo/bar")) == ["", "foo/"]18 assert list(path_to_subdirectories("foo/bar/baz")) == \19 ["", "foo/", "foo/bar/"]20def test_listing_has_subdir():21 assert not listing_has_subdir({}, "foo")22 assert not listing_has_subdir(23 {"foo": [{"behaviour": "PROPERTY-N:1"}]}, "foo")24 assert listing_has_subdir({"foo": [{"behaviour": "DIRECTORY"}]}, "foo")25 assert listing_has_subdir({"foo": [26 {"behaviour": "PROPERTY-1:N"},27 {"behaviour": "DIRECTORY"},28 ]}, "foo")29def test_get_path_listing():30 assert get_path_listing({}, "qux") is None31 ls_tree = {32 "": {33 "foo": [{"behaviour": "PROPERTY-N:1"},34 {"behaviour": "DIRECTORY"}],35 "baz": [{"behaviour": "PROPERTY-1:N"}],36 },37 "foo/": {38 "bar": [{"behaviour": "EVENT-1:N"}],39 },40 "qux/": {41 "quo": [{"behaviour": "EVENT-N:1"}],42 },43 }44 assert get_path_listing(ls_tree, "qux") is None45 assert get_path_listing(ls_tree, "baz") == [{"behaviour": "PROPERTY-1:N"}]46 assert get_path_listing(ls_tree, "foo") == [{"behaviour": "PROPERTY-N:1"},47 {"behaviour": "DIRECTORY"}]48 assert get_path_listing(ls_tree, "foo/bar") == [{"behaviour": "EVENT-1:N"}]49 # The 'qux/' directory is not listed in the top-level so it shouldn't be50 # returned, despite having a tree listing...51 assert get_path_listing(ls_tree, "qux/quo") is None52@pytest.mark.asyncio53async def test_watch_and_unwatch():54 client = Mock()55 client.watch_property = AsyncMock()56 client.unwatch_property = AsyncMock()57 ls = Ls(client)58 foo_bar_cb = AsyncMock()59 await ls.watch_path("foo/bar", foo_bar_cb)60 # Should call straight away with None61 foo_bar_cb.assert_called_once_with("foo/bar", None)62 # Should have registered the callback and prepared a None 'last value'63 assert ls._callbacks == {"foo/bar": [foo_bar_cb]}64 assert ls._last_path_value == {"foo/bar": None}65 # Should have setup watches for meta/ls/ and meta/ls/foo/66 assert client.watch_property.call_count == 267 client.watch_property.assert_any_call(68 "meta/ls/", ls._on_ls_tree_property_changed)69 client.watch_property.assert_any_call(70 "meta/ls/foo/", ls._on_ls_tree_property_changed)71 # Watching a different property should result in a minimal set of path72 # watches being added73 foo_bar_baz_cb = AsyncMock()74 await ls.watch_path("foo/bar/baz", foo_bar_baz_cb)75 foo_bar_baz_cb.assert_called_once_with("foo/bar/baz", None)76 assert ls._callbacks == {"foo/bar": [foo_bar_cb],77 "foo/bar/baz": [foo_bar_baz_cb]}78 assert ls._last_path_value == {"foo/bar": None,79 "foo/bar/baz": None}80 # Should have setup only one extra watch for meta/ls/foo/bar/81 assert client.watch_property.call_count == 382 client.watch_property.assert_called_with(83 "meta/ls/foo/bar/", ls._on_ls_tree_property_changed)84 # Watching a property a second time should add a callback but trigger no85 # new tree watches.86 foo_bar_baz_cb2 = AsyncMock()87 await ls.watch_path("foo/bar/baz", foo_bar_baz_cb2)88 foo_bar_baz_cb.assert_called_once_with("foo/bar/baz", None)89 foo_bar_baz_cb2.assert_called_once_with("foo/bar/baz", None)90 assert ls._callbacks == {"foo/bar": [foo_bar_cb],91 "foo/bar/baz": [foo_bar_baz_cb, foo_bar_baz_cb2]}92 assert ls._last_path_value == {"foo/bar": None,93 "foo/bar/baz": None}94 assert client.watch_property.call_count == 395 # Unwatching a doubly-watched property should result in no tree changes96 await ls.unwatch_path("foo/bar/baz", foo_bar_baz_cb)97 assert foo_bar_baz_cb.call_count == 198 assert ls._callbacks == {"foo/bar": [foo_bar_cb],99 "foo/bar/baz": [foo_bar_baz_cb2]}100 assert ls._last_path_value == {"foo/bar": None,101 "foo/bar/baz": None}102 assert client.watch_property.call_count == 3103 assert client.unwatch_property.call_count == 0104 # Unwatching a property should unwatch only the tree parts it nolonger105 # needs106 await ls.unwatch_path("foo/bar/baz", foo_bar_baz_cb2)107 assert foo_bar_baz_cb2.call_count == 1108 assert ls._callbacks == {"foo/bar": [foo_bar_cb]}109 assert ls._last_path_value == {"foo/bar": None}110 assert client.watch_property.call_count == 3111 assert client.unwatch_property.call_count == 1112 client.unwatch_property.assert_any_call(113 "meta/ls/foo/bar/", ls._on_ls_tree_property_changed)114 # Unwatching the remaining property should take us back where we started115 await ls.unwatch_path("foo/bar", foo_bar_cb)116 assert foo_bar_cb.call_count == 1117 assert ls._callbacks == {}118 assert ls._last_path_value == {}119 assert client.watch_property.call_count == 3120 assert client.unwatch_property.call_count == 3121 client.unwatch_property.assert_any_call(122 "meta/ls/", ls._on_ls_tree_property_changed)123 client.unwatch_property.assert_any_call(124 "meta/ls/foo/", ls._on_ls_tree_property_changed)125@pytest.mark.asyncio126async def test_tree_changes():127 client = Mock()128 client.watch_property = AsyncMock()129 client.unwatch_property = AsyncMock()130 ls = Ls(client)131 # Test that values make it through132 foo_bar_cb = AsyncMock()133 await ls.watch_path("foo/bar", foo_bar_cb)134 # Initially None135 foo_bar_cb.assert_called_once_with("foo/bar", None)136 # When a listing arrives, nothing should happen if we don't have the whole137 # tree of paths138 await ls._on_ls_tree_property_changed("meta/ls/foo/", {139 "bar": [{"behaviour": "EVENT-1:N"}],140 })141 assert foo_bar_cb.call_count == 1142 # When the final missing part of the tree arrives, a callback should come143 # through144 await ls._on_ls_tree_property_changed("meta/ls/", {145 "foo": [{"behaviour": "DIRECTORY"}],146 })147 assert foo_bar_cb.call_count == 2148 foo_bar_cb.assert_called_with("foo/bar", [{"behaviour": "EVENT-1:N"}])149 # A second registration should immediately get the value150 foo_bar_cb2 = AsyncMock()151 await ls.watch_path("foo/bar", foo_bar_cb2)152 foo_bar_cb2.assert_called_once_with(153 "foo/bar", [{"behaviour": "EVENT-1:N"}])154 # A tree update which doesn't change the value should not result in a call155 await ls._on_ls_tree_property_changed("meta/ls/", {156 "foo": [{"behaviour": "DIRECTORY"}],157 "irrelevant": [{"behaviour": "EVENT-1:N"}],158 })159 assert foo_bar_cb.call_count == 2160 assert foo_bar_cb2.call_count == 1161 # If any part of the path is removed, should be None again162 await ls._on_ls_tree_property_changed("meta/ls/", {163 "irrelevant": [{"behaviour": "EVENT-1:N"}],164 })165 assert foo_bar_cb.call_count == 3166 foo_bar_cb.assert_called_with("foo/bar", None)167 assert foo_bar_cb2.call_count == 2168 foo_bar_cb2.assert_called_with("foo/bar", None)169 # Put it back again...170 await ls._on_ls_tree_property_changed("meta/ls/", {171 "foo": [{"behaviour": "DIRECTORY"}],172 })173 assert foo_bar_cb.call_count == 4174 foo_bar_cb.assert_called_with("foo/bar", [{"behaviour": "EVENT-1:N"}])175 assert foo_bar_cb2.call_count == 3176 foo_bar_cb2.assert_called_with("foo/bar", [{"behaviour": "EVENT-1:N"}])177 # If the listing property is deleted, everything should also disappear.178 await ls._on_ls_tree_property_changed("meta/ls/foo/", qth.Empty)179 assert foo_bar_cb.call_count == 5180 foo_bar_cb.assert_called_with("foo/bar", None)181 assert foo_bar_cb2.call_count == 4...

Full Screen

Full Screen

rule.py

Source:rule.py Github

copy

Full Screen

...48 return tree49async def _copy_files_modifications(_cache, tree, paths_multimap):50 modifications = {}51 for source in paths_multimap:52 source_info_dict = await _cache.ls_tree(tree, source)53 if not source_info_dict:54 raise NoMatchingFilesError(55 'Path "{}" does not exist.'.format(source))56 source_info = list(source_info_dict.items())[0][1]57 for dest in paths_multimap[source]:58 # If dest is a directory, put the source inside dest instead of59 # overwriting dest entirely.60 dest_is_dir = False61 dest_info_dict = await _cache.ls_tree(tree, dest)62 if dest_info_dict:63 dest_info = list(dest_info_dict.items())[0][1]64 dest_is_dir = (dest_info.type == cache.TREE_TYPE)65 adjusted_dest = dest66 if dest_is_dir:67 adjusted_dest = str(68 PurePosixPath(dest) / PurePosixPath(source).name)69 modifications[adjusted_dest] = source_info70 return modifications71async def copy_files(_cache, tree, paths_multimap):72 modifications = await _copy_files_modifications(_cache, tree,73 paths_multimap)74 tree = await _cache.modify_tree(tree, modifications)75 return tree76async def move_files(_cache, tree, paths_multimap):77 # First obtain the copies from the original tree. Moves are not ordered but78 # happen all at once, so if you move a->b and b->c, the contents of c will79 # always end up being b rather than a.80 modifications = await _copy_files_modifications(_cache, tree,81 paths_multimap)82 # Now add in deletions, but be careful not to delete a file that just got83 # moved. Note that if "a" gets moved into "dir", it will end up at "dir/a",84 # even if "dir" is deleted (because modify_tree always modifies parents85 # before decending into children, and deleting a dir is a modification of86 # that dir's parent).87 for source in paths_multimap:88 if source not in modifications:89 modifications[source] = None90 tree = await _cache.modify_tree(tree, modifications)91 return tree92async def _get_glob_entries(_cache, tree, globs_list):93 matches = {}94 for glob_str in globs_list:95 # Do an in-memory match of all the paths in the tree against the96 # glob expression. As an optimization, if the glob is something97 # like 'a/b/**/foo', only list the paths under 'a/b'.98 regex = glob.glob_to_path_regex(glob_str)99 prefix = glob.unglobbed_prefix(glob_str)100 entries = await _cache.ls_tree(tree, prefix, recursive=True)101 found = False102 for path, entry in entries.items():103 if re.match(regex, path):104 matches[path] = entry105 found = True106 if not found:107 raise NoMatchingFilesError(108 '"{}" didn\'t match any files.'.format(glob_str))109 return matches110async def pick_files(_cache, tree, globs_list):111 picks = await _get_glob_entries(_cache, tree, globs_list)112 tree = await _cache.modify_tree(None, picks)113 return tree114async def drop_files(_cache, tree, globs_list):115 drops = await _get_glob_entries(_cache, tree, globs_list)116 for path in drops:117 drops[path] = None118 tree = await _cache.modify_tree(tree, drops)119 return tree120async def make_files_executable(_cache, tree, globs_list):121 entries = await _get_glob_entries(_cache, tree, globs_list)122 exes = {}123 for path, entry in entries.items():124 # Ignore directories.125 if entry.type == cache.BLOB_TYPE:126 exes[path] = entry._replace(mode=cache.EXECUTABLE_FILE_MODE)127 tree = await _cache.modify_tree(tree, exes)128 return tree129async def get_export_tree(_cache, tree, export_path):130 entries = await _cache.ls_tree(tree, export_path)131 if not entries:132 raise NoMatchingFilesError(133 'Export path "{}" doesn\'t exist.'.format(export_path))134 entry = list(entries.values())[0]135 if entry.type != cache.TREE_TYPE:136 raise NoMatchingFilesError(137 'Export path "{}" is not a directory.'.format(export_path))138 return entry.hash139class NoMatchingFilesError(PrintableError):...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run Nose automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful