How to use _read_data method in yandex-tank

Best Python code snippet using yandex-tank

flac.py

Source:flac.py Github

copy

Full Screen

...49 columns = 250 f = open('mesh.0')51 offset = (frame-1) * columns * self.nnodes * sizeoffloat52 f.seek(offset)53 x, z = self._read_data(f, columns)54 self._reshape_nodal_fields(x, z)55 return x, z56 def read_vel(self, frame):57 columns = 258 f = open('vel.0')59 offset = (frame-1) * columns * self.nnodes * sizeoffloat60 f.seek(offset)61 vx, vz = self._read_data(f, columns)62 self._reshape_nodal_fields(vx, vz)63 return vx, vz64 def read_temperature(self, frame):65 columns = 166 f = open('temperature.0')67 offset = (frame-1) * columns * self.nnodes * sizeoffloat68 f.seek(offset)69 T = self._read_data(f, columns)70 self._reshape_nodal_fields(T)71 return T72 def read_aps(self, frame):73 columns = 174 f = open('aps.0')75 offset = (frame-1) * columns * self.nelements * sizeoffloat76 f.seek(offset)77 aps = self._read_data(f, columns, count=self.nelements)78 self._reshape_elemental_fields(aps)79 return aps80 def read_density(self, frame):81 columns = 182 f = open('density.0')83 offset = (frame-1) * columns * self.nelements * sizeoffloat84 f.seek(offset)85 density = self._read_data(f, columns, count=self.nelements)86 self._reshape_elemental_fields(density)87 return density88 def read_eII(self, frame):89 columns = 190 f = open('eII.0')91 offset = (frame-1) * columns * self.nelements * sizeoffloat92 f.seek(offset)93 eII = self._read_data(f, columns, count=self.nelements)94 self._reshape_elemental_fields(eII)95 return eII96 def read_sII(self, frame):97 columns = 198 f = open('sII.0')99 offset = (frame-1) * columns * self.nelements * sizeoffloat100 f.seek(offset)101 sII = self._read_data(f, columns, count=self.nelements)102 self._reshape_elemental_fields(sII)103 return sII104 def read_sxx(self, frame):105 columns = 1106 f = open('sxx.0')107 offset = (frame-1) * columns * self.nelements * sizeoffloat108 f.seek(offset)109 sxx = self._read_data(f, columns, count=self.nelements)110 self._reshape_elemental_fields(sxx)111 return sxx112 def read_sxz(self, frame):113 columns = 1114 f = open('sxz.0')115 offset = (frame-1) * columns * self.nelements * sizeoffloat116 f.seek(offset)117 sxz = self._read_data(f, columns, count=self.nelements)118 self._reshape_elemental_fields(sxz)119 return sxz120 def read_szz(self, frame):121 columns = 1122 f = open('szz.0')123 offset = (frame-1) * columns * self.nelements * sizeoffloat124 f.seek(offset)125 szz = self._read_data(f, columns, count=self.nelements)126 self._reshape_elemental_fields(szz)127 return szz128 def read_srII(self, frame):129 columns = 1130 f = open('srII.0')131 offset = (frame-1) * columns * self.nelements * sizeoffloat132 f.seek(offset)133 srII = self._read_data(f, columns, count=self.nelements)134 self._reshape_elemental_fields(srII)135 return srII136 def read_pres(self, frame):137 columns = 1138 f = open('pres.0')139 offset = (frame-1) * columns * self.nelements * sizeoffloat140 f.seek(offset)141 pres = self._read_data(f, columns, count=self.nelements)142 self._reshape_elemental_fields(pres)143 return pres144 def read_diss(self, frame):145 columns = 1146 f = open('diss.0')147 offset = (frame-1) * columns * self.nelements * sizeoffloat148 f.seek(offset)149 diss = self._read_data(f, columns, count=self.nelements)150 self._reshape_elemental_fields(diss)151 return diss152 def read_visc(self, frame):153 columns = 1154 f = open('visc.0')155 offset = (frame-1) * columns * self.nelements * sizeoffloat156 f.seek(offset)157 visc = self._read_data(f, columns, count=self.nelements)158 self._reshape_elemental_fields(visc)159 return visc160 def read_phase(self, frame):161 columns = 1162 f = open('phase.0')163 offset = (frame-1) * columns * self.nelements * sizeofint164 f.seek(offset)165 phase = self._read_data(f, columns, count=self.nelements, dtype=np.int32)166 self._reshape_elemental_fields(phase)167 return phase168 def read_markers(self, frame):169 # read tracer size170 tmp = np.fromfile('_markers.0', sep=' ')171 tmp.shape = (-1, 4)172 n = int(tmp[frame-1,2])173 suffix = '.%06d.0' % frame174 dead = self._read_data('markdead' + suffix, count=n, dtype=np.int32)175 tmp = self._read_data('markx' + suffix, count=n)176 x = self._remove_dead_markers(tmp, dead)177 tmp = self._read_data('marky' + suffix, count=n)178 z = self._remove_dead_markers(tmp, dead)179 tmp = self._read_data('markage' + suffix, count=n)180 age = self._remove_dead_markers(tmp, dead)181 tmp = self._read_data('markphase' + suffix, count=n, dtype=np.int32)182 phase = self._remove_dead_markers(tmp, dead)183 tmp = np.arange(n)184 ID = self._remove_dead_markers(tmp, dead)185 return x, z, age, phase, ID186 def read_tracers(self):187 # read tracer size188 tmp = np.fromfile('_tracers.0', sep=' ')189 tmp.shape = (-1, 4)190 ntracerrec = tmp.shape[0]191 ntracers = int(tmp[0,1])192 n = ntracerrec * ntracers193 time = self._read_data('outtracktime.0', count=n)194 x = self._read_data('outtrackxx.0', count=n)195 x.shape = (ntracerrec, ntracers)196 z = self._read_data('outtrackyy.0', count=n)197 z.shape = (ntracerrec, ntracers)198 T = self._read_data('outtracktemp.0', count=n)199 T.shape = (ntracerrec, ntracers)200 p = self._read_data('outtrackpres.0', count=n)201 p.shape = (ntracerrec, ntracers)202 e = self._read_data('outtrackstrain.0', count=n)203 e.shape = (ntracerrec, ntracers)204 phase = self._read_data('outtrackphase.0', count=n)205 phase.shape = (ntracerrec, ntracers)206 return x, z, T, p, e, phase207 def _read_data(self, fileobj, columns=1,208 count=None, dtype=None):209 '''Read data from a file-like object 'fileobj'.210 The 'dtype' specifies the storage type, default to single precision211 float.212 '''213 # number of nodes214 if count is None:215 count = self.nnodes216 # total number of items217 n = columns * count218 if dtype is None:219 dtype = default_dtype220 result = np.fromfile(fileobj, dtype, n)221 if self.swap_endian:...

Full Screen

Full Screen

__init__.py

Source:__init__.py Github

copy

Full Screen

...27 except IOError as e:28 # Convert pybel's IOError (?!) into a ValueError29 raise ValueError(str(e))30_parse_re = re.compile(r'^\s*((?P<data>([^#]|\S#)*)(\s+#.*)?|#.*)$')31def _read_data(32 filename, key_conv=str, value_conv=float, key_col=0, value_col=1):33 result = {}34 cols = None35 for count, line in enumerate(36 pkg.resource_stream(__name__, filename), start=1):37 data = _parse_re.match(line.decode('utf-8')).group('data')38 if data:39 data = data.split()40 try:41 if cols is None:42 cols = len(data)43 elif len(data) != cols:44 raise ValueError(45 'Unexpected number of values (expected %d)' % cols)46 key = key_conv(data[key_col])47 value = value_conv(data[value_col])48 if key in result:49 raise ValueError(50 'Duplicate definition for group %s' % key)51 result[key] = value52 except (IndexError, ValueError) as e:53 e.args += ('on line %d of %s' % (count, filename),)54 raise55 return result56def _read_smarts(filename):57 return _read_data(filename, key_conv=int, value_conv=smarts)58def _read_matrix(filename, key_conv=str, value_conv=float, symmetric=True):59 result = {}60 col_keys = []61 row_keys = []62 for count, line in enumerate(63 pkg.resource_stream(__name__, filename), start = 1):64 data = _parse_re.match(line.decode('utf-8')).group('data')65 if data:66 try:67 if not col_keys:68 col_keys = [key_conv(key) for key in data.split()]69 else:70 row_key, values = data.split(None, 1)71 row_key = key_conv(row_key)72 if row_key in result:73 raise ValueError(74 'Duplicate definition for row %s' % key)75 values = [value_conv(value) for value in values.split()]76 if not len(col_keys) == len(values):77 raise ValueError(78 'Expected %d values but found %d' % (79 len(col_keys), len(values)))80 row_keys.append(row_key)81 result[row_key] = {82 col_key: value83 for col_key, value in zip(col_keys, values)84 }85 except ValueError as e:86 e.args += ('on line %d of %s' % (count, filename))87 raise88 if symmetric:89 if sorted(row_keys) != sorted(col_keys):90 raise ValueError('Column and row keys are not identical')91 for row_key in row_keys:92 for col_key in col_keys:93 if result[row_key][col_key] != result[col_key][row_key]:94 raise ValueError(95 'Value %f in row %s, column %s does not match '96 '%f in row %s, column %s' % (97 result[row_key][col_key],98 row_key, col_key,99 result[col_key][row_key],100 col_key, row_key,101 ))102 return result103STEIN_AND_BROWN_BOILING_POINT = _read_data('joback.data', key_conv=int, value_col=1)104JOBACK_BOILING_POINT = _read_data('joback.data', key_conv=int, value_col=2)105JOBACK_TEMPERATURE = _read_data('joback.data', key_conv=int, value_col=3)106JOBACK_PRESSURE = _read_data('joback.data', key_conv=int, value_col=4)107JOBACK_VOLUME = _read_data('joback.data', key_conv=int, value_col=5)108SCHROEDER_DENSITY = _read_data('schroeder.data', key_conv=int)109LE_BAS_DENSITY = _read_data('le_bas.data', key_conv=int)110NANNOOLAL_BOILING_POINT_PRIMARY = _read_data('nannoolal_primary.data', key_conv=int, value_col=1)111NANNOOLAL_VAPOUR_PRESSURE_PRIMARY = _read_data('nannoolal_primary.data', key_conv=int, value_col=2)112NANNOOLAL_TEMPERATURE_PRIMARY = _read_data('nannoolal_primary.data', key_conv=int, value_col=3)113NANNOOLAL_PRESSURE_PRIMARY = _read_data('nannoolal_primary.data', key_conv=int, value_col=4)114NANNOOLAL_VOLUME_PRIMARY = _read_data('nannoolal_primary.data', key_conv=int, value_col=5)115NANNOOLAL_BOILING_POINT_SECONDARY = _read_data('nannoolal_secondary.data', key_conv=int, value_col=1)116NANNOOLAL_VAPOUR_PRESSURE_SECONDARY = _read_data('nannoolal_secondary.data', key_conv=int, value_col=2)117NANNOOLAL_TEMPERATURE_SECONDARY = _read_data('nannoolal_secondary.data', key_conv=int, value_col=3)118NANNOOLAL_PRESSURE_SECONDARY = _read_data('nannoolal_secondary.data', key_conv=int, value_col=4)119NANNOOLAL_VOLUME_SECONDARY = _read_data('nannoolal_secondary.data', key_conv=int, value_col=5)120EVAPORATION_A = _read_data('evaporation.data', value_col=1)121EVAPORATION_B = _read_data('evaporation.data', value_col=2)122EVAPORATION2_A = _read_data('evaporation2.data', value_col=1)123EVAPORATION2_B = _read_data('evaporation2.data', value_col=2)124SIMPOL_1 = _read_data('SIMPOL.data', value_col=1)125SIMPOL_2 = _read_data('SIMPOL.data', value_col=2)126SIMPOL_3 = _read_data('SIMPOL.data', value_col=3)127SIMPOL_4 = _read_data('SIMPOL.data', value_col=4)128AIOMFAC_SALT_CATION_GROUP = _read_data('aiomfac_salts.data', key_conv=int, value_col=1, value_conv=int)129AIOMFAC_SALT_CATION_STOICH = _read_data('aiomfac_salts.data', key_conv=int, value_col=2)130AIOMFAC_SALT_ANION_GROUP = _read_data('aiomfac_salts.data', key_conv=int, value_col=3, value_conv=int)131AIOMFAC_SALT_ANION_STOICH = _read_data('aiomfac_salts.data', key_conv=int, value_col=4)132AIOMFAC_SALT_DENSITY = _read_data('aiomfac_salts.data', key_conv=int, value_col=5)133AIOMFAC_SALT_MASS = _read_data('aiomfac_salts.data', key_conv=int, value_col=6)134AIOMFAC_SALT_NAME = _read_data('aiomfac_salts.data', key_conv=int, value_col=7, value_conv=str)135AIOMFAC_ION_CHARGE = _read_data('aiomfac_ions.data', key_conv=int)136AIOMFAC_MAIN_GROUP = _read_data('aiomfac_main.data', key_conv=int, value_col=1, value_conv=int)137AIOMFAC_MASS = _read_data('aiomfac_main.data', key_conv=int, value_col=2)138AIOMFAC_RI = _read_data('aiomfac_main.data', key_conv=int, value_col=3)139AIOMFAC_QI = _read_data('aiomfac_main.data', key_conv=int, value_col=4)140AIOMFAC_ION_CHARGE_ABS = {141 group: abs(charge)142 for group, charge in AIOMFAC_ION_CHARGE.items()143 }144AIOMFAC_ION_SALT = {145 (cation, AIOMFAC_SALT_ANION_GROUP[group]): group146 for group, cation in AIOMFAC_SALT_CATION_GROUP.items()147 }148# NOTE: 2012-12-12 - The following extensions are ones suggsted by Mark Barley,149# November 2012 These are to make sure the MCM compounds are parsed correctly.150# For ANY official AIOMFAC extensions then these can be removed if required.151AIOMFAC_MAIN_GROUP[281] = AIOMFAC_MAIN_GROUP[19]152AIOMFAC_MAIN_GROUP[282] = AIOMFAC_MAIN_GROUP[23]153AIOMFAC_MAIN_GROUP[283] = AIOMFAC_MAIN_GROUP[26]...

Full Screen

Full Screen

Fake.py

Source:Fake.py Github

copy

Full Screen

...98 'rtscts': self.rtscts99 }100 return settings101102 def set_read_data(self, data):103 """104 Set fake data to be be returned by the read() and readline() functions.105 """106 self._read_data = data107108 def get_data_written(self):109 """110 Return record of data sent via the write command.111 """112 return(self._data_written)113114 def set_silent_on_empty(self, flag):115 """116 Set silent on error flag. If True do not error. ...

Full Screen

Full Screen

gamedata.py

Source:gamedata.py Github

copy

Full Screen

...10 names = ['Воин', 'Лучник']11 emoji = ['⚔️','🏹']12class MonsterType:13 NORMAL, EXPERT, ELITE, CHAMPION = range(4)14def _read_data(file_name:str) -> list:15 with open(file_name) as file_obj:16 str_data = file_obj.read()17 json_data = json.loads(str_data)18 return json_data19class Gamedata:20 _equipment_armor = None21 _equipment_weapon = None22 _exp_lvl = None23 _item_loot = None24 _location = None25 _monster = None26 stats = None27 primary_loot = None28 @staticmethod29 def load():30 Gamedata._equipment_armor = _read_data('data/equipment_armor.json')31 Gamedata._equipment_weapon = _read_data('data/equipment_weapon.json')32 Gamedata._exp_lvl = _read_data('data/exp_lvl.json')33 Gamedata._item_loot = _read_data('data/item_loot.json')34 Gamedata._location = _read_data('data/location.json')35 Gamedata._monster = _read_data('data/monster.json')36 Gamedata.stats = _read_data('data/stats.json')37 Gamedata.primary_loot = _read_data('data/primary_loot.json')38 @staticmethod39 def validate():40 pass41Gamedata.load()...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run yandex-tank automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful