How to use get_metadata method in stestr

Best Python code snippet using stestr_python

test_re.py

Source:test_re.py Github

copy

Full Screen

...266 c = re_symbol('c')267 c1 = re_symbol('c', metadata='ingress')268 c2 = re_symbol('c', metadata='egress')269 symbol_list = 'cba'270 assert a.get_metadata() == []271 assert a1 == a272 assert a1 & a == a273 assert (a1 & a).get_metadata() == ['ingress']274 assert c1 & c & c2 == c275 assert (c1 & c2 & c).get_metadata() == ['ingress', 'egress']276 assert (c1 & c2 & c & c2).get_metadata() == ['ingress', 'egress', 'egress']277 assert c1 & c2 & c & c2 == c278 assert a1 | a == a279 assert (a1 | a).get_metadata() == ['ingress']280 assert c1 | c | c2 == c281 assert (c1 | c2 | c).get_metadata() == ['ingress', 'egress']282 assert (c1 | c2 | c | c2).get_metadata() == ['ingress', 'egress', 'egress']283 assert c1 | c2 | c | c2 == c284def test_deriv_metadata():285 """ Test if derivation works well when consuming derivatives """286 a = re_symbol('a')287 a1 = re_symbol('a', metadata='ingress')288 a2 = re_symbol('a', metadata='egress')289 b = re_symbol('b')290 b1 = re_symbol('b', metadata='hook')291 c = re_symbol('c')292 c1 = re_symbol('c', metadata='ingress')293 c2 = re_symbol('c', metadata='egress')294 # tests for sample expressions295 (d, r) = deriv_consumed(a1, a)296 assert d == re_epsilon()297 assert len(r) == 1 and r == a1.get_metadata()298 (d, r) = deriv_consumed(c2, c)299 assert d == re_epsilon()300 assert len(r) == 1 and r == c2.get_metadata()301 (d, r) = deriv_consumed(a1, b)302 assert d == re_empty()303 assert len(r) == 0304 (d, r) = deriv_consumed(a1 ^ b, a)305 assert d == b306 assert len(r) == 1 and r == a1.get_metadata()307 (d, r) = deriv_consumed(c1 | c2, c)308 assert d == re_epsilon()309 assert len(r) == 2310 assert r == c1.get_metadata() + c2.get_metadata()311 (d, r) = deriv_consumed((c1 ^ a) | (c2 ^ b), c)312 assert d == a | b313 assert len(r) == 2314 assert sorted(r) == sorted(c1.get_metadata() + c2.get_metadata())315 (d, r) = deriv_consumed((c1 | a) & (c2 | b), c)316 assert d == re_epsilon()317 assert len(r) == 2318 assert sorted(r) == sorted(c1.get_metadata() + c2.get_metadata())319 (d, r) = deriv_consumed(~(c2 & (+c) & (c1 ^ b)), c)320 assert d == ~(re_epsilon() & (re_epsilon() ^ +c) & b)321 assert len(r) == 2322 assert sorted(r) == sorted(c1.get_metadata() + c2.get_metadata())323 (d, r) = deriv_consumed((a1 ^ +c1) & ~(a2 ^ c1 ^ c2), a)324 assert d == +c & ~(c ^ c)325 assert len(r) == 2326 assert sorted(r) == sorted(a1.get_metadata() + a2.get_metadata())327def test_dfa_metadata():328 """ Check if metadata is stored appropriately mapping to the transitions in329 the resulting DFA from specific regular expressions. """330 a = re_symbol('a')331 a1 = re_symbol('a', metadata='ingress')332 a2 = re_symbol('a', metadata='egress')333 b = re_symbol('b')334 b1 = re_symbol('b', metadata='ingress')335 b2 = re_symbol('b', metadata='egress')336 c = re_symbol('c')337 c1 = re_symbol('c', metadata='ingress')338 c2 = re_symbol('c', metadata='egress')339 c3 = re_symbol('c', metadata='hook')340 d = re_symbol('d')341 symbol_list = 'abcd'342 def list_equals_meta_structural(x, y):343 return reduce(lambda acc, (u,v): acc and u.equals_meta_structural(v),344 zip(x, y),345 True)346 # make DFAs, and check all metadata transitions347 e = a1348 tt = makeDFA(e, symbol_list).transition_table349 assert tt.get_metadata(a1, 'a') == a1.get_metadata()350 assert tt.get_metadata(re_empty(), 'a') == []351 e = a1 ^ b352 tt = makeDFA(e, symbol_list).transition_table353 assert tt.get_metadata(a1 ^ b, 'a') == a1.get_metadata()354 assert tt.get_metadata(b, 'b') == []355 assert tt.get_metadata(re_empty(), 'a') == []356 e = c1 | c2357 tt = makeDFA(e, symbol_list).transition_table358 assert (sorted(tt.get_metadata(c1 | c2, 'c')) ==359 sorted(c1.get_metadata() + c2.get_metadata()))360 assert tt.get_metadata(re_empty(), 'c') == []361 e = (c1 ^ a2 ^ c3) | (c2 ^ b1 ^ c)362 tt = makeDFA(e, symbol_list).transition_table363 assert (sorted(tt.get_metadata(e, 'c')) ==364 sorted(c1.get_metadata() + c2.get_metadata()))365 r1 = (a ^ c) | (b ^ c)366 assert tt.get_metadata(r1, 'a') == a2.get_metadata()367 assert tt.get_metadata(r1, 'b') == b1.get_metadata()368 assert tt.get_metadata(c, 'c') == c3.get_metadata()369 assert tt.get_metadata(re_empty(), 'b') == []370 e = (+c1 ^ a1 ^ b ^ a2) | (c2 ^ c3 ^ b1 ^ a1)371 dfa = makeDFA(e, symbol_list)372 tt = dfa.transition_table373 st = dfa.all_states374 assert (sorted(tt.get_metadata(e, 'c')) ==375 sorted(c1.get_metadata() + c2.get_metadata()))376 r1 = (+c ^ a ^ b ^ a) | (c ^ b ^ a)377 assert (sorted(tt.get_metadata(r1, 'c')) ==378 sorted(c1.get_metadata() + c3.get_metadata()))379 assert list_equals_meta_structural(st.get_expressions(b ^ a), [b ^ a2])380 assert tt.get_metadata(b ^ a, 'b') == []381 assert list_equals_meta_structural(st.get_expressions(a), [a2, a1])382 e = (+c1 ^ c3 ^ b ^ a2) | (c2 ^ c3 ^ b1 ^ a1)383 tt = makeDFA(e, symbol_list).transition_table384 assert (sorted(tt.get_metadata(e, 'c')) ==385 sorted(c1.get_metadata() + c2.get_metadata() + c3.get_metadata()))386 e = (b1 ^ c1 ^ b ^ a2) | (c3 ^ b1 ^ b1 ^ a1)387 dfa = makeDFA(e, symbol_list)388 tt = dfa.transition_table389 st = dfa.all_states390 assert tt.get_metadata(e, 'c') == c3.get_metadata()391 assert list_equals_meta_structural(st.get_expressions(b ^ a),392 [b ^ a2, b1 ^ a1])393 assert tt.get_metadata(b ^ a, 'b') == b1.get_metadata()394 assert list_equals_meta_structural(st.get_expressions(a), [a2, a1])395 assert (sorted(tt.get_metadata(a, 'a')) ==396 sorted(a1.get_metadata() + a2.get_metadata()))397 e = (+c1 ^ a1 ^ b2 ^ a2) | (c ^ d ^ b1 ^ a1)398 dfa = makeDFA(e, symbol_list)399 st = dfa.all_states400 tt = dfa.transition_table401 assert list_equals_meta_structural(st.get_expressions(b ^ a),402 [b2 ^ a2, b1 ^ a1])403 assert (sorted(tt.get_metadata(b ^ a, 'b')) ==404 sorted(b1.get_metadata() + b2.get_metadata()))405 assert list_equals_meta_structural(st.get_expressions(a), [a2, a1])406 assert (sorted(tt.get_metadata(a, 'a')) ==407 sorted(a1.get_metadata() + a2.get_metadata()))408def test_dfa_vector():409 a = re_symbol('a')410 b = re_symbol('b')411 c = re_symbol('c')412 d = re_symbol('d')413 e1 = (a ^ b) | (a ^ c)414 e2 = (a ^ c) | (b ^ c)415 e3 = (+a) | (b ^ c)416 e4 = (+a ^ +b) | (c ^ d)417 symlist = 'abcd'418 # base case: single expression lists.419 dfa1 = makeDFA_vector([e1], symlist)420 dfa2 = makeDFA_vector([e2], symlist)421 dfa3 = makeDFA_vector([e3], symlist)...

Full Screen

Full Screen

read_aux.py

Source:read_aux.py Github

copy

Full Screen

...39 metadatadictionaries : dict40 all of the data and variables in the format specified in the configuration files41 """42 # date and time43 date = get_metadata('date')44 date['data'] = datadict['Date']45 time = get_metadata('time')46 time['data'] = datadict['Start Time']47 # data, sample and diameter variables48 field = {}49 datafield = _DEFAULT_VARIABLES[metadatadict['Units']][metadatadict['Weight']]50 field[datafield] = get_metadata(datafield)51 field['variables'] = [datafield]52 sample = get_metadata('sample')53 data = []54 if 'Sample #' in header: # diameters in header columns, samples in rows. Should be adjusted for different readers55 field['coordinates'] = ['diameter','sample']56 diameter = get_metadata('diameter')57 diameterdata = []58 for item in header:59 try:60 if isinstance( float(item), float):61 diameterdata.append(float(item))62 data.append([float(i) for i in datadict[item]])63 except ValueError:64 pass65 diameter['data'] = diameterdata66 field[datafield]['data'] = np.ma.asarray(data)67 sample['data'] = [float(i) for i in datadict['Sample #']]68 if 'Upper Size (nm)' in datadict.keys():69 diameter['valid_max'] = np.max( [float(i) for i in datadict['Upper Size (nm)']] )70 if 'Lower Size (nm)' in datadict.keys():71 diameter['valid_min'] = np.max( [float(i) for i in datadict['Lower Size (nm)']] )72 else:73 field['coordinates'] = ['sample', 'diameter']74 # TODO75 variable = 'temperature'76 temperature = get_metadata(variable)77 filenaming = _FIELD_MAPPING[fileorg][variable]78 temperature['data'] = [float(i) for i in datadict[filenaming]]79 variable = 'pressure'80 pressure = get_metadata(variable)81 filenaming = _FIELD_MAPPING[fileorg][variable]82 pressure['data'] = [float(i) for i in datadict[filenaming]]83 variable = 'relative_humidity'84 relative_humidity = get_metadata(variable)85 filenaming = _FIELD_MAPPING[fileorg][variable]86 relative_humidity['data'] = [float(i) for i in datadict[filenaming]]87 variable = 'mean_free_path'88 mean_free_path = get_metadata(variable)89 filenaming = _FIELD_MAPPING[fileorg][variable]90 mean_free_path['data'] = [float(i) for i in datadict[filenaming]]91 variable = 'viscosity'92 viscosity = get_metadata(variable)93 filenaming = _FIELD_MAPPING[fileorg][variable]94 viscosity['data'] = [float(i) for i in datadict[filenaming]]95 variable = 'scan_time'96 scan_time = get_metadata(variable)97 filenaming = _FIELD_MAPPING[fileorg][variable]98 scan_time['data'] = [float(i) for i in datadict[filenaming]]99 variable = 'retrace_time'100 retrace_time = get_metadata(variable)101 filenaming = _FIELD_MAPPING[fileorg][variable]102 retrace_time['data'] = [float(i) for i in datadict[filenaming]]103 variable = 'scan_resolution'104 scan_resolution = get_metadata(variable)105 filenaming = _FIELD_MAPPING[fileorg][variable]106 scan_resolution['data'] = [float(i) for i in datadict[filenaming]]107 variable = 'scans_per_sample'108 scans_per_sample = get_metadata(variable)109 filenaming = _FIELD_MAPPING[fileorg][variable]110 scans_per_sample['data'] = [float(i) for i in datadict[filenaming]]111 variable = 'sheath_flow'112 sheath_flow = get_metadata(variable)113 filenaming = _FIELD_MAPPING[fileorg][variable]114 sheath_flow['data'] = [float(i) for i in datadict[filenaming]]115 variable = 'aerosol_flow'116 aerosol_flow = get_metadata(variable)117 filenaming = _FIELD_MAPPING[fileorg][variable]118 aerosol_flow['data'] = [float(i) for i in datadict[filenaming]]119 variable = 'bypass_flow'120 bypass_flow = get_metadata(variable)121 filenaming = _FIELD_MAPPING[fileorg][variable]122 bypass_flow['data'] = [float(i) for i in datadict[filenaming]]123 variable = 'low_voltage'124 low_voltage = get_metadata(variable)125 filenaming = _FIELD_MAPPING[fileorg][variable]126 low_voltage['data'] = [float(i) for i in datadict[filenaming]]127 variable = 'high_voltage'128 high_voltage = get_metadata(variable)129 filenaming = _FIELD_MAPPING[fileorg][variable]130 high_voltage['data'] = [float(i) for i in datadict[filenaming]]131 variable = 'lower_size'132 lower_size = get_metadata(variable)133 filenaming = _FIELD_MAPPING[fileorg][variable]134 lower_size['data'] = [float(i) for i in datadict[filenaming]]135 variable = 'upper_size'136 upper_size = get_metadata(variable)137 filenaming = _FIELD_MAPPING[fileorg][variable]138 upper_size['data'] = [float(i) for i in datadict[filenaming]]139 variable = 'density'140 density = get_metadata(variable)141 filenaming = _FIELD_MAPPING[fileorg][variable]142 try:143 density['data'] = [float(i) for i in datadict[filenaming]]144 except KeyError:145 if fileorg == 'AIM':146 filenaming = _FIELD_MAPPING['AIM_text'][variable]147 density['data'] = [float(i) for i in datadict[filenaming]]148 elif fileorg == 'AIM_text':149 filenaming = _FIELD_MAPPING['AIM'][variable]150 density['data'] = [float(i) for i in datadict[filenaming]]151 else:152 warnings.warn("If reading fails, try a different file organisation")153 variable = 'td+05'154 td05 = get_metadata(variable)155 filenaming = _FIELD_MAPPING[fileorg][variable]156 td05['data'] = [float(i) for i in datadict[filenaming]]157 variable = 'tf'158 tf = get_metadata(variable)159 filenaming = _FIELD_MAPPING[fileorg][variable]160 tf['data'] = [float(i) for i in datadict[filenaming]]161 variable = 'D50'162 D50 = get_metadata(variable)163 filenaming = _FIELD_MAPPING[fileorg][variable]164 D50['data'] = [float(i) for i in datadict[filenaming]]165 variable = 'neutralizer_status'166 neutralizer_status = get_metadata(variable)167 filenaming = _FIELD_MAPPING[fileorg][variable]168 try:169 neutralizer_status['data'] = [float(i) for i in datadict[filenaming]]170 except ValueError:171 neutralizer_status['data'] = datadict[filenaming]172 variable = 'median'173 median = get_metadata(variable)174 filenaming = _FIELD_MAPPING[fileorg][variable]175 median['data'] = [float(i) for i in datadict[filenaming]]176 variable = 'mean'177 mean = get_metadata(variable)178 filenaming = _FIELD_MAPPING[fileorg][variable]179 mean['data'] = [float(i) for i in datadict[filenaming]]180 variable = 'geo_mean'181 geo_mean = get_metadata(variable)182 filenaming = _FIELD_MAPPING[fileorg][variable]183 geo_mean['data'] = [float(i) for i in datadict[filenaming]]184 variable = 'mode'185 mode = get_metadata(variable)186 filenaming = _FIELD_MAPPING[fileorg][variable]187 mode['data'] = [float(i) for i in datadict[filenaming]]188 variable = 'geo_std_dev'189 geo_std_dev = get_metadata(variable)190 filenaming = _FIELD_MAPPING[fileorg][variable]191 geo_std_dev['data'] = [float(i) for i in datadict[filenaming]]192 variable = 'total_concentration'193 total_concentration = get_metadata(variable)194 filenaming = _FIELD_MAPPING[fileorg][variable]195 try:196 total_concentration['data'] = [float(i) for i in datadict[filenaming]]197 except KeyError:198 if fileorg == 'AIM':199 filenaming = _FIELD_MAPPING['AIM_text'][variable]200 total_concentration['data'] = [float(i) for i in datadict[filenaming]]201 elif fileorg == 'AIM_text':202 filenaming = _FIELD_MAPPING['AIM'][variable]203 total_concentration['data'] = [float(i) for i in datadict[filenaming]]204 else:205 warnings.warn("If reading fails, try a different file organisation")206 variable = 'title'207 title = get_metadata(variable)208 filenaming = _FIELD_MAPPING[fileorg][variable]209 title['data'] = datadict[filenaming]210 variable = 'user_name'211 user_name = get_metadata(variable)212 filenaming = _FIELD_MAPPING[fileorg][variable]213 user_name['data'] = datadict[filenaming]214 variable = 'sample_id'215 sample_id = get_metadata(variable)216 filenaming = _FIELD_MAPPING[fileorg][variable]217 try:218 sample_id['data'] = [float(i) for i in datadict[filenaming]]219 except ValueError:220 sample_id['data'] = datadict[filenaming]221 variable = 'instrument_id'222 instrument_id = get_metadata(variable)223 filenaming = _FIELD_MAPPING[fileorg][variable]224 try:225 instrument_id['data'] = datadict[filenaming]226 except ValueError:227 instrument_id['data'] = [float(i) for i in datadict[filenaming]]228 variable = 'lab_id'229 lab_id = get_metadata(variable)230 filenaming = _FIELD_MAPPING[fileorg][variable]231 try:232 lab_id['data'] = datadict[filenaming]233 except ValueError:234 lab_id['data'] = [float(i) for i in datadict[filenaming]]235 variable = 'leak_test_rate'236 leak_test_rate = get_metadata(variable)237 filenaming = _FIELD_MAPPING[fileorg][variable]238 try:239 leak_test_rate['data'] = [float(i) for i in datadict[filenaming]]240 except ValueError:241 leak_test_rate['data'] = datadict[filenaming]242 variable = 'instrument_errors'243 instrument_errors = get_metadata(variable)244 filenaming = _FIELD_MAPPING[fileorg][variable]245 try:246 instrument_errors['data'] = [float(i) for i in datadict[filenaming]]247 except ValueError:248 instrument_errors['data'] = datadict[filenaming]249 variable = 'comment'250 comment = get_metadata(variable)251 filenaming = _FIELD_MAPPING[fileorg][variable]252 comment['data'] = datadict[filenaming]253 return field, diameter, date, time, sample, temperature, pressure, relative_humidity, mean_free_path, viscosity, scan_time, retrace_time, scan_resolution, scans_per_sample, sheath_flow, aerosol_flow, bypass_flow, low_voltage, high_voltage, lower_size, upper_size, density, td05, tf, D50, median, mean, geo_mean, mode, geo_std_dev, total_concentration, title, user_name, sample_id, instrument_id, lab_id, leak_test_rate, instrument_errors, comment254def opc_file_to_config(datadict, metadatadict, header, fileorg = 'OPC', **kwargs):255 """256 Unfortunately a rather long script that organises data read257 from file into metadata dictionaries from config258 Parameters259 ----------260 datadict : dict261 dictionary with data read from file262 metadatadict : dict263 dictionary with metadata read from file264 header : list265 file header266 fileorg : str267 organisation of the file268 Returns269 -------270 metadatadictionaries : dict271 all of the data and variables in the format specified in the configuration files272 """273 # get conversions dict if exists274 if fileorg in _CONVERSIONS:275 convdict = _CONVERSIONS[fileorg]276 convvars = list(convdict.keys())277 else:278 convvars = []279 outdict = {}280 #variables = ['time','duration','latitude','longitude','fix_time', 'temperature','relative_humidity']281 variables = header282 for variable in variables:283 if 'bin' in variable:284 pass285 else:286 filenaming = _FIELD_MAPPING[fileorg][variable]287 outdict[filenaming] = get_metadata(filenaming)288 try:289 _ = datadict[variable]290 if filenaming == 'time' or filenaming == 'date':291 # prevent time from being converted into a float292 outdict[filenaming]['data'] = [i for i in datadict[variable]]293 else:294 try:295 outdict[filenaming]['data'] = [float(i) for i in datadict[variable]]296 except ValueError:297 outdict[filenaming]['data'] = [i for i in datadict[variable]]298 if filenaming in convvars:299 outdict[filenaming]['data'] = convert_units(outdict[filenaming]['data'], *convdict[filenaming])300 except KeyError:301 pass302 diameter = get_metadata('diameter')303 diameter['data'] = [0.35, 0.46, 0.66, 1.0, 1.3, 1.7, 2.3, 3.0, 4.0, 5.2, 6.5, 8.0, 10.0, 12.0, 14.0, 16.0, 18.0, 20.0, 22.0, 25.0, 28.0, 31.0, 34.0, 37.0, 40.0]304 bins = ["bin0", "bin1", "bin2", "bin3", "bin4", "bin5", "bin6", "bin7", "bin8", "bin9", "bin10", "bin11", "bin12", "bin13", "bin14", "bin15", "bin16", "bin17", "bin18", "bin19", "bin20", "bin21", "bin22", "bin23"]305 data = []306 for abin in bins:307 data.append([float(i) for i in datadict[abin]])308 #data = []309 #for abin in bins:310 # for i in range(0,len(datadict[abin])):311 # try:312 # data.append(float(datadict[abin][i]))313 # except ValueError:314 # print(i)315 field = {}316 datafield = _DEFAULT_VARIABLES['Raw Counts']['Number']317 field[datafield] = get_metadata(datafield)318 field[datafield]['data'] = np.ma.asarray(data)319 field['coordinates'] = ['diameter','sample']320 field['variables'] = [datafield]321 time = outdict.pop('time')322 sample = get_metadata('sample')323 sample['data'] = np.arange(0,len(time['data']))324 return time, sample, field, diameter, outdict325def grimm_file_to_config(datadict, metadatadict, header, fileorg = 'Grimm', **kwargs):326 """327 Organises data read from file into metadata dictionaries328 from config329 Parameters330 ----------331 datadict : dict332 dictionary with data read from file333 metadatadict : dict334 dictionary with metadata read from file335 header : list336 file header337 fileorg : str338 organisation of the file339 Returns340 -------341 metadatadictionaries : dict342 all of the data and variables in the format specified in the configuration files343 """344 # get conversions dict if exists345 if fileorg in _CONVERSIONS:346 convdict = _CONVERSIONS[fileorg]347 convvars = list(convdict.keys())348 outdict = {}349 diameter = get_metadata('diameter')350 diameter['data'] = [0.0] #GRIMM does not have a lower limit351 bins = []352 variables = header353 for variable in variables:354 try:355 diameter['data'].append(float(variable))356 bins.append(variable)357 except ValueError:358 try:359 filenaming = _FIELD_MAPPING[fileorg][variable]360 except KeyError:361 filenaming = variable362 outdict[filenaming] = get_metadata(filenaming)363 try:364 _ = datadict[variable]365 try:366 outdict[filenaming]['data'] = [float(i) for i in datadict[variable]]367 except ValueError:368 outdict[filenaming]['data'] = [i for i in datadict[variable]]369 if filenaming in convvars:370 outdict[filenaming]['data'] = convert_units(outdict[filenaming]['data'], *convdict[filenaming])371 except KeyError:372 pass373 data = []374 for abin in bins:375 data.append([float(i) for i in datadict[abin]])376 field = {}377 datafield = _DEFAULT_VARIABLES['Concentration (DW)']['Number']378 field[datafield] = get_metadata(datafield)379 field[datafield]['data'] = np.ma.asarray(data)380 field['units'] = '#/L'381 field['coordinates'] = ['diameter','sample']382 try:383 time = outdict.pop('time')384 except KeyError:385 time = outdict.pop('datetime')386 sample = get_metadata('sample')387 sample['data'] = np.arange(0,len(time['data']))388 return time, sample, field, diameter, outdict389def file_to_config(datadict, metadatadict, header, fileorg, **kwargs):390 """391 Organises data read from file into metadata dictionaries from config392 Parameters393 ----------394 datadict : dict395 dictionary with data read from file396 metadatadict : dict397 dictionary with metadata read from file398 header : list399 file header400 fileorg : str401 organisation of the file402 Returns403 -------404 metadatadictionaries : dict405 all of the data and variables in the format specified in the configuration files406 """407 outdict = {}408 variable_list = list(datadict.keys())409 for variable in variable_list:410 if fileorg is not None:411 varname = _FIELD_MAPPING[fileorg][variable]412 else:413 varname = variable414 vardict = get_metadata(varname)415 vardict['data'] = datadict[variable]416 outdict[varname] = vardict...

Full Screen

Full Screen

test_check_runner.py

Source:test_check_runner.py Github

copy

Full Screen

1#!/usr/bin/env python32# /*******************************************************************************3# Copyright Intel Corporation.4# This software and the related documents are Intel copyrighted materials, and your use of them5# is governed by the express license under which they were provided to you (License).6# Unless the License provides otherwise, you may not use, modify, copy, publish, distribute, disclose7# or transmit this software or the related documents without Intel's prior written permission.8# This software and the related documents are provided as is, with no express or implied warranties,9# other than those that are expressly stated in the License.10#11# *******************************************************************************/12# NOTE: workaround to import modules13import os14import sys15from modules.check.check import CheckSummary16sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../../'))17import json # noqa: E40218import time # noqa: E40219import unittest # noqa: E40220from unittest.mock import MagicMock, patch, call # noqa: E40221from modules.check.check_runner import run_checks, check_run, _get_dependency_checks_map,\22 create_dependency_order # noqa: E40223class TestCheckRun(unittest.TestCase):24 def test_check_run_positive(self):25 mocked_check = MagicMock()26 mocked_check.get_metadata.return_value = MagicMock()27 mocked_check.get_metadata.return_value.name = "check"28 mocked_check.get_metadata.return_value.timeout = 129 mocked_check.run.return_value = CheckSummary(result=json.dumps({30 "Value": {31 "Check": {32 "Value": "Check Value",33 "RetVal": "INFO"34 }35 }36 }))37 expected = CheckSummary(result=json.dumps({38 "Value": {39 "Check": {40 "Value": "Check Value",41 "RetVal": "INFO"42 }43 }44 }))45 value = check_run(mocked_check, {})46 self.assertEqual(expected.__dict__, value.__dict__)47 def test_check_run_timeout_positive(self):48 mocked_check = MagicMock()49 mocked_check.get_metadata.return_value = MagicMock()50 mocked_check.get_metadata.return_value.name = "check"51 mocked_check.get_metadata.return_value.timeout = 152 mocked_check.run = lambda data: time.sleep(2)53 expected = CheckSummary(result=json.dumps({54 "RetVal": "ERROR",55 "Verbosity": 0,56 "Message": "",57 "Value": {58 "check": {59 "Value": "Timeout was exceeded.",60 "Verbosity": 0,61 "Message": "",62 "RetVal": "ERROR"63 }64 }65 }))66 value = check_run(mocked_check, {})67 self.assertEqual(expected.__dict__, value.__dict__)68 def test_check_run_check_crush_positive(self):69 mocked_check = MagicMock()70 mocked_check.get_metadata.return_value = MagicMock()71 mocked_check.get_metadata.return_value.name = "check"72 mocked_check.get_metadata.return_value.timeout = 173 mocked_check.run.side_effect = Exception()74 expected = CheckSummary(result=json.dumps({75 "RetVal": "ERROR",76 "Verbosity": 0,77 "Message": "",78 "Value": {79 "check": {80 "Value": "",81 "Verbosity": 0,82 "Message": "The check crashed at runtime. No data was received. "83 "See call stack above.",84 "RetVal": "ERROR"85 }86 }87 }))88 value = check_run(mocked_check, {})89 self.assertEqual(expected.__dict__, value.__dict__)90class TestGetDependencyChecksMap(unittest.TestCase):91 def test__get_dependency_checks_map_positive(self):92 mocked_check = MagicMock()93 mocked_check.get_metadata.return_value = MagicMock()94 mocked_check.get_metadata.return_value.name = "check"95 mocked_check.get_metadata.return_value.version = 196 expected = {"check": mocked_check}97 value = _get_dependency_checks_map([mocked_check], {"check": 1})98 self.assertEqual(expected, value)99 def test__get_dependency_checks_map_no_dep_positive(self):100 mocked_check = MagicMock()101 mocked_check.get_metadata.return_value = MagicMock()102 mocked_check.get_metadata.return_value.name = "check"103 mocked_check.get_metadata.return_value.version = 1104 expected = {}105 value = _get_dependency_checks_map([mocked_check], {})106 self.assertEqual(expected, value)107 @patch("logging.error")108 def test__get_dependency_checks_map_another_version_negative(self, mocked_log):109 mocked_check = MagicMock()110 mocked_check.get_metadata.return_value = MagicMock()111 mocked_check.get_metadata.return_value.name = "check"112 mocked_check.get_metadata.return_value.version = 1113 expected = {}114 value = _get_dependency_checks_map([mocked_check], {"check": 3})115 self.assertEqual(expected, value)116 mocked_log.assert_called()117 @patch("logging.error")118 def test__get_dependency_checks_map_not_found_negative(self, mocked_log):119 mocked_check = MagicMock()120 mocked_check.get_metadata.return_value = MagicMock()121 mocked_check.get_metadata.return_value.name = "check_2"122 mocked_check.get_metadata.return_value.version = 1123 expected = {}124 value = _get_dependency_checks_map([mocked_check], {"check": 1})125 self.assertEqual(expected, value)126 mocked_log.assert_called_once()127class TestCreateDependencyOrder(unittest.TestCase):128 def test_create_dependency_order_positive(self):129 mocked_check_1 = MagicMock()130 mocked_check_1.get_metadata.return_value = MagicMock()131 mocked_check_1.get_metadata.return_value.name = "check_1"132 mocked_check_1.get_metadata.return_value.version = 1133 mocked_check_1.get_metadata.return_value.dataReq = """{"check_3": 1}"""134 mocked_check_1.get_metadata.return_value.tags = "default"135 mocked_check_2 = MagicMock()136 mocked_check_2.get_metadata.return_value = MagicMock()137 mocked_check_2.get_metadata.return_value.name = "check_2"138 mocked_check_2.get_metadata.return_value.version = 1139 mocked_check_2.get_metadata.return_value.dataReq = """{"check_1": 1}"""140 mocked_check_2.get_metadata.return_value.tags = "default"141 mocked_check_3 = MagicMock()142 mocked_check_3.get_metadata.return_value = MagicMock()143 mocked_check_3.get_metadata.return_value.name = "check_3"144 mocked_check_3.get_metadata.return_value.version = 1145 mocked_check_3.get_metadata.return_value.dataReq = "{}"146 mocked_check_3.get_metadata.return_value.tags = "default"147 expected = (["check_1", "check_2", "check_3"], [mocked_check_3, mocked_check_1, mocked_check_2])148 value = create_dependency_order([mocked_check_1, mocked_check_2, mocked_check_3], {"default"})149 self.assertEqual(expected, value)150class TestRunChecks(unittest.TestCase):151 @patch("builtins.exit")152 @patch("builtins.print")153 def test_run_checks_no_checks_to_run(self, mocked_print, mocked_exit):154 expected_exit_code = 1155 run_checks([])156 mocked_print.assert_called_once()157 mocked_exit.assert_called_once_with(expected_exit_code)158 @patch("logging.error")159 def test_run_checks_does_not_have_dependency(self, mocked_error):160 mocked_check = MagicMock()161 mocked_check.get_metadata.return_value = MagicMock()162 mocked_check.get_metadata.return_value.name = "check"163 mocked_check.get_metadata.return_value.dataReq = """{"data": "1"}"""164 mocked_check.get_summary.return_value = None165 run_checks([mocked_check])166 mocked_error.assert_called()167 @patch("modules.check.check_runner.check_run")168 def test_run_checks_run_check(self, mocked_check_run):169 mocked_summary = MagicMock()170 mocked_summary.result = "{}"171 mocked_check = MagicMock()172 mocked_check.get_metadata.return_value = MagicMock()173 mocked_check.get_metadata.return_value.name = "check"174 mocked_check.get_metadata.return_value.timeout = 1175 mocked_check.get_metadata.return_value.dataReq = "{}"176 mocked_check.get_summary.return_value = mocked_summary177 run_checks([mocked_check])178 mocked_check_run.assert_called_once_with(mocked_check, {})179 @patch("modules.check.check_runner.check_run")180 def test_run_checks_run_two_dependencies_checks(self, mocked_check_run):181 mocked_summary_1 = MagicMock()182 mocked_summary_1.result = "{}"183 mocked_check_1 = MagicMock()184 mocked_check_1.get_metadata.return_value = MagicMock()185 mocked_check_1.get_metadata.return_value.name = "check_1"186 mocked_check_1.get_metadata.return_value.timeout = 1187 mocked_check_1.get_metadata.return_value.dataReq = """{"check_2": "1"}"""188 mocked_check_1.get_summary.return_value = mocked_summary_1189 mocked_summary_2 = MagicMock()190 mocked_summary_2.result = json.dumps({191 "Value": {192 "Check 2": {193 "Value": "Check 2 Value",194 "RetVal": "INFO"195 }196 }197 })198 mocked_check_2 = MagicMock()199 mocked_check_2.get_metadata.return_value = MagicMock()200 mocked_check_2.get_metadata.return_value.name = "check_2"201 mocked_check_2.get_metadata.return_value.version = "1"202 mocked_check_2.get_metadata.return_value.timeout = 1203 mocked_check_2.get_metadata.return_value.dataReq = "{}"204 mocked_check_2.get_summary.return_value = mocked_summary_2205 run_checks([mocked_check_2, mocked_check_1])206 expected_calls = [207 call(mocked_check_2, {}),208 call(209 mocked_check_1,210 {"check_2": {"Value": {"Check 2": {"Value": "Check 2 Value", "RetVal": "INFO"}}}}211 )212 ]213 mocked_check_run.assert_has_calls(expected_calls)214 @patch("modules.check.check_runner.check_run")215 def test_run_checks_run_two_separate_checks(self, mocked_check_run):216 mocked_summary = MagicMock()217 mocked_summary.result = "{}"218 mocked_check_1 = MagicMock()219 mocked_check_1.get_metadata.return_value = MagicMock()220 mocked_check_1.get_metadata.return_value.name = "check_1"221 mocked_check_1.get_metadata.return_value.timeout = 1222 mocked_check_1.get_metadata.return_value.dataReq = "{}"223 mocked_check_1.get_summary.return_value = mocked_summary224 mocked_check_2 = MagicMock()225 mocked_check_2.get_metadata.return_value = MagicMock()226 mocked_check_2.get_metadata.return_value.name = "check_2"227 mocked_check_2.get_metadata.return_value.timeout = 1228 mocked_check_2.get_metadata.return_value.dataReq = "{}"229 mocked_check_2.get_summary.return_value = mocked_summary230 run_checks([mocked_check_1, mocked_check_2])231 expected_calls = [232 call(mocked_check_2, {}),233 call(mocked_check_1, {})234 ]235 mocked_check_run.assert_has_calls(expected_calls, any_order=True)236if __name__ == '__main__':...

Full Screen

Full Screen

dhq_paper_plots.py

Source:dhq_paper_plots.py Github

copy

Full Screen

...100 101print('document read in successfully!')102print(text.isnull().sum())103print(list(text))104def get_metadata(title, date, members):105 a = text.ix[(text['BILL'].str.contains(title)) & (text['DATE'] == date), ['BILL', 'MEMBER', 'DATE', 'SPEECH_ACT']]106 if a.shape[0] > 0:107 print(a.shape)108 # a.to_csv('/Users/alee35/Dropbox (Brown)/data/hansard/dhq/{}_{}.tsv'.format(title, date), sep='\t', index=False)109 memb = list(a.MEMBER.unique())110 for m in memb:111 members.append(m)112 else:113 b = text.ix[(text['DATE'] == date), ['BILL', 'MEMBER', 'DATE', 'SPEECH_ACT']]114 print('------------------------')115 bills = list(b.BILL.unique())116 for bill in bills:117 print(bill)118 print('------------------------')119# z = 500120members_500 = []121get_metadata('COMMUTATION OF TITHES', '1836-03-25', members_500)122get_metadata('LEASEHOLD S ENFRANCHISEMENT', '1889-05-01', members_500)123get_metadata('SECOND READING', '1890-03-27', members_500)124get_metadata('TITHE RENT-CHARGE RECOVERY', '1889-08-12', members_500)125get_metadata('TITHE RENT-CHARGE RECOVERY', '1889-08-13', members_500)126get_metadata('COMMUTATION OF TITHES, \(ENGLAND.\)', '1835-03-24', members_500)127get_metadata('TITHES \(IRELAND\) MINISTBEIAL PLAN', '1832-07-05', members_500)128get_metadata('TENANTS IN TOWNS IMPROVEMENT \(IRELAND\) BILL', '1900-04-04', members_500)129get_metadata('TITHES \(IRELAND\)', '1834-05-02', members_500)130get_metadata('TITHES \(IRELAND\)', '1834-02-20', members_500)131# get_metadata('COMMITTEE', '1890-06-05')132# get_metadata('SECOND READING ADJOURNED DEBATE', '1890-03-28')133# get_metadata('COMMITTEE', '1887-07-25')134# z = 0135members_0 = []136get_metadata('IRISH LAND COMMISSION', '1897-06-29', members_0)137get_metadata('IRISH LAND COMMISSION', '1897-06-25', members_0)138get_metadata('IRISH LAND COMMISSION FAIR RENTS, CO. WESTMEATH.', '1888-04-30', members_0)139get_metadata('FAIR RENT APPEALS IN COUNTY ANTRIM', '1899-07-27', members_0)140get_metadata('LAND COMMISSION \(KING\'S COUNTY\)', '1897-02-11', members_0)141get_metadata('Fair Rent Cases in County Roscommon', '1904-03-22', members_0)142get_metadata('COUNTY DOWN LAND COMMISSION', '1900-02-16', members_0)143get_metadata('JUDICIAL RENTS \(COUNTY MONAGHAN\)', '1896-08-11', members_0)144get_metadata('North Tipperary Land Court', '1904-03-07', members_0)145get_metadata('Listowel Fair Rent Applications', '1908-07-31', members_0)146get_metadata('FERMANAGH RENT APPEALS', '1901-03-04', members_0)147get_metadata('IRISH LAND COMMISSION WEXFORD', '1888-03-05', members_0)148get_metadata('FAIR RENT APPEALS IN CORK', '1900-07-24', members_0)149get_metadata('CORK LAND COMMISSION', '1900-07-27', members_0)150# get_metadata('NEXT SITTING AT LONGFORD OF APPEAL COURT OF LAND COMMISSION', '1906-11-14')151# get_metadata('MIDLETON FAIR RENT APPLICATIONS', '1907-02-14')152print('-----# of members in z500: {}-----'.format(len(members_500)))153for member in members_500:154 print(member)155print('-----# of members in z500: {}-----'.format(len(members_500)))156print('-----# of members in z0: {}-----'.format(len(members_0)))157for member in members_0:158 print(member)...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run stestr automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful