How to use iterkeys method in autotest

Best Python code snippet using autotest_python

german_entropy_values.py

Source:german_entropy_values.py Github

copy

Full Screen

1import sys 23lex = {}4typelex = {}5tokenlex = {}6'''7#Use for fake lexicons -- edit out other file opening#8import random 910with open('real_German_lexfreq.txt') as f:11 for line in f:12 tok = line.split()13 lex[tok[0]] = tok[1]14 1516#Values for German: Fem = 8452 obs ; Masc = 6603 obs ; Neut = 3392 obs17import random18keys = list(lex.keys())19random.shuffle(keys)20#keys = keys[:8452]21#keys = keys[:6603]22keys = keys[:3392]2324for key in keys:25 if key not in tokenlex:26 tokenlex[key] = lex[key]27 else:28 tokenlex[key] += lex[key]29 30typelex = {key: 1 for key in tokenlex}31'''3233print("Type Analysis\n")34'''35with open('real_Gerlex_Fem.txt') as f:36 for line in f:37 tok = line.split()38 typelex[tok[0]] = 139'''40'''41with open('real_Gerlex_Masc.txt') as f:42 for line in f:43 tok = line.split()44 typelex[tok[0]] = 145'''46with open('real_Gerlex_Neut.txt') as f:47 for line in f:48 tok = line.split()49 typelex[tok[0]] = 150515253#entropy code54import math5556def eta(data, unit='natural'):57 base = {58 'shannon' : 2.,59 'natural' : math.exp(1),60 'hartley' : 10.61 }6263 if len(data) <= 1:64 return 065 ent = 066 probs = [float(c) / sum(data.values()) for c in data.values()]67 for p in probs:68 if p > 0.:69 ent -= p * math.log(p, base[unit])7071 return ent72 73unarytypelex = typelex.copy()74bigramtypelex = typelex.copy()75trigramtypelex = typelex.copy()7677typelex = {k:float(v) for k, v in typelex.items()}7879#unigrams 80iterkeys = list(unarytypelex.keys()) 81for oldkey in iterkeys:82 newkey = oldkey[:1]83 if newkey not in unarytypelex:84 unarytypelex[newkey] = unarytypelex[oldkey]85 else:86 unarytypelex[newkey] += unarytypelex[oldkey]87 del unarytypelex[oldkey]88 89#bigrams90iterkeys = list(bigramtypelex.keys()) 91for oldkey in iterkeys:92 newkey = oldkey[:2]93 if newkey not in bigramtypelex:94 bigramtypelex[newkey] = bigramtypelex[oldkey]95 else:96 bigramtypelex[newkey] += bigramtypelex[oldkey]97 del bigramtypelex[oldkey]9899100#trigrams101iterkeys = list(trigramtypelex.keys()) 102103for oldkey in iterkeys:104 newkey = oldkey[:3]105 if newkey not in trigramtypelex:106 trigramtypelex[newkey] = trigramtypelex[oldkey]107 else:108 trigramtypelex[newkey] += trigramtypelex[oldkey]109 del trigramtypelex[oldkey]110111112#print("The values for the real fem German lexicon are...")113#print("The values for the real masc German lexicon are...")114print("The values for the real neut German lexicon are...")115#print("The values for the fake fem German lexicon are...")116#print("The values for the fake masc German lexicon are...")117#print("The values for the fake neut German lexicon are...")118119print(len(unarytypelex))120print(len(bigramtypelex))121print(len(trigramtypelex))122print(eta(unarytypelex))123print(eta(bigramtypelex))124print(eta(trigramtypelex))125126127####start analysis of final segments####128129unarytypefinal = typelex.copy()130bigramtypefinal = typelex.copy()131trigramtypefinal = typelex.copy()132133typelex = {k:int(v) for k, v in typelex.items()}134135#unigrams 136iterkeys = list(unarytypefinal.keys()) 137for oldkey in iterkeys:138 newkey = oldkey[-1]139 if newkey not in unarytypefinal:140 unarytypefinal[newkey] = unarytypefinal[oldkey]141 else:142 unarytypefinal[newkey] += unarytypefinal[oldkey]143 del unarytypefinal[oldkey]144 145#bigrams146iterkeys = list(bigramtypefinal.keys()) 147for oldkey in iterkeys:148 newkey = oldkey[-2:]149 if newkey not in bigramtypefinal:150 bigramtypefinal[newkey] = bigramtypefinal[oldkey]151 else:152 bigramtypefinal[newkey] += bigramtypefinal[oldkey]153 del bigramtypefinal[oldkey]154155#trigrams156iterkeys = list(trigramtypefinal.keys()) 157158for oldkey in iterkeys:159 newkey = oldkey[-3:]160 if newkey not in trigramtypefinal:161 trigramtypefinal[newkey] = trigramtypefinal[oldkey]162 else:163 trigramtypefinal[newkey] += trigramtypefinal[oldkey]164 del trigramtypefinal[oldkey]165166167#print("The values for the final segments in real fem German lexicon are...")168#print("The values for the final segments in real masc German lexicon are...")169print("The values for the final segments in real neut German lexicon are...")170#print("The values for the final segments in fake fem German lexicon are...")171#print("The values for the final segments in fake masc German lexicon are...")172#print("The values for the final segments infake neut German lexicon are...")173174print(len(unarytypefinal))175print(len(bigramtypefinal))176print(len(trigramtypefinal))177print(eta(unarytypefinal))178print(eta(bigramtypefinal))179print(eta(trigramtypefinal))180181print("\n")182print("Token analysis\n")183'''184with open('real_Gerlex_Femfreq.txt') as f:185 for line in f:186 tok = line.split()187 tokenlex[tok[1]] = tok[0]188'''189'''190with open('real_Gerlex_Mascfreq.txt') as f:191 for line in f:192 tok = line.split()193 tokenlex[tok[1]] = tok[0]194'''195196with open('real_Gerlex_Neutfreq.txt') as f:197 for line in f:198 tok = line.split()199 tokenlex[tok[1]] = tok[0]200201202203tokenlex = {k:float(v) for k, v in tokenlex.items()}204205unarytokenlex = tokenlex.copy()206bigramtokenlex = tokenlex.copy()207trigramtokenlex = tokenlex.copy()208209#unigrams 210iterkeys = list(unarytokenlex.keys()) 211for oldkey in iterkeys:212 newkey = oldkey[:1]213 if newkey not in unarytokenlex:214 unarytokenlex[newkey] = unarytokenlex[oldkey]215 else:216 unarytokenlex[newkey] += unarytokenlex[oldkey]217 del unarytokenlex[oldkey]218 219#bigrams220iterkeys = list(bigramtokenlex.keys()) 221for oldkey in iterkeys:222 newkey = oldkey[:2]223 if newkey not in bigramtokenlex:224 bigramtokenlex[newkey] = bigramtokenlex[oldkey]225 else:226 bigramtokenlex[newkey] += bigramtokenlex[oldkey]227 del bigramtokenlex[oldkey]228229230#trigrams231iterkeys = list(trigramtokenlex.keys()) 232233for oldkey in iterkeys:234 newkey = oldkey[:3]235 if newkey not in trigramtokenlex:236 trigramtokenlex[newkey] = trigramtokenlex[oldkey]237 else:238 trigramtokenlex[newkey] += trigramtokenlex[oldkey]239 del trigramtokenlex[oldkey]240241#print("The values for initial segments in the fem German lexicon are...")242#print("The values for initial segments in the real masc German lexicon are...")243print("The values for initial segments in the real neut German lexicon are...")244#print("The values for initial segments in the fake fem German lexicon are...")245#print("The values for initial segments in the fake masc German lexicon are...")246#print("The values for initial segments in the fake neut German lexicon are...")247248print(len(unarytokenlex))249print(len(bigramtokenlex))250print(len(trigramtokenlex))251print(eta(unarytokenlex))252print(eta(bigramtokenlex))253print(eta(trigramtokenlex))254255256#final segment analysis#257unarytokenfinal = tokenlex.copy()258bigramtokenfinal = tokenlex.copy()259trigramtokenfinal = tokenlex.copy()260261#final unary262iterkeys = list(unarytokenfinal.keys()) 263for oldkey in iterkeys:264 newkey = oldkey[-1]265 if newkey not in unarytokenfinal:266 unarytokenfinal[newkey] = unarytokenfinal[oldkey]267 else:268 unarytokenfinal[newkey] += unarytokenfinal[oldkey]269 del unarytokenfinal[oldkey]270 271#final bigrams272iterkeys = list(bigramtokenfinal.keys()) 273for oldkey in iterkeys:274 newkey = oldkey[-2:]275 if newkey not in bigramtokenfinal:276 bigramtokenfinal[newkey] = bigramtokenfinal[oldkey]277 else:278 bigramtokenfinal[newkey] += bigramtokenfinal[oldkey]279 del bigramtokenfinal[oldkey]280281#final trigrams282iterkeys = list(trigramtokenfinal.keys()) 283284for oldkey in iterkeys:285 newkey = oldkey[-3:]286 if newkey not in trigramtokenfinal:287 trigramtokenfinal[newkey] = trigramtokenfinal[oldkey]288 else:289 trigramtokenfinal[newkey] += trigramtokenfinal[oldkey]290 del trigramtokenfinal[oldkey]291292#print("The values for the final segments in the real fem German lexicon are...")293#print("The values for the final segments in the real masc German lexicon are...")294print("The values for the final segments in the real neut German lexicon are...")295#print("The values for the final segments in the fake fem German lexicon are...")296#print("The values for the final segments in the fake masc German lexicon are...")297#print("The values for the final segments in the fake neut German lexicon are...")298299print(len(unarytokenfinal))300print(len(bigramtokenfinal))301print(len(trigramtokenfinal))302print(eta(unarytokenfinal))303print(eta(bigramtokenfinal))304print(eta(trigramtokenfinal))305306307x = [len(unarytypelex), len(bigramtypelex), len(trigramtypelex), eta(unarytypelex), eta(bigramtypelex), eta(trigramtypelex), len(unarytypefinal), len(bigramtypefinal), len(trigramtypefinal), eta(unarytypefinal), eta(bigramtypefinal), eta(trigramtypefinal), len(unarytokenlex), len(bigramtokenlex), len(trigramtokenlex), eta(unarytokenlex), eta(bigramtokenlex), eta(trigramtokenlex), len(unarytokenfinal), len(bigramtokenfinal), len(trigramtokenfinal), eta(unarytokenfinal), eta(bigramtokenfinal), eta(trigramtokenfinal)]308x = ';'.join([str(x)])309print(x)310'''311with open('german_fake_neut_values.txt', 'a') as file:312 file.write(x + '\n')313314#bash code: for n in {1..2000}; do python german_entropy_values.py; done315''' ...

Full Screen

Full Screen

french_entropy_values.py

Source:french_entropy_values.py Github

copy

Full Screen

1import random 2lex = {}3typelex = {}4tokenlex = {}56'''7with open('real_FR_lexfreq.txt') as f:8 for line in f:9 tok = line.split()10 lex[tok[0]] = tok[1]111213#Values for French: Fem = 7572 obs; Masc = 10076 obs 14keys = list(lex.keys())15random.shuffle(keys)16#keys = keys[:7572]17keys = keys[:10076]1819for key in keys:20 if key not in tokenlex:21 tokenlex[key] = lex[key]22 else:23 tokenlex[key] += lex[key]2425typelex = {key:1 for key in tokenlex}26'''27print("Type Analysis")2829'''30with open('real_FRlex_Masc.txt') as f:31 for line in f:32 tok = line.split()33 typelex[tok[0]] = 13435'''36with open('real_FRlex_Fem.txt') as f:37 for line in f:38 tok = line.split()39 typelex[tok[0]] = 1404142#entropy code43import math4445def eta(data, unit='natural'):46 base = {47 'shannon' : 2.,48 'natural' : math.exp(1),49 'hartley' : 10.50 }5152 if len(data) <= 1:53 return 054 ent = 055 probs = [float(c) / sum(data.values()) for c in data.values()]56 for p in probs:57 if p > 0.:58 ent -= p * math.log(p, base[unit])5960 return ent61 62unarytypelex = typelex.copy()63bigramtypelex = typelex.copy()64trigramtypelex = typelex.copy()6566typelex = {k:float(v) for k, v in typelex.items()}6768#unigrams 69iterkeys = list(unarytypelex.keys()) 70for oldkey in iterkeys:71 newkey = oldkey[:1]72 if newkey not in unarytypelex:73 unarytypelex[newkey] = unarytypelex[oldkey]74 else:75 unarytypelex[newkey] += unarytypelex[oldkey]76 del unarytypelex[oldkey]77 78#bigrams79iterkeys = list(bigramtypelex.keys()) 80for oldkey in iterkeys:81 newkey = oldkey[:2]82 if newkey not in bigramtypelex:83 bigramtypelex[newkey] = bigramtypelex[oldkey]84 else:85 bigramtypelex[newkey] += bigramtypelex[oldkey]86 del bigramtypelex[oldkey]878889#trigrams90iterkeys = list(trigramtypelex.keys()) 9192for oldkey in iterkeys:93 newkey = oldkey[:3]94 if newkey not in trigramtypelex:95 trigramtypelex[newkey] = trigramtypelex[oldkey]96 else:97 trigramtypelex[newkey] += trigramtypelex[oldkey]98 del trigramtypelex[oldkey]99100#print("The values for the real masc French lexicon are...")101print("The values for the real fem French lexicon are...")102#print("The values for the fake fem French lexicon are...")103#print("The values for the fake masc French lexicon are...")104105106print(len(unarytypelex))107print(len(bigramtypelex))108print(len(trigramtypelex))109print(eta(unarytypelex))110print(eta(bigramtypelex))111print(eta(trigramtypelex))112113114####start analysis of final segments####115116unarytypefinal = typelex.copy()117bigramtypefinal = typelex.copy()118trigramtypefinal = typelex.copy()119120typelex = {k:int(v) for k, v in typelex.items()}121122#unigrams 123iterkeys = list(unarytypefinal.keys()) 124for oldkey in iterkeys:125 newkey = oldkey[-1]126 if newkey not in unarytypefinal:127 unarytypefinal[newkey] = unarytypefinal[oldkey]128 else:129 unarytypefinal[newkey] += unarytypefinal[oldkey]130 del unarytypefinal[oldkey]131 132#bigrams133iterkeys = list(bigramtypefinal.keys()) 134for oldkey in iterkeys:135 newkey = oldkey[-2:]136 if newkey not in bigramtypefinal:137 bigramtypefinal[newkey] = bigramtypefinal[oldkey]138 else:139 bigramtypefinal[newkey] += bigramtypefinal[oldkey]140 del bigramtypefinal[oldkey]141142#trigrams143iterkeys = list(trigramtypefinal.keys()) 144145for oldkey in iterkeys:146 newkey = oldkey[-3:]147 if newkey not in trigramtypefinal:148 trigramtypefinal[newkey] = trigramtypefinal[oldkey]149 else:150 trigramtypefinal[newkey] += trigramtypefinal[oldkey]151 del trigramtypefinal[oldkey]152153#print("The values for the final segments in real masc French lexicon are...")154print("The values for the final segments in real fem French lexicon are...")155#print("The values for the final segments in fake fem French lexicon are...")156#print("The values for the final segments in fake masc French lexicon are...")157158159print(len(unarytypefinal))160print(len(bigramtypefinal))161print(len(trigramtypefinal))162print(eta(unarytypefinal))163print(eta(bigramtypefinal))164print(eta(trigramtypefinal))165166'''167with open('real_FRlex_Mascfreq.txt') as f:168 for line in f:169 tok = line.split()170 tokenlex[tok[0]] = tok[1]171 172'''173with open('real_FRlex_Femfreq.txt') as f:174 for line in f:175 tok = line.split()176 tokenlex[tok[0]] = tok[1]177178print("Token Analysis")179180tokenlex = {k:float(v) for k, v in tokenlex.items()}181182unarytokenlex = tokenlex.copy()183bigramtokenlex = tokenlex.copy()184trigramtokenlex = tokenlex.copy()185186#unigrams 187iterkeys = list(unarytokenlex.keys()) 188for oldkey in iterkeys:189 newkey = oldkey[:1]190 if newkey not in unarytokenlex:191 unarytokenlex[newkey] = unarytokenlex[oldkey]192 else:193 unarytokenlex[newkey] += unarytokenlex[oldkey]194 del unarytokenlex[oldkey]195 196#bigrams197iterkeys = list(bigramtokenlex.keys()) 198for oldkey in iterkeys:199 newkey = oldkey[:2]200 if newkey not in bigramtokenlex:201 bigramtokenlex[newkey] = bigramtokenlex[oldkey]202 else:203 bigramtokenlex[newkey] += bigramtokenlex[oldkey]204 del bigramtokenlex[oldkey]205206207#trigrams208iterkeys = list(trigramtokenlex.keys()) 209210for oldkey in iterkeys:211 newkey = oldkey[:3]212 if newkey not in trigramtokenlex:213 trigramtokenlex[newkey] = trigramtokenlex[oldkey]214 else:215 trigramtokenlex[newkey] += trigramtokenlex[oldkey]216 del trigramtokenlex[oldkey]217218 219#print("The values for initial segments in the real masc French lexicon are...")220print("The values for initial segments in the real fem French lexicon are...")221#print("The values for initial segments in the fake fem French lexicon are...")222#print("The values forinitial segments in the fake masc French lexicon are...")223224225print(len(unarytokenlex))226print(len(bigramtokenlex))227print(len(trigramtokenlex))228print(eta(unarytokenlex))229print(eta(bigramtokenlex))230print(eta(trigramtokenlex))231232233#final segment analysis#234unarytokenfinal = tokenlex.copy()235bigramtokenfinal = tokenlex.copy()236trigramtokenfinal = tokenlex.copy()237238#final unary239iterkeys = list(unarytokenfinal.keys()) 240for oldkey in iterkeys:241 newkey = oldkey[-1]242 if newkey not in unarytokenfinal:243 unarytokenfinal[newkey] = unarytokenfinal[oldkey]244 else:245 unarytokenfinal[newkey] += unarytokenfinal[oldkey]246 del unarytokenfinal[oldkey]247 248#final bigrams249iterkeys = list(bigramtokenfinal.keys()) 250for oldkey in iterkeys:251 newkey = oldkey[-2:]252 if newkey not in bigramtokenfinal:253 bigramtokenfinal[newkey] = bigramtokenfinal[oldkey]254 else:255 bigramtokenfinal[newkey] += bigramtokenfinal[oldkey]256 del bigramtokenfinal[oldkey]257258#final trigrams259iterkeys = list(trigramtokenfinal.keys()) 260261for oldkey in iterkeys:262 newkey = oldkey[-3:]263 if newkey not in trigramtokenfinal:264 trigramtokenfinal[newkey] = trigramtokenfinal[oldkey]265 else:266 trigramtokenfinal[newkey] += trigramtokenfinal[oldkey]267 del trigramtokenfinal[oldkey]268#print("The values for the final segments in the real masc French lexicon are...")269print("The values for the final segments in the real fem French lexicon are...")270#print("The values for the final segments in the fake fem French lexicon are...")271#print("The values for the final segments in the fake masc French lexicon are...")272273print(len(unarytokenfinal))274print(len(bigramtokenfinal))275print(len(trigramtokenfinal))276print(eta(unarytokenfinal))277print(eta(bigramtokenfinal))278print(eta(trigramtokenfinal))279280281x = [len(unarytypelex), len(bigramtypelex), len(trigramtypelex), eta(unarytypelex), eta(bigramtypelex), eta(trigramtypelex), len(unarytypefinal), len(bigramtypefinal), len(trigramtypefinal), eta(unarytypefinal), eta(bigramtypefinal), eta(trigramtypefinal), len(unarytokenlex), len(bigramtokenlex), len(trigramtokenlex), eta(unarytokenlex), eta(bigramtokenlex), eta(trigramtokenlex), len(unarytokenfinal), len(bigramtokenfinal), len(trigramtokenfinal), eta(unarytokenfinal), eta(bigramtokenfinal), eta(trigramtokenfinal)]282283x = ';'.join([str(x)])284285print(x)286'''287with open('french_fake_masc_values.txt', 'a') as file:288 file.write(x + '\n')289290#bash code: for n in {1..2000}; do python french_entropy_values.py; done291'''292293294 ...

Full Screen

Full Screen

nmsetting_gsm.py

Source:nmsetting_gsm.py Github

copy

Full Screen

...26 NMSetting.__init__(self)27 self.name = "gsm"28 @property 29 def number(self):30 if "number" in self.prop_dict.iterkeys():31 return TypeConvert.dbus2py(self.prop_dict["number"])32 @number.setter33 def number(self, new_number):34 self.prop_dict["number"] = TypeConvert.py2_dbus_string(new_number)35 @number.deleter36 def number(self):37 if "number" in self.prop_dict.iterkeys():38 del self.prop_dict["number"]39 @property40 def username(self):41 if "username" in self.prop_dict.iterkeys():42 return TypeConvert.dbus2py(self.prop_dict["username"])43 @username.setter44 def username(self, new_user_name):45 self.prop_dict["username"] = TypeConvert.py2_dbus_string(new_user_name)46 47 @username.deleter48 def username(self):49 if "username" in self.prop_dict.iterkeys():50 del self.prop_dict["username"]51 @property52 def password(self):53 if "password" in self.prop_dict.iterkeys():54 return TypeConvert.dbus2py(self.prop_dict["password"])55 @password.setter56 def password(self, new_password):57 self.prop_dict["password"] = TypeConvert.py2_dbus_string(new_password)58 @password.deleter59 def password(self):60 if "password" in self.prop_dict.iterkeys():61 del self.prop_dict["password"]62 @property63 def password_flags(self):64 if "password-flags" in self.prop_dict.iterkeys():65 return self.prop_dict["password-flags"]66 @password_flags.setter67 def password_flags(self, new_password_flags):68 self.prop_dict["password-flags"] = TypeConvert.py2_dbus_uint32(new_password_flags)69 @password_flags.deleter70 def password_flags(self):71 if "password-flags" in self.prop_dict.iterkeys():72 del self.prop_dict["password-flags"]73 @property74 def apn(self):75 if "apn" in self.prop_dict.iterkeys():76 return TypeConvert.dbus2py(self.prop_dict["apn"])77 @apn.setter78 def apn(self, new_apn):79 self.prop_dict["apn"] = TypeConvert.py2_dbus_string(new_apn)80 @apn.deleter81 def apn(self):82 if "apn" in self.prop_dict.iterkeys():83 del self.prop_dict["apn"]84 @property85 def network_id(self):86 if "network-id" in self.prop_dict.iterkeys():87 return TypeConvert.dbus2py(self.prop_dict["network-id"])88 @network_id.setter89 def network_id(self, new_network_id):90 self.prop_dict["network-id"] = TypeConvert.py2_dbus_string(new_network_id)91 @network_id.deleter92 def network_id(self):93 if "network-id" in self.prop_dict.iterkeys():94 del self.prop_dict["network-id"]95 @property96 def network_type(self):97 if "network-type" in self.prop_dict.iterkeys():98 return TypeConvert.dbus2py(self.prop_dict["network-type"])99 @network_type.setter100 def network_type(self, new_network_type):101 self.prop_dict["network-type"] = TypeConvert.py2_dbus_uint32(new_network_type)102 @network_type.deleter103 def network_type(self):104 if "network-type" in self.prop_dict.iterkeys():105 del self.prop_dict["network-type"]106 @property107 def allowed_bands(self):108 if "allowed-bands" in self.prop_dict.iterkeys():109 return TypeConvert.dbus2py(self.prop_dict["allowed-bands"])110 @allowed_bands.setter111 def allowed_bands(self, new_allowed_bands):112 self.prop_dict["allowed-bands"] = TypeConvert.py2_dbus_uint32(new_allowed_bands)113 @allowed_bands.deleter114 def allowed_bands(self):115 if "allowed-bands" in self.prop_dict.iterkeys():116 del self.prop_dict["allowed-bands"]117 @property118 def pin(self):119 if "pin" in self.prop_dict.iterkeys():120 return TypeConvert.dbus2py(self.prop_dict["pin"])121 @pin.setter122 def pin(self, new_pin):123 self.prop_dict["pin"] = TypeConvert.py2_dbus_string(new_pin)124 @pin.deleter125 def pin(self):126 if "pin" in self.prop_dict.iterkeys():127 del self.prop_dict["pin"]128 @property129 def pin_flags(self):130 if "pin-flags" in self.prop_dict.iterkeys():131 return TypeConvert.dbus2py(self.prop_dict["pin-flags"])132 @pin_flags.setter133 def pin_flags(self, new_pin_flags):134 self.prop_dict["pin-flags"] = TypeConvert.py2_dbus_uint32(new_pin_flags)135 @pin_flags.deleter136 def pin_flags(self):137 if "pin-flags" in self.prop_dict.iterkeys():138 del self.prop_dict["pin-flags"]139 @property140 def home_only(self):141 if "home-only" in self.prop_dict.iterkeys():142 return TypeConvert.dbus2py(self.prop_dict["home-only"])143 @home_only.setter144 def home_only(self, new_home_only):145 self.prop_dict["home-only"] = TypeConvert.py2_dbus_boolean(new_home_only)146 @home_only.deleter147 def home_only(self):148 if "home-only" in self.prop_dict.iterkeys():149 del self.prop_dict["home-only"]150if __name__ == "__main__":...

Full Screen

Full Screen

trigram.py

Source:trigram.py Github

copy

Full Screen

...18 elif list[1] == '3-GRAM':19 trigram_count.setdefault(list[4], {})[list[2], list[3]] = int(list[0])20 except:21 pass22 for word in tag_word_count.iterkeys():23 for tag in tag_word_count[word].iterkeys():24 emission_rate.setdefault(word, {})[tag] = tag_word_count[word][tag] / tag_count[tag]25 #emission_rate.setdefault('**',{})['**'] = 126 #emission_rate.setdefault('STOP',{})['STOP'] = 127 for trigram in trigram_count.iterkeys():28 for bi in trigram_count[trigram].iterkeys():29 q_rate.setdefault(trigram, {})[bi] = trigram_count[trigram][bi] / bigram_count[bi]30 return emission_rate, q_rate31def sentences(test_file):32 sentence = []33 for line in test_file:34 if line != '\n':35 sentence.append(line[:-1])36 else:37 if len(sentence) > 0:38 yield sentence39 sentence = []40 yield line41 if len(sentence) > 0:42 yield sentence43if __name__ == "__main__":44 try:45 dev_file = file('gene.test',"r")46 count_file = file('gene.counts',"r")47 except IOError:48 sys.stderr.write("ERROR: Cannot read inputfile.\n")49 sys.exit(1)50 emission_rate,q_rate = count_tag_and_word(count_file)51 write_lines = []52 for sentence in sentences(dev_file):53 if sentence == '\n':54 write_lines.append('\n')55 continue56 length = len(sentence)57 pi = {}58 pi.setdefault(0, {})['*','*'] = 1,None59 s = [None for i in range(len(sentence))]60 for v in emission_rate.get(sentence[0], emission_rate['_RARE_']).iterkeys():61 rate = pi[0].get(('*', '*'),(0,0))[0] * q_rate[v].get(('*', '*'), 0) * emission_rate.get(sentence[0], emission_rate['_RARE_'])[v]62 pi.setdefault(1, {})['*',v] = rate,'*'63 for v in emission_rate.get(sentence[1], emission_rate['_RARE_']).iterkeys():64 for u in emission_rate.get(sentence[0], emission_rate['_RARE_']).iterkeys():65 rate = pi[1].get(('*', u),(0,0))[0] * q_rate[v].get(('*', u), 0) * emission_rate.get(sentence[1], emission_rate['_RARE_'])[v]66 pi.setdefault(2, {})[u,v] = rate,'*'67 for i in range(2,length):68 for v in emission_rate.get(sentence[i], emission_rate['_RARE_']).iterkeys():69 for u in emission_rate.get(sentence[i-1], emission_rate['_RARE_']).iterkeys():70 max_rate = 071 for w in emission_rate.get(sentence[i-2], emission_rate['_RARE_']).iterkeys():72 rate = pi[i].get((w, u),(0,0))[0] * q_rate[v].get((w, u), 0) * emission_rate.get(sentence[i], emission_rate['_RARE_'])[v]73 if rate >= max_rate:74 max_rate = rate75 pi.setdefault(i+1, {})[u,v] = max_rate,w76 #if max_rate == 0:77 # pi[i+1][u,v] = 0.000000001, pi[i+1][u,v][1]78 max_rate = 079 for v in emission_rate.get(sentence[length-1], emission_rate['_RARE_']).iterkeys():80 for u in emission_rate.get(sentence[length-2], emission_rate['_RARE_']).iterkeys():81 rate = pi[length].get((u,v),(0,0))[0] * q_rate['STOP'].get((u,v),0)82 if rate >= max_rate:83 max_rate = rate84 y_n1, y_n = u, v85 s[length-1] = y_n86 s[length-2] = y_n187 for i in range(length-3, -1, -1):88 s[i] = pi[i+3][y_n1, y_n][1]89 y_n = y_n190 y_n1 = s[i]91 for i in range(len(s)):92 write_lines.append(sentence[i] + ' ' + s[i] + '\n')93 #write_lines.append('\n')94 write_lines.append('\n')...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run autotest automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful