How to use def_list method in yandex-tank

Best Python code snippet using yandex-tank

test_parameter.py

Source:test_parameter.py Github

copy

Full Screen

...213 self.pX = Parameter('invalid')214 return self.ps215 216 @fixture217 def def_list(self, parameters):218 return TypedList(Parameter.is_compatible, parameters)219220 def test_getitem_index(self, def_list):221 assert self.ps[0] == def_list[0]222 223 def test_getitem_name(self, def_list):224 assert self.ps[0] == def_list[self.ps[0].name]225 226 def test_getitem_missing(self, def_list):227 with raises(IndexError):228 def_list[11]229 with raises(KeyError):230 def_list['missing']231232 def test_index(self, def_list):233 assert def_list.index(self.ps[1]) == 1234 assert def_list.index(self.ps[1].name) == 1235 with raises(ValueError):236 def_list.index(self.pX)237 with raises(ValueError):238 def_list.index(self.pX.name)239240 def test_append(self, def_list):241 def_list.append(self.pN)242 with raises(TypeError):243 def_list.append(None)244 with raises(TypeError):245 def_list.append([self.pN])246 247 def test_extend(self, def_list):248 def_list.extend([self.pN, self.pX])249 with raises(TypeError):250 def_list.extend([self.pN, list()])251 252 def test_setitem(self, def_list):253 with raises(TypeError):254 def_list[0] = None255 def_list[0] = self.pN256 257 def test_delitem(self, def_list):258 p0 = def_list[0]259 del def_list[0]260 assert p0 not in def_list261262 def test_contains(self, def_list):263 assert def_list[0] in def_list264 265 def test_insert(self, def_list):266 def_len = len(def_list)267 def_list.insert(1, self.pN)268 assert def_list[1] == self.pN269 assert len(def_list) == def_len + 1270 with raises(TypeError):271 def_list.insert(0, None)272 273 def test_copy(self, def_list):274 copy_list = copy(def_list)275 #assert def_list.is_compatible_item == copy_list.is_compatible_item 276 assert def_list == copy_list277 copy_list[1] = self.pN278 assert def_list[1] != copy_list[1]279280 def test_add(self, def_list):281 add_list = def_list + [self.pN]282 assert add_list[-1] == self.pN283 assert type(add_list) == type(def_list)284 assert len(add_list) == len(def_list) + 1285 286 def test_radd(self, def_list):287 radd_list = [self.pN] + def_list 288 assert radd_list[0] == self.pN289 assert type(radd_list) == type(def_list)290 assert len(radd_list) == len(def_list) + 1291 292 def test_str(self, def_list):293 assert str(def_list)294 295 def test_pretty(self, def_list):296 assert pretty(def_list)297298299 300class TestParameterList(TestTypedList):301 @fixture302 def def_list(self, parameters):303 return ParameterList(parameters)304 305 def test_append_str(self, def_list):306 def_list.append('pS')307 assert isinstance(def_list[-1], Parameter)308 assert def_list[-1].name == 'pS'309 310 def test_insert_str(self, def_list):311 def_list.insert(0, 'pS')312 assert isinstance(def_list[0], Parameter)313 assert def_list[0].name == 'pS'314 315 def test_setitem_str(self, def_list):316 def_list[0] = 'pS' ...

Full Screen

Full Screen

__init__.py

Source:__init__.py Github

copy

Full Screen

1import re2import scrapy3# by Peyman (mohsenikiasari@ce.sharif.edu) in 2019.4words = ['I', 'hope', 'you', 'like', 'this', 'dictionary', 'web', 'crawler']5# scrapy crawl oxford -o oxford.jl6class OxfordCrawler(scrapy.Spider):7 name = "oxford"8 allowed_domains = ["www.lexico.com"]9 start_urls = ["https://www.lexico.com/en/definition/" + word for word in words]10 def parse(self, response):11 word = response.request.url.split("/")[-1]12 definition_dict = {}13 for sections in response.xpath("//section[@class='gramb']"):14 try:15 part_of_speech = sections.xpath(".//span[@class='pos']/text()").extract()[0]16 except:17 part_of_speech = False18 def_list = sections.xpath("./ul/li/div[@class='trg']//span[@class='ind']").extract()19 if not def_list:20 def_list = sections.xpath(".//div[@class='empty_sense']//div[@class='crossReference']").extract()21 def_list = [re.sub(r'<.*?>', "", i).strip() for i in def_list]22 def_list = [i for i in def_list if i]23 if def_list and part_of_speech:24 if part_of_speech in definition_dict:25 definition_dict[part_of_speech] += def_list26 else:27 definition_dict[part_of_speech] = def_list28 if definition_dict:29 yield {word: definition_dict}30# scrapy crawl longman -o longman.jl31class LongmanCrawler(scrapy.Spider):32 name = "longman"33 allowed_domains = ["https://www.ldoceonline.com"]34 start_urls = ["https://www.ldoceonline.com/dictionary/" + word for word in words]35 def parse(self, response):36 word = response.request.url.split("/")[-1]37 definition_dict = {}38 for sections in response.xpath("//span[@class='dictentry']"):39 try:40 part_of_speech = (sections.xpath(".//span[@class='POS']/text()").extract()[0]).strip()41 except:42 part_of_speech = False43 def_list = sections.xpath(".//span[@class='Sense']/span[@class='DEF']").extract()44 def_list = [re.sub(r'<.*?>', "", i[18:-7]).strip() for i in def_list]45 def_list = [i for i in def_list if i]46 if def_list and part_of_speech:47 if part_of_speech in definition_dict:48 definition_dict[part_of_speech] += def_list49 else:50 definition_dict[part_of_speech] = def_list51 if definition_dict:52 yield {word: definition_dict}53# scrapy crawl cambridge -o cambridge.jl54class CambridgeCrawler(scrapy.Spider):55 name = "cambridge"56 allowed_domains = ["https://dictionary.cambridge.org"]57 start_urls = ["https://dictionary.cambridge.org/dictionary/english/" + word for word in words]58 def parse(self, response):59 word = response.request.url.split("/")[-1]60 definition_dict = {}61 for enrty in response.xpath("//div[@class='entry-body__el clrd js-share-holder']"):62 part_of_speeches = enrty.xpath("./div[@class='pos-header']//span[@class='pos']/text()").extract()63 def_list = enrty.xpath(64 ".//div[@class='sense-body']/div[@class='def-block pad-indent']//b[@class='def']").extract()65 def_list = [re.sub(r'<.*?>|:', "", i[15:-4]).strip() for i in def_list]66 def_list = [i for i in def_list if i]67 if def_list and part_of_speech:68 for part_of_speech in part_of_speeches:69 if part_of_speech in definition_dict:70 definition_dict[part_of_speech] += def_list71 else:72 definition_dict[part_of_speech] = def_list73 if definition_dict:74 yield {word: definition_dict}75# scrapy crawl webster -o webster.jl76class WebsterCrawler(scrapy.Spider):77 name = "webster"78 allowed_domains = ["https://www.merriam-webster.com"]79 start_urls = ["https://www.merriam-webster.com/dictionary/" + word for word in words]80 def parse(self, response):81 word = response.request.url.split("/")[-1]82 definition_dict = {}83 part_of_speeches = [re.sub(r'\(.*\)', "", i).strip() for i in84 response.xpath("//span[@class='fl']/a/text()|//span[@class='fl']/text()").extract()]85 for sections in response.xpath("//div[contains(@id, 'dictionary-entry')]/div[@class='vg']"):86 part_of_speech = part_of_speeches.pop(0)87 def_list = sections.xpath(88 ".//span[@class='dtText' or @class='unText'][not(ancestor::span[@class='dtText'])]").extract()89 def_list = [re.sub(r'<span.*>.+</span>', "", i[21:-7]) for i in def_list]90 def_list = [re.sub(r'<.*?>|:', "", i).strip() for i in def_list]91 def_list = [i for i in def_list if i]92 if def_list and part_of_speech:93 if part_of_speech in definition_dict:94 definition_dict[part_of_speech] += def_list95 else:96 definition_dict[part_of_speech] = def_list97 if definition_dict:98 yield {word: definition_dict}99# scrapy crawl collins -o collins.jl100class CollinsCrawler(scrapy.Spider):101 name = "collins"102 allowed_domains = ["https://www.collinsdictionary.com"]103 start_urls = ["https://www.collinsdictionary.com/dictionary/english/" + word for word in words]104 def parse(self, response):105 word = response.request.url.split("/")[-1]106 definition_dict = {}107 for sections in response.xpath("//div[@class='dictionary Cob_Adv_Brit']"108 "//div[@class='content definitions cobuild br']/div[@class='hom']"):109 try:110 part_of_speech = (sections.xpath(".//span[@class='pos']/text()").extract()[0]).strip()111 except:112 part_of_speech = False113 def_list = sections.xpath("./div[@class='sense']/div[@class='def']").extract()114 def_list = [re.sub(r'<.*?>', "", i[17:-6]).strip() for i in def_list]115 def_list = [i for i in def_list if i]116 if def_list and part_of_speech:117 if part_of_speech in definition_dict:118 definition_dict[part_of_speech] += def_list119 else:120 definition_dict[part_of_speech] = def_list121 if definition_dict:...

Full Screen

Full Screen

clue_maker.py

Source:clue_maker.py Github

copy

Full Screen

1import json2import requests3import random4from layout_solver.word_slot import WordSlot5# trying out the simpler version first: 6# maps to 20th synonym, 7# or the last one if <208def generate_clues_with_api(word):9 """10 Finds a clue for the inputted word.11 word: [word] - a String, the word12 Output: [clue] - a String, the clue for the inputted word13 """14 # we pull the list of synonyms for the inputted word15 synonym_url = "https://api.datamuse.com/words?rel_syn=" + word + "&md=d"16 synonym_data = requests.get(synonym_url)17 synonym_json = json.loads(synonym_data.text)18 if len(synonym_json) == 0: 19 return "NO CLUE" 20 else:21 synonym_list = []22 counter = 023 # clean up the data and make it into a list of synonyms24 while (counter<len(synonym_json)):25 synonym_list.append(synonym_json[counter]["defs"][0])26 counter += 127 rand_syn = random.randint(0, counter-1)28 clue = synonym_list[rand_syn]29 # Separate clue from PoS30 clue = clue.split('\t', 1)[1].strip()31 clue_mult = clue.split(';')32 rand_clue_mult = random.randint(0, len(clue_mult)-1)33 return clue_mult[rand_clue_mult].strip()34def definition(word):35 dict_json = open('theme_classifier/dictionary.json')36 data = json.load(dict_json)37 word_def = data[word]38 dict_json.close()39 return word_def40def insert_semicolon(word_def):41 semicol_def = ""42 start_ind = 043 i = 044 while i < len(word_def):45 if word_def[i:i+2].isdigit() and i != len(word_def)-2:46 semicol_def += word_def[start_ind:i]+"; "47 start_ind = i+448 i += 249 elif word_def[i:i+1].isdigit() and i != len(word_def)-1:50 semicol_def += word_def[start_ind:i]+"; "51 start_ind = i+352 i += 153 elif word_def[i:i+2]=="--" and i != len(word_def)-2:54 semicol_def += word_def[start_ind:i]+"; "55 start_ind = i+456 i += 257 elif i == len(word_def)-1:58 semicol_def += word_def[start_ind:i+1]59 i += 160 else:61 i += 162 return semicol_def63def split_def(def_string):64 def_list = def_string.split("; ")65 return def_list66def clean_list(def_list):67 i = 068 while i < len(def_list):69 def_word = def_list[i]70 if ". See" in def_word:71 see_ind = def_word.find(". See")72 def_list[i] = def_word[:see_ind]73 i += 174 elif "See" in def_word:75 see_ind = def_word.find("See")76 def_list[i] = def_word[see_ind+3:]77 i += 178 elif ". [" in def_word:79 brac_ind = def_word.find(". [")80 def_list[i] = def_word[:brac_ind]81 i += 182 elif "\n" in def_word:83 n_ind = def_word.find("\n")84 def_list[i] = def_word[:n_ind]85 i += 1 86 elif '.' in def_word and def_word[-1]!= '.' and len(def_list) != 1:87 del def_list[i]88 elif def_word == '':89 del def_list[i]90 elif def_word[0:2]=='--':91 def_list[i] = def_word[2:]92 i += 193 else:94 i += 195 return def_list96def replace_syn(def_string, word):97 if word in def_string:98 synonym_url = "https://api.datamuse.com/words?rel_syn=" + word + "&md"99 synonym_data = requests.get(synonym_url)100 synonym_json = json.loads(synonym_data.text)101 if len(synonym_json) == 0: 102 return None103 else:104 synonym_list = []105 counter = 0106 while (counter<len(synonym_json)):107 synonym_list.append(synonym_json[counter]["word"])108 counter += 1109 rand_syn = random.randint(0, counter-1)110 syn = synonym_list[rand_syn]111 def_string = def_string.replace(word, syn)112 return def_string113def find_clue(word):114 def_list = definition(word)115 def_list = insert_semicolon(def_list)116 def_list = split_def(def_list)117 def_list = clean_list(def_list)118 i = 0119 while i < len(def_list):120 i_word = def_list[i].strip()121 if i_word == '':122 del def_list[i]123 else:124 i += 1125 word_clue = None126 if len(def_list) == 0:127 return generate_clues_with_api(word)128 while word_clue is None:129 temp_clue = def_list[random.randint(0, len(def_list)-1)]130 word_clue = replace_syn(temp_clue, word)131 return word_clue.capitalize()132def generate_clues(word_slots):133 for i in range(len(word_slots)):134 word = word_slots[i].get_best_word()135 clue = find_clue(word)...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run yandex-tank automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful