How to use setup_parser method in Behave

Best Python code snippet using behave

run_the_grid.py

Source:run_the_grid.py Github

copy

Full Screen

1import bz22import lzma3import gzip4import json5import pathlib6import argparse7import logging8import functools9from datetime import datetime10from urllib.parse import urlparse11import shutil12import multiprocessing13import socket14import executor15import requests16import gspread17logger = logging.getLogger("run_the_grid")18def download_file(url, local_filename):19 with requests.get(url, stream=True) as r:20 r.raw.read = functools.partial(r.raw.read, decode_content=True)21 r.raise_for_status()22 with open(local_filename, "wb") as f:23 shutil.copyfileobj(r.raw, f)24def open_compressed(filename):25 if filename.suffix == ".bz2":26 return bz2.open(filename, "rt")27 if filename.suffix == ".gz":28 return gzip.open(filename, "rt")29 if filename.suffix == ".xz":30 return lzma.open(filename, "rt")31 return None32def get_worksheet(api_key_path, spreadshet_id, worksheet_name="Params combined"):33 try:34 gc = gspread.service_account(filename=api_key_path)35 sh = gc.open_by_key(spreadshet_id)36 logger.info(f"Successfully opened spreadshet {sh.title}")37 except gspread.exceptions.APIError:38 logger.error(39 f"Cannot connect to google spreadsheet, check the validity of your service account api key {api_key_path} or spreadshet id {spreadshet_id}"40 )41 return42 except gspread.exceptions.SpreadsheetNotFound:43 logger.error(f"Cannot find google spreadsheet, check your spreadshet id {spreadshet_id}")44 return45 try:46 return sh.worksheet(worksheet_name)47 except gspread.exceptions.WorksheetNotFound:48 logger.error(f"Cannot open worksheet {worksheet_name} in {spreadsheet.title}, please fix the spreadsheet")49 return50def pick_the_task(worksheet, hostname):51 tasks = worksheet.get_all_records()52 for row_no, t in enumerate(tasks):53 if t["Status"] == "":54 logger.info(f"We've found a new task at row {row_no + 2}, it is {t['Description']}")55 try:56 algo, epochs, subwords, wordngram, neg_sampling = t["Params"].strip().split(";")57 subwords_min, subwords_max = subwords.split("-")58 except ValueError:59 logger.error(f"Cannot parse record {t['Params']}, skipping it")60 continue61 worksheet.update_cell(row_no + 2, 3, "Processing")62 worksheet.update_cell(row_no + 2, 4, str(datetime.now()))63 worksheet.update_cell(row_no + 2, 5, hostname)64 return {65 "task": t,66 "params": {67 "algo": algo,68 "epochs": epochs,69 "subwords_min": subwords_min,70 "subwords_max": subwords_max,71 "wordngram": wordngram,72 "neg_sampling": neg_sampling,73 },74 "suffix": f"algo-{algo}.epochs-{epochs}.subwords-{subwords_min}..{subwords_max}.wordngram-{wordngram}.neg_sampling-{neg_sampling}",75 "row_no": row_no + 2,76 }77 return78def tick_the_task(worksheet, task):79 worksheet.update_cell(task["row_no"], 3, "Computed")80 worksheet.update_cell(task["row_no"], 4, str(datetime.now()))81def train(args):82 logger.info("Running pre-flight checks")83 if not args.config.exists():84 logger.error(f"Config file {args.config} doesn't exist, please run ./run_the_grid.py setup first")85 return86 try:87 with open(args.config, "r") as fp:88 config = json.load(fp)89 except json.decoder.JSONDecodeError:90 logger.error(91 f"Config file {args.config} is corrupt, please run ./run_the_grid.py setup again and do not tamper with file"92 )93 return94 corpus_path = pathlib.Path(config["corpus"])95 if not corpus_path.exists():96 logger.error(f"Corpus file {corpus_path} doesn't exist, please re-run ./run_the_grid.py setup")97 return98 vectors_path = pathlib.Path(config["vectors"])99 if not vectors_path.exists():100 logger.error(f"Vectors directory {vectors_path} doesn't exist, please re-run ./run_the_grid.py setup")101 return102 api_key_path = pathlib.Path(config["api_key"])103 if not api_key_path.exists():104 logger.error(f"Api key file {api_key_path} doesn't exist, please re-run ./run_the_grid.py setup")105 return106 fasttext_path = pathlib.Path(config["fasttext"])107 if not fasttext_path.exists() or not executor.is_executable(fasttext_path):108 logger.error(109 f"Fastext binary {fasttext_path} doesn't exist or it is not executable, please re-run ./run_the_grid.py setup"110 )111 return112 worksheet = get_worksheet(api_key_path, config.get("spreadshet_id"))113 if worksheet is None:114 return115 while True:116 task = pick_the_task(worksheet, config.get("hostname", socket.gethostname()))117 if task is None:118 logger.warning(f"Woohoo, no more tasks left in the queue")119 return120 vectors_fname = vectors_path / (corpus_path.name + "." + task["suffix"])121 executor.execute(122 fasttext_path,123 task["params"]["algo"],124 "-epoch",125 task["params"]["epochs"],126 "-neg",127 task["params"]["neg_sampling"],128 "-wordNgrams",129 task["params"]["wordngram"],130 "-minn",131 task["params"]["subwords_min"],132 "-maxn",133 task["params"]["subwords_max"],134 "-input",135 corpus_path,136 "-output",137 vectors_fname,138 "-dim",139 "300",140 "-thread",141 str(config["threads"]),142 )143 144 vectors_plaintext = vectors_fname.parent / (vectors_fname.name + '.vec')145 if vectors_plaintext.exists():146 vectors_plaintext.unlink()147 vectors_bin = vectors_fname.parent / (vectors_fname.name + '.bin')148 with open(config["logfile"], "a") as fp_out:149 fp_out.write(150 json.dumps({151 "vectors": str(vectors_bin),152 "corpus": str(corpus_path),153 "params": task["params"],154 "dt": str(datetime.now()),155 }, sort_keys=True) + "\n"156 )157 158 tick_the_task(worksheet, task)159def setup(args):160 # Checking dependencies161 binary_dependencies = ["git", "make"]162 for dep in binary_dependencies:163 if shutil.which(dep) is None:164 logger.error(f"Cannot find required binary dependencies {dep}, exiting. Please install it and return back")165 return166 # Checking api keys167 if not args.api_key_location.exists():168 logger.error(f"Api key file {args.api_key_location} doesn't exist")169 return170 # Creating folders171 try:172 # logger.info(f"Creating lib folder {args.fasttext_location}")173 # args.fasttext_location.mkdir(exist_ok=True)174 logger.info(f"Creating corpus folder {args.corpus_location}")175 args.corpus_location.mkdir(exist_ok=True)176 logger.info(f"Creating vectors folder {args.vectors_location}")177 args.vectors_location.mkdir(exist_ok=True)178 except (PermissionError, FileNotFoundError) as e:179 logger.error(f"Cannot create one of the required folders: {e}")180 return181 # Downloading and unpacking corpus182 corpus_frags = urlparse(args.corpus_url)183 corpus_path = args.corpus_location / pathlib.Path(corpus_frags.path).name184 # TODO: verify the case when it's already exists185 decompressed_corpus_path = corpus_path186 if not corpus_path.exists() or args.overwrite_corpus:187 logger.info(f"Downloading vectors from {args.corpus_url} to {corpus_path}")188 download_file(args.corpus_url, corpus_path)189 logger.info(f"Corpus successfully downloaded to {corpus_path}")190 corpus_fh = open_compressed(corpus_path)191 if corpus_fh is not None:192 decompressed_corpus_path = corpus_path.with_suffix("")193 if not decompressed_corpus_path.exists() or args.overwrite_corpus:194 logger.info(f"Decompressing {corpus_path.suffix} corpus")195 with open(decompressed_corpus_path, "w") as fp_out:196 shutil.copyfileobj(corpus_fh, fp_out)197 corpus_fh.close()198 logger.info(f"Finished decompressing {corpus_path.suffix} corpus")199 else:200 logger.info(f"Corpus file {corpus_path} already exists, skipping")201 fasttext_path = args.fasttext_location / "fasttext"202 # building fasttext203 if not fasttext_path.exists() or args.overwrite_fasttext:204 if args.fasttext_location.exists():205 shutil.rmtree(args.fasttext_location)206 logger.info("Cloning fasttext repo")207 if not executor.execute(208 "git", "clone", "https://github.com/facebookresearch/fastText.git", args.fasttext_location, check=False209 ):210 logger.error("Failed to clone fasttext repo")211 return212 logger.info("Building fasttext")213 if not executor.execute("make", directory=args.fasttext_location, check=False):214 logger.error("Failed to build fasttext binaries")215 return216 logger.info(f"Fasttext binaries successfully cloned and built at {fasttext_path}")217 else:218 logger.info(f"Fasttext binary {fasttext_path} already exists, skipping")219 if not args.config.exists() or args.overwrite_config:220 with open(args.config, "w") as fp_out:221 json.dump(222 {223 "corpus": str(decompressed_corpus_path),224 "fasttext": str(fasttext_path),225 "api_key": str(args.api_key_location),226 "spreadshet_id": args.spreadshet_id,227 "threads": args.threads,228 "vectors": str(args.vectors_location),229 "logfile": str(args.logfile),230 "hostname": args.hostname,231 },232 fp_out,233 sort_keys=True,234 indent=4,235 )236 logger.info(f"Config stored to {args.config}")237 else:238 logger.warning(f"Config {args.config} already exists, not overwriting")239if __name__ == "__main__":240 parser = argparse.ArgumentParser(241 description="""That is the node worker to compute fasttext """242 """vectors using different params, store obtained vectors on gdrive and update google spreadsheet"""243 )244 parser.add_argument("--config", type=pathlib.Path, help="Path to config file", default="config.json")245 parser.add_argument(246 "-d",247 "--debug",248 help="Print lots of debugging statements",249 action="store_const",250 dest="loglevel",251 const=logging.DEBUG,252 default=logging.WARNING,253 )254 parser.add_argument(255 "-v",256 "--verbose",257 help="Be verbose",258 action="store_const",259 dest="loglevel",260 const=logging.INFO,261 )262 subparsers = parser.add_subparsers(help="Available commands")263 # Setup subparser264 setup_parser = subparsers.add_parser(265 "setup", help="Download and build dependencies, download corpus file, create config from template"266 )267 setup_parser.add_argument(268 "--overwrite_config",269 help="Overwrite config if it is already exists",270 action="store_true",271 default=False,272 )273 setup_parser.add_argument(274 "--overwrite_corpus",275 help="Overwrite corpus if it is already exists",276 action="store_true",277 default=False,278 )279 setup_parser.add_argument(280 "--overwrite_fasttext",281 help="Download and rebuild fasttext, if it is already exists",282 action="store_true",283 default=False,284 )285 setup_parser.add_argument(286 "--corpus_location",287 type=pathlib.Path,288 help="Download corpus to specific folder",289 default=pathlib.Path("corpus"),290 )291 setup_parser.add_argument(292 "--corpus_url",293 type=str,294 help="Download corpus to specific folder",295 default="https://lang-uk.nbu.rocks/static/ubertext.fiction_news_wikipedia.filter_rus+short.tokens.txt.bz2",296 )297 setup_parser.add_argument(298 "--fasttext_location",299 type=pathlib.Path,300 help="Download and build fasttext to specific folder",301 default=pathlib.Path("lib"),302 )303 setup_parser.add_argument(304 "--vectors_location",305 type=pathlib.Path,306 help="Store vectors to given folder after the training",307 default=pathlib.Path("vectors"),308 )309 setup_parser.add_argument(310 "--api_key_location",311 type=pathlib.Path,312 help="Location of json file with service account credentials for google drive and spreadsheet",313 default=pathlib.Path("api_keys/fasttext_gridtraining.json"),314 )315 setup_parser.add_argument(316 "--spreadshet_id",317 type=str,318 help="Google Spreadsheet id (the one from the url) with the spreadsheet of tasks and results",319 default="150DjEZKCuJEcsCJWahWmhPkfHzn9pA-N3UIYYx7XM04",320 )321 setup_parser.add_argument(322 "--threads", type=int, help="Number of threads to use", default=multiprocessing.cpu_count() - 2323 )324 setup_parser.add_argument(325 "--logfile", type=pathlib.Path, help="JSONLines file to write training details",326 default=pathlib.Path("log.jsonl")327 )328 setup_parser.add_argument(329 "--hostname", type=str, help="Identifier of the worker, defaulted to the hostname",330 default=socket.gethostname()331 )332 setup_parser.set_defaults(func=setup)333 # Train subparser334 train_parser = subparsers.add_parser("train", help="Run the trainings")335 train_parser.set_defaults(func=train)336 args = parser.parse_args()337 logging.basicConfig(level=args.loglevel)...

Full Screen

Full Screen

toxcmd3.py

Source:toxcmd3.py Github

copy

Full Screen

...170 assert isinstance(func, collections.Callable)171 self.name = name172 self.func = func173 self.parser = None174 def setup_parser(self, command_parser):175 setup_parser = getattr(self.func, "setup_parser", None)176 if setup_parser and isinstance(setup_parser, collections.Callable):177 setup_parser(command_parser)178 else:179 command_parser.add_argument("args", nargs="*")180 @property181 def usage(self):182 usage = getattr(self.func, "usage", None)183 return usage184 @property185 def short_description(self):186 short_description = getattr(self.func, "short", "")187 return short_description188 @property189 def description(self):190 return inspect.getdoc(self.func)191 def __call__(self, args):192 return self.func(args)193# -----------------------------------------------------------------------------194# MAIN-COMMAND:195# -----------------------------------------------------------------------------196def toxcmd_main(args=None):197 """Command util with subcommands for tox environments."""198 usage = "USAGE: %(prog)s [OPTIONS] COMMAND args..."199 if args is None:200 args = sys.argv[1:]201 # -- STEP: Build command-line parser.202 parser = argparse.ArgumentParser(description=inspect.getdoc(toxcmd_main),203 formatter_class=FORMATTER_CLASS)204 common_parser = parser.add_argument_group("Common options")205 common_parser.add_argument("--version", action="version", version=VERSION)206 subparsers = parser.add_subparsers(help="commands")207 for command in discover_commands():208 command_parser = subparsers.add_parser(command.name,209 usage=command.usage,210 description=command.description,211 help=command.short_description,212 formatter_class=FORMATTER_CLASS)213 command_parser.set_defaults(func=command)214 command.setup_parser(command_parser)215 command.parser = command_parser216 # -- STEP: Process command-line and run command.217 options = parser.parse_args(args)218 command_function = options.func219 return command_function(options)220# -----------------------------------------------------------------------------221# MAIN:222# -----------------------------------------------------------------------------223if __name__ == "__main__":...

Full Screen

Full Screen

toxcmd.py

Source:toxcmd.py Github

copy

Full Screen

...152 assert callable(func)153 self.name = name154 self.func = func155 self.parser = None156 def setup_parser(self, command_parser):157 setup_parser = getattr(self.func, "setup_parser", None)158 if setup_parser and callable(setup_parser):159 setup_parser(command_parser)160 else:161 command_parser.add_argument("args", nargs="*")162 @property163 def usage(self):164 usage = getattr(self.func, "usage", None)165 return usage166 @property167 def short_description(self):168 short_description = getattr(self.func, "short", "")169 return short_description170 @property171 def description(self):172 return inspect.getdoc(self.func)173 def __call__(self, args):174 return self.func(args)175# -----------------------------------------------------------------------------176# MAIN-COMMAND:177# -----------------------------------------------------------------------------178def toxcmd_main(args=None):179 """Command util with subcommands for tox environments."""180 usage = "USAGE: %(prog)s [OPTIONS] COMMAND args..."181 if args is None:182 args = sys.argv[1:]183 # -- STEP: Build command-line parser.184 parser = argparse.ArgumentParser(description=inspect.getdoc(toxcmd_main),185 formatter_class=FORMATTER_CLASS)186 common_parser = parser.add_argument_group("Common options")187 common_parser.add_argument("--version", action="version", version=VERSION)188 subparsers = parser.add_subparsers(help="commands")189 for command in discover_commands():190 command_parser = subparsers.add_parser(command.name,191 usage=command.usage,192 description=command.description,193 help=command.short_description,194 formatter_class=FORMATTER_CLASS)195 command_parser.set_defaults(func=command)196 command.setup_parser(command_parser)197 command.parser = command_parser198 # -- STEP: Process command-line and run command.199 options = parser.parse_args(args)200 command_function = options.func201 return command_function(options)202# -----------------------------------------------------------------------------203# MAIN:204# -----------------------------------------------------------------------------205if __name__ == "__main__":...

Full Screen

Full Screen

manage.py

Source:manage.py Github

copy

Full Screen

1#!/usr/bin/env python2"""Perform administrative actions on Google Alerts."""3import base644import contextlib5import json6import os7import pickle8import sys9import time10from argparse import ArgumentParser11import selenium.webdriver as webdriver12from google_alerts import GoogleAlerts13PY2 = False14if sys.version_info[0] < 3:15 PY2 = True16__author__ = "Brandon Dixon"17__copyright__ = "Copyright, Brandon Dixon"18__credits__ = ["Brandon Dixon"]19__license__ = "MIT"20__maintainer__ = "Brandon Dixon"21__email__ = "brandon@9bplus.com"22__status__ = "BETA"23AUTH_COOKIE_NAME = 'SIDCC'24CONFIG_PATH = os.path.expanduser('~/.config/google_alerts')25CONFIG_FILE = os.path.join(CONFIG_PATH, 'config.json')26SESSION_FILE = os.path.join(CONFIG_PATH, 'session')27CONFIG_DEFAULTS = {'email': '', 'password': '', 'py2': PY2}28def obfuscate(p, action):29 """Obfuscate the auth details to avoid easy snatching.30 It's best to use a throw away account for these alerts to avoid having31 your authentication put at risk by storing it locally.32 """33 key = "ru7sll3uQrGtDPcIW3okutpFLo6YYtd5bWSpbZJIopYQ0Du0a1WlhvJOaZEH"34 s = list()35 if action == 'store':36 if PY2:37 for i in range(len(p)):38 kc = key[i % len(key)]39 ec = chr((ord(p[i]) + ord(kc)) % 256)40 s.append(ec)41 return base64.urlsafe_b64encode("".join(s))42 else:43 return base64.urlsafe_b64encode(p.encode()).decode()44 else:45 if PY2:46 e = base64.urlsafe_b64decode(p)47 for i in range(len(e)):48 kc = key[i % len(key)]49 dc = chr((256 + ord(e[i]) - ord(kc)) % 256)50 s.append(dc)51 return "".join(s)52 else:53 e = base64.urlsafe_b64decode(p)54 return e.decode()55def main():56 """Run the core."""57 parser = ArgumentParser()58 subs = parser.add_subparsers(dest='cmd')59 setup_parser = subs.add_parser('setup')60 setup_parser.add_argument('-e', '--email', dest='email', required=True,61 help='Email of the Google user.', type=str)62 setup_parser.add_argument('-p', '--password', dest='pwd', required=True,63 help='Password of the Google user.', type=str)64 setup_parser = subs.add_parser('seed')65 setup_parser.add_argument('-d', '--driver', dest='driver',66 required=True, type=str,67 help='Location of the Chrome driver. This can be downloaded by visiting http://chromedriver.chromium.org/downloads',)68 setup_parser.add_argument('-t', '--timeout', dest='timeout',69 required=False, type=int, default=20)70 setup_parser = subs.add_parser('list')71 setup_parser = subs.add_parser('create')72 setup_parser.add_argument('-t', '--term', dest='term', required=True,73 help='Term to store.', type=str)74 setup_parser.add_argument('--exact', dest='exact', action='store_true',75 help='Exact matches only for term.')76 setup_parser.add_argument('-d', '--delivery', dest='delivery',77 required=True, choices=['rss', 'mail'],78 help='Delivery method of results.')79 setup_parser.add_argument('-f', '--frequency', dest='frequency',80 default="realtime", choices=['realtime', 'daily', 'weekly'],81 help='Frequency to send results. RSS only allows for realtime alerting.')82 setup_parser = subs.add_parser('delete')83 setup_parser.add_argument('--id', dest='term_id', required=True,84 help='ID of the term to find for deletion.',85 type=str)86 args = parser.parse_args()87 if args.cmd == 'setup':88 if not os.path.exists(CONFIG_PATH):89 os.makedirs(CONFIG_PATH)90 if not os.path.exists(CONFIG_FILE):91 json.dump(CONFIG_DEFAULTS, open(CONFIG_FILE, 'w'), indent=4,92 separators=(',', ': '))93 config = CONFIG_DEFAULTS94 config['email'] = args.email95 config['password'] = str(obfuscate(args.pwd, 'store'))96 json.dump(config, open(CONFIG_FILE, 'w'), indent=4,97 separators=(',', ': '))98 config = json.load(open(CONFIG_FILE))99 if config.get('py2', PY2) != PY2:100 raise Exception("Python versions have changed. Please run `setup` again to reconfigure the client.")101 if config['password'] == '':102 raise Exception("Run setup before any other actions!")103 if args.cmd == 'seed':104 config['password'] = obfuscate(str(config['password']), 'fetch')105 ga = GoogleAlerts(config['email'], config['password'])106 chrome_options = webdriver.ChromeOptions()107 chrome_options.add_experimental_option("excludeSwitches", ['enable-automation'])108 caps = webdriver.DesiredCapabilities.CHROME.copy()109 caps['acceptInsecureCerts'] = True110 with contextlib.closing(webdriver.Chrome(args.driver, options=chrome_options)) as driver:111 driver.get('https://stackoverflow.com/users/signup?ssrc=head&returnurl=%2fusers%2fstory%2fcurrent%27')112 time.sleep(3)113 driver.find_element_by_xpath('//*[@id="openid-buttons"]/button[1]').click()114 driver.find_element_by_xpath('//input[@type="email"]').send_keys(config['email'])115 driver.find_element_by_xpath('//*[@id="identifierNext"]').click()116 time.sleep(3)117 driver.find_element_by_xpath('//input[@type="password"]').send_keys(config['password'])118 driver.find_element_by_xpath('//*[@id="passwordNext"]').click()119 time.sleep(3)120 driver.get('https://www.google.com/alerts')121 print("[*] Filled in password and submitted.")122 print("[!] Waiting for the authentication cookie or %d seconds" % args.timeout)123 for _ in range(0, args.timeout):124 cookies = driver.get_cookies()125 if [x for x in cookies if x['name'] == AUTH_COOKIE_NAME]:126 break127 time.sleep(1)128 collected = dict()129 for cookie in cookies:130 collected[str(cookie['name'])] = str(cookie['value'])131 with open(SESSION_FILE, 'wb') as f:132 pickle.dump(collected, f, protocol=2)133 print("[$] Session has been seeded, google-alerts is ready for use.")134 if args.cmd == 'list':135 config['password'] = obfuscate(str(config['password']), 'fetch')136 ga = GoogleAlerts(config['email'], config['password'])137 ga.authenticate()138 print(json.dumps(ga.list(), indent=4))139 if args.cmd == 'create':140 config['password'] = obfuscate(str(config['password']), 'fetch')141 ga = GoogleAlerts(config['email'], config['password'])142 ga.authenticate()143 # 'realtime' is default, force it144 alert_frequency = 'as_it_happens'145 if args.frequency == 'daily':146 alert_frequency = 'at_most_once_a_day'147 if args.frequency == 'weekly':148 alert_frequency = 'at_most_once_a_week'149 monitor = ga.create(args.term, {'delivery': args.delivery.upper(),150 'alert_frequency': alert_frequency.upper(),151 'exact': args.exact})152 print(json.dumps(monitor, indent=4))153 if args.cmd == 'delete':154 config['password'] = obfuscate(str(config['password']), 'fetch')155 ga = GoogleAlerts(config['email'], config['password'])156 ga.authenticate()157 result = ga.delete(args.term_id)158 if result:159 print("%s was deleted" % args.term_id)160if __name__ == '__main__':...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run Behave automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful