How to use run_abort method in autotest

Best Python code snippet using autotest_python

configure_run.py

Source:configure_run.py Github

copy

Full Screen

1#!/usr/bin/python2from __future__ import print_function3__docformat__ = "restructuredtext en"4import subprocess5import logging6import time7import re8import os9from htsworkflow.pipelines.retrieve_config import \10 CONFIG_SYSTEM, CONFIG_USER, \11 FlowCellNotFound, getCombinedOptions, saveConfigFile, WebError40412from htsworkflow.pipelines.genome_mapper import DuplicateGenome, getAvailableGenomes, constructMapperDict13from htsworkflow.pipelines.run_status import GARunStatus14from pyinotify import WatchManager, ThreadedNotifier15from pyinotify import EventsCodes, ProcessEvent16LOGGER = logging.getLogger(__name__)17class ConfigInfo:18 def __init__(self):19 #run_path = firecrest analysis directory to run analysis from20 self.run_path = None21 self.bustard_path = None22 self.config_filepath = None23 self.status = None24 #top level directory where all analyses are placed25 self.base_analysis_dir = None26 #analysis_dir, top level analysis dir...27 # base_analysis_dir + '/070924_USI-EAS44_0022_FC12150'28 self.analysis_dir = None29 def createStatusObject(self):30 """31 Creates a status object which can be queried for32 status of running the pipeline33 returns True if object created34 returns False if object cannot be created35 """36 if self.config_filepath is None:37 return False38 self.status = GARunStatus(self.config_filepath)39 return True40####################################41# inotify event processor42s_firecrest_finished = re.compile('Firecrest[0-9\._\-A-Za-z]+/finished.txt')43s_bustard_finished = re.compile('Bustard[0-9\._\-A-Za-z]+/finished.txt')44s_gerald_finished = re.compile('GERALD[0-9\._\-A-Za-z]+/finished.txt')45s_gerald_all = re.compile('Firecrest[0-9\._\-A-Za-z]+/Bustard[0-9\._\-A-Za-z]+/GERALD[0-9\._\-A-Za-z]+/')46s_bustard_all = re.compile('Firecrest[0-9\._\-A-Za-z]+/Bustard[0-9\._\-A-Za-z]+/')47s_firecrest_all = re.compile('Firecrest[0-9\._\-A-Za-z]+/')48class RunEvent(ProcessEvent):49 def __init__(self, conf_info):50 self.run_status_dict = {'firecrest': False,51 'bustard': False,52 'gerald': False}53 self._ci = conf_info54 ProcessEvent.__init__(self)55 def process_IN_CREATE(self, event):56 fullpath = os.path.join(event.path, event.name)57 if s_finished.search(fullpath):58 LOGGER.info("File Found: %s" % (fullpath))59 if s_firecrest_finished.search(fullpath):60 self.run_status_dict['firecrest'] = True61 self._ci.status.updateFirecrest(event.name)62 elif s_bustard_finished.search(fullpath):63 self.run_status_dict['bustard'] = True64 self._ci.status.updateBustard(event.name)65 elif s_gerald_finished.search(fullpath):66 self.run_status_dict['gerald'] = True67 self._ci.status.updateGerald(event.name)68 #WARNING: The following order is important!!69 # Firecrest regex will catch all gerald, bustard, and firecrest70 # Bustard regex will catch all gerald and bustard71 # Gerald regex will catch all gerald72 # So, order needs to be Gerald, Bustard, Firecrest, or this73 # won't work properly.74 elif s_gerald_all.search(fullpath):75 self._ci.status.updateGerald(event.name)76 elif s_bustard_all.search(fullpath):77 self._ci.status.updateBustard(event.name)78 elif s_firecrest_all.search(fullpath):79 self._ci.status.updateFirecrest(event.name)80 #print "Create: %s" % (os.path.join(event.path, event.name))81 def process_IN_DELETE(self, event):82 #print "Remove %s" % (os.path.join(event.path, event.name))83 pass84#FLAGS85# Config Step Error86RUN_ABORT = 'abort'87# Run Step Error88RUN_FAILED = 'failed'89#####################################90# Configure Step (goat_pipeline.py)91#Info92s_start = re.compile('Starting Genome Analyzer Pipeline')93s_gerald = re.compile("[\S\s]+--GERALD[\S\s]+--make[\S\s]+")94s_generating = re.compile('^Generating journals, Makefiles')95s_seq_folder = re.compile('^Sequence folder: ')96s_seq_folder_sub = re.compile('want to make ')97s_stderr_taskcomplete = re.compile('^Task complete, exiting')98#Errors99s_invalid_cmdline = re.compile('Usage:[\S\s]*goat_pipeline.py')100s_species_dir_err = re.compile('Error: Lane [1-8]:')101s_goat_traceb = re.compile("^Traceback \(most recent call last\):")102s_missing_cycles = re.compile('^Error: Tile s_[1-8]_[0-9]+: Different number of cycles: [0-9]+ instead of [0-9]+')103SUPPRESS_MISSING_CYCLES = False104##Ignore - Example of out above each ignore regex.105#NOTE: Commenting out an ignore will cause it to be106# logged as DEBUG with the logging module.107#CF_STDERR_IGNORE_LIST = []108s_skip = re.compile('s_[0-8]_[0-9]+')109##########################################110# Pipeline Run Step (make -j8 recursive)111##Info112s_finished = re.compile('finished')113##Errors114s_make_error = re.compile('^make[\S\s]+Error')115s_no_gnuplot = re.compile('gnuplot: command not found')116s_no_convert = re.compile('^Can\'t exec "convert"')117s_no_ghostscript = re.compile('gs: command not found')118##Ignore - Example of out above each ignore regex.119#NOTE: Commenting out an ignore will cause it to be120# logged as DEBUG with the logging module.121#122PL_STDERR_IGNORE_LIST = []123# Info: PF 11802124PL_STDERR_IGNORE_LIST.append( re.compile('^Info: PF') )125# About to analyse intensity file s_4_0101_sig2.txt126PL_STDERR_IGNORE_LIST.append( re.compile('^About to analyse intensity file') )127# Will send output to standard output128PL_STDERR_IGNORE_LIST.append( re.compile('^Will send output to standard output') )129# Found 31877 clusters130PL_STDERR_IGNORE_LIST.append( re.compile('^Found [0-9]+ clusters') )131# Will use quality criterion ((CHASTITY>=0.6)132PL_STDERR_IGNORE_LIST.append( re.compile('^Will use quality criterion') )133# Quality criterion translated to (($F[5]>=0.6))134PL_STDERR_IGNORE_LIST.append( re.compile('^Quality criterion translated to') )135# opened /woldlab/trog/data1/king/070924_USI-EAS44_0022_FC12150/Data/C1-36_Firecrest1.9.1_14-11-2007_king.4/Bustard1.9.1_14-11-2007_king/s_4_0101_qhg.txt136# AND137# opened s_4_0103_qhg.txt138PL_STDERR_IGNORE_LIST.append( re.compile('^opened[\S\s]+qhg.txt') )139# 81129 sequences out of 157651 passed filter criteria140PL_STDERR_IGNORE_LIST.append( re.compile('^[0-9]+ sequences out of [0-9]+ passed filter criteria') )141def pl_stderr_ignore(line):142 """143 Searches lines for lines to ignore (i.e. not to log)144 returns True if line should be ignored145 returns False if line should NOT be ignored146 """147 for s in PL_STDERR_IGNORE_LIST:148 if s.search(line):149 return True150 return False151def config_stdout_handler(line, conf_info):152 """153 Processes each line of output from GOAT154 and stores useful information using the logging module155 Loads useful information into conf_info as well, for future156 use outside the function.157 returns True if found condition that signifies success.158 """159 # Skip irrelevant line (without logging)160 if s_skip.search(line):161 pass162 # Detect invalid command-line arguments163 elif s_invalid_cmdline.search(line):164 LOGGER.error("Invalid commandline options!")165 # Detect starting of configuration166 elif s_start.search(line):167 LOGGER.info('START: Configuring pipeline')168 # Detect it made it past invalid arguments169 elif s_gerald.search(line):170 LOGGER.info('Running make now')171 # Detect that make files have been generated (based on output)172 elif s_generating.search(line):173 LOGGER.info('Make files generted')174 return True175 # Capture run directory176 elif s_seq_folder.search(line):177 mo = s_seq_folder_sub.search(line)178 #Output changed when using --tiles=<tiles>179 # at least in pipeline v0.3.0b2180 if mo:181 firecrest_bustard_gerald_makefile = line[mo.end():]182 firecrest_bustard_gerald, junk = \183 os.path.split(firecrest_bustard_gerald_makefile)184 firecrest_bustard, junk = os.path.split(firecrest_bustard_gerald)185 firecrest, junk = os.path.split(firecrest_bustard)186 conf_info.bustard_path = firecrest_bustard187 conf_info.run_path = firecrest188 #Standard output handling189 else:190 print('Sequence line:', line)191 mo = s_seq_folder.search(line)192 conf_info.bustard_path = line[mo.end():]193 conf_info.run_path, temp = os.path.split(conf_info.bustard_path)194 # Log all other output for debugging purposes195 else:196 LOGGER.warning('CONF:?: %s' % (line))197 return False198def config_stderr_handler(line, conf_info):199 """200 Processes each line of output from GOAT201 and stores useful information using the logging module202 Loads useful information into conf_info as well, for future203 use outside the function.204 returns RUN_ABORT upon detecting failure;205 True on success message;206 False if neutral message207 (i.e. doesn't signify failure or success)208 """209 global SUPPRESS_MISSING_CYCLES210 # Detect invalid species directory error211 if s_species_dir_err.search(line):212 LOGGER.error(line)213 return RUN_ABORT214 # Detect goat_pipeline.py traceback215 elif s_goat_traceb.search(line):216 LOGGER.error("Goat config script died, traceback in debug output")217 return RUN_ABORT218 # Detect indication of successful configuration (from stderr; odd, but ok)219 elif s_stderr_taskcomplete.search(line):220 LOGGER.info('Configure step successful (from: stderr)')221 return True222 # Detect missing cycles223 elif s_missing_cycles.search(line):224 # Only display error once225 if not SUPPRESS_MISSING_CYCLES:226 LOGGER.error("Missing cycles detected; Not all cycles copied?")227 LOGGER.debug("CONF:STDERR:MISSING_CYCLES: %s" % (line))228 SUPPRESS_MISSING_CYCLES = True229 return RUN_ABORT230 # Log all other output as debug output231 else:232 LOGGER.debug('CONF:STDERR:?: %s' % (line))233 # Neutral (not failure; nor success)234 return False235#def pipeline_stdout_handler(line, conf_info):236# """237# Processes each line of output from running the pipeline238# and stores useful information using the logging module239#240# Loads useful information into conf_info as well, for future241# use outside the function.242#243# returns True if found condition that signifies success.244# """245#246# #f.write(line + '\n')247#248# return True249def pipeline_stderr_handler(line, conf_info):250 """251 Processes each line of stderr from pipelien run252 and stores useful information using the logging module253 ##FIXME: Future feature (doesn't actually do this yet)254 #Loads useful information into conf_info as well, for future255 #use outside the function.256 returns RUN_FAILED upon detecting failure;257 #True on success message; (no clear success state)258 False if neutral message259 (i.e. doesn't signify failure or success)260 """261 if pl_stderr_ignore(line):262 pass263 elif s_make_error.search(line):264 LOGGER.error("make error detected; run failed")265 return RUN_FAILED266 elif s_no_gnuplot.search(line):267 LOGGER.error("gnuplot not found")268 return RUN_FAILED269 elif s_no_convert.search(line):270 LOGGER.error("imagemagick's convert command not found")271 return RUN_FAILED272 elif s_no_ghostscript.search(line):273 LOGGER.error("ghostscript not found")274 return RUN_FAILED275 else:276 LOGGER.debug('PIPE:STDERR:?: %s' % (line))277 return False278def retrieve_config(conf_info, flowcell, cfg_filepath, genome_dir):279 """280 Gets the config file from server...281 requires config file in:282 /etc/ga_frontend/ga_frontend.conf283 or284 ~/.ga_frontend.conf285 with:286 [config_file_server]287 base_host_url: http://host:port288 return True if successful, False is failure289 """290 options = getCombinedOptions()291 if options.url is None:292 LOGGER.error("%s or %s missing base_host_url option" % \293 (CONFIG_USER, CONFIG_SYSTEM))294 return False295 try:296 saveConfigFile(flowcell, options.url, cfg_filepath)297 conf_info.config_filepath = cfg_filepath298 except FlowCellNotFound as e:299 LOGGER.error(e)300 return False301 except WebError404 as e:302 LOGGER.error(e)303 return False304 except IOError as e:305 LOGGER.error(e)306 return False307 except Exception as e:308 LOGGER.error(e)309 return False310 f = open(cfg_filepath, 'r')311 data = f.read()312 f.close()313 genome_dict = getAvailableGenomes(genome_dir)314 mapper_dict = constructMapperDict(genome_dict)315 LOGGER.debug(data)316 f = open(cfg_filepath, 'w')317 f.write(data % (mapper_dict))318 f.close()319 return True320def configure(conf_info):321 """322 Attempts to configure the GA pipeline using goat.323 Uses logging module to store information about status.324 returns True if configuration successful, otherwise False.325 """326 #ERROR Test:327 #pipe = subprocess.Popen(['goat_pipeline.py',328 # '--GERALD=config32bk.txt',329 # '--make .',],330 # #'.'],331 # stdout=subprocess.PIPE,332 # stderr=subprocess.PIPE)333 #ERROR Test (2), causes goat_pipeline.py traceback334 #pipe = subprocess.Popen(['goat_pipeline.py',335 # '--GERALD=%s' % (conf_info.config_filepath),336 # '--tiles=s_4_100,s_4_101,s_4_102,s_4_103,s_4_104',337 # '--make',338 # '.'],339 # stdout=subprocess.PIPE,340 # stderr=subprocess.PIPE)341 ##########################342 # Run configuration step343 # Not a test; actual configure attempt.344 #pipe = subprocess.Popen(['goat_pipeline.py',345 # '--GERALD=%s' % (conf_info.config_filepath),346 # '--make',347 # '.'],348 # stdout=subprocess.PIPE,349 # stderr=subprocess.PIPE)350 stdout_filepath = os.path.join(conf_info.analysis_dir,351 "pipeline_configure_stdout.txt")352 stderr_filepath = os.path.join(conf_info.analysis_dir,353 "pipeline_configure_stderr.txt")354 fout = open(stdout_filepath, 'w')355 ferr = open(stderr_filepath, 'w')356 pipe = subprocess.Popen(['goat_pipeline.py',357 '--GERALD=%s' % (conf_info.config_filepath),358 '--make',359 conf_info.analysis_dir],360 stdout=fout,361 stderr=ferr)362 print("Configuring pipeline: %s" % (time.ctime()))363 error_code = pipe.wait()364 # Clean up365 fout.close()366 ferr.close()367 ##################368 # Process stdout369 fout = open(stdout_filepath, 'r')370 stdout_line = fout.readline()371 complete = False372 while stdout_line != '':373 # Handle stdout374 if config_stdout_handler(stdout_line, conf_info):375 complete = True376 stdout_line = fout.readline()377 fout.close()378 #error_code = pipe.wait()379 if error_code:380 LOGGER.error('Recieved error_code: %s' % (error_code))381 else:382 LOGGER.info('We are go for launch!')383 #Process stderr384 ferr = open(stderr_filepath, 'r')385 stderr_line = ferr.readline()386 abort = 'NO!'387 stderr_success = False388 while stderr_line != '':389 stderr_status = config_stderr_handler(stderr_line, conf_info)390 if stderr_status == RUN_ABORT:391 abort = RUN_ABORT392 elif stderr_status is True:393 stderr_success = True394 stderr_line = ferr.readline()395 ferr.close()396 #Success requirements:397 # 1) The stdout completed without error398 # 2) The program exited with status 0399 # 3) No errors found in stdout400 print('#Expect: True, False, True, True')401 print(complete, bool(error_code), abort != RUN_ABORT, stderr_success is True)402 status = complete is True and \403 bool(error_code) is False and \404 abort != RUN_ABORT and \405 stderr_success is True406 # If everything was successful, but for some reason407 # we didn't retrieve the path info, log it.408 if status is True:409 if conf_info.bustard_path is None or conf_info.run_path is None:410 LOGGER.error("Failed to retrieve run_path")411 return False412 return status413def run_pipeline(conf_info):414 """415 Run the pipeline and monitor status.416 """417 # Fail if the run_path doesn't actually exist418 if not os.path.exists(conf_info.run_path):419 LOGGER.error('Run path does not exist: %s' \420 % (conf_info.run_path))421 return False422 # Change cwd to run_path423 stdout_filepath = os.path.join(conf_info.analysis_dir, 'pipeline_run_stdout.txt')424 stderr_filepath = os.path.join(conf_info.analysis_dir, 'pipeline_run_stderr.txt')425 # Create status object426 conf_info.createStatusObject()427 # Monitor file creation428 wm = WatchManager()429 mask = EventsCodes.IN_DELETE | EventsCodes.IN_CREATE430 event = RunEvent(conf_info)431 notifier = ThreadedNotifier(wm, event)432 notifier.start()433 wdd = wm.add_watch(conf_info.run_path, mask, rec=True)434 # Log pipeline starting435 LOGGER.info('STARTING PIPELINE @ %s' % (time.ctime()))436 # Start the pipeline (and hide!)437 #pipe = subprocess.Popen(['make',438 # '-j8',439 # 'recursive'],440 # stdout=subprocess.PIPE,441 # stderr=subprocess.PIPE)442 fout = open(stdout_filepath, 'w')443 ferr = open(stderr_filepath, 'w')444 pipe = subprocess.Popen(['make',445 '--directory=%s' % (conf_info.run_path),446 '-j8',447 'recursive'],448 stdout=fout,449 stderr=ferr)450 #shell=True)451 # Wait for run to finish452 retcode = pipe.wait()453 # Clean up454 notifier.stop()455 fout.close()456 ferr.close()457 # Process stderr458 ferr = open(stderr_filepath, 'r')459 run_failed_stderr = False460 for line in ferr:461 err_status = pipeline_stderr_handler(line, conf_info)462 if err_status == RUN_FAILED:463 run_failed_stderr = True464 ferr.close()465 # Finished file check!466 print('RUN SUCCESS CHECK:')467 for key, value in event.run_status_dict.items():468 print(' %s: %s' % (key, value))469 dstatus = event.run_status_dict470 # Success or failure check471 status = (retcode == 0) and \472 run_failed_stderr is False and \473 dstatus['firecrest'] is True and \474 dstatus['bustard'] is True and \475 dstatus['gerald'] is True...

Full Screen

Full Screen

deploy.py

Source:deploy.py Github

copy

Full Screen

...25 """26 Aborts deploy if lock file is found27 """28 29 def run_abort():30 abort("Another deploy in progress")31 echo_subtask("Checking `deploy.lock` file presence")32 with settings(hide('warnings'), warn_only=True):33 if run('test -f %s' % '/'.join([fetch('deploy_to'), 'deploy.lock'])).succeeded:34 if fetch('ask_unlock_if_locked', default_value=False):35 if not confirm('Deploy lockfile exists. Continue?'): 36 run_abort()37 else:38 run_abort()39def lock():40 """41 Locks deploy42 Parallel deploys are not allowed43 """44 ensure('build_to')45 echo_subtask("Creating `deploy.lock` file")46 create_entity(47 '/'.join([fetch('deploy_to'), 'deploy.lock'])48 , entity_type='file', 49 protected=False50 )51def create_build_path():52 """...

Full Screen

Full Screen

cli.py

Source:cli.py Github

copy

Full Screen

...75 systemd_template = "\n".join(systemd_template.split("\n")[1:-1])76 out.write(systemd_template)77 out.close()78 click.secho(f"Wrote systemd service file to {out.name}", fg="green", bold=True)79 run_abort("systemctl daemon-reload")80 service = os.path.basename(out.name)81 run_abort(f"systemctl start {service}")82 click.secho(f"Started {service}", fg="green", bold=True)83 # run_abort(f"systemctl unmask {service}")84 run_abort(f"systemctl enable {service}")85 click.secho(f"Enabled {service}", fg="green", bold=True)86def run_abort(cmd):87 try:88 subprocess.run(cmd.split(), check=True)89 except subprocess.CalledProcessError as ex:...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run autotest automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful