How to use scavenge method in prospector

Best Python code snippet using prospector_python

scr_scavenge.py

Source:scr_scavenge.py Github

copy

Full Screen

...12from scrjob.scr_environment import SCR_Env13from scrjob.resmgrs import AutoResourceManager14from scrjob.launchers import AutoJobLauncher15# check for pdsh / (clustershell) errors in case any nodes should be retried16def scr_scavenge(nodeset_job=None,17 nodeset_up='',18 nodeset_down='',19 dataset_id=None,20 cntldir=None,21 prefixdir=None,22 verbose=False,23 scr_env=None,24 log=None):25 """This script is invoked to perform a scavenge operation26 scr_scavenge.py is a wrapper to gather values and arrange parameters needed for27 a scavenge operation.28 When ready, the scavenge parameters are passed to the Joblauncher class to perform29 the scavenge operation.30 The output of the scavenge operation is both written to file and printed to screen.31 This script returns 1 if a needed value or parameter could not be determined,32 and returns 0 otherwise33 """34 # check that we have a nodeset for the job and directories to read from / write to35 if nodeset_job is None or dataset_id is None or cntldir is None or prefixdir is None:36 return 137 bindir = scr_const.X_BINDIR38 # TODO: need to be able to set these defaults via config settings somehow39 # for now just hardcode the values40 if scr_env is None:41 scr_env = SCR_Env(prefix=prefixdir)42 if scr_env.param is None:43 scr_env.param = SCR_Param()44 if scr_env.resmgr is None:45 scr_env.resmgr = AutoResourceManager()46 if scr_env.launcher is None:47 scr_env.launcher = AutoJobLauncher()48 # lookup buffer size and crc flag via scr_param49 param = scr_env.param50 buf_size = os.environ.get('SCR_FILE_BUF_SIZE')51 if buf_size is None:52 buf_size = str(1024 * 1024)53 crc_flag = os.environ.get('SCR_CRC_ON_FLUSH')54 if crc_flag is None:55 crc_flag = '--crc'56 elif crc_flag == '0':57 crc_flag = ''58 start_time = int(time())59 # tag output files with jobid60 jobid = scr_env.resmgr.getjobid()61 if jobid is None:62 print('scr_scavenge: ERROR: Could not determine jobid.')63 return 164 # build the output filenames65 dset_dir = scr_env.dir_dset(dataset_id)66 output = os.path.join(dset_dir, 'scr_scavenge.pdsh.o' + jobid)67 error = os.path.join(dset_dir, 'scr_scavenge.pdsh.e' + jobid)68 if verbose:69 print('scr_scavenge: nodeset_up = ' + nodeset_up)70 print('scr_scavenge: nodeset_down = ' + nodeset_down)71 # format up and down nodesets for the scavenge command72 nodeset_up, nodeset_down = scr_env.resmgr.get_scavenge_nodelists(73 upnodes=nodeset_up, downnodes=nodeset_down)74 if verbose:75 print('scr_scavenge: upnodes = ' + nodeset_up)76 print('scr_scavenge: downnodes_spaced = ' + nodeset_down)77 # log the start of the scavenge operation78 if log:79 log.event('SCAVENGE_START', dset=dataset_id)80 print('scr_scavenge: ' + str(int(time())))81 # have the launcher class gather files via pdsh or clustershell82 consoleout = scr_env.launcher.scavenge_files(prog=bindir + '/scr_copy',83 upnodes=nodeset_up,84 downnodes_spaced=nodeset_down,85 cntldir=cntldir,86 dataset_id=dataset_id,87 prefixdir=prefixdir,88 buf_size=buf_size,89 crc_flag=crc_flag)90 # print outputs to screen91 try:92 os.makedirs('/'.join(output.split('/')[:-1]), exist_ok=True)93 with open(output, 'w') as outfile:94 outfile.write(consoleout[0])95 if verbose:96 print('scr_scavenge: stdout: cat ' + output)97 print(consoleout[0])98 except Exception as e:99 print(str(e))100 print('scr_scavenge: ERROR: Unable to write stdout to \"' + output + '\"')101 try:102 with open(error, 'w') as outfile:103 outfile.write(consoleout[1])104 if verbose:105 print('scr_scavenge: stderr: cat ' + error)106 print(consoleout[1])107 except Exception as e:108 print(str(e))109 print('scr_scavenge: ERROR: Unable to write stderr to \"' + error + '\"')110 # TODO: if we knew the total bytes, we could register a transfer here in addition to an event111 # get a timestamp for logging timing values112 end_time = int(time())113 diff_time = end_time - start_time114 if log:115 log.event('SCAVENGE_END', dset=dataset_id, secs=diff_time)116 return 0117if __name__ == '__main__':118 parser = argparse.ArgumentParser(add_help=False,119 argument_default=argparse.SUPPRESS,120 prog='scr_scavenge')121 parser.add_argument('-h',122 '--help',123 action='store_true',124 help='Show this help message and exit.')125 parser.add_argument('-v',126 '--verbose',127 action='store_true',128 default=False,129 help='Verbose output.')130 parser.add_argument('-j',131 '--jobset',132 metavar='<nodeset>',133 type=str,134 default=None,135 help='Specify the nodeset.')136 parser.add_argument('-u',137 '--up',138 metavar='<nodeset>',139 type=str,140 default=None,141 help='Specify up nodes.')142 parser.add_argument('-d',143 '--down',144 metavar='<nodeset>',145 type=str,146 default=None,147 help='Specify down nodes.')148 parser.add_argument('-i',149 '--id',150 metavar='<id>',151 type=str,152 default=None,153 help='Specify the dataset id.')154 parser.add_argument('-f',155 '--from',156 metavar='<dir>',157 type=str,158 default=None,159 help='The control directory.')160 parser.add_argument('-t',161 '--to',162 metavar='<dir>',163 type=str,164 default=None,165 help='The prefix directory.')166 args = vars(parser.parse_args())167 if 'help' in args:168 parser.print_help()169 elif args['jobset'] is None or args['id'] is None or args[170 'from'] is None or args['to'] is None:171 parser.print_help()172 print('Required arguments: --jobset --id --from --to')173 else:174 ret = scr_scavenge(nodeset_job=args['jobset'],175 nodeset_up=args['up'],176 nodeset_down=args['down'],177 dataset_id=args['id'],178 cntldir=args['from'],179 prefixdir=args['to'],180 verbose=args['verbose'],181 scr_env=None)...

Full Screen

Full Screen

optimization_run.py

Source:optimization_run.py Github

copy

Full Screen

1#!/usr/bin/env python32import os3import sys4import random5import time6from random import seed, randint7import argparse8import platform9from datetime import datetime10# import imp11import glob12from time import sleep13import fileinput14import numpy as np15import pandas as pd16import subprocess17from small_script.myFunctions import *18from collections import defaultdict19parser = argparse.ArgumentParser(description="Compute phis under the optimization folder")20# parser.add_argument("DatabaseFolder", help="your database folder")21# parser.add_argument("OptimizationFolder", help="your optimization folder")22parser.add_argument("-m", "--mode", type=int, default=0)23parser.add_argument("-l", "--label", type=str, default="label")24parser.add_argument("-s", "--subMode", type=int, default=0)25parser.add_argument("-n", "--n_decoys", type=int, default=10)26args = parser.parse_args()27# if args.test:28# do = print29# else:30# do = os.system31with open('log_optimization_run.txt', 'a') as f:32 f.write(' '.join(sys.argv))33 f.write('\n')34from pyCodeLib import *35import warnings36warnings.filterwarnings('ignore')37scavenge_slurm = '''\38#!/bin/bash39#SBATCH --job-name=CTBP_WL40#SBATCH --account=commons41#SBATCH --partition=scavenge42#SBATCH --ntasks=143#SBATCH --threads-per-core=144#SBATCH --cpus-per-task=145#SBATCH --mem-per-cpu=1G46#SBATCH --time=04:00:0047#SBATCH --mail-user=luwei0917@gmail.com48#SBATCH --mail-type=FAIL49#SBATCH -o outs/slurm-%j.out50echo "My job ran on:"51echo $SLURM_NODELIST52srun {}\n'''53base_slurm = '''\54#!/bin/bash55#SBATCH --job-name=CTBP_WL56#SBATCH --account=ctbp-common57#SBATCH --partition=ctbp-common58#SBATCH --ntasks=159#SBATCH --threads-per-core=160#SBATCH --mem-per-cpu=1G61#SBATCH --time=1-00:00:0062#SBATCH --mail-user=luwei0917@gmail.com63#SBATCH --mail-type=FAIL64#SBATCH -o outs/slurm-%j.out65echo "My job ran on:"66echo $SLURM_NODELIST67srun {}\n'''68n_decoys = args.n_decoys69separateDecoysNum = -170# template = base_slurm71template = scavenge_slurm72def do(cmd, get=False, show=True):73 if get:74 out = subprocess.Popen(cmd,stdout=subprocess.PIPE,shell=True).communicate()[0].decode()75 if show:76 print(out, end="")77 return out78 else:79 return subprocess.Popen(cmd, shell=True).wait()80cd = os.chdir81def slurmRun(slurmFileName, cmd, template=scavenge_slurm, memory=1):82 with open(slurmFileName, "w") as out:83 out.write(template.format(cmd))84 # out.write(scavenge_slurm.format(f"python3 ~/opt/compute_phis.py -m 1 proteins_name_list/proteins_name_list_{name}.txt"))85 replace(slurmFileName, "#SBATCH --mem-per-cpu=1G", f"#SBATCH --mem-per-cpu={memory}G")86 a = getFromTerminal(f"sbatch {slurmFileName}")87 jobId = a.split(" ")[-1].strip()88 return jobId89def waitForJobs(jobIdList, sleepInterval=30):90 from datetime import datetime as dt91 if len(jobIdList) == 0:92 return93 previousJobNotFinished = True94 while previousJobNotFinished:95 print(f"Waiting for previous jobs {jobIdList}", dt.now())96 time.sleep(sleepInterval)97 previousJobNotFinished = False98 a = getFromTerminal("squeue -u wl45")99 for jobId in jobIdList:100 if jobId in a:101 previousJobNotFinished = True102 print("Continue Next Script")103if args.mode == 1:104 # time.sleep(36000)105 with open("protein_list") as f:106 content = f.readlines()107 pos = 0108 i = 0109 n = len(content)110 # n = 100 # for testing111 while pos < n:112 with open(f"proteins_name_list/proteins_name_list_{i}.txt", "w") as out:113 for ii in range(1):114 if pos < n:115 out.write(content[pos])116 pos += 1117 i += 1118 print(i)119 n = i120 i = 0121 jobIdList = []122 for i in range(n):123 proteins = f"proteins_name_list/proteins_name_list_{i}.txt"124 # generate_decoy_sequences(proteins, methods=['shuffle'], num_decoys=[n_decoys], databaseLocation="../../../")125 jobId = slurmRun(f"slurms/run_{i}.slurm", f"python3 ~/opt/optimization_run.py -m 222 -l {proteins} -s {args.subMode}", template=template)126 # jobId = slurmRun(f"slurms/run_{i}.slurm", f"python3 ~/opt/compute_phis.py -m 0 proteins_name_list/proteins_name_list_{i}.txt")127 jobIdList.append(jobId)128 do(f"cat {proteins} >> iter0_complete.txt")129 # exit()130 waitForJobs(jobIdList, sleepInterval=300)131 with open(f"slurms/run_on_scavenge.slurm", "w") as out:132 out.write(base_slurm.format(f"python3 ~/opt/optimization_run.py -m 4 -s {args.subMode}"))133 replace(f"slurms/run_on_scavenge.slurm", "#SBATCH --mem-per-cpu=1G", "#SBATCH --mem-per-cpu=60G")134 do(f"sbatch slurms/run_on_scavenge.slurm")135if args.mode == 22:136 protein = args.label137 do(f"python3 ~/opt/compute_phis.py -m 0 {protein}")138 # do("python3 ~/opt/compute_phis.py -m 0 test_protein")139if args.mode == 222:140 proteins = args.label141 if args.subMode == 0:142 evaluate_phis_over_training_set_for_native_structures_Wei(proteins, "phi_list.txt",143 decoy_method='shuffle', max_decoys=1e+10, tm_only=False, num_processors=1, separateDecoysNum=separateDecoysNum)144 if args.subMode == 1:145 evaluate_phis_over_training_set_for_decoy_structures_Wei(proteins, "phi_list.txt",146 decoy_method='rosetta', max_decoys=1e+10, tm_only=False, num_processors=1, pickle=True, withBiased=False, mode=0)147if args.mode == 3:148 with open(f"slurms/run_on_scavenge.slurm", "w") as out:149 out.write(scavenge_slurm.format(f"python3 ~/opt/optimization_run.py -m 4"))150 replace(f"slurms/run_on_scavenge.slurm", "#SBATCH --mem-per-cpu=1G", "#SBATCH --mem-per-cpu=60G")151 do(f"sbatch slurms/run_on_scavenge.slurm")152if args.mode == 4:153 if args.subMode == 0:154 decoy_method = "shuffle"155 if args.subMode == 1:156 decoy_method = "rosetta"157 # complete_proteins = "iter0.txt"158 complete_proteins = "protein_list"159 # A, B, gamma, filtered_B, filtered_gamma, filtered_lamb, P, lamb = calculate_A_B_and_gamma_parallel(complete_proteins, "phi_list.txt", decoy_method='shuffle',160 # num_decoys=1000, noise_filtering=True, jackhmmer=False, subset=None, read=2)161 A, B, gamma, filtered_B, filtered_gamma, filtered_lamb, P, lamb = calculate_A_B_and_gamma_wl45(complete_proteins, "phi_list.txt", decoy_method=decoy_method,162 num_decoys=n_decoys, noise_filtering=True, jackhmmer=False, read=False, mode=0, multiSeq=False)163# if args.mode == 44:164# with open(f"slurms/run_on_scavenge.slurm", "w") as out:165# out.write(base_slurm.format(f"python3 ~/opt/gg_server.py -d jun10 -m 4"))166# replace(f"slurms/run_on_scavenge.slurm", "#SBATCH --mem-per-cpu=1G", "#SBATCH --mem-per-cpu=60G")...

Full Screen

Full Screen

__init__.py

Source:__init__.py Github

copy

Full Screen

1from ew.static import cfg as ewcfg2from . import juviecmds34cmd_map = {56 # Crush a poudrin to get some slime.7 ewcfg.cmd_crush: juviecmds.crush,8 ewcfg.cmd_crush_alt1: juviecmds.crush,910 # move from juvenile to one of the armies (rowdys or killers)11 ewcfg.cmd_enlist: juviecmds.enlist,12 ewcfg.cmd_renounce: juviecmds.renounce,1314 # gives slime to the miner (message.author)15 ewcfg.cmd_mine: juviecmds.mine,1617 # flags a vein as dangerous18 ewcfg.cmd_flag: juviecmds.flag,1920 # scavenging21 ewcfg.cmd_scavenge: juviecmds.scavenge,22 ewcfg.cmd_scavenge_alt1: juviecmds.scavenge,23 ewcfg.cmd_scavenge_alt2: juviecmds.scavenge,24 25 # LOL26 ewcfg.cmd_juviemode: juviecmds.juviemode,2728 # gender??? OMG???29 ewcfg.cmd_identify: juviecmds.identify, ...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run prospector automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful