How to use _run_task method in lisa

Best Python code snippet using lisa_python

test_sdtimeaverage.py

Source:test_sdtimeaverage.py Github

copy

Full Screen

...145 os.system('rm -rf ' + "TEST-*.ms")146##############147# Run Task148##############149 def _run_task(self, auxArgs=None):150 print("_run_task::starts")151 if auxArgs is not None:152 for k in auxArgs:153 self.args[k] = auxArgs[k]154 # Execution.155 # if success, returns True156 # if any error, returns False.157 try:158 return sdtimeaverage(**self.args)159 except Exception:160 return False161#################162# Check Result163#################164 def _checkZero(self, data):165 '''166 check all the results must be zero.167 See _generate_data() to see Test Data.168 '''169 print("-- checking Zero --")170 check = numpy.all(numpy.abs(data) < errLimit)171 self.assertTrue(check, msg='## Zero check Failed ##\n{}'.format(data))172 def _checkNonZero(self, data):173 '''174 check sum of each averaged result175 check non Zero.176 '''177 print("-- checking Non Zero --")178 check = numpy.all(numpy.abs(data) >= errLimit)179 self.assertTrue(180 check,181 msg='## Non Zero check Failed (ref={}) ##\n{}'.format(182 errLimit,183 data))184 def _checkZeroSum(self, data1, data2):185 '''186 check sum of each averaged result187 check Zero.188 '''189 print("-- checking ZeroSum of data1 and data2.")190 zSumData = numpy.abs(numpy.array(data1) + numpy.array(data2))191 check = numpy.all(zSumData < errLimit)192 self.assertTrue(193 check,194 msg='## Zero Sum check Failed (ref={})\n{}'.format(195 errLimit,196 zSumData))197######################198# check time199######################200 def _checkTime(self, msName, row, refTime):201 '''202 check time of specified row,203 compare value with the reference.204 '''205 print("-- checking Time --")206 # get time and inspection.207 self. _get_main(msName)208 # one output209 time = self.tm[row]210 # check Time211 check = (time == refTime)212 self.assertTrue(213 check,214 msg='## Time is Invalid.##\n val={} ref={}'.format(215 time,216 refTime))217######################218# check Output219######################220 def _checkOutputRec(self, msName, refNRow):221 '''222 check calculated result record count,223 compare value with the expected count.224 '''225 print("-- checking Output Record Count --")226 # get time227 self. _get_main(msName)228 # count output rows229 nrow = len(self.tm)230 # check231 check = (nrow == refNRow)232 self.assertTrue(233 check,234 msg='## Row Count in Output is Invalid.##\n val={} ref={}'.format(235 nrow,236 refNRow))237######################238# check scan239######################240 def _check_scan(self, out_msname, refValue):241 '''242 check 'scan'243 number of output must 1,244 compare value with expected value.245 '''246 print("-- checking scan selection --")247 # get table248 self. _get_main(out_msname)249 # get one value from row=0250 scan = self.sc[0]251 # check scan ID252 self.assertTrue(len(self.sc) == 1,253 msg='## unexpected number of output. ##\n {}'.format(len(self.sc)))254 self.assertTrue(scan == refValue,255 msg='## unexpected scan no. in output. ##\n {}'.format(scan))256########################257# check Weight/Spectra258########################259 # Check Wait and Sigma260 def _checkWeightSigma(self, msName, row, weight_ref):261 '''262 check Sigma and Weight263 compare 'weight' with expected value.264 Sigma is mathematically inspected.265 '''266 print("-- checking Weight and Sigma --")267 self._get_spectra(msName, row)268 print("Weight Ref :{0}".format(weight_ref))269 print("Weight :{0}".format(self.wgt))270 print("Sigma :{0}".format(self.sgm))271 # Check (based on Formula about Sigma and Weight) #272 check1 = (self.wgt[0] == weight_ref)273 check2 = (self.wgt[1] == weight_ref)274 check3 = ((1.0 / self.wgt[0]) -275 (self.sgm[0] * self.sgm[0]) < errLimit2)276 check4 = ((1.0 / self.wgt[1]) -277 (self.sgm[1] * self.sgm[1]) < errLimit2)278 # Assert279 self.assertTrue(280 check1, msg='## Weight[0] is unexpected. ##\n {}/{}'.format(self.wgt[0], weight_ref))281 self.assertTrue(282 check2, msg='## Weight[1] is unexpected. ##\n {}/{}'.format(self.wgt[1], weight_ref))283 self.assertTrue(284 check3,285 msg='## Sigma [0] is unexpected. ##\n sigma={}, weight={}'.format(286 self.sgm[0], self.wgt[0]))287 self.assertTrue(288 check4,289 msg='## Sigma [1] is unexpected. ##\n sigma={}, weight={}'.format(290 self.sgm[1], self.wgt[1]))291##################################292# Read Data from Specified MS293##################################294 # MAIN #295 def _get_main(self, msname):296 # get MAIN table data297 with tbmanager(msname) as tb:298 # Key data299 self.tm = tb.getcol('TIME')300 self.a1 = tb.getcol('ANTENNA1')301 self.a2 = tb.getcol('ANTENNA2')302 self.sc = tb.getcol('SCAN_NUMBER')303 self.fd = tb.getcol('FIELD_ID')304 # DATA (spectra) #305 def _get_spectra(self, msname, row):306 with tbmanager(msname) as tb:307 # Spectra Data308 self.data = tb.getcell('FLOAT_DATA', row)309 self.wgt = tb.getcell('WEIGHT', row)310 self.sgm = tb.getcell('SIGMA', row)311 return self.data312#####################################313# Generate Data on FLOAT_DATA column314#####################################315 def _generate_data(self, msName, stateOption=False):316 print("----- Generating MS.")317 # Column Name318 dataColumnName = 'FLOAT_DATA'319 # if non-zero value is set on 'offset', an Intentional Fail is raised.320 offset = 0.0321 # Value parameters322 slope = 0.5 # (tunable, but error threshold subject to be changed.)323 baseTime = 0.0 # gives start time in JD.324 # Table Access (with numPy array operation)325 with tbmanager(msName, nomodify=False) as tb:326 # reduce MS row size if reduce size is specified.327 if (nReduce != 0):328 print("----- reducing rows count in Test-MS.")329 rows = list(range(nReduce))330 tb.removerows(rows)331 # show nRow in MS.332 NN = tb.nrows() # NN MUST BE same as nRow333 print("Nrow = {}".format(NN))334 # initialize STATE_ID (option)335 # (ex) state =0,1,...numOfState-1 ,0,1,.....336 if stateOption:337 print("------ stateOption Active, putting three STATE_IDs on the MS. ")338 arrayState = numpy.mod(numpy.arange(0, NN), numOfState)339 tb.putcol("STATE_ID", arrayState)340 # get array shape of Spectra Data, by getcolshapestring(),341 # returned string is like:"[2,1024]", via list.342 tk = re.split(r",|\[|\]", # specify delimiter as; [ , ]343 tb.getcolshapestring(344 dataColumnName,345 nrow=1)[0])346 # separating to :: <zero> [<1st> ,<2nd*> ]<3rd>347 nChan = int(tk[2])348 # separating to :: <zero> [<1st*> ,<2nd> ]<3rd>349 # nPol = int(tk[1]) (not used, reserved)350 # create array (time, interval)351 arrayTime = testInterval * \352 numpy.arange(0, NN, dtype=numpy.float64) + baseTime353 arrayInterval = numpy.full(NN, testInterval, dtype=numpy.float64)354 # put to column (from numpy array)355 print("------ Putting Time,INTERVAL.")356 tb.putcol("TIME", arrayTime)357 tb.putcol("INTERVAL", arrayInterval)358 # create Test-Data359 print("------ Calculating Curve.")360 NN1 = (NN - 1) / 2361 L = numpy.linspace(-NN1, NN1, NN) * slope + offset362 VAL = numpy.tile(L, [nChan, 1])363 arrayData3 = numpy.array([VAL, VAL])364 # write to the column at once365 print("------ Putting Curve.")366 tb.putcol(dataColumnName, arrayData3)367 print("------ Done.")368 # set telescope name on MS.369 def _set_telescopename(self, msName, telName):370 print("------ changing Telscope Name. ")371 msObservation = msName + '/OBSERVATION'372 with tbmanager(msObservation, nomodify=False) as tb:373 tb.putcell('TELESCOPE_NAME', 0, telName)374 # tb.resync()375##################################376# sub function for TEST FIXTURE377##################################378 def _check_averaged_result_N1(self, outMsName):379 '''380 This function inspects the Averaged result-MS.381 All the spectral data will be averaged. One averaged result remains, which must be Zero.382 '''383 # get the result and inspect #384 fData = self._get_spectra(outMsName, 0) # use row=0 from RESULT385 self._checkZero(fData)386 # Ref time387 refTime = (nRow - 1) / 2 * testInterval388 self._checkTime(outMsName, 0, refTime)389 # Weight, Sigma390 self._checkWeightSigma(outMsName, 0, nRow)391 def _check_averaged_result_N3(self, outMsName):392 '''393 This function inspects the Averaged result-MS by 3 averaged results.394 Three sections are Averaged. 1st.result and 3rd.result are different sign and sum =0395 The 2nd section makes Zero Sum.396 Note: In Test-MS, the test data is designed by 'Odd functional' curve.397 '''398 #399 self._get_main(outMsName)400 check = (len(self.tm) == 3)401 self.assertTrue(402 check, msg='## Unexpected Result Count ##\n count={}'.format(len(self.tm)))403 # get the result #404 fData0 = self._get_spectra(outMsName, 0) # result on row=0405 fData1 = self._get_spectra(outMsName, 1) # row=1406 fData2 = self._get_spectra(outMsName, 2) # row=2407 # Inspection #408 # Following two sections must be different sign.409 self._checkZeroSum(fData0, fData2)410 self._checkZero(fData1) # must be zero411 # Ref Time in 3 sections (becomes centre of the section)412 Tref0 = testInterval * (nRow / 3 - 1.0) / 2413 Tref1 = Tref0 + testInterval * (nRow / 3)414 Tref2 = Tref1 + testInterval * (nRow / 3)415 # check Time416 self._checkTime(outMsName, 0, Tref0)417 self._checkTime(outMsName, 1, Tref1)418 self._checkTime(outMsName, 2, Tref2)419 # check Weight, Sigma420 self._checkWeightSigma(outMsName, 0, (nRow / 3))421 self._checkWeightSigma(outMsName, 1, (nRow / 3))422 self._checkWeightSigma(outMsName, 2, (nRow / 3))423 def _check_averaged_result_N3TimeSpan(self, outMsName):424 '''425 This is for TimeSpan (when number of state=3)426 '''427 # check result record count. (must be same as state count)428 self._get_main(outMsName)429 check = (len(self.tm) == 3)430 self.assertTrue(431 check, msg='## Unexpected Result Count. ##\n count={}'.format(len(self.tm)))432 # get the result #433 fData0 = self._get_spectra(outMsName, 0) # result on row=0434 fData1 = self._get_spectra(outMsName, 1) # row=1435 fData2 = self._get_spectra(outMsName, 2) # row=2436 # Inspection437 # The sum of three section data must be zero (particular in TimeSpan test)438 self._checkZero(fData0 + fData1 + fData2)439 # check Weight, Sigma440 self._checkWeightSigma(outMsName, 0, (nRow / 3))441 self._checkWeightSigma(outMsName, 1, (nRow / 3))442 self._checkWeightSigma(outMsName, 2, (nRow / 3))443 def _check_averaged_result_N61(self, outMsName):444 '''445 This is for TiimeSpan (when scan=state, 61 results are inspected.)446 see numOfScan447 '''448 print("outfile ={} specified.".format(outMsName))449 # check Zero Sum450 for n in range(numOfScan):451 # symmetricaly get the data. These sum must be Zero #452 fData_1 = self._get_spectra(outMsName, n)453 fData_2 = self._get_spectra(outMsName, (numOfScan - 1) - n)454 self._checkZeroSum(fData_1, fData_2)455##############456# MISC457##############458 def _set_outfile_timebin(self, testNo, numRec):459 strTimeBin = '{}s'.format(numRec * testInterval)460 outFile = defPrivateMsForm.format(testNo, numRec)461 return outFile, strTimeBin462############################463# TEST FIXTURE464############################465#466# TIME RANGE467#468 def test_param00(self):469 '''sdtimeagerage::00:: timerange = 00:00:00~01:04:03 NORMAL (3843s same as in MS)'''470 # set timebin string and private outputMS name.471 privateOutfile, dmy = self._set_outfile_timebin(0, nRow)472 # Time string (justify to interval and row counts)473 td = datetime.timedelta(seconds=testInterval * nRow)474 timerangeString = '00:00:00~' + str(td) # '01:04:03' =3848 #475 # Run Task476 prm = {'timerange': timerangeString,477 'timebin': '',478 'infile': defWorkMsBasic,479 'outfile': privateOutfile} # Specify Full-Range #480 self._run_task(prm)481 # Check Result (zerosum check)482 self._get_spectra(privateOutfile, 0) # row=0483 self._checkZero(self.data)484 self._checkOutputRec(privateOutfile, 1)485 def test_param01E(self):486 '''sdtimeagerage::01E:: timerange = 00:00:00~01:00:00 ERROR case(3600s INSUFFICIENT)'''487 # set timebin string and private outputMS name.488 privateOutfile, dmy = self._set_outfile_timebin(1, nRow)489 # Time string (justify to interval and row counts)490 td = datetime.timedelta(491 seconds=nRow *492 testInterval /493 10) # relatively very short494 timerangeString = '00:00:00~' + str(td)495 # Run Task496 prm = {'timerange': timerangeString, # orig set up = '00:00:00~00:4:00',497 'timebin': '',498 'infile': defWorkMsBasic,499 'outfile': privateOutfile} # Specify Full-Range #500 self._run_task(prm)501 # Check Result (zerosum check)502 self._get_spectra(privateOutfile, 0) # row=0503 self._checkNonZero(self.data)504 self._checkOutputRec(privateOutfile, 1)505 def test_param02(self):506 '''sdtimeagerage::02:: timerange = "" (dafault) '''507 # set timebin string and private outputMS name.508 privateOutfile, dmy = self._set_outfile_timebin(2, nRow + 1)509 # Run Task510 prm = {'timerange': '',511 'timebin': '',512 'infile': defWorkMsBasic,513 'outfile': privateOutfile} # Specify Full-Range #514 self._run_task(prm)515 # Check Result (zerosum check)516 self._get_spectra(privateOutfile, 0) # row=0517 self._checkZero(self.data)518 self._checkOutputRec(privateOutfile, 1)519#520# SCAN521#522 def test_para10(self):523 '''sdtimeagerage::10:: scan=2 (Within the range)'''524 # Run Task525 scan_no = 2 # SCAN = 2 #526 prm = {'timebin': '',527 'scan': str(scan_no)}528 self._run_task(prm)529 # check scan530 self._check_scan(defOutputMs, scan_no)531 self._checkOutputRec(defOutputMs, 1)532 def test_param11(self):533 '''sdtimeagerage::11:: scan=61 (Within the range)'''534 # Run Task535 scan_no = 61 # SCAN = 61 #536 prm = {'timebin': '',537 'scan': str(scan_no)} # Normal. In range. #538 self.assertTrue(self._run_task(prm))539 # check scan540 self._check_scan(defOutputMs, scan_no)541 self._checkOutputRec(defOutputMs, 1)542 def test_param12E(self):543 '''sdtimeagerage::12E:: scan=62 (Error Out of range) '''544 # Run Task545 prm = {'timebin': '',546 'scan': '62'} # ERROR : out of range in MS #547 self.assertFalse(self._run_task(prm))548 def test_param13(self):549 '''sdtimeagerage::13:: scan='' (no number) Default action. '''550 # set timebin string and private outputMS name.551 privateOutfile, dmy = self._set_outfile_timebin(13, nRow)552 prm = {'timebin': '',553 'scan': '',554 'infile': defWorkMsBasic,555 'outfile': privateOutfile}556 # Run Task557 self._run_task(prm)558 # Check Result (zerosum check)559 self._check_averaged_result_N1(privateOutfile)560 self._checkOutputRec(privateOutfile, 1)561#562# FIELD563#564 def test_param20(self):565 '''sdtimeaverage::20:: field = 'FLS3a*' (Exact NAME)'''566 prm = {'field': 'FLS3a*'}567 # Run Task and check568 self.assertTrue(self._run_task(prm))569 self._checkOutputRec(defOutputMs, 1)570 def test_param21E(self):571 '''sdtimeaverage::21E:: field = 'hoge*' (Error :Bad NAME)'''572 prm = {'field': 'hoge'}573 # Run Task and check574 self.assertFalse(self._run_task(prm))575 def test_param22(self):576 '''sdtimeaverage::22:: field = '*' (OK : wildcard)'''577 prm = {'field': '*'}578 # Run Task and check579 self.assertTrue(self._run_task(prm))580 self._checkOutputRec(defOutputMs, 1)581 def test_param23(self):582 '''sdtimeaverage::23:: field = '' (OK : default)'''583 prm = {'field': ''}584 # Run Task and check585 self.assertTrue(self._run_task(prm))586 self._checkOutputRec(defOutputMs, 1)587#588# SPW589#590 def test_param30(self):591 '''sdtimeaverage::30:: spw = '0' (exist)'''592 prm = {'spw': '0'}593 # Run Task and check594 self.assertTrue(self._run_task(prm))595 self._checkOutputRec(defOutputMs, 1)596 def test_param31E(self):597 '''sdtimeaverage::31E:: spw = '9' (Error: Not Exist)'''598 prm = {'spw': '9'}599 # Run Task and check600 self.assertFalse(self._run_task(prm))601 def test_param32(self):602 '''sdtimeaverage::32:: spw = '' (default)'''603 prm = {'spw': ''}604 # Run Task and check605 self.assertTrue(self._run_task(prm))606 self._checkOutputRec(defOutputMs, 1)607 def test_param33(self):608 '''sdtimeaverage::33:: spw = '*' (OK: Wildcard)'''609 prm = {'spw': ''}610 # Run Task and check611 self.assertTrue(self._run_task(prm))612 self._checkOutputRec(defOutputMs, 1)613#614# ANTENNA615#616 def test_param40(self):617 '''sdtimeaverage::40:: antenna = 'GBT' (Exact name without &&&)'''618 prm = {'antenna': 'GBT'}619 # Run Task and check620 self.assertTrue(self._run_task(prm))621 self._checkOutputRec(defOutputMs, 1)622 def test_param41(self):623 '''sdtimeaverage::41:: antenna = 'GBT&&&' (Fully given)'''624 prm = {'antenna': 'GBT&&&'}625 # Run Task and check626 self.assertTrue(self._run_task(prm))627 self._checkOutputRec(defOutputMs, 1)628 def test_param42E(self):629 '''sdtimeaverage::42E antenna = 'gBT' (Error: Bad name) '''630 prm = {'antenna': 'gBT'}631 # Run Task and check632 self.assertFalse(self._run_task(prm))633 def test_param43E(self):634 '''sdtimeaverage::42E antenna = 'gBT&&&' (Error: Bad name with &&&) '''635 prm = {'antenna': 'gBT&&&'}636 # Run Task and check637 self.assertFalse(self._run_task(prm))638#639# TIMEBIN(1) (generating Average)640#641 def test_param100(self):642 '''sdtimeaverage::100:: timebin=1282(N=3) '''643 # set timebin string and private outputMS name.644 privateOutfile, timebin_str = self._set_outfile_timebin(645 100, nRow / 3 + 0.5) # if reduced cond. 0.0 is needed #646 prm = {'timebin': timebin_str,647 'infile': defWorkMsBasic,648 'outfile': privateOutfile}649 # Run Task and check650 self.assertTrue(self._run_task(prm))651 # Check Result (zerosum check)652 self._check_averaged_result_N3(privateOutfile)653 self._checkOutputRec(privateOutfile, 3)654 def test_param101(self):655 '''sdtimeaverage::101: timebin=3846(N=1), timebin='' '''656 # set timebin string and private outputMS name.657 privateOutfile, timebin_str = self._set_outfile_timebin(101, nRow + 3)658 prm = {'timebin': timebin_str, # Immediate Value ,659 'infile': defWorkMsBasic,660 'outfile': privateOutfile}661 # Run Task and check662 self.assertTrue(self._run_task(prm))663 # Check Result (zerosum check)664 self._check_averaged_result_N1(privateOutfile)665 self._checkOutputRec(privateOutfile, 1)666 def test_param103(self):667 '''sdtimeaverage::103: timebin=3846(N=1), timebin='all' '''668 # set timebin string and private outputMS name.669 privateOutfile, dmy = self._set_outfile_timebin(103, nRow)670 prm = {'timebin': 'all', # default = all is applied.671 'infile': defWorkMsBasic,672 'outfile': privateOutfile}673 # Run Task and check674 self.assertTrue(self._run_task(prm))675 # Check Result (zerosum check)676 self._check_averaged_result_N1(privateOutfile)677 self._checkOutputRec(privateOutfile, 1)678#679# TIMEBIN(2) (arguments handling)680#681 def test_param110(self):682 '''sdtimeagerage::110:: timebin='all' '''683 prm = {'timebin': 'all'}684 # Run Task and check685 self.assertTrue(self._run_task(prm))686 self._checkOutputRec(defOutputMs, 1)687 def test_param111(self):688 '''sdtimeagerage::111:: timebin='ALL' '''689 # Run Task690 prm = {'timebin': 'ALL'}691 # Run Task and check692 self.assertTrue(self._run_task(prm))693 self._checkOutputRec(defOutputMs, 1)694 def test_param112(self):695 '''sdtimeagerage::112:: timebin='' (default) '''696 # Run Task697 prm = {'timebin': ''}698 # Run Task and check699 self.assertTrue(self._run_task(prm))700 self._checkOutputRec(defOutputMs, 1)701 def test_param113E(self):702 '''sdtimeagerage::113E:: timebin='Alles' (ERROR: Bad keyword) '''703 # Run Task704 prm = {'timebin': 'Alles'}705 # Run Task and check706 self.assertFalse(self._run_task(prm))707 def test_param114(self):708 '''sdtimeagerage::114:: timebin='aLL' (OK: Upper/Lower case mixed) '''709 # Run Task710 prm = {'timebin': 'aLL'}711 # Run Task and check712 self.assertTrue(self._run_task(prm))713 self._checkOutputRec(defOutputMs, 1)714 def test_param115(self):715 '''sdtimeagerage::115:: timebin='0' (No averaging, not an Error) '''716 # Run Task717 prm = {'timebin': '0'}718 # Run Task and check719 self.assertTrue(self._run_task(prm))720 # No averaging, original rows remain.721 self._checkOutputRec(defOutputMs, nRowOrg)722 def test_param116(self):723 '''sdtimeagerage::115:: timebin='-1' (Error. Not acceptable) '''724 # Run Task725 prm = {'timebin': '-1'}726 # Run Task and check727 self.assertFalse(self._run_task(prm))728#729# DATACOLUMN (alternative column selection )730#731 def test_param50(self):732 '''sdtimeaverage::50:: MS= 'float_data' arg = 'float_data' (NORMAL) '''733 prm = {'infile': defWorkMs3NRO,734 'outfile': "TEST-50.ms",735 'datacolumn': 'float_data'}736 # Run Task and check737 self.assertTrue(self._run_task(prm))738 def test_param51(self):739 '''sdtimeaverage::51:: MS= 'float_data' arg = 'data' (Column Switch) '''740 prm = {'infile': defWorkMs3NRO,741 'outfile': "TEST-51.ms",742 'datacolumn': 'data'}743 # Run Task and check744 self.assertTrue(self._run_task(prm))745 def test_param52(self):746 '''sdtimeaverage::52:: MS= 'data' arg = 'float_data' (Column Switch) '''747 prm = {'infile': defWorkMs3ALMA,748 'outfile': "TEST-52.ms",749 'datacolumn': 'float_data'}750 # Run Task and check751 self.assertTrue(self._run_task(prm))752 def test_param53(self):753 '''sdtimeaverage::53:: MS= 'data' arg = 'data' (NORMAL) '''754 prm = {'infile': defWorkMs3ALMA,755 'outfile': "TEST-53.ms",756 'datacolumn': 'data'}757 # Run Task and check758 self.assertTrue(self._run_task(prm))759#760# ALMA Specific Behavior in mstransform. (depending on telescope name)761#762 def test_param60Nobeyama(self):763 '''sdtimeaverage::60 Nobeyama:: 'scan' will be applied in mstransform '''764 privateOutfile = "TEST-60-Nobeyama.ms"765 prm = {'infile': defWorkMs3NRO,766 'outfile': privateOutfile,767 'timebin': 'all',768 'timespan': 'scan'}769 # Run Task and check770 self.assertTrue(self._run_task(prm))771 # see directly about infile for detail.772 self._checkOutputRec(privateOutfile, 121)773 def test_param60ALMA(self):774 '''sdtimeaverage::61:: ALMA:: 'scan, state' will be applied in mstransform. '''775 privateOutfile = "TEST-61-ALMA.ms"776 prm = {'infile': defWorkMs3ALMA,777 'outfile': privateOutfile,778 'timebin': 'all',779 'timespan': 'scan'}780 # Run Task and check781 self.assertTrue(self._run_task(prm))782 # 'scan, state' is applied. number of result = 1783 self._checkOutputRec(privateOutfile, 1)784#785# TIMESPAN786#787 def test_param70(self):788 '''sdtimeaverage::70:: timespan="scan" '''789 privateOutfile, dmy = self._set_outfile_timebin(70, nRow)790 prm = {'infile': defWorkMsTimeSpan,791 'timespan': 'scan',792 'outfile': privateOutfile}793 # Run Task and check794 self.assertTrue(self._run_task(prm))795 self._check_averaged_result_N3TimeSpan(privateOutfile)796 # Averaged by each State={0,1,2}. In detail, see _generate_data()797 self._checkOutputRec(privateOutfile, numOfState)798 def test_param71(self):799 '''sdtimeaverage::71:: timespan="state" '''800 privateOutfile, dmy = self._set_outfile_timebin(71, nRow)801 prm = {'infile': defWorkMsTimeSpan,802 'timespan': 'state',803 'outfile': privateOutfile}804 # Run Task and check805 self.assertTrue(self._run_task(prm))806 # Check Result (zerosum check)807 self._check_averaged_result_N61(privateOutfile)808 self._checkOutputRec(privateOutfile, numOfScan)809 def test_param72(self):810 '''sdtimeaverage::72:: timespan="scan,state" (WARN) in NRO '''811 privateOutfile, dmy = self._set_outfile_timebin(72, nRow)812 prm = {'infile': defWorkMsTimeSpan,813 'timespan': 'scan,state', # scan and state are specified. #814 'outfile': privateOutfile}815 # Run Task and check816 self.assertTrue(self._run_task(prm))817 self._check_averaged_result_N1(privateOutfile)818 self._checkOutputRec(privateOutfile, 1)819 def test_param73(self):820 '''sdtimeaverage::73:: timespan="state,scan" (WARN) in NRO '''821 privateOutfile, dmy = self._set_outfile_timebin(73, nRow)822 prm = {'infile': defWorkMsTimeSpan,823 'timespan': 'state,scan', # opposite keywords location #824 'outfile': privateOutfile}825 # Run Task and check826 self.assertTrue(self._run_task(prm))827 self._check_averaged_result_N1(privateOutfile)828 self._checkOutputRec(privateOutfile, 1)829 def test_param74E(self):830 '''sdtimeaverage::74E:: timespan="hoge" '''831 privateOutfile, dmy = self._set_outfile_timebin(79, nRow)832 prm = {'infile': defWorkMsTimeSpan,833 'timespan': 'hoge',834 'outfile': privateOutfile}835 # Run Task and check836 self.assertTrue(self._run_task(prm))837 def test_param75(self):838 '''sdtimeaverage::75:: timespan="" '''839 privateOutfile, dmy = self._set_outfile_timebin(75, nRow)840 prm = {'infile': defWorkMsTimeSpan,841 'timespan': '',842 'outfile': privateOutfile}843 # Run Task and check844 self.assertTrue(self._run_task(prm))845 # Averaged results846 print("numOfScan ={}, numOfState={}".format(numOfScan, numOfState))847 expected_count = numOfScan * numOfState848 self._checkOutputRec(privateOutfile, expected_count)849def suite():850 return [test_sdtimeaverage]851if is_CASA6:852 if __name__ == '__main__':...

Full Screen

Full Screen

oval_office_2.py

Source:oval_office_2.py Github

copy

Full Screen

...20 config.cluster,21 config.python_exec)22 system.connect()23 return system24def _run_task(task):25 """Loops through the job stages in order.26 :param task: Class specifying the operations to be performed.27 :type task: tasks.Task28 """29 for stage in tasks.stages:30 getattr(task, stage)()31@click.group()32@pass_config33def cli(config):34 config.initialize()35@cli.command()36@pass_config37def compile_specfem3d_globe(config):38 """Compiles the solver on the remote machine.39 """40 system = _connect_to_system(config)41 task = tasks.task_map['CompileSpecfem3dGlobe'](system, config)42 _run_task(task)43@cli.command()44@pass_config45def setup_specfem_directories(config):46 """Sets up the directories needed for a specfem run.47 """48 system = _connect_to_system(config)49 task = tasks.task_map['SetupSpecfemDirectories'](system, config)50 _run_task(task)51@cli.command()52@pass_config53def generate_cmt_solutions(config):54 """Generates the CMTSOLUTION file in specfem3d_globe format.55 Reads in the CMT solution template, and populates it with event-specific56 parameters. Then, copies the formatted files to the correct directories57 on the remote machine.58 """59 system = _connect_to_system(config)60 task = tasks.task_map['GenerateCmtSolutions'](system, config)61 _run_task(task)62@cli.command()63@click.option("--regenerate-data-cache", is_flag=True)64@pass_config65def generate_stations_files(config, regenerate_data_cache):66 """Generates the STATIONS file in specfem3d_globe format.67 Generating the proper stations file requires a decent amount of work. This68 is because each station needs to be checked against each event, so that69 only stations that are online for a given event are simulated. Because70 this check is relatively, but not too, expensive, we do it locally. So,71 the LASIF StationXML directory is downloaded and cached.72 """73 system = _connect_to_system(config)74 task = tasks.task_map['GenerateStationsFiles'](system, config, regenerate_data_cache)75 _run_task(task)76@cli.command()77@pass_config78def copy_binaries(config):79 """Copies compiled binaries to relevant scratch directories.80 For each SPECFEM3D_GLOBE run, compiled solver binaries, along with81 information regarding topography, etc., are required in the run directory.82 This function copies all relevant files from the specfem source directory.83 """84 system = _connect_to_system(config)85 task = tasks.task_map['CopyBinariesToRunDirectory'](system, config)86 _run_task(task)87@cli.command()88@pass_config89def copy_raw_data(config):90 """Copies raw data to the remote LASIF project."""91 system = _connect_to_system(config)92 task = tasks.task_map['CopyRawData'](system, config)93 _run_task(task)94@cli.command()95@pass_config96def copy_mseeds(config):97 """Copies mseed files to local directory to enable the CompareWaveforms function"""98 system = _connect_to_system(config)99 task = tasks.task_map['CopyMseeds'](system, config)100 _run_task(task)101@cli.command()102@click.option("--nodes", default=1, type=int, help="Total number of nodes.")103@click.option("--ntasks", default=1, type=int, help="Total number of cores.")104@click.option("--time", required=True, type=str, help="Wall time.")105@click.option("--ntasks-per-node", default=1, help="Cores per node.")106@click.option("--cpus-per-task", default=8, help="Threads per core.")107@click.option("--account", default="ch1", help="Account name.")108@click.option("--job-name", default="create_adjoint_sources", help="Name of slurm job.")109@click.option("--output", default="create_adjoint_sources.stdout", help="Capture stdout.")110@click.option("--error", default="create_adjoint_sources.stderr", help="Capture stderr.")111@pass_config112def create_adjoint_sources(config, nodes, ntasks, time, ntasks_per_node, cpus_per_task,113 account, job_name, output, error):114 """Runs the LASIF provided create_adjoint_sources script on preprocessed and synthetic data."""115 _, _, _, sbatch_dict = inspect.getargvalues(inspect.currentframe())116 sbatch_dict.pop("config")117 sbatch_dict["execute"] = 'srun create_adjoint_sources.py'118 system = _connect_to_system(config)119 task = tasks.task_map['createAdjointSources'](system, config, sbatch_dict)120 _run_task(task)121@cli.command()122@click.option("--nodes", default=1, type=int, help="Total number of nodes.")123@click.option("--ntasks", default=1, type=int, help="Total number of cores.")124@click.option("--time", required=True, type=str, help="Wall time.")125@click.option("--ntasks-per-node", default=1, help="Cores per node.")126@click.option("--cpus-per-task", default=8, help="Threads per core.")127@click.option("--account", default="ch1", help="Account name.")128@click.option("--job-name", default="write_adjoint_sources", help="Name of slurm job.")129@click.option("--output", default="write_adjoint_sources.stdout", help="Capture stdout.")130@click.option("--error", default="write_adjoint_sources.stderr", help="Capture stderr.")131@pass_config132def write_adjoint_sources(config, nodes, ntasks, time, ntasks_per_node, cpus_per_task,133 account, job_name, output, error):134 """Runs the LASIF provided write_adjoint_sources script on preprocessed and synthetic data."""135 _, _, _, sbatch_dict = inspect.getargvalues(inspect.currentframe())136 sbatch_dict.pop("config")137 sbatch_dict["execute"] = 'srun write_adjoint_sources.py'138 system = _connect_to_system(config)139 task = tasks.task_map['writeAdjointSources'](system, config, sbatch_dict)140 _run_task(task)141@cli.command()142@click.option("--get-ui", is_flag=True, help="work in progress to get a UI")143@pass_config144def compare_waveforms(config, get_ui):145 """Compares synthetic and preprocessed waveforms and shows selected time windows.146 Use oo_2 copy_mseeds first to run this function"""147 if get_ui:148 system = _connect_to_system(config)149 from tasks.CompareWaveforms import Example150 app = QtGui.QApplication(sys.argv)151 ex = Example(system, config)152 sys.exit(app.exec_())153 else:154 system = _connect_to_system(config)155 task = tasks.task_map['CompareWaveforms'](system, config)156 _run_task(task)157@cli.command()158@click.option("--get-noise-stations-file", is_flag=True, help="If you are using noise_data this gets you a csv file "159 "containing the networks and stations used.")160@click.option("--stations_file", type=click.File(),161 help="Formatted file containing station information.", default = None)162@pass_config163def download_stations(config, stations_file, get_noise_stations_file):164 """Downloads station XML files into local dir"""165 if stations_file == None and get_noise_stations_file == False:166 print "I need a stations_file or generate one with get-noise-stations-file"167 else:168 system = _connect_to_system(config)169 task = tasks.task_map['DownloadStations'](system, config, stations_file, get_noise_stations_file)170 _run_task(task)171@cli.command()172@click.option("--stations_file", type=click.File(),173 help="Formatted file containing station information.",174 required=True)175@click.option("--recording_time", help="Recording time (in minutes)",176 default=90)177@pass_config178def download_data(config, stations_file, recording_time):179 """Downloads data from IRIS.180 Given a stations file in the proper format, this script will181 download the appropriate data for a set of Earthquakes queried182 from the LASIF project. By default, the data will be downloaded183 from 5 minutes before the event time, and finish `recording time`184 minutes after the event time.185 """186 system = _connect_to_system(config)187 task = tasks.task_map["DataDownloader"](system, config, stations_file,188 recording_time)189 _run_task(task)190@cli.command()191@pass_config192def link_mesh(config):193 """Symlinks the mesh DATABASES_MPI directory to all event directories.194 Each individual event simulation uses the same mesh as is created in the195 MESH subdirectory. This function just places symbolic links in the196 DATABASES_MPI directory of each event simulation directory.197 """198 system = _connect_to_system(config)199 task = tasks.task_map['LinkMesh'](system, config)200 _run_task(task)201@cli.command()202@pass_config203def save_synthetics(config):204 """Saves the consolidated synthetics.mseed files to the LASIF project."""205 system = _connect_to_system(config)206 task = tasks.task_map["SaveSynthetics"](system, config)207 _run_task(task)208@cli.command()209@pass_config210def save_preprocessed_data(config):211 """Saves the consolidated preprocessed_data.mseed files to the LASIF project."""212 system = _connect_to_system(config)213 task = tasks.task_map['SavePreprocessedData'](system, config)214 _run_task(task)215@cli.command()216@click.option("--nodes", default=1, help="Total number of nodes.")217@click.option("--ntasks", default=1, help="Total number of cores.")218@click.option("--time", default="02:00:00", help="Wall time.")219@click.option("--ntasks-per-node", default=1, help="Cores per node.")220@click.option("--cpus-per-task", default=8, help="Threads per core.")221@click.option("--account", default="ch1", help="Account name.")222@click.option("--job-name", default="process_synthetics", help="Name of slurm job.")223@click.option("--output", default="process_synthetics.stdout", help="Capture stdout.")224@click.option("--error", default="process_synthetics.stderr", help="Capture stderr.")225@pass_config226def run_process_synthetics(config, nodes, ntasks, time, ntasks_per_node, cpus_per_task,227 account, job_name, output, error):228 """Process synthetic data from a recent SPECFEM3D_GLOBE forward solve.229 This command submits the scripts/process_synthetics.py file to run on the230 remote machine.231 """232 _, _, _, sbatch_dict = inspect.getargvalues(inspect.currentframe())233 sbatch_dict.pop("config")234 sbatch_dict["execute"] = "srun process_synthetics.py"235 system = _connect_to_system(config)236 task = tasks.task_map["ProcessSynthetics"](system, config, sbatch_dict)237 _run_task(task)238@cli.command()239@click.option("--nodes", required=True, type=int, help="Total number of nodes.")240@click.option("--ntasks", required=True, type=int, help="Total number of cores.")241@click.option("--time", required=True, type=str, help="Wall time.")242@click.option("--ntasks-per-node", default=8, help="Cores per node.")243@click.option("--cpus-per-task", default=1, help="Threads per core.")244@click.option("--account", default="ch1", help="Account name.")245@click.option("--job-name", default="mesher", help="Name of slurm job.")246@click.option("--output", default="mesher.stdout", help="Capture stdout.")247@click.option("--error", default="mesher.stderr", help="Capture stderr.")248@click.option("--model-type", default="CEM", help="CEM or CEM_GLL.")249@pass_config250def run_mesher(config, nodes, ntasks, time, ntasks_per_node, cpus_per_task,251 account, job_name, output, error, model_type):252 """Writes and submits the sbatch script for running the SPECFEM3D_GLOBE253 internal mesher.254 """255 _, _, _, sbatch_dict = inspect.getargvalues(inspect.currentframe())256 sbatch_dict.pop("config")257 sbatch_dict["execute"] = "srun ./bin/xmeshfem3D"258 system = _connect_to_system(config)259 task = tasks.task_map['RunMesher'](system, config, sbatch_dict,model_type)260 _run_task(task)261@cli.command()262@click.option("--nodes", required=True, type=int, help="Total number of nodes.")263@click.option("--ntasks", required=True, type=int, help="Total number of cores.")264@click.option("--time", required=True, type=str, help="Wall time.")265@click.option("--ntasks-per-node", default=8, help="Cores per node.")266@click.option("--cpus-per-task", default=1, help="Threads per core.")267@click.option("--account", default="ch1", help="Account name.")268@click.option("--job-name", default="solver", help="Name of slurm job.")269@click.option("--output", default="solver.stdout", help="Capture stdout.")270@click.option("--error", default="solver.stderr", help="Capture stderr.")271@click.option("--sim-type", default="forward", help="Set type of simulation.")272@pass_config273def run_solver(config, nodes, ntasks, time, ntasks_per_node, cpus_per_task,274 account, job_name, output, error, sim_type):275 """Writes and submits the sbatch script for running the SPECFEM3D_GLOBE276 solver.277 """278 _, _, _, sbatch_dict = inspect.getargvalues(inspect.currentframe())279 sbatch_dict.pop("config")280 sbatch_dict["execute"] = "srun ./bin/xspecfem3D"281 system = _connect_to_system(config)282 task = tasks.task_map["RunSolver"](system, config, sbatch_dict, sim_type)283 _run_task(task)284@cli.command()285@click.option("--nodes", required=True, type=int, help="Total number of nodes.")286@click.option("--ntasks", required=True, type=int, help="Total number of cores.")287@click.option("--time", required=True, type=str, help="Wall time.")288@click.option("--ntasks-per-node", default=8, help="Cores per node.")289@click.option("--cpus-per-task", default=1, help="Threads per core.")290@click.option("--account", default="ch1", help="Account name.")291@click.option("--job-name", default="select_windows", help="Name of slurm job.")292@click.option("--output", default="select_windows.stdout", help="Capture stdout.")293@click.option("--error", default="select_windows.stderr", help="Capture stderr.")294@pass_config295def run_select_windows(config, nodes, ntasks, time, ntasks_per_node, cpus_per_task,296 account, job_name, output, error):297 """Run LASIF's window selection algorithm on synthetic data."""298 _, _, _, sbatch_dict = inspect.getargvalues(inspect.currentframe())299 sbatch_dict.pop("config")300 sbatch_dict["execute"] = "./select_windows.py"301 system = _connect_to_system(config)302 task = tasks.task_map["SelectWindows"](system, config, sbatch_dict)303 _run_task(task)304@cli.command()305@click.option("--nodes", default=1, type=int, help="Total number of nodes.")306@click.option("--ntasks", default=1, type=int, help="Total number of cores.")307@click.option("--time", required=True, type=str, help="Wall time.")308@click.option("--ntasks-per-node", default=1, help="Cores per node.")309@click.option("--cpus-per-task", default=8, help="Threads per core.")310@click.option("--account", default="ch1", help="Account name.")311@click.option("--job-name", default="preprocess_data", help="Name of slurm job.")312@click.option("--output", default="preprocess.stdout", help="Capture stdout.")313@click.option("--error", default="preprocess.stderr", help="Capture stderr.")314@pass_config315def preprocess_data(config, nodes, ntasks, time, ntasks_per_node, cpus_per_task,316 account, job_name, output, error):317 """Runs the LASIF provided preprocessing scripts on raw data."""318 _, _, _, sbatch_dict = inspect.getargvalues(inspect.currentframe())319 sbatch_dict.pop("config")320 sbatch_dict["execute"] = 'srun preprocess_data.py'321 system = _connect_to_system(config)322 task = tasks.task_map['PreprocessData'](system, config, sbatch_dict)323 _run_task(task)324@cli.command()325@click.option("--nodes", default=18, type=int, help="Total number of nodes.")326@click.option("--ntasks", default=144, type=int, help="Total number of cores.")327@click.option("--time", default='00:10:00', type=str, help="Wall time.")328@click.option("--ntasks-per-node", default=8, help="Cores per node.")329@click.option("--cpus-per-task", default=1, help="Threads per core.")330@click.option("--account", default="ch1", help="Account name.")331@click.option("--job-name", default="sum_kernels", help="Name of slurm job.")332@click.option("--output", default="sum_kernels.stdout", help="Capture stdout.")333@click.option("--error", default="sum_kernels.stderr", help="Capture stderr.")334@pass_config335def sum_kernels(config, nodes, ntasks, time, ntasks_per_node, cpus_per_task,336 account, job_name, output, error):337 """ Sums the kernels """338 _, _, _, sbatch_dict = inspect.getargvalues(inspect.currentframe())339 sbatch_dict.pop('config')340 sbatch_dict['execute'] = 'srun ./bin/xsum_preconditioned_kernels'341 system = _connect_to_system(config)342 task = tasks.task_map['SumGradients'](system, config, sbatch_dict)343 _run_task(task)344@cli.command()345@click.option("--nodes", default=18, type=int, help="Total number of nodes.")346@click.option("--ntasks", default=144, type=int, help="Total number of cores.")347@click.option("--time", default='12:00:00', type=str, help="Wall time.")348@click.option("--ntasks-per-node", default=8, help="Cores per node.")349@click.option("--cpus-per-task", default=1, help="Threads per core.")350@click.option("--account", default="ch1", help="Account name.")351@click.option("--job-name", default="smooth_kernels", help="Name of slurm job.")352@click.option("--output", default="smooth_kernels.stdout", help="Capture stdout.")353@click.option("--error", default="smooth_kernels.stderr", help="Capture stderr.")354@click.option("--sigma-v", default=5, help="vertical smoothing parameter.")355@click.option("--sigma-h", default=250, help="horizontal smoothing parameter.")356@pass_config357def smooth_kernels(config, nodes, ntasks, time, ntasks_per_node, cpus_per_task,358 account, job_name, output, error, sigma_v, sigma_h):359 """ Smoothes the kernels """360 _, _, _, sbatch_dict = inspect.getargvalues(inspect.currentframe())361 sbatch_dict.pop('config')362 system = _connect_to_system(config)363 task = tasks.task_map['SmoothKernels'](system, config, sbatch_dict, sigma_v, sigma_h)364 _run_task(task)365@cli.command()366@click.option("--nodes", default=3, type=int, help="Total number of nodes.")367@click.option("--ntasks", default=24, type=int, help="Total number of cores.")368@click.option("--time", default='00:10:00', type=str, help="Wall time.")369@click.option("--ntasks-per-node", default=8, help="Cores per node.")370@click.option("--cpus-per-task", default=1, help="Threads per core.")371@click.option("--account", default="ch1", help="Account name.")372@click.option("--job-name", default="make_vtk", help="Name of slurm job.")373@click.option("--output", default="make_vtk.stdout", help="Capture stdout.")374@click.option("--error", default="make_vtk.stderr", help="Capture stderr.")375@click.option("--nslices", required=True, type=int, help="Number of slices.")376@click.option("--vtk-type", default="model", help="Type: smoothed_kernel, raw_kernel, model. Default: model")377@pass_config378def make_vtk(config, nodes, ntasks, time, ntasks_per_node, cpus_per_task,379 account, job_name, output, error, nslices, vtk_type):380 """ Creates vtk files from a kernel or model """381 _, _, _, sbatch_dict = inspect.getargvalues(inspect.currentframe())382 sbatch_dict.pop('config')383 system = _connect_to_system(config)384 task = tasks.task_map['MakeVTK'](system, config, sbatch_dict, nslices, vtk_type)385 _run_task(task)386@cli.command()387@pass_config388def copy_kernels_to_safety(config):389 """ Copies kernel to remote LASIF directory"""390 src_dir = os.path.join(config.optimization_dir, 'PROCESSED_KERNELS', '*')391 dst_dir = os.path.join(config.lasif_project_path, 'KERNELS', config.base_iteration)392 remote_system = _connect_to_system(config)393 remote_system.makedir(dst_dir)394 remote_system.execute_command('rsync {} {}'.format(src_dir, dst_dir))395@cli.command()396@click.option('--new-iteration-name', required=True)397@pass_config398@click.pass_context399def create_new_iteration(ctx, config, new_iteration_name):400 """ Creates mew Iteration, based on files of current iteration on scratch and LASIF"""401 old_solver_dir = config.solver_dir402 old_opt_dir = config.optimization_dir403 old_iter = config.base_iteration404 config.base_iteration = new_iteration_name405 remote_system = _connect_to_system(config)406 remote_system.execute_command('lasif create_successive_iteration {} {}'407 .format(old_iter, config.base_iteration), workdir=config.lasif_project_path)408 ctx.invoke(setup_specfem_directories)409 ctx.invoke(copy_binaries)410 ctx.invoke(generate_cmt_solutions)411 ctx.invoke(generate_stations_files, regenerate_data_cache=True)412 print "Copying mesh from iteration: " + old_iter + " to: " + new_iteration_name413 print "This might take a while..."414 remote_system.execute_command('rsync -a {} {}'.format(os.path.join(old_solver_dir, 'MESH'),415 os.path.join(config.solver_dir)))416 print "Copying optimization diectory..."417 remote_system.execute_command('rsync -av {} {}'.format(os.path.join(old_opt_dir), os.path.join(config.work_dir)))418 print "Finished setting up new iteration: " + new_iteration_name419@cli.command()420@click.option('--iteration-name', type=str, required=True)421@pass_config422def switch_iteration(config, iteration_name):423 """ Switches iteration in config.json file """424 new_config = config.__dict__425 new_config['base_iteration'] = iteration_name426 new_config.pop("specfem_dict", None)427 new_config.pop("iteration_name", None)428 with io.open(CONFIG_FILE, "wb") as fh:429 json.dump(new_config, fh, sort_keys=True, indent=4, separators=(",", ": "))430 print 'Switched to {}'.format(iteration_name)431@cli.command()432@click.option('--perturbation-percent', type=float, default=0.01)433@click.option("--nodes", default=18, type=int, help="Total number of nodes.")434@click.option("--ntasks", default=144, type=int, help="Total number of cores.")435@click.option("--time", default='00:30:00', type=str, help="Wall time.")436@click.option("--ntasks-per-node", default=8, help="Cores per node.")437@click.option("--cpus-per-task", default=1, help="Threads per core.")438@click.option("--account", default="ch1", help="Account name.")439@click.option("--job-name", default="add_smoothed_gradient", help="Name of slurm job.")440@click.option("--output", default="add_smoothed_gradient.stdout", help="Capture stdout.")441@click.option("--error", default="add_smoothed_gradient.stderr", help="Capture stderr.")442@pass_config443def add_smoothed_gradient(config,nodes, ntasks, time, ntasks_per_node, cpus_per_task,444 account, job_name, output, error, perturbation_percent):445 """ Adds smoothed gradient to the model, writes new files to GLL directory """446 _, _, _, sbatch_dict = inspect.getargvalues(inspect.currentframe())447 sbatch_dict.pop('config')448 system = _connect_to_system(config)449 task = tasks.task_map['AddSmoothedGradient'](system, config, sbatch_dict,perturbation_percent)450 _run_task(task)451@cli.command()452@click.option("--correlations-dir", default="./NOISE_CORRELATIONS", type=str, help="Path to the directory containing the cross-correlations.")453@pass_config454def write_noise_events(config, correlations_dir):455 """Writes events for noise correlation in LASIF directory."""456 system = _connect_to_system(config)457 task = tasks.task_map['WriteNoiseEvents'](system, config, correlations_dir)458 _run_task(task)459@cli.command()460@click.option("--correlations-dir", default="./NOISE_CORRELATIONS", type=str, help="Path to the directory containing the cross-correlations.")461@pass_config462def sort_cross_correlations(config, correlations_dir):463 """Sorts noise correlations in the correct format464 from the "noise_correlations" directory into 'events'"""465 system = _connect_to_system(config)466 task = tasks.task_map['SortCrossCorrelations'](system, config, correlations_dir)467 _run_task(task)468@cli.command()469@pass_config470def weight_adjoint_sources(config):471 """Weights the adjoint sources to decrease the effect of clusters on the kernel472 You still need to manually put the files in the right directory"""473 system = _connect_to_system(config)474 task = tasks.task_map['WeightAdjointSources'](system, config)475 _run_task(task)476if __name__ == "__main__":...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run lisa automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful