How to use test_ons method in keyboard

Best Python code snippet using keyboard

lightning_task.py

Source:lightning_task.py Github

copy

Full Screen

1import torch2import numpy as np3from torch.utils.data import Dataset, DataLoader4import matplotlib.pyplot as plt5import pytorch_lightning as pl6import os7AVAIL_GPUS = min(1, torch.cuda.device_count())8BATCH_SIZE = 256 if AVAIL_GPUS else 649NUM_WORKERS = int(os.cpu_count() / 2)10class DMTS_Dataset(torch.utils.data.Dataset):11 'Characterizes a delay-match to sample task'12 def __init__(self, inps,out_des,list_IDs, labels,test_ons, distracted_bools):13 'Initialization'14 self.labels = labels15 self.list_IDs = list_IDs16 self.inps = inps17 self.out_des = out_des18 self.test_ons = test_ons19 self.distracted_bools = distracted_bools20 def __len__(self):21 'Denotes the total number of samples'22 return len(self.list_IDs)23 def __getitem__(self, index):24 'Generates one sample of data'25 # Select sample26 ID = self.list_IDs[index]27 # Load data and get label28 inp = self.inps[ID]29 y = self.labels[ID].long()30 out = self.out_des[ID]31 test_on = self.test_ons[ID]32 distracted_bool = self.distracted_bools[ID]33 34 return inp,out,y,test_on,distracted_bool35def generate_one_DMTS_IO(sample_mat, samp, noise_level, dt,alpha, time_limits = [-1,5.5], possible_delays = [1. , 1.41, 2. , 2.83, 4. ], use_distractor = 1):36 37 num_samples = len(sample_mat)38 39 distractor1 = np.random.choice(np.delete(np.arange(num_samples),samp))40 41 #by convention the 8th and 9th samples are possible mid-delay distractors42 mid_delay_distractor_ind = np.random.choice([7,8])43 delay_length = 1000*np.random.choice(possible_delays)44 45 tvec = np.arange(time_limits[0],time_limits[1],1/1000)46 TIME_STEPS = len(tvec)47 48 49 inp = torch.zeros((TIME_STEPS,num_samples+1)) 50 out_des = torch.zeros((TIME_STEPS,num_samples+1)) 51 samp_on = np.argmin(np.abs(tvec-0))52 samp_off = samp_on + 50053 54 dis_on = int(delay_length/2) + samp_off55 dis_off = dis_on + 25056 test_on = samp_off + int(delay_length)57 test_off = test_on + 500 58 59 #present sample60 inp[samp_on:samp_off,:-1] = sample_mat[samp]61 62 63 64 if np.heaviside(np.random.rand()-0.5,0):65 #present distractor on 50% of trials66 inp[dis_on:dis_off,:-1] = sample_mat[mid_delay_distractor_ind]67 distracted_bool = 168 else:69 distracted_bool = 070 71 #present test and sample72 inp[test_on:test_off,:-1] = sample_mat[samp] + sample_mat[distractor1]73 74 #fixate signal75 inp[0:test_on,-1] = 1 # fixation signal, answer when it goes off 76 77 #desired output78 #out_des[int(3000/dt):int(3500/dt),samp] = 179 out_des[test_on:,samp] = 180 out_des[0:test_on,-1] = 081 82 inp += np.sqrt(2/alpha)*noise_level*torch.randn(inp.shape)83 84 85 return inp[::dt] , out_des[::dt], int(test_on/dt), distracted_bool86def generate_DMTS(dt = 100, tau = 100, time_limits = [-1,5.5], num_samples = 8+2, variable_delay = True, mid_delay_distractor = True):87 88 """"89 Generates one delayed-match to sample dataset. 90 91 ARGS:92 -dt: timestep to use93 -time_limits: beginning and end of trial. sample On = 0s. Units of s. 94 -num_samples: size of sample pool95 96 97 98 RETURNS:99 -inps: inputs into the rnn, size batch by time by num_samples + 1100 -out_des: desired outputs from the rnn, size batch by time by num_samples + 1101 -partition: training and testing IDs102 -labels: sample label for each element in dataset103 """ 104 #use binary encoding of sample images105 sample_mat = torch.eye( num_samples )106 107 TIME_STEPS = len(np.arange(time_limits[0],time_limits[1], dt/1000))108 109 noise_level = 0.01110 111 num_train = int(2**14)#int(0.6*0.5*(2**13))112 num_test = int(2**12)#int(0.4*0.5*(2**13))113 num_val = int(2**12)114 num_examples = num_train + num_test + num_val #int(0.5*(2**10))115 inps = torch.empty((num_examples,TIME_STEPS, num_samples+1), requires_grad = False)116 out_des = torch.empty((num_examples,TIME_STEPS, num_samples+1),requires_grad = False)117 test_ons = torch.empty((num_examples),requires_grad = False)118 distracted_bools = torch.empty((num_examples),requires_grad = False)119 120 labels = torch.empty(num_examples)121 list_IDs = []122 for i in range(num_examples):123 samp = np.random.choice(np.arange(num_samples-2)) 124 inps[i],out_des[i],test_ons[i],distracted_bools[i] = generate_one_DMTS_IO(sample_mat = sample_mat, samp = samp, noise_level = noise_level, dt = dt, alpha = dt/tau,time_limits = time_limits)125 labels[i] = samp126 list_IDs.append(i)127 partition = {'train': list_IDs[:num_train], 'test': list_IDs[num_train: num_train+ num_test],'val':list_IDs[num_train + num_test: num_train+ num_test+ num_val]}128 129 return inps,out_des,partition,labels,test_ons,distracted_bools130def get_DMTS_training_test_val_sets(dt_ann,train_batch = 256,test_batch = 256,val_batch = 2**12):131 132 inps,out_des,partition,labels,test_ons,distracted_bools = generate_DMTS(dt = dt_ann)133 training_set = DMTS_Dataset(inps,out_des,partition['train'],labels,test_ons,distracted_bools)134 test_set = DMTS_Dataset(inps,out_des,partition['test'],labels,test_ons, distracted_bools)135 validation_set = DMTS_Dataset(inps,out_des,partition['val'],labels,test_ons, distracted_bools)136 137 138 return training_set,test_set,validation_set139class dDMTSDataModule(pl.LightningDataModule):140 def __init__(self, dt_ann: int = 15, batch_size: int = 256):141 super().__init__()142 self.dt_ann = dt_ann 143 self.batch_size = batch_size144 145 def setup(self,stage = None):146 self.training_data,self.test_data,self.validation_data = get_DMTS_training_test_val_sets(self.dt_ann)147 def train_dataloader(self):148 return DataLoader(self.training_data, batch_size=BATCH_SIZE, shuffle=True,num_workers = NUM_WORKERS,drop_last = True,pin_memory=False)149 def val_dataloader(self):150 return DataLoader(self.test_data, batch_size=2**11,num_workers = NUM_WORKERS, drop_last = True, pin_memory=False)151 def test_dataloader(self):...

Full Screen

Full Screen

main.py

Source:main.py Github

copy

Full Screen

1def read_line(line):2 on = line.split()[0] == "on" # True if on, False if off3 xyz = []4 for val in line.split()[1].split(","):5 xyz.append([int(i) for i in val.split("=")[1].split("..")])6 return on, xyz7def read_input(filename):8 ons = []9 xyzs = []10 with open(filename, "r") as fid:11 for line in fid.readlines():12 on, xyz = read_line(line.strip())13 ons.append(on)14 xyzs.append(xyz)15 return ons, xyzs16def on_cubes(ons, xyzs):17 output = set()18 for i in range(len(ons)):19 on = ons[i]20 xyz = xyzs[i]21 for x in range(max(xyz[0][0], -50), min(xyz[0][1], 50) + 1):22 for y in range(max(xyz[1][0], -50), min(xyz[1][1], 50) + 1):23 for z in range(max(xyz[2][0], -50), min(xyz[2][1], 50) + 1):24 if on:25 output.add((x, y, z))26 else:27 output.discard((x, y, z))28 return len(output)29def volume(xyz):30 return (31 (xyz[0][1] - xyz[0][0] + 1)32 * (xyz[1][1] - xyz[1][0] + 1)33 * (xyz[2][1] - xyz[2][0] + 1)34 )35def overlap(xyza, xyzb):36 xyzc = []37 for dim in range(3):38 if xyza[dim][0] <= xyzb[dim][0] <= xyza[dim][1]:39 xyzc.append([xyzb[dim][0], min(xyza[dim][1], xyzb[dim][1])])40 elif xyza[dim][0] <= xyzb[dim][1] <= xyza[dim][1]:41 xyzc.append([xyza[dim][0], min(xyza[dim][1], xyzb[dim][1])])42 elif xyzb[dim][0] < xyza[dim][0] and xyzb[dim][1] > xyza[dim][1]:43 xyzc.append(xyza[dim])44 else:45 return []46 return xyzc47def split(xyz, overlap):48 new_xyzs = []49 new_xlims = [xyz[0][0], xyz[0][1]]50 if xyz[0][0] < overlap[0][0]:51 new_xyzs.append([[xyz[0][0], overlap[0][0] - 1], xyz[1], xyz[2]])52 new_xlims[0] = overlap[0][0]53 if overlap[0][1] < xyz[0][1]:54 new_xyzs.append([[overlap[0][1] + 1, xyz[0][1]], xyz[1], xyz[2]])55 new_xlims[1] = overlap[0][1]56 new_ylims = [xyz[1][0], xyz[1][1]]57 if xyz[1][0] < overlap[1][0]:58 new_xyzs.append([new_xlims, [xyz[1][0], overlap[1][0] - 1], xyz[2]])59 new_ylims[0] = overlap[1][0]60 if overlap[1][1] < xyz[1][1]:61 new_xyzs.append([new_xlims, [overlap[1][1] + 1, xyz[1][1]], xyz[2]])62 new_ylims[1] = overlap[1][1]63 if xyz[2][0] < overlap[2][0]:64 new_xyzs.append([new_xlims, new_ylims, [xyz[2][0], overlap[2][0] - 1]])65 if overlap[2][1] < xyz[2][1]:66 new_xyzs.append([new_xlims, new_ylims, [overlap[2][1] + 1, xyz[2][1]]])67 return new_xyzs68def maybe_better_on_cubes(ons, xyzs):69 on_cubes = []70 for i in range(len(ons)):71 on = ons[i]72 xyz = xyzs[i]73 if on:74 on_cubes.append(xyz)75 else:76 add = []77 remove = []78 for cube in on_cubes:79 cube_overlap = overlap(xyz, cube)80 if cube_overlap:81 remove.append(cube)82 add += split(cube, cube_overlap)83 for cube in remove:84 on_cubes.remove(cube)85 on_cubes += add86 final_cubes = []87 while on_cubes:88 xyz = on_cubes[0]89 on_cubes = on_cubes[1:]90 for fc in final_cubes:91 cube_overlap = overlap(xyz, fc)92 if cube_overlap:93 on_cubes += split(xyz, fc)94 break95 else:96 final_cubes.append(xyz)97 return sum([volume(cube) for cube in final_cubes])98if __name__ == "__main__":99 test_ons, test_xyzs = read_input("test_input.txt")100 assert on_cubes(test_ons, test_xyzs) == 590784101 ons, xyzs = read_input("input.txt")102 print(on_cubes(ons, xyzs))103 assert maybe_better_on_cubes(test_ons, test_xyzs) == 590784104 test_ons, test_xyzs = read_input("test_input_2.txt")105 assert maybe_better_on_cubes(test_ons, test_xyzs) == 2758514936282235...

Full Screen

Full Screen

test_ons.py

Source:test_ons.py Github

copy

Full Screen

1import tigercontrol2from tigercontrol.utils.optimizers.ons import ONS3from tigercontrol.utils.optimizers.losses import mse4import matplotlib.pyplot as plt5def test_ons(show=False):6 environment = tigercontrol.environment('LDS')7 x = environment.reset(p=2,q=0)8 controller = tigercontrol.controllers('LSTM')9 controller.initialize(n=1, m=1, l=5, h=10, optimizer=ONS) # initialize with class10 controller.predict(1.0) # call controllers to verify it works11 controller.update(1.0)12 optimizer = ONS(learning_rate=0.001)13 controller = tigercontrol.controllers('LSTM')14 controller.initialize(n=1, m=1, l=3, h=10, optimizer=optimizer) # reinitialize with instance15 loss = []16 for t in range(1000):17 y_pred = controller.predict(x)18 y_true = environment.step()19 loss.append(mse(y_pred, y_true))20 controller.update(y_true)21 x = y_true22 if show:23 plt.plot(loss)24 plt.show(block=False)25 plt.pause(3)26 plt.close()27 print("test_ons passed")28if __name__ == "__main__":...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run keyboard automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful