How to use set_simulator method in robotframework-ioslibrary

Best Python code snippet using robotframework-ioslibrary_python

dataloaders.py

Source:dataloaders.py Github

copy

Full Screen

...74 filter_by_level=filter_by_level,75 num_to_keep=num_to_keep,76 genome_cache_size=genome_cache_size)77 if self.valid_loader:78 self.dataset.set_simulator(validation_distribution)79 else:80 self.dataset.set_simulator(training_distribution)81 super(RefSeqProkaryotaDataLoader,82 self).__init__(self.dataset, batch_size, shuffle,83 validation_split, num_workers, drop_last)84 def enable_multithreading_if_possible(self):85 if self.dataset.genome_cache_is_full():86 try:87 if self.num_workers != self._num_workers:88 self.num_workers = self._num_workers89 self.logger.info(f'Enabling {self.num_workers} '90 'workers for data loading...')91 except AttributeError:92 pass93 else:94 self._num_workers = self.num_workers95 self.num_workers = 096 def step(self, epoch):97 super().step(epoch)98 self.enable_multithreading_if_possible()99 if not self.fixed_dataset:100 self.dataset.idx_offset = epoch * len(self.dataset)101 seed = epoch102 seed = seed * get_world_size() + get_global_rank()103 if self.valid_loader:104 seed = 2**32 - seed105 self.dataset.set_simulator(self.validation_distribution)106 else:107 self.dataset.set_simulator(self.training_distribution)108 self.dataset.simulator.set_seed(seed)109 def init_validation(self, other):110 super().init_validation(other)111 self.fixed_dataset = other.fixed_dataset112 self.valid_loader = True113 self.training_distribution = other.training_distribution114 self.validation_distribution = other.validation_distribution115class RefSeqProkaryotaBagsDataLoader(BaseDataLoader):116 """117 RefSeq DataLoader118 Any encoding or transformation should be done here119 and passed to the Dataset120 """121 def __init__(self,122 target_format,123 genome_dir,124 taxonomy_dir,125 total_bags,126 bag_size,127 batch_size,128 fixed_dataset=False,129 drop_last=False,130 training_distribution='lognormal',131 validation_distribution='lognormal',132 reseed_every_n_bags=1,133 accessions_file=None,134 taxids_list=None,135 error_model=None,136 rmin=None,137 rmax=None,138 download=True,139 shuffle=True,140 validation_split=0.0,141 num_workers=1,142 noise=None,143 filter_by_level=None,144 num_to_keep=1,145 genome_cache_size=1000,146 single_read_target_vectors=False):147 self.logger = logging.getLogger(self.__class__.__name__)148 ncbi_email = 'your-email@domain.com'149 ncbi_api = None150 g2read = mytransforms.GenomeToNoisyRead(error_model,151 rmin,152 rmax,153 p=noise)154 self.rmin = rmin155 self.rmax = rmax156 self.noise = noise157 self.error_model = error_model158 self.fixed_dataset = fixed_dataset159 self.training_distribution = training_distribution160 self.validation_distribution = validation_distribution161 if self.error_model:162 self.rmin = g2read.rmin163 self.rmax = g2read.rmax164 trsfm_x = mytransforms.Compose([165 g2read,166 mytransforms.ToTensorWithView(dtype=torch.long, view=[1, -1])167 ])168 trsfm_y = mytransforms.Compose([mytransforms.ToTensorWithView()])169 self.valid_loader = False170 self.dataset = datasets.RefSeqProkaryotaBags(171 bag_size=bag_size,172 total_bags=total_bags,173 target_format=target_format,174 genome_dir=genome_dir,175 taxonomy_dir=taxonomy_dir,176 accessions_file=accessions_file,177 taxids_list=taxids_list,178 download=download,179 ncbi_email=ncbi_email,180 ncbi_api=ncbi_api,181 transform_x=trsfm_x,182 transform_y=trsfm_y,183 filter_by_level=filter_by_level,184 num_to_keep=num_to_keep,185 reseed_every_n_bags=reseed_every_n_bags,186 genome_cache_size=genome_cache_size,187 num_workers=num_workers,188 single_read_target_vectors=single_read_target_vectors)189 if self.valid_loader:190 self.dataset.set_simulator(validation_distribution)191 else:192 self.dataset.set_simulator(training_distribution)193 super(RefSeqProkaryotaBagsDataLoader,194 self).__init__(self.dataset, batch_size, shuffle,195 validation_split, 0, drop_last)196 def step(self, epoch):197 super().step(epoch)198 if not self.fixed_dataset:199 self.dataset.idx_offset = epoch * len(self.dataset)200 if self.valid_loader:201 self.dataset.set_simulator(self.validation_distribution)202 else:203 self.dataset.set_simulator(self.training_distribution)204 def init_validation(self, other):205 super().init_validation(other)206 self.fixed_dataset = other.fixed_dataset207 self.valid_loader = True208 self.training_distribution = other.training_distribution209 self.validation_distribution = other.validation_distribution210class RefSeqProkaryotaLargeBagsDataLoader(BaseDataLoader):211 """212 RefSeq DataLoader213 Any encoding or transformation should be done here214 and passed to the Dataset215 """216 def __init__(217 self,218 # target_format,219 genome_dir,220 taxonomy_dir,221 total_bags,222 bag_size,223 mini_bag_size,224 batch_size,225 fixed_dataset=False,226 drop_last=False,227 training_distribution='lognormal',228 validation_distribution='lognormal',229 reseed_every_n_bags=1,230 accessions_file=None,231 taxids_list=None,232 error_model=None,233 rmin=None,234 rmax=None,235 download=True,236 shuffle=True,237 validation_split=0.0,238 num_workers=1,239 noise=None,240 filter_by_level=None,241 num_to_keep=1,242 genome_cache_size=1000,243 single_read_target_vectors=False):244 self.logger = logging.getLogger(self.__class__.__name__)245 ncbi_email = 'your-email@domain.com'246 ncbi_api = None247 g2read = mytransforms.GenomeToNoisyRead(error_model,248 rmin,249 rmax,250 p=noise)251 self.rmin = rmin252 self.rmax = rmax253 self.noise = noise254 self.error_model = error_model255 self.fixed_dataset = fixed_dataset256 self.training_distribution = training_distribution257 self.validation_distribution = validation_distribution258 if self.error_model:259 self.rmin = g2read.rmin260 self.rmax = g2read.rmax261 trsfm_x = mytransforms.Compose([262 g2read,263 mytransforms.ToTensorWithView(dtype=torch.long, view=[1, -1])264 ])265 trsfm_y = mytransforms.Compose([mytransforms.ToTensorWithView()])266 self.valid_loader = False267 self.dataset = datasets.RefSeqProkaryotaBags2(268 bag_size=bag_size,269 mini_bag_size=mini_bag_size,270 total_bags=total_bags,271 genome_dir=genome_dir,272 taxonomy_dir=taxonomy_dir,273 accessions_file=accessions_file,274 taxids_list=taxids_list,275 download=download,276 ncbi_email=ncbi_email,277 ncbi_api=ncbi_api,278 transform_x=trsfm_x,279 transform_y=trsfm_y,280 filter_by_level=filter_by_level,281 num_to_keep=num_to_keep,282 reseed_every_n_bags=reseed_every_n_bags,283 genome_cache_size=genome_cache_size,284 num_workers=num_workers)285 if self.valid_loader:286 self.dataset.set_simulator(validation_distribution)287 else:288 self.dataset.set_simulator(training_distribution)289 super().__init__(self.dataset,290 batch_size,291 shuffle,292 validation_split,293 0,294 drop_last,295 collate_fn=nothing_collate_fn)296 def step(self, epoch):297 super().step(epoch)298 if not self.fixed_dataset:299 self.dataset.idx_offset = epoch * len(self.dataset)300 if self.valid_loader:301 self.dataset.set_simulator(self.validation_distribution)302 else:303 self.dataset.set_simulator(self.training_distribution)304 def init_validation(self, other):305 super().init_validation(other)306 self.fixed_dataset = other.fixed_dataset307 self.valid_loader = True308 self.training_distribution = other.training_distribution309 self.validation_distribution = other.validation_distribution310class RefSeqProkaryotaKmerDataLoader(BaseDataLoader):311 """312 RefSeq DataLoader313 Any encoding or transformation should be done here314 and passed to the Dataset315 """316 def __init__(self,317 genome_dir,318 taxonomy_dir,319 total_samples,320 batch_size,321 kmer_vocab_file,322 fixed_dataset=False,323 drop_last=False,324 training_distribution='uniform',325 validation_distribution='lognormal',326 accessions_file=None,327 taxids_list=None,328 error_model=None,329 rmin=None,330 rmax=None,331 kmer_processing_method='hash',332 hash_bits=16,333 lsh=True,334 lsh_k=11,335 alternative_lsh=False,336 download=True,337 shuffle=True,338 validation_split=0.0,339 num_workers=1,340 noise=None,341 forward_reads_only=False,342 filter_by_level=None,343 num_to_keep=1,344 genome_cache_size=1000):345 # if kmer_vocab_file is provided:346 # kmers are mapped to numbers according to numbers347 # else:348 # if kmer_processing_method is 'hash':349 # use normal hash or lsh according to hash_bits, lsh, lsh_k, alternative_lsh350 # if kmer_processing_method is 'count':351 # use kmer counter to count subkmers of size lsh_k352 assert kmer_processing_method in ['hash', 'count']353 self.logger = logging.getLogger(self.__class__.__name__)354 ncbi_email = 'your-email@domain.com'355 ncbi_api = None356 g2read = mytransforms.GenomeToNoisyKmerRead(357 kmer_vocab_file,358 error_model,359 rmin,360 rmax,361 p=noise,362 forward_reads_only=forward_reads_only,363 hash_bits=hash_bits,364 lsh=lsh,365 lsh_k=lsh_k,366 alternative_lsh=alternative_lsh,367 kmer_processing_method=kmer_processing_method)368 self.lsh_k = lsh_k369 if kmer_processing_method == 'hash':370 totensor = mytransforms.ToTensorWithView(dtype=torch.long,371 view=[-1])372 else:373 totensor = mytransforms.ToTensorWithView(dtype=torch.float)374 self.rmin = rmin375 self.rmax = rmax376 self.noise = noise377 self.error_model = error_model378 self.fixed_dataset = fixed_dataset379 self.training_distribution = training_distribution380 self.validation_distribution = validation_distribution381 try:382 self.vocab_size = g2read.vocab_size383 except AttributeError:384 self.vocab_size = None385 if self.error_model:386 self.rmin = g2read.rmin387 self.rmax = g2read.rmax388 trsfm_x = mytransforms.Compose([g2read, totensor])389 trsfm_y = mytransforms.Compose([mytransforms.ToTensorWithView()])390 self.valid_loader = False391 self.dataset = datasets.RefSeqProkaryota(392 genome_dir,393 taxonomy_dir,394 total_samples,395 accessions_file,396 taxids_list,397 download=download,398 ncbi_email=ncbi_email,399 ncbi_api=ncbi_api,400 transform_x=trsfm_x,401 transform_y=trsfm_y,402 filter_by_level=filter_by_level,403 num_to_keep=num_to_keep,404 genome_cache_size=genome_cache_size)405 if self.valid_loader:406 self.dataset.set_simulator(validation_distribution)407 else:408 self.dataset.set_simulator(training_distribution)409 super().__init__(self.dataset, batch_size, shuffle, validation_split,410 num_workers, drop_last)411 def enable_multithreading_if_possible(self):412 if self.dataset.genome_cache_is_full():413 try:414 if self.num_workers != self._num_workers:415 self.num_workers = self._num_workers416 self.logger.info(f'Enabling {self.num_workers} '417 'workers for data loading...')418 except AttributeError:419 pass420 else:421 self._num_workers = self.num_workers422 self.num_workers = 0423 def step(self, epoch):424 super().step(epoch)425 self.enable_multithreading_if_possible()426 if not self.fixed_dataset:427 self.dataset.idx_offset = epoch * len(self.dataset)428 seed = epoch429 seed = seed * get_world_size() + get_global_rank()430 if self.valid_loader:431 seed = 2**32 - seed432 self.dataset.set_simulator(self.validation_distribution)433 else:434 self.dataset.set_simulator(self.training_distribution)435 self.dataset.simulator.set_seed(seed)436 def init_validation(self, other):437 super().init_validation(other)438 self.fixed_dataset = other.fixed_dataset439 self.valid_loader = True440 self.training_distribution = other.training_distribution441 self.validation_distribution = other.validation_distribution442class RefSeqProkaryotaKmerBagsDataLoader(BaseDataLoader):443 """444 RefSeq DataLoader445 Any encoding or transformation should be done here446 and passed to the Dataset447 """448 def __init__(self,449 target_format,450 genome_dir,451 taxonomy_dir,452 total_bags,453 bag_size,454 batch_size,455 kmer_vocab_file,456 fixed_dataset=False,457 drop_last=False,458 training_distribution='lognormal',459 validation_distribution='lognormal',460 reseed_every_n_bags=1,461 accessions_file=None,462 taxids_list=None,463 error_model=None,464 rmin=None,465 rmax=None,466 kmer_processing_method='hash',467 hash_bits=16,468 lsh=True,469 lsh_k=11,470 alternative_lsh=False,471 download=True,472 shuffle=True,473 validation_split=0.0,474 num_workers=1,475 noise=None,476 forward_reads_only=False,477 filter_by_level=None,478 num_to_keep=1,479 genome_cache_size=1000,480 single_read_target_vectors=False):481 self.logger = logging.getLogger(self.__class__.__name__)482 ncbi_email = 'your-email@domain.com'483 ncbi_api = None484 g2read = mytransforms.GenomeToNoisyKmerRead(485 kmer_vocab_file,486 error_model,487 rmin,488 rmax,489 p=noise,490 forward_reads_only=forward_reads_only,491 hash_bits=hash_bits,492 lsh=lsh,493 lsh_k=lsh_k,494 alternative_lsh=alternative_lsh,495 kmer_processing_method=kmer_processing_method)496 self.lsh_k = lsh_k497 if kmer_processing_method == 'hash':498 totensor = mytransforms.ToTensorWithView(dtype=torch.long,499 view=[-1])500 else:501 totensor = mytransforms.ToTensorWithView(dtype=torch.float)502 self.rmin = rmin503 self.rmax = rmax504 self.noise = noise505 self.error_model = error_model506 self.fixed_dataset = fixed_dataset507 self.training_distribution = training_distribution508 self.validation_distribution = validation_distribution509 try:510 self.vocab_size = g2read.vocab_size511 except AttributeError:512 self.vocab_size = None513 if self.error_model:514 self.rmin = g2read.rmin515 self.rmax = g2read.rmax516 trsfm_x = mytransforms.Compose([g2read, totensor])517 trsfm_y = mytransforms.Compose([mytransforms.ToTensorWithView()])518 self.valid_loader = False519 self.dataset = datasets.RefSeqProkaryotaBags(520 bag_size=bag_size,521 total_bags=total_bags,522 target_format=target_format,523 genome_dir=genome_dir,524 taxonomy_dir=taxonomy_dir,525 accessions_file=accessions_file,526 taxids_list=taxids_list,527 download=download,528 ncbi_email=ncbi_email,529 ncbi_api=ncbi_api,530 transform_x=trsfm_x,531 transform_y=trsfm_y,532 filter_by_level=filter_by_level,533 num_to_keep=num_to_keep,534 reseed_every_n_bags=reseed_every_n_bags,535 genome_cache_size=genome_cache_size,536 num_workers=num_workers,537 single_read_target_vectors=single_read_target_vectors)538 if self.valid_loader:539 self.dataset.set_simulator(validation_distribution)540 else:541 self.dataset.set_simulator(training_distribution)542 super().__init__(self.dataset, batch_size, shuffle, validation_split,543 0, drop_last)544 def step(self, epoch):545 super().step(epoch)546 if not self.fixed_dataset:547 self.dataset.idx_offset = epoch * len(self.dataset)548 if self.valid_loader:549 self.dataset.set_simulator(self.validation_distribution)550 else:551 self.dataset.set_simulator(self.training_distribution)552 def init_validation(self, other):553 super().init_validation(other)554 self.fixed_dataset = other.fixed_dataset555 self.valid_loader = True556 self.training_distribution = other.training_distribution557 self.validation_distribution = other.validation_distribution558if __name__ == '__main__':559 # Testing560 # rspdl = RefSeqProkaryotaDataLoader(561 # ("/home/ageorgiou/eth/spring2019/"562 # "thesis/data/refseq_prokaryota/genomes"),563 # ("/home/ageorgiou/eth/spring2019/"564 # "thesis/data/refseq_prokaryota/taxonomy"),565 # 100,...

Full Screen

Full Screen

test_sre.py

Source:test_sre.py Github

copy

Full Screen

...40plt.show()41from sklearn.pipeline import Pipeline42clf = Pipeline([('normalize', StandardScaler()), ('classifier', MLPClassifier(alpha=1, max_iter=1000))])43lfi = emceeSRE()44lfi.set_simulator(simulator)45lfi.learn_logL_with_classifier(clf, mins, maxs, Nsamples=10000, test_size=0.1)46lfi.set_obs(y_obs)47from skorch import NeuralNetClassifier48import torch49from torch import nn50from torch.nn import functional as F51class ClassifierModule(nn.Module):52 def __init__(53 self,54 num_units=50,55 nonlin=F.relu,56 dropout=0.5,57 ):58 super(ClassifierModule, self).__init__()59 self.num_units = num_units60 self.nonlin = nonlin61 self.dropout = dropout62 self.dense0 = nn.Linear(y_obs.shape[1]+2, num_units)63 self.nonlin = nonlin64 self.dropout = nn.Dropout(dropout)65 self.dense1 = nn.Linear(num_units, 20)66 self.output = nn.Linear(20, 2)67 def forward(self, X, **kwargs):68 X = self.nonlin(self.dense0(X))69 X = self.dropout(X)70 X = F.relu(self.dense1(X))71 X = F.softmax(self.output(X), dim=-1)72 return X73model = NeuralNetClassifier(74 ClassifierModule,75 max_epochs=1000,76 lr=0.1,77 # optimizer=torch.optim.SGD,78 # criterion=nn.BCELoss,79 iterator_train__shuffle=True,80)81lfi = emceeSRE()82lfi.set_simulator(simulator)83lfi.learn_logL_with_classifier(model, mins, maxs, Nsamples=10000, test_size=0.1)84lfi.set_obs(y_obs)85from sklearn.neighbors import KNeighborsClassifier, RadiusNeighborsClassifier86clf = KNeighborsClassifier(n_neighbors=50)87# clf = Pipeline([('normalize', StandardScaler()), ('classifier', KNeighborsClassifier(n_neighbors=30) )]) 88lfi = emceeSRE()89lfi.set_simulator(simulator)90lfi.learn_logL_with_classifier(clf, mins, maxs, Nsamples=10000, test_size=0.1)91lfi.set_obs(y_obs)92def logL(theta):93 lp = log_prior(theta)94 if not np.isfinite(lp): return lp 95 return lp+lfi.logL_estimator(theta)96pos = mins+(maxs-mins) * np.random.uniform(0,1,size=(64, len(mins)))97nwalkers, ndim = pos.shape98n_samples = 100099sampler = emcee.EnsembleSampler(nwalkers, ndim, logL)100sampler.run_mcmc(pos, n_samples, progress=True);101flat_samples = sampler.get_chain(discard=0, flat=True) 102from sklearn import svm103clf = svm.SVC(kernel='linear', probability=True)104lfi = emceeSRE()105lfi.set_simulator(simulator)106lfi.learn_logL_with_classifier(clf, mins, maxs, Nsamples=10000, test_size=0.1)107lfi.set_obs(y_obs)108from sklearn.ensemble import AdaBoostClassifier109clf = AdaBoostClassifier(n_estimators=100, random_state=0)110lfi = emceeSRE()111lfi.set_simulator(simulator)112lfi.learn_logL_with_classifier(clf, mins, maxs, Nsamples=10000, test_size=0.1)113lfi.set_obs(y_obs)114c = ChainConsumer()115labels = [ke for ke in bounds.keys()]116theta_true = [line.true_slope, line.true_intercept]117c.add_chain(flat_samples, parameters=labels)118fig = c.plotter.plot(figsize="column", truth=theta_true)119fig.set_size_inches(3 + fig.get_size_inches()) # Resize fig for doco. You don't need this.120plt.show()121ni = 50122mesh = np.array([[i,j] for i in np.linspace(mins[0],maxs[0],ni) for j in np.linspace(mins[1],maxs[1],ni)])123logLmesh = np.array([logL(m) for m in tqdm(mesh)])124plt.pcolor(mesh[:,0].reshape(ni,ni),mesh[:,1].reshape(ni,ni),logLmesh.reshape(ni,ni))125############## https://elfi.readthedocs.io/en/latest/quickstart.html126import elfi 127import scipy.stats as ss128mu = elfi.Prior('uniform', -2, 4)129sigma = elfi.Prior('uniform', 1, 4)130def simulator(theta, batch_size=1, random_state=None):131 if type(theta)==list: theta = np.array(theta)132 if theta.ndim>1:133 out = [simulator(tt, batch_size=batch_size, random_state=random_state) for tt in theta]134 return np.array(out).squeeze()135 mu, sigma = theta136 mu, sigma = np.atleast_1d(mu, sigma)137 return ss.norm.rvs(mu[:, None], sigma[:, None], size=(batch_size, 30), random_state=random_state)138def mean(y):139 return np.mean(y, axis=1)140def var(y):141 return np.var(y, axis=1)142# Set the generating parameters that we will try to infer143mean0 = 1144std0 = 3145# Generate some data (using a fixed seed here)146np.random.seed(20170525)147y_obs = simulator([mean0, std0])148print(y_obs)149def log_prior(theta):150 if -2<=theta[0]<=4 and 1<=theta[1]<=4: 151 return 0152 return -np.inf153def logL(theta):154 lp = log_prior(theta)155 if not np.isfinite(lp): return lp 156 return lp+lfi.logL_estimator(theta)157mins = np.array([-2, 1])158maxs = np.array([ 4, 4])159from sklearn.ensemble import RandomForestClassifier160clf = RandomForestClassifier(n_estimators=100, random_state=0, verbose=1, n_jobs=4)161lfi = emceeSRE()162lfi.set_simulator(simulator)163lfi.learn_logL_with_classifier(clf, mins, maxs, Nsamples=50000, test_size=0.1)164lfi.set_obs(y_obs)165ni = 50166mesh = np.array([[i,j] for i in np.linspace(mins[0],maxs[0],ni) for j in np.linspace(mins[1],maxs[1],ni)])167logLmesh = np.array([logL(m) for m in tqdm(mesh)])168plt.pcolor(mesh[:,0].reshape(ni,ni),mesh[:,1].reshape(ni,ni),logLmesh.reshape(ni,ni))169from skorch import NeuralNetClassifier170import torch171from torch import nn172from torch.nn import functional as F173class ClassifierModule(nn.Module):174 def __init__(175 self,176 num_units=50,177 nonlin=F.relu,178 dropout=0.5,179 ):180 super(ClassifierModule, self).__init__()181 self.num_units = num_units182 self.nonlin = nonlin183 self.dropout = dropout184 self.dense0 = nn.Linear(y_obs.shape[1]+2, num_units)185 self.nonlin = nonlin186 self.dropout = nn.Dropout(dropout)187 self.dense1 = nn.Linear(num_units, 20)188 self.output = nn.Linear(20, 2)189 def forward(self, X, **kwargs):190 X = self.nonlin(self.dense0(X))191 X = self.dropout(X)192 X = F.relu(self.dense1(X))193 X = F.sigmoid(self.output(X)) #F.softmax(self.output(X), dim=-1)194 return X195model = NeuralNetClassifier(196 ClassifierModule,197 max_epochs=1000,198 lr=0.01,199 # optimizer=torch.optim.SGD,200 criterion=nn.CrossEntropyLoss, #criterion=nn.NLLLoss, # criterion=nn.BCELoss,201 # iterator_train__shuffle=True,202)203model.fit(X,y)204lfi = emceeSRE()205lfi.set_simulator(simulator)206lfi.learn_logL_with_classifier(model, mins, maxs, Nsamples=10000, test_size=0.1)...

Full Screen

Full Screen

test_simulators.py

Source:test_simulators.py Github

copy

Full Screen

...15 @pytest.mark.timeout(300)16 @pytest.mark.parametrize("simulator_class", simulator_class_set)17 @pytest.mark.parametrize("m", mSet)18 def test_dc_simulation(self, simulator_class, m):19 simulator = self.set_simulator(simulator_class, m)20 simulator.isGravity = True21 actuatorVoltages = [-1500]22 simulator.actuator_dc_drive([actuatorVoltages], saving=False)23 self.simulator_testing(simulator)24 @pytest.mark.timeout(300)25 @pytest.mark.parametrize("simulator_class", simulator_class_set)26 @pytest.mark.parametrize("m", mSet)27 def test_ac_simulation(self, simulator_class, m):28 simulator = self.set_simulator(simulator_class, m)29 drivenFrequency = 1.030 actuatorLength = 10.031 arg = [m, drivenFrequency, actuatorLength]32 simulator.drive(arg, saving=False)33 self.simulator_testing(simulator)34 @pytest.mark.timeout(300)35 @pytest.mark.parametrize("simulator_class", simulator_class_set)36 @pytest.mark.parametrize("m", mSet)37 def test_data_recorder(self, simulator_class, m):38 simulator = self.set_simulator(simulator_class, m)39 io_buf = io.BytesIO()40 simulator.outfilename = io_buf41 simulator.reset()42 drivenFrequency = 1.043 actuatorLength = 10.044 arg = [m, drivenFrequency, actuatorLength]45 simulator.drive(arg, saving=True)46 io_buf.seek(0)47 file = io_buf.read()48 recorder = RobotShapeRecorderFromData(data_filename=io.BytesIO(file), record_intervals_in_steps=1, saving=False)49 recorder.reset()50 recorder.run()51class TestInchwormRobotSimulator(BaseSimulatorTest):52 simulator_class_set = [Inchworm, InchwormBackward, FullFrictionalInchworm, InchwormCrawl, FrictionalFrogMotionV2]53 mSet = [1, 3]54 @pytest.mark.timeout(300)55 @pytest.mark.parametrize("simulator_class", simulator_class_set)56 @pytest.mark.parametrize("m", mSet)57 def test_simulation(self, simulator_class, m):58 simulator = self.set_simulator(simulator_class, m)59 period = 1.060 simulator.drive([period], saving=False)61 self.simulator_testing(simulator)62 @pytest.mark.timeout(300)63 @pytest.mark.parametrize("simulator_class", simulator_class_set)64 @pytest.mark.parametrize("m", mSet)65 def test_data_recorder(self, simulator_class, m):66 simulator = self.set_simulator(simulator_class, m)67 io_buf = io.BytesIO()68 simulator.outfilename = io_buf69 simulator.reset()70 period = 1.071 simulator.drive([period], saving=True)72 io_buf.seek(0)73 file = io_buf.read()74 recorder = RobotShapeRecorderFromData(data_filename=io.BytesIO(file), record_intervals_in_steps=1, saving=False)75 recorder.reset()76 recorder.run()77 self.recorder_testing(recorder)78 @pytest.mark.timeout(300)79 @pytest.mark.parametrize("simulator_class", simulator_class_set)80 @pytest.mark.parametrize("m", mSet)81 def test_save_and_load_state(self, simulator_class, m):82 simulator = self.set_simulator(simulator_class, m)83 simulator.reset()84 period = 1.085 simulator.drive([period], saving=False, closing=False)86 state_file = simulator.save_simulator_state()87 state_file_name = state_file.name88 simulator.close()89 simulator.reset()90 simulator.load_simulator_state(state_file_name)91 os.remove(state_file_name)92 simulator.drive([period], saving=False, closing=True)93 @pytest.mark.timeout(300)94 @pytest.mark.parametrize("simulator_class", simulator_class_set)95 @pytest.mark.parametrize("m", mSet)96 def test_simulator_copy(self, simulator_class, m):97 simulator = self.set_simulator(simulator_class, m)98 simulator.reset()99 state_file = simulator.save_simulator_state()100 state_file_name = state_file.name101 simulator.copy(state_file_name)102class TestInchwormRobotScanner(BaseSimulatorTest):103 scanner_class_set = [InchwormPeriodScan,104 InchwormCrawlPeriodScan,105 InchwormReversedPeriodScan,106 InchwormBackwardPeriodScan,107 FullInchwormPeriodScan,108 FullFrictionalInchwormPeriodScan,109 FrictionalFrogMotionV2PeriodScan,110 FullFrictionalInchwormPeriodDampingFrictionScan,111 SingleActuatorCantileverDCResponseVoltageScan,112 SingleActuatorCantileverACResponse,113 InchwormDCResponseVoltageScan]114 mSet = [1, 3]115 @pytest.mark.timeout(300)116 @pytest.mark.parametrize("scanner_class", scanner_class_set)117 @pytest.mark.parametrize("m", mSet)118 def test_scaning(self, scanner_class, m):119 scanner = self.set_simulator(scanner_class, m)120 scanner.scan(saving=False)121class TestRobotShapeRecorder(BaseSimulatorTest):122 simulator_name_set = ['forward',123 'backward',124 'crawl',125 'full frictional inchworm']126 @pytest.mark.timeout(300)127 @pytest.mark.parametrize("simulator_name", simulator_name_set)128 def test_recording(self, simulator_name):129 recorder = self.set_recorder(simulator_name)130 recorder.run()...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run robotframework-ioslibrary automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful