Best Python code snippet using yandex-tank
search_indexes.py
Source:search_indexes.py  
1from haystack import indexes2from anarapp.models import Yacimiento, Piedra3##################################################4# Piedra Index5##################################################6class PiedraIndex(indexes.SearchIndex, indexes.Indexable):7	#Busqueda General8	text = indexes.CharField(document=True, use_template=True)9	10	#Piedra11	codigo 			    = indexes.CharField(model_attr='codigo')12	nombre 			= indexes.CharField(model_attr='nombre')13	figuras 		        = indexes.CharField(model_attr='nombreFiguras')14	def get_model(self):15		return Piedra16	def index_queryset(self, using=None):17		return self.get_model().objects.all()18##################################################19# Yacimiento Index20##################################################21class YacimientoIndex(indexes.SearchIndex, indexes.Indexable):22	#Busqueda General23	text = indexes.CharField(document=True, use_template=True)24	25	#Yacimiento26	codigo 			    = indexes.CharField(model_attr='codigo')27	pais                  = indexes.CharField(model_attr='pais')28	municipio 		= indexes.CharField(model_attr='municipio')29	estado 			= indexes.CharField(model_attr='estado')30	nombre 			= indexes.CharField(model_attr='nombre')31	localidad 		    = indexes.CharField()32	fotografia 		= indexes.CharField()33	tipo 			        = indexes.MultiValueField() 34	exposicion 		= indexes.MultiValueField() 35	manifestacion 	= indexes.MultiValueField() 	36	ubicacion 		= indexes.MultiValueField() 37	material 		= indexes.MultiValueField() 38	conservacion 	= indexes.MultiValueField() 39	40	manifasociadas 	= indexes.MultiValueField()41	carasurcopetrotipo  = indexes.MultiValueField()42	carasurcopetroancho = indexes.MultiValueField()43	carasurcopetroprofun = indexes.MultiValueField()44	45	def get_model(self):46		return Yacimiento47	def index_queryset(self, using=None):48		return self.get_model().objects.all()49	def prepare(self, obj):50		self.prepare_data = super(YacimientoIndex, self).prepare(obj)51		52		#Localidad Yacimiento53		try:54			localidad = obj.LocalidadYacimiento55			self.prepare_data['localidad'] = localidad.nombrePoblado + ' ' + localidad.nombreNoPoblado56		except:57			pass58		59		#Fotografias	60		fotografias = obj.FotografiaYac.all()61		self.prepare_data['fotografia'] = 'true' if fotografias.count() > 0 else 'false'62		63		#Tipo Yacimiento64		try:65			tipo = obj.TipoYacimiento66			self.prepare_data['tipo'] = []67			68			if tipo.esParedRocosa:69				self.prepare_data['tipo'].append(1)70			if tipo.esRoca:71				self.prepare_data['tipo'].append(2)72			if tipo.esDolmen:73				self.prepare_data['tipo'].append(3)74			if tipo.esAbrigo:75				self.prepare_data['tipo'].append(4)76			if tipo.esCueva:77				self.prepare_data['tipo'].append(5)78			if tipo.esCuevadeRec:79				self.prepare_data['tipo'].append(6)80			if tipo.esTerrenoSup:81				self.prepare_data['tipo'].append(7)82			if tipo.esTerrenoPro:83				self.prepare_data['tipo'].append(8)84		except:85			pass86			87		#Exposicion88		try:89			exposicion = obj.TipoExposicionYac90			self.prepare_data['exposicion'] = []			91			92			if exposicion.expuesto:93				self.prepare_data['exposicion'].append(1)94			#if exposicion.noExpuesto:95			#	self.prepare_data['exposicion'].append(2)96			if exposicion.expuestoperiodicamente:97				self.prepare_data['exposicion'].append(3)98		except:99			pass100			101		#Manifestaciones102		manifestaciones = obj.ManifestacionYacimiento.all()			103		self.prepare_data['manifestacion'] = []104		105		for m in manifestaciones:106			self.prepare_data['manifestacion'].append(m.tipoManifestacion)107		#Ubicacion de la manifestacion108		ubicaciones = obj.UbicacionYacimiento.all()109		self.prepare_data['ubicacion'] = []110		for u in ubicaciones:111			self.prepare_data['ubicacion'].append(u.ubicacionManifestacion)112		#Material113		try:114			material = obj.MaterialYacimiento115			self.prepare_data['material'] = []			116			117			if material.esRoca and material.esIgnea :118				self.prepare_data['material'].append(1)119			if material.esRoca and material.esMetamor:120				self.prepare_data['material'].append(2)121			if material.esRoca and materia.esSedimentaria:122				self.prepare_data['material'].append(3)123			if material.esTierra:124				self.prepare_data['material'].append(4)125			if material.esHueso:126				self.prepare_data['material'].append(5)127			if material.esCorteza:128				self.prepare_data['material'].append(6)129			if material.esPiel:130				self.prepare_data['material'].append(7)	131		except:132			pass133		#Conservacion134		try:135			conservacion = obj.EstadoConserYac136			self.prepare_data['conservacion'] = []137						138			if conservacion.enBuenEstado:139				self.prepare_data['conservacion'].append(1)140			if exposicion.estadoModificado:141				self.prepare_data['conservacion'].append(2)142			if conservacion.porErosion and conservacion.porErosionParModerada:143				self.prepare_data['conservacion'].append(3)144			if conservacion.porErosion and conservacion.porErosionParSevere:145				self.prepare_data['conservacion'].append(4)146			if conservacion.porErosion and conservacion.porErosionExtModerada:147				self.prepare_data['conservacion'].append(5)148			if conservacion.porErosion and conservacion.porErosionExtSevere:149				self.prepare_data['conservacion'].append(6)150		except:151			pass152			153		154		#Manifestaciones Asociadas155		try:156			asociada = obj.ManifestacionesAsociadas157			self.prepare_data['manifasociadas'] = []158			159			if asociada.esLitica:160				self.prepare_data['manifasociadas'].append(1)161			if asociada.esCeramica:162				self.prepare_data['manifasociadas'].append(2)163			if asociada.esOseo:164				self.prepare_data['manifasociadas'].append(3)165			if asociada.esConcha:166				self.prepare_data['manifasociadas'].append(4)167			if asociada.esCarbon:168				self.prepare_data['manifasociadas'].append(5)169			if asociada.esMito:170				self.prepare_data['manifasociadas'].append(6)171			if asociada.esCementerio:172				self.prepare_data['manifasociadas'].append(7)173			if asociada.esMonticulo:174				self.prepare_data['manifasociadas'].append(8)175		except:176			pass177		#CaraSurcoPetroglifo178		try:179			caracpetro = obj.CaracSurcoPetroglifo180			self.prepare_data['carasurcopetroancho'] = caracpetro.anchoDe + ' ' +caracpetro.anchoA181			self.prepare_data['carasurcopetroprofun'] = caracpetro.produndidadDe + ' '+caracpetro.profundidadA182			self.prepare_data['carasurcopetrotipo'] = []183			if caracpetro.esBase:184				self.prepare_data['carasurcopetrotipo'].append(1)185			if caracpetro.esBaseRedonda:186				self.prepare_data['carasurcopetrotipo'].append(2)187			if caracpetro.esBaseAguda:188				self.prepare_data['carasurcopetrotipo'].append(3)189			if caracpetro.esBajoRelieve:190				self.prepare_data['carasurcopetrotipo'].append(4)191			if caracpetro.esBajoRelieveLineal:192				self.prepare_data['carasurcopetrotipo'].append(5)193			if caracpetro.esBajoRelievePlanar:194				self.prepare_data['carasurcopetrotipo'].append(6)195			if caracpetro.esAltoRelieve:196				self.prepare_data['carasurcopetrotipo'].append(7)197			if caracpetro.esAltoRelieveLineal:198				self.prepare_data['carasurcopetrotipo'].append(8)199			if caracpetro.esAltoRelievePlanar:200				self.prepare_data['carasurcopetrotipo'].append(9)201			if caracpetro.esAreaInterlineal:202				self.prepare_data['carasurcopetrotipo'].append(10)203			if caracpetro.esAreaInterlinealPulida:204				self.prepare_data['carasurcopetrotipo'].append(11)205			if caracpetro.esAreaInterlinealRebajada:206				self.prepare_data['carasurcopetrotipo'].append(12)207			if caracpetro.esGrabadoSuperpuesto:208				self.prepare_data['carasurcopetrotipo'].append(13)209			if caracpetro.esGrabadoRebajado:210				self.prepare_data['carasurcopetrotipo'].append(14)211		except:212			pass213		return self.prepare_data	214"""215class BaseIndex(indexes.SearchIndex, indexes.Indexable):216	#Busqueda General217	text = indexes.CharField(document=True, use_template=True)	218	def get_model(self):219		return Yacimiento220	def index_queryset(self, using=None):221		return self.get_model().objects.all()222		223	def prepare(self, instance):224		self.prepare_data = super(BaseIndex, self).prepare(instance)225		226		#Listando todas las piedras una sola vez227		piedras = None228		try:229			piedras = instance.Piedra.all()230		except:231			pass232		233		#Recorriendo todos los modelos de anarapp234		for mname, model in dynamic.get_models(anarapp.models):235			if mname == 'Yacimiento':236				continue237			238			foreign = None239			elem 	= None240			elems	= None241			242			#Se relaciona con Piedra y existe al menos una piedra243			if dynamic.has_attr(model, 'piedra') and piedras != None:244				try:245					elems = [getattr(piedra, name) for piedra in piedras]246				except:247					continue248				249				for fname, ftype, name in dynamic.get_attrs(model):250			 		if ftype == 'OneToOneField' or ftype == 'ForeignKey':251						continue252						253					self.prepare_data[fname] = [getattr(e, name) for e in elems]254				continue255				256			#Se relaciona con Yacimiento257			if dynamic.has_attr(model, 'yacimiento'):258				foreign = dynamic.get_type(model, 'yacimiento')259				try:260					elem = getattr(instance, mname)261				except:262					continue263			264			#Relaciones uno a uno: un campo por modelo	265			if foreign == 'OneToOneField':266				for fname, ftype, name in dynamic.get_attrs(model):267					if ftype == 'OneToOneField' or ftype == 'ForeignKey':268						continue269					270					value = getattr(elem, name)271					272					#Troll Attribute273					if fname.endswith('cantidad'):274						value = unicode(value)275						276					#Troll Type277					if ftype == 'BooleanField':278						value = 'true' if getattr(elem, name) else 'false'279						280					self.prepare_data[fname] = value281			 282			#Relaciones muchos a muchos: todos los campos multivalue283			elif foreign == 'ForeignKey':284				elems = elem.all()285				286				for fname, ftype, name in dynamic.get_attrs(model):287			 		if ftype == 'OneToOneField' or ftype == 'ForeignKey':288						continue289					290					values = []291					292					#Handling Troll Type293					if ftype == 'BooleanField':294						for e in elems:295							values.append('true' if getattr(e,name) else 'false')296					else:297						values = [getattr(e, name) for e in elems]298					299					self.prepare_data[fname] = values300						301		302		#print self.prepare_data			303		return self.prepare_data304def crear_yacimiento_index():305	attrs = {}306	#Recorriendo todos los modelos de anarapp307	for mname, model in dynamic.get_models(anarapp.models):308		foreign = None309			310		#Se relaciona con Yacimiento311		if dynamic.has_attr(model, 'yacimiento'):312			foreign = dynamic.get_type(model, 'yacimiento')313		314		#Se relaciona con Piedra	315		elif dynamic.has_attr(model, 'piedra'):316			foreign = dynamic.get_type(model, 'piedra')317		318		#Es la clase principal319		elif mname == 'Yacimiento':320			for fname, ftype, name in dynamic.get_attrs(model):321				attrs[fname] = indexes.CharField(model_attr=name)322			continue323		324		#Relacion uno a uno: un campo por modelo	325		if foreign == 'OneToOneField':326			for fname, ftype, name in dynamic.get_attrs(model):327				if ftype == 'CharField':328					attrs[fname] = indexes.CharField()329				elif ftype == 'IntegerField':330					attrs[fname] = indexes.IntegerField(null=True)331				elif ftype == 'BooleanField':332					attrs[fname] = indexes.CharField()					333				elif ftype == 'DateField':334					attrs[fname] = indexes.DateField()335		336		#Relacion muchos a muchos: todos los campos con multivalue			337		elif foreign == 'ForeignKey':338			for fname, ftype, name in dynamic.get_attrs(model):339				if ftype == 'OneToOneField' or ftype == 'ForeignKey':340					continue341				attrs[fname] = indexes.MultiValueField()		 342	return type("YacimientoIndex", (BaseIndex, indexes.Indexable), attrs)343########################344# Creando Index	345########################346YacimientoIndex = crear_yacimiento_index()...main.py
Source:main.py  
1import torch2import torch.nn as nn3import torch.optim as optim4from torch.utils.data import DataLoader, WeightedRandomSampler5import numpy as np6import pickle7import gensim8import fasttext9import math10import utils.prepare_data as prepare_data11from model import Encoder, Transformer12from config.config import *13from train import train14from get_predictions import get_predictions15torch.manual_seed(0)16device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")17print(device)18# load features and labels19print('Loading data...')20# Lahjoita Puhetta data21#features_train = prepare_data.load_features_combined('data/gender_modeling/features/train.npy', max_len)22#trn_train, topic_train = prepare_data.load_transcripts('data/gender_modeling/transcripts_gender/train.txt')23#24#features_dev = prepare_data.load_features_combined('data/gender_modeling/features/dev.npy', max_len)25#trn_dev, topic_dev = prepare_data.load_transcripts('data/gender_modeling/transcripts_gender/dev.txt')26# test Lahjoita Puhetta27features_train = prepare_data.load_features_combined('data/gender_modeling/features/test.npy', max_len)28trn_train, topic_train = prepare_data.load_transcripts('data/gender_modeling/transcripts_gender/test.txt')29features_train = features_train30trn_train = trn_train31topic_train = topic_train32features_dev = features_train33trn_dev = trn_train34topic_dev = topic_train35print('Done...')36print('Loading embeddings...')37embeddings = fasttext.load_model('weights/embeddings/cc.fi.300.bin')38print('Done...')39# generate index dictionaries40#char2idx, idx2char = prepare_data.encode_data(target_train)41#topic2idx = {}42#for i in topic_train:43#    i = i[0]44#    if i not in topic2idx.keys():45#        topic2idx[i] = len(topic2idx) + 146#47#idx2topic = {v: k for k, v in topic2idx.items()}48# generate index dictionaries49#with open('weights/topic2idx.pkl', 'wb') as f:50#    pickle.dump(topic2idx, f, protocol=pickle.HIGHEST_PROTOCOL)51#52#with open('weights/idx2topic.pkl', 'wb') as f:53#    pickle.dump(idx2topic, f, protocol=pickle.HIGHEST_PROTOCOL)54# For topic detection55#with open('weights/topic2idx.pkl', 'rb') as f:56#    topic2idx = pickle.load(f)57#with open('weights/idx2topic.pkl', 'rb') as f:58#    idx2topic = pickle.load(f)59# for gender detection60topic2idx = {'Mies': 0, 'Nainen': 1}61idx2topic = {0: 'Mies', 1: 'Nainen'}62# for age detection63#topic2idx = {'1-10': 1, '11-20': 2, '21-30': 3, '31-40': 4, '41-50': 5, '51-60': 6, '61-70': 7, '71-80': 8, '81-90': 9, '91-100': 10, '101+': 11}64#topic2idx = {'1-20': 0, '21-60': 1, '61-101+': 2}65#idx2topic = {v: k for k, v in topic2idx.items()}66with open('weights/char2idx.pkl', 'rb') as f:67    char2idx = pickle.load(f)68with open('weights/idx2char.pkl', 'rb') as f:69    idx2char = pickle.load(f)70# convert topics to indices71indexed_topic_train = prepare_data.topic_to_idx(topic_train, topic2idx)72indexed_topic_dev = prepare_data.topic_to_idx(topic_dev, topic2idx)73# convert words to vectors74indexed_word_train = prepare_data.word_to_idx(trn_train, embeddings)75indexed_word_dev = prepare_data.word_to_idx(trn_dev, embeddings)76# combine features and topics in a tuple77train_data = prepare_data.combine_data(features_train, indexed_topic_train, indexed_word_train)78dev_data = prepare_data.combine_data(features_dev, indexed_topic_dev, indexed_word_dev)79# remove extra data that doesn't fit in batch80train_data = prepare_data.remove_extra(train_data, batch_size)81dev_data = prepare_data.remove_extra(dev_data, batch_size)82class_1 = 083class_2 = 084class_3 = 085indexed_topic_train = indexed_topic_train[:3520]86for sample in indexed_topic_train:87    if sample.item() == 0:88        class_1 += 189    if sample.item() == 1:90        class_2 += 191    if sample.item() == 2:92        class_3 += 193class_sample_counts = [class_1, class_2, class_3]94weights = 1. / torch.tensor(class_sample_counts, dtype=torch.float)95sample_weights = np.array([weights[t.item() - 1] for t in indexed_topic_train])96sample_weights = torch.from_numpy(sample_weights)97sampler = WeightedRandomSampler(sample_weights, len(sample_weights), replacement=True)98pairs_batch_train = DataLoader(dataset=train_data,99                    batch_size=batch_size,100                    shuffle=True,101                    collate_fn=prepare_data.collate,102                    pin_memory=True)103pairs_batch_dev = DataLoader(dataset=dev_data,104                    batch_size=batch_size,105                    shuffle=True,106                    collate_fn=prepare_data.collate,107                    pin_memory=True)108transformer = Transformer(features_train[0].size(1), len(topic2idx), n_head_encoder, d_model, n_layers_encoder, max_len).to(device)109for p in transformer.parameters():110    if p.dim() > 1:111        nn.init.xavier_uniform_(p)112total_trainable_params = sum(p.numel() for p in transformer.parameters() if p.requires_grad)113print('The number of trainable parameters is: %d' % (total_trainable_params))114# train115if skip_training == False:116    print('Training...')117    #weight_1 = len(indexed_topic_train) / (3 * class_1)118    #weight_2 = len(indexed_topic_train) / (3 * class_2)119    #weight_3 = len(indexed_topic_train) / (3 * class_3)120    121    #weight_1 = 1 / class_1122    #weight_2 = 1 / class_2123    #weight_3 = 1 / class_3124    #weight = torch.Tensor([weight_1, weight_2, weight_3]).to(device)125    criterion = nn.CrossEntropyLoss(reduction='mean')126    optimizer = torch.optim.AdamW(transformer.parameters(), lr=lr) 127    #checkpoint = torch.load('weights/gender_new/state_dict_26.pt', map_location=torch.device('cpu'))128    #transformer.load_state_dict(checkpoint['transformer'])129    #optimizer.load_state_dict(checkpoint['optimizer'])130    train(pairs_batch_train, 131            pairs_batch_dev,132            transformer,133            criterion,134            optimizer,135            num_epochs,136            batch_size,137            len(features_train),138            len(features_dev),139            device) 140else:141    checkpoint = torch.load('weights/gender/state_dict_34.pt', map_location=torch.device('cpu'))142    transformer.load_state_dict(checkpoint['transformer'])143batch_size = 1144pairs_batch_train = DataLoader(dataset=dev_data,145                    batch_size=batch_size,146                    shuffle=False,147                    collate_fn=prepare_data.collate,148                    pin_memory=True)...Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
