How to use test_z2 method in hypothesis

Best Python code snippet using hypothesis

10_10cv.py

Source:10_10cv.py Github

copy

Full Screen

1#knn 2from sklearn.neighbors import KNeighborsClassifier3from sklearn.model_selection import StratifiedKFold4import csv5import numpy as np 6with open("dataset_dt.txt", "r") as f:7 reader = csv.reader(f)8 line = [row for row in reader]9with open("dataset_rf.txt", "r") as f:10 reader = csv.reader(f)11 line3 = [row for row in reader]12line2 = []13with open("t_test_dt_rf.txt", "r") as f:14 reader = csv.reader(f)15 for row in reader:16 if len(row) != 0:17 line2.append(row)18'''19with open("train_x_0_0.txt","r") as f:20 reader = csv.reader(f)21 line4 = [row for row in reader]22for i in range(len(line)):23 for j in range(len(line[0])):24 if line[i][j] == "":25 print(i)26 print(line[i])27'''28'''29with open("dataset_raw.txt","r") as f:30 reader = csv.reader(f)31 line5 = [row for row in reader]32print(line5[28612])33'''34n = len(line)35m = len(line[0])36#print(n,m)37#print(len(line2))38#X attributes; Y class labels; Z accuracies39X = [[None for i in range(m-1)] for j in range(n)]40#Y = [None for i in range(n)]41Z = [None for i in range(n)]42Z2 = [None for i in range(n)]43Y = []44for row in line2:45 Y.append(int(row[0]))46 47for i in range(n):48 for j in range(m-1):49 X[i][j] = line[i][j]50 Z[i] = line[i][m-1]51 Z2[i] = line3[i][m-1]52#print(len(X),len(Y),len(Z))53#dividing on the basis of class54label_wise = [[] for i in range(m-1)]55for i in range(n):56 index = Y[i]57 label_wise[index].append(i)58print(len(label_wise[0]),len(label_wise[1]),len(label_wise[2]))59accuracy = [[] for i in range(15)]60for k in range(2,3):61 print(k)62 #start of 10 x 10 cv63 for n0 in range(10):64 label_wise_all = []65 for i in label_wise:66 np.random.shuffle(i)67 for j in i:68 label_wise_all.append(j)69 one,two,three,four,five,six,seven,eight,nine,ten = [[] for i in range(10)]70 folds = [one,two,three,four,five,six,seven,eight,nine,ten]71 for j in range(len(label_wise_all)):72 temp = j % 1073 temp = folds[temp]74 temp.append(label_wise_all[j])75 for test_fold in range(10):76 train_x = []77 train_y = []78 train_z = []79 train_z2 = []80 test_x = []81 test_y = []82 test_z = []83 test_z2 = []84 for fold in range(10):85 temp = folds[fold]86 if fold == test_fold:87 for i in temp:88 test_x.append(X[i])89 test_y.append(Y[i])90 test_z.append(Z[i])91 test_z2.append(Z2[i])92 else:93 for i in temp:94 train_x.append(X[i])95 train_y.append(Y[i])96 train_z.append(Z[i])97 train_z2.append(Z2[i])98 train_y = np.array(train_y).reshape(-1,1)99 test_y = np.array(test_y).reshape(-1,1)100 train_z = np.array(train_z).reshape(-1,1)101 train_z2 = np.array(train_z2).reshape(-1,1)102 test_z = np.array(test_z).reshape(-1,1)103 test_z2 = np.array(test_z2).reshape(-1,1)104 file_name = "train_x_" + str(n0) + '_' + str(test_fold) + ".txt"105 with open(file_name,"w") as f:106 writer = csv.writer(f)107 writer.writerows(train_x)108 file_name = "train_y_" + str(n0) + '_' + str(test_fold) + ".txt"109 with open(file_name,"w") as f:110 writer = csv.writer(f)111 writer.writerows(train_y)112 file_name = "train_dt_" + str(n0) + '_' + str(test_fold) + ".txt"113 with open(file_name,"w") as f:114 writer = csv.writer(f)115 writer.writerows(train_z)116 file_name = "train_rf_" + str(n0) + '_' + str(test_fold) + ".txt"117 with open(file_name,"w") as f:118 writer = csv.writer(f)119 writer.writerows(train_z2)120 file_name = "test_x_" + str(n0) + '_' + str(test_fold) + ".txt"121 with open(file_name,"w") as f:122 writer = csv.writer(f)123 writer.writerows(test_x)124 file_name = "test_y_" + str(n0) + '_' + str(test_fold) + ".txt"125 with open(file_name,"w") as f:126 writer = csv.writer(f)127 writer.writerows(test_y)128 file_name = "test_dt_" + str(n0) + '_' + str(test_fold) + ".txt"129 with open(file_name,"w") as f:130 writer = csv.writer(f)131 writer.writerows(test_z)132 file_name = "test_rf_" + str(n0) + '_' + str(test_fold) + ".txt"133 with open(file_name,"w") as f:134 writer = csv.writer(f)135 writer.writerows(test_z2)...

Full Screen

Full Screen

classifier.py

Source:classifier.py Github

copy

Full Screen

1import tensorflow as tf2import numpy as np3from sklearn.metrics import roc_auc_score4import matplotlib.pyplot as plt5from sklearn.svm import SVC6plt.switch_backend("agg")7inp_dim = 1008num_classes = 29niter = 200010reg = .0511def get_indices(raw_labels):12# i2 = (raw_labels==2).nonzero()[0]13 i1 = (raw_labels==1).nonzero()[0]14 i0 = (raw_labels==0).nonzero()[0]15 a = np.amin([i1.shape[0],i0.shape[0]])16# np.random.shuffle(i2) 17 np.random.shuffle(i1) 18 np.random.shuffle(i0)19 r = np.concatenate((i1[0:a], i0[0:a]))20 np.random.shuffle(r)21 return r22#def SVM_C():23# train_z2 = np.load("/opt/data/saket/gene_data/data/train_z2.npy")24# test_z2 = np.load("/opt/data/saket/gene_data/data/test_z2.npy")25# labels = np.load("/opt/data/saket/gene_data/data/data_label.npy")26# i2 = (labels==2).nonzero()[0]27# i1 = (labels==1).nonzero()[0]28# i0 = (labels==0).nonzero()[0]29# labels[i1] = 130# labels[i2] = 031# labels[i0] = 032# train_labels = labels[0:160]33# test_labels = labels[160:]34# clf = SVC(probability=True)35# train_z2 = np.mean(train_z2, axis=0)36# clf.fit(train_z2, train_labels)37# test_z2 = np.mean(test_z2, axis=0)38# metrics = clf.predict(test_z2)39# Accuracy = 040# n = metrics.shape[0]41# for i in range(n):42# if metrics[i] == test_labels[i]:43# Accuracy += 144# print ("Accuracy:",Accuracy)45def normalize(x):46 m,v = tf.nn.moments(x, [0])47 y = (x-m)/tf.sqrt(v)48 return y49def main():50 51 x = tf.placeholder(tf.float32, [None, inp_dim])52 y1 = normalize(x)53 W = tf.Variable(tf.ones([inp_dim, num_classes]))54 b = tf.Variable(tf.zeros([num_classes]))55 y = tf.matmul(y1,W)+b56 ys = tf.nn.softmax(y)57 y_ = tf.placeholder(tf.int64, [None])58 cross_entropy = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=y_, logits=y))59 train_step = tf.train.GradientDescentOptimizer(0.005).minimize(cross_entropy+reg*tf.nn.l2_loss(W))60 61 sess = tf.InteractiveSession()62 sess.run(tf.global_variables_initializer())63 64 train_z2 = np.load("/opt/data/saket/gene_data/data/train_z2.npy")65 test_z2 = np.load("/opt/data/saket/gene_data/data/test_z2.npy")66 labels = np.load("/opt/data/saket/gene_data/data/data_label.npy")67 #train_z2 = train_z2*(10**5)68 #test_z2 = test_z2*(10**5)69 print ("Class 0 Vs all")70 i2 = (labels==2).nonzero()[0]71 i1 = (labels==1).nonzero()[0]72 i0 = (labels==0).nonzero()[0]73 labels[i1] = 074 labels[i2] = 175 labels[i0] = 076 train_labels = labels[0:160]77 test_labels = labels[160:]78 79 np.random.shuffle(train_z2)80 closs = []81 correct_prediction = tf.equal(tf.argmax(y,1), y_)82 accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))83 for i in range(niter):84 z2 = train_z2[i%train_z2.shape[0]]85 #print ("z2:",z2)86 indices = get_indices(train_labels)87# print("normalized data:",sess.run(y1, feed_dict={x:z2[indices], y_:train_labels[indices]}))88 l, train_a, _ = sess.run([cross_entropy, accuracy, train_step], feed_dict = {x:z2[indices], y_:train_labels[indices]})89 closs.append((l, train_a))90 # print ("Step:%d accuracy:%f loss:%f"%(i, train_a, l))91 # Test the trained model92 testylist = []93 for i in range(test_z2.shape[0]):94 y = sess.run(ys, feed_dict={x:test_z2[i], y_:test_labels})95 testylist.append(y)96 #print(sess.run(y1, feed_dict={x:test_z2[i], y_:test_labels}))97 testy = np.array(testylist)98 testy = np.mean(testy, axis=0)99 pred_labels = np.argmax(testy, axis=1)100 print ("pred_labels:", pred_labels)101# indices = np.concatenate((np.expand_dims(np.arange(0,pred_labels.shape[0]), axis=1), np.expand_dims(pred_labels, axis=1)), axis=1)102 score = testy[np.arange(0,pred_labels.shape[0]),pred_labels]103 print("indices:", indices.shape)104 print("score shape:",score.shape)105 print("test_labels_shape:", test_labels.shape)106 print("ROC Score:", roc_auc_score(test_labels, score)) 107 n = pred_labels.shape[0]108 TP = 0109 FP = 0110 TN = 0111 FN = 0112 for i in range(n):113 if test_labels[i] == 0:114 if pred_labels[i] == 0:115 TN += 1116 else:117 FP += 1118 else:119 if pred_labels[i] == 0:120 FN += 1121 else:122 TP += 1123 test_accuracy = pred_labels==test_labels124 test_accuracy = np.sum(test_accuracy)125 print("Test classification accuracy:",test_accuracy)126 closs = np.array(closs)127 plt.plot(closs[:,0])128 plt.savefig("closs.png")129 plt.close()130 print("Test FP:%d TP:%d FN:%d TN:%d"%(FP,TP,FN,TN))...

Full Screen

Full Screen

gan_tester.py

Source:gan_tester.py Github

copy

Full Screen

1import random as rand2import time3#description Generative Adverserial Networks trainer4#icon fa fa-retweet5#MAIN=Run6#param object7_input = None8#param object9Generator = None10#param object11Discriminator = None12#param int13latent_size = 1014#param folder15save_path = ""16#param list: Image,Sound,Text,3D Model,Raw17preview = "Image"18#editor param float19#preview_state = 0.520def Run(self):21 self.X = tf.placeholder(tf.float32, shape=[None, self._input.input_size], name="x_input") 22 self.Z = tf.placeholder(tf.float32, shape=[None, self.latent_size], name="z_input") 23 fakeX = self.Generator.Run(self.Z)24 25 #initialize everything26 instance = AIBlocks.InitModel(load_path=self.save_path)27 Log("Model initialized!")28 29 acc_log = []30 resetRand = 031 test_Z = np.random.uniform(-1., 1., size=[self.latent_size])32 test_Z1 = np.random.uniform(-1., 1., size=[self.latent_size])33 test_Z2 = np.random.uniform(-1., 1., size=[self.latent_size])34 targetRand = np.random.uniform(-1., 1., size=[self.latent_size])35 targetRand1 = np.random.uniform(-1., 1., size=[self.latent_size])36 targetRand2 = np.random.uniform(-1., 1., size=[self.latent_size])37 38 while 1:39 Math.LerpVec(test_Z, targetRand, 0.1)40 Math.LerpVec(test_Z1, targetRand1, 0.1)41 Math.LerpVec(test_Z2, targetRand2, 0.1)42 if(resetRand>10):43 targetRand = np.random.uniform(-1., 1., size=[self.latent_size])44 targetRand1 = np.random.uniform(-1., 1., size=[self.latent_size])45 targetRand2 = np.random.uniform(-1., 1., size=[self.latent_size])46 resetRand = 047 resetRand += 148 49 imagined = instance.Run(fakeX, feed_dict = {self.Z: [test_Z]})[0]50 imagined1 = instance.Run(fakeX, feed_dict = {self.Z: [test_Z1]})[0]51 imagined2 = instance.Run(fakeX, feed_dict = {self.Z: [test_Z2]})[0]52 if self.preview=="Image":53 SendImageData(self.id, imagined, self._input.image_size[0], self._input.image_size[1], "1")54 SendImageData(self.id, imagined1, self._input.image_size[0], self._input.image_size[1], "2")55 SendImageData(self.id, imagined2, self._input.image_size[0], self._input.image_size[1], "3")56 elif self.preview=="Sound":57 pass58 else:59 pass60 time.sleep(0.01)61 AIBlocks.SaveModel(instance)...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run hypothesis automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful