How to use c_test_1 method in Lemoncheesecake

Best Python code snippet using lemoncheesecake

NaiveBayesKde.py

Source:NaiveBayesKde.py Github

copy

Full Screen

1import numpy as np2import matplotlib.pyplot as plt3from sklearn.neighbors import KernelDensity4from sklearn.model_selection import StratifiedKFold5class NaiveBayesKde:6 def __init__(self):7 self.best_bw = 08 self.best_valid_err = 19 self._trained = False10 self._errors = []11 def _kde_estimate(self, fit_x, train_x, valid_x, bw):12 kde = KernelDensity(bandwidth=bw).fit(fit_x)13 return kde.score_samples(train_x), kde.score_samples(valid_x)14 def _kde(self, fit_x, test_x, bw):15 kde = KernelDensity(bandwidth=bw).fit(fit_x)16 return kde.score_samples(test_x)17 def _kde_calc_fold(self, Xs, Ys, tr_ix, va_ix, bw):18 x_c0 = Xs[tr_ix[Ys[tr_ix] == 0]]19 x_c1 = Xs[tr_ix[Ys[tr_ix] == 1]]20 # probability of the classes21 c_train_0 = c_valid_0 = np.log(len(tr_ix[Ys[tr_ix] == 0]) / len(tr_ix))22 c_train_1 = c_valid_1 = np.log(len(tr_ix[Ys[tr_ix] == 1]) / len(tr_ix))23 for feat in range(0, Xs.shape[1]):24 # calculate logarithmic density for class 0, i.e log(P(x_i | c_0))25 t_dens, v_dens = self._kde_estimate(x_c0[:,[feat]], Xs[tr_ix][:,[feat]], Xs[va_ix][:,[feat]], bw)26 c_train_0 += t_dens27 c_valid_0 += v_dens28 # calculate logarithmic density for class 1, i.e log(P(x_i | c_1))29 t_dens, v_dens = self._kde_estimate(x_c1[:,[feat]], Xs[tr_ix][:,[feat]], Xs[va_ix][:,[feat]], bw)30 c_train_1 += t_dens31 c_valid_1 += v_dens32 # calculate predictions33 predict_train = np.argmax([c_train_0, c_train_1], axis=0)34 predict_valid = np.argmax([c_valid_0, c_valid_1], axis=0)35 # compare the predictions with our data sets36 cmp_train = Ys[tr_ix] == predict_train37 cmp_valid = Ys[va_ix] == predict_valid38 # obtain training error and validation error39 train_err = (len(tr_ix) - np.sum(cmp_train)) / len(tr_ix)40 valid_err = (len(va_ix) - np.sum(cmp_valid)) / len(va_ix) 41 return train_err, valid_err42 def optimize_bandwidth(self, X_r, Y_r, folds, start_bw, end_bw, step_bw):43 kfold = StratifiedKFold(n_splits=folds)44 best_bw = 045 best_valid_err = 146 for bw in np.arange(start_bw, end_bw + step_bw, step_bw):47 train_err = valid_err = 048 for tr_ix, va_ix in kfold.split(Y_r, Y_r):49 tr_err, va_err = self._kde_calc_fold(X_r, Y_r, tr_ix, va_ix, bw)50 train_err += tr_err / folds51 valid_err += va_err / folds52 self._errors.append([bw, train_err, valid_err])53 if (valid_err < best_valid_err):54 best_bw = bw55 best_valid_err = valid_err56 self.best_bw = best_bw57 self.best_valid_err = best_valid_err58 return self.best_bw, self.best_valid_err59 def fit(self, X_r, Y_r):60 if self.best_bw == 0:61 raise Exception('The bandwidth should be optimized first')62 self._p_c0 = np.log(np.sum(Y_r == 0) / len(Y_r))63 self._p_c1 = np.log(np.sum(Y_r == 1) / len(Y_r))64 self._x_c0 = X_r[Y_r == 0]65 self._x_c1 = X_r[Y_r == 1]66 self._trained = True67 68 def predict(self, X_t, bw):69 if self._trained == False:70 raise Exception('The classifier is not trained')71 c_test_0 = self._p_c072 c_test_1 = self._p_c173 for feat in range(0, X_t.shape[1]):74 test_dens = self._kde(self._x_c0[:,[feat]], X_t[:,[feat]], bw)75 c_test_0 += test_dens76 test_dens = self._kde(self._x_c1[:,[feat]], X_t[:,[feat]], bw)77 c_test_1 += test_dens78 predictions = np.argmax([c_test_0, c_test_1], axis=0)79 return predictions80 def plot_errors(self, save_fig=True, fig_name='NB.png'):81 errors = np.array(self._errors)82 plt.figure()83 plt.title('Training vs Cross Validation Errors')84 plt.plot(errors[:,0], errors[:,1], 'b', label='Training Error')85 plt.plot(errors[:,0], errors[:,2], 'r', label='Cross-Validation Error')86 plt.legend()87 88 if save_fig:89 plt.savefig(fig_name)90 plt.show()...

Full Screen

Full Screen

nankomah_Project2.py

Source:nankomah_Project2.py Github

copy

Full Screen

1#!/usr/bin/env python32# -*- coding: utf-8 -*-3# import os4# os.chdir('./Project2/')5import numpy as np6from cvxopt import matrix7from cvxopt.solvers import qp8import matplotlib.pyplot as plt9def generate_vars(n=100, m = 30):10 """[returns B- nxn, A - mxn, c - mx1]11 Args:12 n (int, optional): [number of columns]. Defaults to 100.13 m (int, optional): [number of rows]. Defaults to 30.14 Returns:15 [np arrays]: [Generated Matrix and Vector]16 """ 17 sigma = 1 / (4*n)18 I = np.eye(n)19 K = np.random.normal(scale=sigma, size=(n,n))20 21 A = np.random.uniform(size=(m,n))22 B = I + K + K.T23 c = np.random.uniform(size=(m))24 return A, B, c25def cvxpot_qp(A,c,B):26 """[return x_star]27 Args:28 A ([np.array]): [A - mxn]29 c ([np.array]): [c - mx1]30 B ([np.array]): [B - nxn]31 Returns:32 [type]: [x_star]33 """ 34 m = A.shape[0]35 n = A.shape[1]36 q = matrix(np.zeros((n,1)))37 P = matrix(B)38 A = matrix(A)39 b = matrix(c)40 cvx_results= qp(P = P, q = q, A=A, b=b)41 return np.asarray(cvx_results['x']).flatten()42def lagrangian_x(A,c,B,c_k,lam):43 """[returns x_k based on d_x alm solve for x ]44 Args:45 A ([np.array]): [A - mxn]46 c ([np.array]): [c - mx1]47 B ([np.array]): [B - nxn]48 c_k ([float]): [variable will the following condition49 must be > 0, -> infty, c_{i} <= c_{i+1} ]50 lam ([np.array]): [lam - mx1]51 Returns:52 [x]: [mx1 np.array]53 """ 54 leftside = 2*B+(c_k*A.T@A)55 rightside = (c_k*A.T@c) - (A.T@lam)56 x = np.linalg.pinv(leftside)@rightside57 return x.flatten()58def constraint(A,x,c):59 """[returns output of constraint]60 Args:61 A ([np.array]): [A - mxn]62 x ([np.array]): [x - mx1]63 c ([np.array]): [c - mx1]64 Returns:65 [type]: [mx1]66 """ 67 return (A@x) - c 68def alm(B,A,c, epsilon=10**(-10)):69 """[uses Augmented Lagrangian method to find an x_k simlar to x_star]70 Args:71 B ([np.array]): [B - nxn]72 A ([np.array]): [A - mxn]73 c ([np.array]): [c - mx1]74 epsilon ([float], optional): [threshold value]. Defaults to 10**(-13).75 Returns:76 [list]: [list of relative error ]77 """ 78 x_star = np.asarray(cvxpot_qp(A,c,B*2)).flatten()79 error_i = [] 80 lambda_k = np.zeros(c.shape)81 #c_k must be > 0, -> infinity, c_k_{i} <= c_k_{i+1} 82 c_k =.183 i = 0 84 while True:85 x_k = lagrangian_x(A,c,B,c_k,lambda_k)86 error = np.linalg.norm(x_k - x_star) / np.linalg.norm(x_star)87 error_i.append(error)88 stopping = np.linalg.norm(A@x_k - c)89 if stopping <= epsilon:90 return x_k, i, error_i91 92 lambda_k += c_k*constraint(A,x_k,c)93 c_k = 2.0**i94 i += 195def main():96 """[main function]97 """ 98 #a)99 A = np.loadtxt('A_test_1.txt')100 c = np.loadtxt('c_test_1.txt')101 B = np.loadtxt('B_test_1.txt')102 # Q, A, b = generate_vars()103 #b)104 x_star = cvxpot_qp(A,c,B*2)105 print('cvx output')106 print(x_star)107 #c) 108 x_k, i, error_i = alm(B,A,c)109 print()110 print('Augmented Lagrangian method output')111 print(x_k)112 print()113 print('Final Error')114 print(error_i[-1])115 print()116 print('Final Error is below 10^(-6)')117 print(error_i[-1] <= 10**(-6))118 print()119 120 fig = plt.figure()121 plt.title('Relative Error')122 plt.plot(np.arange(len(error_i)), error_i, label = 'Relative Error vs k')123 plt.legend()124 plt.grid(True)125 fig.tight_layout()126 # plt.savefig('pix/Given_sample.png', dpi=fig.dpi)127 plt.show()128if __name__ == "__main__":...

Full Screen

Full Screen

B.py

Source:B.py Github

copy

Full Screen

...5}6@lcc.suite("BB1")7class BB1:8 @lcc.test("Test of BB1")9 def c_test_1(self):10 import time11 time.sleep(0.21)12 require_that("value", 1, is_integer(2))13@lcc.suite("BB2")14class BB2:15 @lcc.test("Test of BB2")16 def d_test_1(self):...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run Lemoncheesecake automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful