How to use copy_tree method in Airtest

Best Python code snippet using Airtest

syn_2.py

Source:syn_2.py Github

copy

Full Screen

1from _csv import reader2import matplotlib.pyplot as plt3import numpy as np4import pandas as pd5from numpy import where6from numpy import meshgrid7from numpy import arange8from numpy import hstack91011gini_in={}12info_gain={}13b_scoreG=[]14b_valueG=[]15depth_c=316count_acc=017def split_lr(index, value, dataset):18 left, right = list(), list()19 for row in dataset:20 if row[index] < value:21 left.append(row)22 else:23 right.append(row)24 return left, right252627# Gini_index calculating28def gini_impurity(groups, classes):29 n_instances = float(sum([len(group) for group in groups]))30 gini = 0.031 for group in groups:32 size = float(len(group))33 if size == 0:34 continue35 score = 0.036 for class_val in classes: # calculating the GINI Impurity37 p = [row[-1] for row in group].count(class_val) / size38 score += p * p39 gini += (1.0 - score) * (size / n_instances)40 if len(gini_in) != 0: #calculating the information gain41 gini = gini_in[b_valueG[len(b_valueG)-1]]-gini4243 return gini,score444546# Spliting the dataset47def spliting(dataset):48 class_count_left=049 class_count_right=050 class_values = list(set(row[-1] for row in dataset))51 row_index, row_value, gini_score, split_groups,score_t = 999, 999, 999, None,None52 for index in range(len(dataset[0]) - 1):53 for row in dataset:54 groups =split_lr(index, row[index], dataset)55 gini , score= gini_impurity(groups, class_values)56 if len(gini_in)==0:57 if gini < gini_score: # for the root node58 row_index, row_value, gini_score, split_groups,score_t = index, row[index], gini, groups,score59 else:60 gini_score=0 #getting the values of information gain for each split and selecting the once with the higher value61 if gini>gini_score:62 row_index, row_value, gini_score, split_groups, score_t = index, row[index], gini, groups, score6364 gini_in[row_value]=gini_score65 b_scoreG.append(score_t)66 b_valueG.append(row_value)67 # if len(gini_in)==0:68 # info_gain[]69 # print("gini",gini_in[b_scoreG[0]])70 left,right=split_groups7172 # outcomes = [row[-1] for row in left]73 # class_count_left=outcomes.count()74 # outcomes = [row[-1] for row in right]75 # class_count_right = outcomes.count()7677 return {'index': row_index, 'value': row_value, 'groups': split_groups,'count_left':0,'count_right':0}787980#terminal node81def to_terminal(group):82 outcomes = [row[-1] for row in group]83 count=084 for i in outcomes:85 if i==max(set(outcomes), key=outcomes.count):86 count+=187 return max(set(outcomes), key=outcomes.count),count888990# Create child splits for a node or make terminal, pruning based on the max depth and minimum size is done in this function91def split(node, max_depth, min_size, depth):92 left, right = node['groups']93 del (node['groups'])94 # check for a no split95 if not left or not right:96 val_left,counter=to_terminal(left + right)97 node['left'],node['count_left'] = node['right'],node['count_right'] = val_left,counter98 return99 # check for max depth100 if depth >= max_depth:101 val_left, counter_left = to_terminal(right)102 val_right,counter_right=to_terminal(right)103 node['left'],node['count_left'], node['right'],node['count_right'] = val_left, counter_left,val_right,counter_right104 return105 # process left child106 if len(left) <= min_size:107 val_left, counter_left = to_terminal(right)108 node['left'],node['count_left'] = val_left, counter_left109 else:110 node['left'] = spliting(left)111 split(node['left'], max_depth, min_size, depth + 1)112 # process right child113 if len(right) <= min_size:114 val_right, counter_right = to_terminal(right)115 node['right'],node['counter_right'] = val_right, counter_right116 else:117 node['right'] = spliting(right)118 split(node['right'], max_depth, min_size, depth + 1)119120121# Build a decision tree122def build_tree(train, max_depth, min_size):123 root = spliting(train)124 split(root, max_depth, min_size, 1)125 return root126127def predict(node, row):128 test = float(row[node['index']])129 if float(row[node['index']]) < float(node['value']):130 if isinstance(node['left'], dict):131 return predict(node['left'], row)132 else:133 return node['left']134 else:135 if isinstance(node['right'], dict):136 return predict(node['right'], row)137 else:138 return node['right']139140141# Print a decision tree142def print_tree(node, depth=0):143 if isinstance(node, dict):144145 print('%s[X%s < %.3f]' % ((depth * ' ', (node['index'] + 1), float(node['value']))))146 print_tree(node['left'], depth + 1)147 print_tree(node['right'], depth + 1)148 else:149 print('%s[%s]' % ((depth * ' ', node)))150151def prun(copy_tree, depth, acc, dataset, class_val,copy_tree1,dir,acc_count):152153 if (copy_tree['right']==class_val[0] or copy_tree['right']==class_val[1]) and( copy_tree['left']==class_val[0] or copy_tree['left']==class_val[1]):154 # copy_tree['right']='None'155 # print('----------------------------------opt')156 # print_tree(copy_tree)157 if (int(copy_tree['count_left'])>int(copy_tree['count_right'])) or int(copy_tree['count_left'])==int(copy_tree['count_right']):158 if dir==2 or dir==0:159 copy_tree1['right'] = copy_tree1['right']['left']160 else:161 copy_tree1['left'] = copy_tree1['left']['left']162163 else:164 if dir == 2 or dir==0:165 copy_tree1['right'] = copy_tree1['right']['right']166 else:167 copy_tree1['left'] = copy_tree1['left']['right']168 elif (copy_tree['right']==class_val[0] or copy_tree['right']==class_val[1] ) and isinstance(copy_tree, dict):169 # print("new left------------------------------------------------------------------")170 # print_tree(copy_tree)171 copy_tree1 = copy_tree172 prun(copy_tree['left'], depth - 1, acc, dataset, class_val, copy_tree1,1,acc_count)173 elif (copy_tree['left']==class_val[0] or copy_tree['left']==class_val[1] ) and isinstance(copy_tree, dict):174 # print("new- right-----------------------------------------------------------------")175 # print_tree(copy_tree)176 copy_tree1 = copy_tree177 prun(copy_tree['right'], depth - 1, acc, dataset, class_val, copy_tree1,2,acc_count)178 elif isinstance(copy_tree, dict):179 # print("new------------------------------------------------------------------")180 # print_tree(copy_tree)181 copy_tree1=copy_tree182 prun(copy_tree['right'],depth-1,acc,dataset,class_val,copy_tree1,0,acc_count)183184 accura_prun=accuracy(dataset,copy_tree)185 while accura_prun >= acc:186 if accura_prun==acc:187 acc_count+=1188189 # print("new ittr")190 # print("new acc", accura_prun)191 if acc_count==2:192 break193 else:194 prun(copy_tree,depth_c,acc,dataset,class_val,copy_tree1,0,acc_count)195 return copy_tree1,accura_prun196197def accuracy(dataset,tree):198 correct_preditct = 0199 wrong_preditct = 0200 accuracy = 0201 for row in dataset:202 prediction = predict(tree, row)203 if prediction == row[-1]:204 correct_preditct += 1205 else:206 wrong_preditct += 1207 # print('Expected=%s, Got=%s' % (row[-1], prediction))208 accuracy = correct_preditct / (correct_preditct + wrong_preditct)209 # print("accuracy", accuracy * 100)210 return accuracy*100211212filename = 'dt_data.csv'213dataset = list()214with open(filename, 'r') as file:215 csv_reader = reader(file)216 for row in csv_reader:217 if not row:218 continue219 dataset.append(row)220dataset.pop(0)221222class_val= list(set(row[-1] for row in dataset))223tree = build_tree(dataset, depth_c,1 )224print_tree(tree)225accur=accuracy(dataset,tree)226print("accuracy of a full tree",accur)227Copy_tree=tree228# print(tree)229p_tree,acc_prun=prun(tree,depth_c,accur,dataset,class_val,Copy_tree,0,0)230231print("----------------------------------------------prun_tree")232print_tree(p_tree)233print("accuracy of pruned tree",acc_prun)234235# print(p_tree)236X=pd.DataFrame(dataset).iloc[:,:-1]237y=pd.DataFrame(dataset).iloc[:,-1]238239240# print("class_val",class_val)241for row in dataset:242 if (class_val[0]==row[2]):243 plt.scatter(row[0],row[1],color="red")244 else:245 plt.scatter(row[0],row[1],color="blue")246plt.show()247248print("best split")249print(gini_in)250251#252# min1, max1 = float(X.iloc[:, 0].min())-1, float(X.iloc[:, 0].max())+1253# min2, max2 = float(X.iloc[:, 1].min())-1, float(X.iloc[:, 1].max())+1254# print("min1",min1)255# print("max1",max1)256# print("min2",min2)257# print("max2",max2)258# x1grid = arange(min1, max1, 1)259# print(x1grid.shape)260# x2grid = arange(min2, max2, 0.5)261# print(x2grid.shape)262# xx, yy = meshgrid(x1grid, x2grid)263# r1, r2 = xx.flatten(), yy.flatten()264# r1, r2 = r1.reshape((len(r1), 1)), r2.reshape((len(r2), 1))265# grid = hstack((r1,r2))266# pred=[]267# for row in dataset:268# pred.append(predict(tree,row))269# zz = np.array(pred).reshape(-1,1)270# # reshape(xx.shape)271# plt.contourf(xx, yy, zz, cmap='Paired')272# for class_value in range(1):273# row_ix = where(y == class_value)274# # create scatter of these samples275# plt.scatter(X[row_ix, 0], X[row_ix, 1], cmap='Paired') ...

Full Screen

Full Screen

Iris.py

Source:Iris.py Github

copy

Full Screen

1from _csv import reader2import matplotlib.pyplot as plt3import pandas as pd456gini_in={}7info_gain={}8b_scoreG=[]9b_valueG=[]10depth_c=311count_acc=012def split_lr(index, value, dataset):13 left, right = list(), list()14 for row in dataset:15 if row[index] < value:16 left.append(row)17 else:18 right.append(row)19 return left, right202122# Gini_index calculating23def gini_impurity(groups, classes):24 n_instances = float(sum([len(group) for group in groups]))25 gini = 0.026 for group in groups:27 size = float(len(group))28 if size == 0:29 continue30 score = 0.031 for class_val in classes: # calculating the GINI Impurity32 p = [row[-1] for row in group].count(class_val) / size33 score += p * p34 gini += (1.0 - score) * (size / n_instances)35 if len(gini_in) != 0: #calculating the information gain36 gini = gini_in[b_valueG[len(b_valueG)-1]]-gini3738 return gini,score394041# Spliting the dataset42def spliting(dataset):43 class_count_left=044 class_count_right=045 class_values = list(set(row[-1] for row in dataset))46 row_index, row_value, gini_score, split_groups,score_t = 999, 999, 999, None,None47 for index in range(len(dataset[0]) - 1):48 for row in dataset:49 groups =split_lr(index, row[index], dataset)50 gini , score= gini_impurity(groups, class_values)51 if len(gini_in)==0:52 if gini < gini_score: # for the root node53 row_index, row_value, gini_score, split_groups,score_t = index, row[index], gini, groups,score54 else:55 gini_score=0 #getting the values of information gain for each split and selecting the once with the higher value56 if gini>gini_score:57 row_index, row_value, gini_score, split_groups, score_t = index, row[index], gini, groups, score5859 gini_in[row_value]=gini_score60 b_scoreG.append(score_t)61 b_valueG.append(row_value)62 # if len(gini_in)==0:63 # info_gain[]64 # print("gini",gini_in[b_scoreG[0]])65 left,right=split_groups6667 # outcomes = [row[-1] for row in left]68 # class_count_left=outcomes.count()69 # outcomes = [row[-1] for row in right]70 # class_count_right = outcomes.count()7172 return {'index': row_index, 'value': row_value, 'groups': split_groups,'count_left':0,'count_right':0}737475#terminal node76def to_terminal(group):77 outcomes = [row[-1] for row in group]78 count=079 for i in outcomes:80 if i==max(set(outcomes), key=outcomes.count):81 count+=182 return max(set(outcomes), key=outcomes.count),count838485# Create child splits for a node or make terminal, pruning based on the max depth and minimum size is done in this function86def split(node, max_depth, min_size, depth):87 left, right = node['groups']88 del (node['groups'])89 # check for a no split90 if not left or not right:91 val_left,counter=to_terminal(left + right)92 node['left'],node['count_left'] = node['right'],node['count_right'] = val_left,counter93 return94 # check for max depth95 if depth >= max_depth:96 val_left, counter_left = to_terminal(right)97 val_right,counter_right=to_terminal(right)98 node['left'],node['count_left'], node['right'],node['count_right'] = val_left, counter_left,val_right,counter_right99 return100 # process left child101 if len(left) <= min_size:102 val_left, counter_left = to_terminal(right)103 node['left'],node['count_left'] = val_left, counter_left104 else:105 node['left'] = spliting(left)106 split(node['left'], max_depth, min_size, depth + 1)107 # process right child108 if len(right) <= min_size:109 val_right, counter_right = to_terminal(right)110 node['right'],node['counter_right'] = val_right, counter_right111 else:112 node['right'] = spliting(right)113 split(node['right'], max_depth, min_size, depth + 1)114115116# Build a decision tree117def build_tree(train, max_depth, min_size):118 root = spliting(train)119 split(root, max_depth, min_size, 1)120 return root121122def predict(node, row):123 test = float(row[node['index']])124 if float(row[node['index']]) < float(node['value']):125 if isinstance(node['left'], dict):126 return predict(node['left'], row)127 else:128 return node['left']129 else:130 if isinstance(node['right'], dict):131 return predict(node['right'], row)132 else:133 return node['right']134135136# Print a decision tree137def print_tree(node, depth=0):138 if isinstance(node, dict):139140 print('%s[X%s < %.3f]' % ((depth * ' ', (node['index'] + 1), float(node['value']))))141 print_tree(node['left'], depth + 1)142 print_tree(node['right'], depth + 1)143 else:144 print('%s[%s]' % ((depth * ' ', node)))145146def prun(copy_tree, depth, acc, dataset, class_val,copy_tree1,dir,acc_count):147148 if (copy_tree['right']==class_val[0] or copy_tree['right']==class_val[1] or copy_tree['right']==class_val[2]) and( copy_tree['left']==class_val[0] or copy_tree['left']==class_val[1] or copy_tree['left']==class_val[2]):149 # copy_tree['right']='None'150 # print('----------------------------------opt')151 # print_tree(copy_tree)152 if (int(copy_tree['count_left'])>int(copy_tree['count_right'])) or int(copy_tree['count_left'])==int(copy_tree['count_right']):153 if dir==2 or dir==0:154 copy_tree1['right'] = copy_tree1['right']['left']155 else:156 copy_tree1['left'] = copy_tree1['left']['left']157158 else:159 if dir == 2 or dir==0:160 copy_tree1['right'] = copy_tree1['right']['right']161 else:162 copy_tree1['left'] = copy_tree1['left']['right']163 elif (copy_tree['right']==class_val[0] or copy_tree['right']==class_val[1] or copy_tree['right']==class_val[2]) and isinstance(copy_tree, dict):164 # print("new left------------------------------------------------------------------")165 # print_tree(copy_tree)166 copy_tree1 = copy_tree167 prun(copy_tree['left'], depth - 1, acc, dataset, class_val, copy_tree1,1,acc_count)168 elif (copy_tree['left']==class_val[0] or copy_tree['left']==class_val[1] or copy_tree['left']==class_val[2]) and isinstance(copy_tree, dict):169 # print("new- right-----------------------------------------------------------------")170 # print_tree(copy_tree)171 copy_tree1 = copy_tree172 prun(copy_tree['right'], depth - 1, acc, dataset, class_val, copy_tree1,2,acc_count)173 elif isinstance(copy_tree, dict):174 # print("new------------------------------------------------------------------")175 # print_tree(copy_tree)176 copy_tree1=copy_tree177 prun(copy_tree['right'],depth-1,acc,dataset,class_val,copy_tree1,0,acc_count)178179 accura_prun=accuracy(dataset,copy_tree)180 while accura_prun >= acc:181 if accura_prun==acc:182 acc_count+=1183184 # print("new ittr")185 # print("new acc", accura_prun)186 if acc_count==2:187 break188 else:189 prun(copy_tree,depth_c,acc,dataset,class_val,copy_tree1,0,acc_count)190 return copy_tree1,accura_prun191192def accuracy(dataset,tree):193 correct_preditct = 0194 wrong_preditct = 0195 accuracy = 0196 for row in dataset:197 prediction = predict(tree, row)198 if prediction == row[-1]:199 correct_preditct += 1200 else:201 wrong_preditct += 1202 # print('Expected=%s, Got=%s' % (row[-1], prediction))203 accuracy = correct_preditct / (correct_preditct + wrong_preditct)204 # print("accuracy", accuracy * 100)205 return accuracy*100206207filename = 'Iris.csv'208dataset = list()209with open(filename, 'r') as file:210 csv_reader = reader(file)211 for row in csv_reader:212 if not row:213 continue214 dataset.append(row)215dataset.pop(0)216217class_val= list(set(row[-1] for row in dataset))218tree = build_tree(dataset, depth_c,1 )219print_tree(tree)220accur=accuracy(dataset,tree)221print("accuracy of a full tree",accur)222Copy_tree=tree223# print(tree)224p_tree,acc_prun=prun(tree,depth_c,accur,dataset,class_val,Copy_tree,0,0)225226print("----------------------------------------------prun_tree")227print_tree(p_tree)228print("accuracy of pruned tree",acc_prun)229230# print(p_tree)231X=pd.DataFrame(dataset).iloc[:,:-1]232y=pd.DataFrame(dataset).iloc[:,-1]233234# print("class_val",class_val)235plt.figure(figsize=(12,12))236for row in dataset:237 if (class_val[0]==row[-1]):238 plt.scatter(row[0],row[1],color="red")239 elif (class_val[1]==row[-1]):240 plt.scatter(row[0],row[1],color="blue")241 else:242 plt.scatter(row[0], row[1], color="black")243plt.show()244245print("best_split") ...

Full Screen

Full Screen

CreateEffekseerFiles_v4.py

Source:CreateEffekseerFiles_v4.py Github

copy

Full Screen

1import shutil2import os3from distutils.dir_util import copy_tree4copy_tree("../Effekseer/Dev/Cpp/Effekseer", "./Players/Cocos2d-x_v4/Effekseer")5copy_tree("../Effekseer/Dev/Cpp/EffekseerMaterialCompiler", "./Players/Cocos2d-x_v4/EffekseerMaterialCompiler")6copy_tree("../Effekseer/Dev/Cpp/EffekseerRendererCommon", "./Players/Cocos2d-x_v4/EffekseerRendererCommon")7copy_tree("../Effekseer/Dev/Cpp/EffekseerRendererGL", "./Players/Cocos2d-x_v4/EffekseerRendererGL")8copy_tree("../Effekseer/Dev/Cpp/EffekseerRendererLLGI", "./Players/Cocos2d-x_v4/EffekseerRendererLLGI")9copy_tree("../Effekseer/Dev/Cpp/EffekseerRendererMetal", "./Players/Cocos2d-x_v4/EffekseerRendererMetal")10os.makedirs("./Players/Cocos2d-x_v4/LLGI/", exist_ok=True)11shutil.copyfile("../Effekseer/Dev/Cpp/3rdParty/LLGI/CMakeLists.txt", "./Players/Cocos2d-x_v4/3rdParty/LLGI/CMakeLists.txt")12copy_tree("../Effekseer/Dev/Cpp/3rdParty/LLGI/src", "./Players/Cocos2d-x_v4/3rdParty/LLGI/src")13copy_tree("../Effekseer/Dev/Cpp/3rdParty/LLGI/cmake", "./Players/Cocos2d-x_v4/3rdParty/LLGI/cmake")...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run Airtest automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful