How to use rude method in hypothesis

Best Python code snippet using hypothesis

prune_by_feature_map.py

Source:prune_by_feature_map.py Github

copy

Full Screen

...55 if del_kernels is not None:56 weight = np.delete(weight, del_kernels, axis=1)57 print(weight.shape)58 return weight, bias, del_filters, origin_channels59 def _prune_rude(self, name, conv_param, del_kernels=None, del_filters=None):60 weight, bias = conv_param61 weight = weight.data62 bias = bias.data63 origin_channels = weight.shape[0]64 if name in [self.base_layer,]: # 以shortcut层为基础来剪枝65 # del_filters = get_del_filter(net, "../models/image/")66 del_filters = np.loadtxt('del_filter.txt', dtype=np.float32)67 kernel_sum = np.sum(np.abs(weight), axis=(1,2,3))68 # print (kernel_sum)69 # del_num = self.ratio70 # kernel_axis = np.argsort(kernel_sum)71 # del_filters = kernel_axis[0:del_num]72 # print(del_filters)73 if del_filters is not None:74 # 裁剪通道数,output减小75 weight = np.delete(weight, del_filters, axis=0)76 bias = np.delete(bias, del_filters, axis=0)77 # print('\n')78 # print(name)79 # print(name + " filter nums need to delete is " + str(len(del_filters)))80 # print(name + " filter nums need to preserve is " + str(origin_channels - len(del_filters)))81 if del_kernels is not None:82 # 计算出要裁剪的kernel input减小83 weight = np.delete(weight, del_kernels, axis=1)84 print("{}层裁剪后的输出维度是{}".format(name,weight.shape))85 return weight, bias, del_filters, origin_channels86 # 暴力裁剪87 def prune_conv_rude(self, name, bottom=None , not_del_filters=False):88 if bottom is None:89 self.conv_data[name] = self._prune_rude(name, self._net.params[name])90 else:91 if not_del_filters is True: # filters不需要裁剪(output), 但是kernels需要裁剪(input)92 self.conv_data[name] = self._prune_rude(name, self._net.params[name],93 del_kernels=self.conv_data[self.base_layer][2],94 del_filters=None)95 else: # filters需要裁剪(output),但是kernels不需要裁剪(input)96 self.conv_data[name] = self._prune_rude(name, self._net.params[name],97 del_kernels=None,98 del_filters=self.conv_data[self.base_layer][2],)99 def fc_prune(self,conv_param, del_kernels):100 bias = conv_param[1]101 bias = bias.data102 f2c = fc2conv(self._net)103 weight = f2c.del_inputs(del_kernels)104 return weight, bias105 def prune_conv(self, name, bottom=None):106 if bottom is None:107 self.conv_data[name] = self._prune(name, self._net.params[name])108 else:109 self.conv_data[name] = self._prune(name, self._net.params[name], del_kernels=self.conv_data[bottom][2])110 def prune_concat(self, name, bottoms=None):111 if bottoms is not None:112 offsets = [0] + [self.conv_data[b][3] for b in bottoms]113 for i in range(1, len(offsets)):114 offsets[i] += offsets[i - 1]115 del_filters = [self.conv_data[b][2] + offsets[i] for i, b in enumerate(bottoms)]116 del_filters_new = np.concatenate(del_filters)117 else:118 del_filters_new = []119 if name[0:2] == 'fc':120 self.conv_data[name] = self.fc_prune(self._net.params[name], del_filters_new)121 else:122 self.conv_data[name] = self._prune_rude(name, self._net.params[name],123 del_kernels=del_filters_new, del_filters=None)124 def prune_sum(self, name, bottoms):125 del_filters = [self.conv_data[b][2] for b in bottoms]126 del_filter = np.union1d(del_filters[0], del_filters[1])127 print(del_filter)128 weight = []129 bias = []130 origin_channels = self.conv_data[bottoms[0]][3] - len(del_filter)131 for b in bottoms:132 if b[0:3] != 'res':133 self.conv_data[b] = self._prune(b, self._net.params[b], del_filters=del_filter)134 self.conv_data[name] = weight, bias, del_filter, origin_channels135 print("\n {} preserve num : {}".format(name, origin_channels))136 def save(self, new_model, output_weights):137 net2 = caffe.Net(new_model, caffe.TEST)138 for key in net2.params.keys():139 if key in self.conv_data:140 net2.params[key][0].data[...] = self.conv_data[key][0]141 net2.params[key][1].data[...] = self.conv_data[key][1]142 else:143 net2.params[key][0].data[...] = self._net.params[key][0].data144 net2.params[key][1].data[...] = self._net.params[key][1].data145 net2.save(output_weights)146root = "../my_model/"147prototxt = root + "TestModel_prune.prototxt"148caffemodel = root + "TestModel_prune.caffemodel"149net = caffe.Net(prototxt, caffemodel, caffe.TEST)150pruner = Prune(net)151# block1,2152# pruner.prune_conv("conv1_1_1")153# pruner.prune_conv("conv1_2_1")154# pruner.prune_conv("conv1_2_2", "conv1_2_1")155# pruner.prune_conv("conv1_3_1")156# pruner.prune_conv("conv1_3_2", "conv1_3_1")157# pruner.prune_conv("conv1_3_3", "conv1_3_2")158#159# pruner.prune_concat("conv2_1", ("conv1_1_1", "conv1_2_2", "conv1_3_3"))160# pruner.prune_conv("conv2_2", "conv2_1")161# pruner.prune_conv("conv2_3", "conv2_2")162# pruner.prune_conv("conv2_4", "conv2_3")163# pruner.prune_conv("conv2_5", "conv2_4")164# pruner.prune_conv("conv2_6", "conv2_5")165# pruner.prune_conv("conv2_7", "conv2_6")166# pruner.prune_conv("conv2_8", "conv2_7")167#168# pruner.prune_concat("conv3_1_1", ("conv2_2", "conv2_4", "conv2_6", "conv2_8"))169# pruner.prune_concat("conv3_1_1b", ("conv2_2", "conv2_4", "conv2_6", "conv2_8"))170# block3 剪枝过程171pruner.init_layer('conv3_1_1')172pruner.init_layer('conv3_2_1')173pruner.init_layer('conv3_3_1')174pruner.init_layer('conv3_4_1')175pruner.init_layer('conv3_5_1')176pruner.init_layer('conv3_6_1')177pruner.prune_conv_rude('conv3_1_1b')178pruner.prune_conv_rude("conv3_1_2", "conv3_1_1", )179pruner.prune_conv_rude("conv3_2_1", "conv3_1_2", not_del_filters=True)180pruner.prune_conv_rude("conv3_2_2", "conv3_2_1", )181pruner.prune_conv_rude("conv3_3_1", "conv3_2_2", not_del_filters=True)182pruner.prune_conv_rude("conv3_3_2", "conv3_3_1", )183pruner.prune_conv_rude("conv3_4_1", "conv3_3_2", not_del_filters=True)184pruner.prune_conv_rude("conv3_4_2", "conv3_4_1", )185pruner.prune_conv_rude("conv3_5_1", "conv3_4_2", not_del_filters=True)186pruner.prune_conv_rude("conv3_5_2", "conv3_5_1", )187pruner.prune_conv_rude("conv3_6_1", "conv3_5_2", not_del_filters=True)188pruner.prune_conv_rude("conv3_6_2", "conv3_6_1", )189pruner.prune_concat("conv4_1_1", ("conv3_2_2", "conv3_4_2", "conv3_6_2", ))190pruner.prune_concat("conv4_1_1b", ("conv3_2_2", "conv3_4_2", "conv3_6_2",))191# # block4 剪枝过程192#193# pruner.init_layer('conv4_1_1')194# pruner.init_layer('conv4_2_1')195# pruner.init_layer('conv4_3_1')196# pruner.init_layer('conv4_4_1')197# pruner.init_layer('conv4_5_1')198# pruner.init_layer('conv4_6_1')199#200# pruner.prune_conv_rude('conv4_1_1b')201# pruner.prune_conv_rude("conv4_1_2", "conv4_1_1", )202# pruner.prune_conv_rude("conv4_2_1", "conv4_1_2", not_del_filters=True)203# pruner.prune_conv_rude("conv4_2_2", "conv4_2_1", )204# pruner.prune_conv_rude("conv4_3_1", "conv4_2_2", not_del_filters=True)205# pruner.prune_conv_rude("conv4_3_2", "conv4_3_1", )206# pruner.prune_conv_rude("conv4_4_1", "conv4_3_2", not_del_filters=True)207# pruner.prune_conv_rude("conv4_4_2", "conv4_4_1", )208# pruner.prune_conv_rude("conv4_5_1", "conv4_4_2", not_del_filters=True)209# pruner.prune_conv_rude("conv4_5_2", "conv4_5_1", )210# pruner.prune_conv_rude("conv4_6_1", "conv4_5_2", not_del_filters=True)211# pruner.prune_conv_rude("conv4_6_2", "conv4_6_1", )212#213# pruner.prune_concat("conv5_1_1", ("conv4_2_2", "conv4_4_2", "conv4_6_2", ))214# pruner.prune_concat("conv5_1_1b", ("conv4_2_2", "conv4_4_2", "conv4_6_2",))215#216# # block5 剪枝过程217# pruner.init_layer('conv5_1_1')218# pruner.init_layer('conv5_2_1')219# pruner.init_layer('conv5_3_1')220# pruner.init_layer('conv5_4_1')221# pruner.init_layer('conv5_5_1')222# pruner.init_layer('conv5_6_1')223#224# pruner.prune_conv_rude('conv5_1_1b')225# pruner.prune_conv_rude("conv5_1_2", "conv5_1_1", )226# pruner.prune_conv_rude("conv5_2_1", "conv5_1_2", not_del_filters=True)227# pruner.prune_conv_rude("conv5_2_2", "conv5_2_1", )228# pruner.prune_conv_rude("conv5_3_1", "conv5_2_2", not_del_filters=True)229# pruner.prune_conv_rude("conv5_3_2", "conv5_3_1", )230# pruner.prune_conv_rude("conv5_4_1", "conv5_3_2", not_del_filters=True)231# pruner.prune_conv_rude("conv5_4_2", "conv5_4_1", )232# pruner.prune_conv_rude("conv5_5_1", "conv5_4_2", not_del_filters=True)233# pruner.prune_conv_rude("conv5_5_2", "conv5_5_1", )234# pruner.prune_conv_rude("conv5_6_1", "conv5_5_2", not_del_filters=True)235# pruner.prune_conv_rude("conv5_6_2", "conv5_6_1", )236# pruner.prune_concat('fc_svd_v', ('conv5_2_2', 'conv5_4_2', 'conv5_6_2'))237pro_new = root + "TestModel_prune_1.prototxt"...

Full Screen

Full Screen

NLP3.py

Source:NLP3.py Github

copy

Full Screen

1import requests2import re3import requests.packages.urllib3.util.ssl_4import os5import sys6from collections import Counter78requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS = 'ALL'910global dic_rude, dic_rude_ts, dic_ts, dic_ts_link,all_station,dic_rude_cycle11dic_rude={}#['1号线':['苹果园,公主坟']]每个线上的所有站点12dic_rude_ts={}#['1号线',[ 公主坟,军事博物馆,……]……]每个线上的所有换成站13dic_ts_rude={}#[军事博物馆:['1号线',……]]换乘站连接的线路14dic_ts={}#[军事博物馆:[]]每个换乘站可以直接到达的站点15dic_ts_link={}#{'军事博物馆',[公主坟]每个换成站可直接到达的换成站16dic_rude_cycle=['1号线','10号线']17all_station=[]1819202122def get_alldata():#爬虫获取所有线路和站点形成一个字典23 url = r"https://www.bjsubway.com/e/action/ListInfo/?classid=39&ph=1"24 print('begin get data')25 header = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.1) Gecko/20100101 Firefox/40.1', }26 text = requests.get(url, headers=header, timeout=6, verify=False).text27 #print(text)28 text = re.findall('> \w+</th>|>\w+</th>|\w+线.*首', text)29 #print(text)30 firstflage = True31 dic = {}32 for i in text:33 #print(i)34 if re.findall('时间|往|首车|末|全程|终点', i):35 continue36 if re.findall('\w+线.*首', i):37 if firstflage:38 x = re.findall('\w+线.*首', i)[0][:-1]39 y = []40 firstflage = not firstflage41 else:42 dic[x] = y43 x = re.findall('\w+线.*首', i)[0][:-1]44 y = []45 continue46 if re.findall('> \w+</th>|>\w+</th>', i):47 temp=re.findall('> \w+<|>\w+<', i)[0][1:-1]48 y.append(temp.strip())4950 for i in dic:51 mailto = dic[i]52 addr_to = list(set(mailto))53 addr_to.sort(key=mailto.index)54 dic[i] = addr_to5556 #print(len(dic[i]), i, dic[i])57 return dic5859def get_subway_data():60 if "beijingsubway.txt" not in os.listdir():61 dic=get_alldata()62 fw = open("beijingsubway.txt", 'w+')63 fw.write(str(dic)) # 把字典转化为str64 fw.close()65 else:66 fr = open("beijingsubway.txt", 'r+')67 dic = eval(fr.read()) # 读取的str转换为字典68 #print(dic)69 fr.close()70 return dic71def get_global_data(dic_rude):72 global dic_rude_ts, dic_ts, dic_ts_link, all_station, dic_rude_cycle73 all_station1= []74 for i in dic_rude:75 all_station1+=dic_rude[i]76 all_station=list(set(all_station1))77 all_station1=Counter(all_station1).most_common()#统计站点,重复两次为换成站78 all_ts=[ i for i,j in all_station1 if j>1]79 #print(all_ts)80 #计算dic_rude_ts81 for i in dic_rude:82 temp=[]83 for j in all_ts:84 if j in dic_rude[i]:temp.append(j)85 dic_rude_ts[i]=temp86 #计算dic_ts_rude dic_ts,dic_ts_link87 for i in all_ts:88 temptsrude=[]89 ts=[]90 link=[]91 for j in dic_rude:92 if i in dic_rude[j]:93 temptsrude.append(j)94 ts+=dic_rude[j]95 link+=dic_rude_ts[j]96 dic_ts_rude[i]=temptsrude97 temp= list(set(ts))98 temp.remove(i)99 dic_ts[i] =temp#.remove(i)100 temp=list(set(link))101 temp.remove(i)102 dic_ts_link[i]=temp103104def count_station(start,des):#计算有几站路105 #不能直达返回-1106 #可以直达返回最小站数,和要做的线路107 result=[start,-1,des,'']#[苹果园 8站 军事博物馆 1号线]108 for i in dic_rude:109 if start in dic_rude[i] and des in dic_rude[i]:110 tempcount=abs(dic_rude[i].index(start)-dic_rude[i].index(des))111 if i in dic_rude_cycle:#是环线112 tempcount=len(dic_rude[i])-tempcount if len(dic_rude[i])-tempcount<tempcount else tempcount113 if result[1]<0 or tempcount<result[1]:114 result[1]=tempcount115 result[-1]=i116 return result117def count_all_rude_station(rude):#整条路线长度118119 if len(rude)==1:return 0120 if len(rude) < 1: return -1121 return count_station(rude[0],rude[1])[1]+count_all_rude_station(rude[1:])122def say_all_rude_station(rude):#说明整条路线怎么走123124 if len(rude)==1:return ''125 if len(rude) < 1: return '-1'126 res=count_station(rude[0],rude[1])127 return "从 {} 出发坐 {} 经过 {} 站到 {} 下车\n".format(res[0],res[-1],res[1],res[2])+say_all_rude_station(rude[1:])128129def searchpath(start,des,stragegy):130 if start not in all_station:return '初始站点不存在'131 if des not in all_station:return '终点不存在'132 #是否在一条线上133 result=count_station(start,des)134 if result[1]>0:return result135 #初始站136 path=[]137 pathfinish=[]138 besearch={}#分层检索,后一层可以同时到达一个站点,但是不用检索前一层的站点139 if start not in dic_ts:140 for i in dic_rude:141 if start in dic_rude[i]:142 temp=[[start,j] for j in dic_rude_ts[i]]143 path.append(temp)144 else:145 path=[[[start]]]146 #print(besearch)147 while path[0]:148 Temp=[]149 temppath=path.pop()150 while temppath:151 temppathone=temppath.pop()152 laststation=temppathone[-1]153 if laststation in besearch and count_all_rude_station(temppathone)>besearch[laststation]:154 continue155 if des in dic_ts[laststation]:156 pathfinish.append(temppathone+[des])157 continue158 else:159 besearch[laststation]=count_all_rude_station(temppathone)160 for i in dic_ts_link[laststation]:161 Temp.append(temppathone+[i])162 path.append(Temp)163 #print(pathfinish)164 if stragegy=='shortts':#最少换成165 pathfinish=[i for i in pathfinish if len(i)==len(pathfinish[0])]166 sorted(pathfinish,key=count_all_rude_station)167 else:168 sorted(pathfinish, key=count_all_rude_station)169170 print(say_all_rude_station(pathfinish[0]))171172 return 0#没有找到路径173174if __name__=="__main__":175176 flagep=False177 dic_rude=get_subway_data()178 get_global_data(dic_rude)179180 if flagep: print(dic_rude)181 if flagep: print(dic_rude_ts)182 if flagep: print(dic_ts)183 if flagep: print(dic_ts_rude)184 if flagep: print(dic_ts_link)185 if flagep: print(all_station)186 ...

Full Screen

Full Screen

D.py

Source:D.py Github

copy

Full Screen

1#!/usr/bin/env pypy32import math3n,d,m = input().split()4n = int(n)5d = int(d)6m = int(m)7A = list(map(int, input().split()))8rude = []9polite = []10for a in A:11 if a > m:12 rude += [a]13 else:14 polite += [a]15rude = sorted(rude)[::-1]16polite = sorted(polite)[::-1]17rude_prefix = [0]18for r in rude:19 rude_prefix += [rude_prefix[-1] + r]20for _ in range(n):21 rude_prefix += [rude_prefix[-1]]22polite_prefix = [0]23for p in polite:24 polite_prefix += [polite_prefix[-1] + p]25for _ in range(n):26 polite_prefix += [polite_prefix[-1]]27ans = float("-inf")28for np in range(len(polite)+1):29 polite_score = polite_prefix[np]30 rude_cells = n - np31 num_rude = math.ceil(rude_cells / (d+1))32 rude_score = rude_prefix[num_rude]33 ans = max(ans, polite_score + rude_score)...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run hypothesis automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful