How to use close_all method in robotframework-appiumlibrary

Best Python code snippet using robotframework-appiumlibrary_python

Seperate.py

Source:Seperate.py Github

copy

Full Screen

1from pandas_datareader import data2import yfinance as yf3from statsmodels.tsa.holtwinters import ExponentialSmoothing, Holt4from scipy.interpolate import UnivariateSpline5from pandas_wrapper import l1tf, hp, l1ctf, l1tccf6import csv7from numpy import linspace, exp8from matplotlib import pyplot as plt9import numpy as np10import pandas as pd11import matplotlib.pyplot as plt12import statsmodels.api as sm13import math14from scipy import stats15# from Holt import additive,linear,multiplicative,newadditive16from mpl_toolkits import axisartist17import random18import gc19import pywt20def wave(arr):21 return_ = np.diff(arr)22 # return_=return_[1:]23 # close2 / close24 # print(pywt.wavelist())25 #Do the Wavelet Transform for the return and inverse transformation26 #return_ is a dataframe series27 method='haar'28 mode_="soft"29 (ca, cd)=pywt.dwt(return_, method)30 cat = pywt.threshold(ca, 0.3*np.std(ca), mode=mode_)31 cdt = pywt.threshold(cd, 0.3*np.std(cd), mode=mode_)32 tx = pywt.idwt(cat, cdt, method,"smooth")33 # tx=pd.DataFrame(tx,index=return_.index)34 #Get back to the Stock price using denoising wavelet transform35 start_price=arr[0]36 # txx=tx.iloc[:,0]37 # txx=np.exp(tx)38 # txx=np.array(txx)39 temp=np.array([start_price])40 np.hstack((temp,tx))41 txx=np.cumsum(tx)42 # txx = pd.Series(txx, index=arr.index)43 # txx=pd.DataFrame(txx,index=return_.index)44 return txx45def judge(a,b):46 if a>b :47 return 148 if a<b :49 return -150 if a==b:51 return 052def calculate_revenue(list1,list2,holding_period):53 initial=10000054 list1=np.array(list1)55 list2=np.array(list2)56 days_to_long=057 days_to_short=058 money=10000059 money_trend=[money]60 for index, value in enumerate(list1):61 #print(money)62 if value==1:63 try:64 days_to_long+=165 number_to_buy=int(money/list2[index-1])66 #print("long",money,list2[index+holding_period-1],list2[index-1])67 money+= number_to_buy*(list2[index-1+holding_period]-list2[index-1])68 except IndexError:69 continue70 if value==-1:71 try:72 days_to_short +=173 number_to_sell = int(money / list2[index-1])74 #print("short",money, list2[index-1],list2[index + holding_period-1])75 money += number_to_sell * (list2[index-1]-list2[index-1+holding_period])76 except IndexError:77 continue78 money_trend.append(money)79 #plt.plot(money_trend)80 #plt.show()81 #print(days_to_long,days_to_short)82 return money83def best_period(New_close, average_filtered_derivative, confidence):84 sum=085 count=086 # lis=[0]*4087 for i in range(1,30):88 action_list = strategy(average_filtered_derivative, 0, i,confidence)89 final_revenue=calculate_revenue(action_list, New_close, i)90 if final_revenue>sum:91 sum=final_revenue92 count=i93 print("Best holding period:"+str(count)+"return:"+str(sum))94 return count95def strategy( average_filtered_derivative,start_date,period,confidence):96 size = len(confidence)97 buy_or_sell_list = [0] * (size)98 for i in range(start_date, size - period, period):99 buy_or_sell_list[i] = buy_or_sell(average_filtered_derivative[i], confidence[i])100 return buy_or_sell_list101def buy_or_sell(value, confidence):102 if value> 0 and confidence != 0:103 x = 1104 elif value < 0 and confidence !=0:105 x = -1106 else:107 x = 0108 return x109# decide whether to buy or sell or no action110def strategy3(average_filtered_derivative,start_date,period):111 # judge whether there is a trend, n is the moving average day, eg. 10112 size = len(average_filtered_derivative)113 buy_or_sell_list = [0] * (size)114 for i in range(start_date, size - period, period):115 if average_filtered_derivative[i]>0:116 buy_or_sell_list[i] = 1117 elif average_filtered_derivative[i]<0:118 buy_or_sell_list[i] = -1119 return buy_or_sell_list120def best_period3(New_close, average_filtered_derivative):121 sum=0122 count=0123 # lis=[0]*40124 for i in range(1,30):125 action_list = strategy3(average_filtered_derivative,0, i)126 final_revenue=calculate_revenue(action_list, New_close, i)127 if final_revenue>sum:128 sum=final_revenue129 count=i130 print("Best holding period:"+str(count)+"return:"+str(sum))131 return count132def cross_validation(df,lower_bound,higher_bound,step_length,leng_of_training,leng_of_test,type):133 Interval=50134 error = []135 for delta in np.linspace(lower_bound,higher_bound,step_length):136 oneerror = 0137 for count in range(1, 19):138 x1 = df[(Interval * (count - 1)):(Interval * (count - 1) + leng_of_training)]139 if type=="L1T": filtered = l1tf(x1, (delta))140 elif type=="L1C": filtered = l1ctf(x1, (delta))141 else :filtered = hp(x1, (delta))142 estimate = filtered[(leng_of_training-leng_of_test): leng_of_training]143 y1 = list(df[(Interval * (count - 1) + leng_of_training):(Interval * (count - 1) + (leng_of_training+leng_of_test))])144 newerror = [pow(y1[i] - estimate[i], 2) for i in range(0, len(y1))]145 oneerror = oneerror + sum(newerror)146 error.append(oneerror)147 np.array(error)148 plt.plot(np.linspace(lower_bound,higher_bound,step_length),error)149 plt.legend([type],fontsize=20)150 plt.xlabel("lambda")151 plt.ylabel('Error')152 plt.title('CrossValidation')153 plt.show()154 return np.linspace(lower_bound,higher_bound,step_length)[error.index(min(error))]155def calculate_return(df,TYPE,best_lambda1,time,best_lambda2=0):156 close_2015 = time[0]157 close_2016_Actual = time[1]158 close_2016 = np.log(close_2016_Actual)159 close_2017_Actual = time[2]160 close_2017 = np.log(close_2017_Actual)161 close_2018_Actual = time[3]162 close_2018 = np.log(close_2018_Actual)163 close_2019_Actual = time[4]164 close_2019 = np.log(close_2019_Actual)165 start = close_2015166 end = close_2016167 close_all = np.hstack((start, end))168 close_end_Actual = close_2016_Actual169 inter = 10170 if TYPE == "L1T":171 filtered = l1tf(start, best_lambda1)172 elif TYPE == "L1C":173 filtered = l1ctf(start, best_lambda1)174 elif TYPE == "L1TC":175 filtered = l1tccf(start, best_lambda1, best_lambda2)176 elif TYPE == "Wave":177 filtered = wave(start)178 else:179 filtered = hp(start, best_lambda1)180 filter_derivative = [(filtered[i] - filtered[i - 1]) for i in range(1, len(filtered))]181 filter_derivative.insert(0, filter_derivative[0])182 average_filtered_derivative = [np.mean(filter_derivative[(i - inter):i]) for i in183 range(inter, len(filter_derivative))]184 temp_lis = np.array([0] * inter)185 average_filtered_derivative = np.hstack((temp_lis, average_filtered_derivative))186 size_start = len(start)187 size_end = len(end)188 temp_lis2 = np.array([0] * size_end)189 average_filtered_derivative = np.hstack((average_filtered_derivative, temp_lis2))190 for i in range(0, size_end):191 if TYPE == "L1T":192 filtered_updating = l1tf(close_all[0:size_start + i + 1], best_lambda1)193 elif TYPE == "L1C":194 filtered_updating = l1ctf(close_all[0:size_start + i + 1], best_lambda1)195 elif TYPE == "L1TC":196 filtered_updating = l1tccf(close_all[0:size_start + i + 1], best_lambda1, best_lambda2)197 elif TYPE== "Wave":198 filtered_updating=wave(close_all[0:size_start + i + 1])199 else:200 filtered_updating = hp(close_all[0:size_start + i + 1], best_lambda1)201 filter_derivative = [(filtered_updating[i] - filtered_updating[i - 1]) for i in202 range(1, len(filtered_updating))]203 target = np.mean(filter_derivative[-inter - 1:-1])204 try:205 average_filtered_derivative[size_start + i] = target206 except :207 continue208 # updating zt209 n = 60210 size = len(close_all)211 st = [0] * size212 for date in range(n, size):213 total = 0214 for i in range(0, n - 1):215 for j in range(i + 1, n):216 value = judge(close_all[date - i], close_all[date - j])217 total = total + value218 st[date] = total219 # st_normal = list(map(lambda num: num * 2 / (n * n + 1), st))220 std = math.sqrt((n * (n - 1) * (2 * n + 5)) / 18)221 zt = list(map(lambda num: num / std, st))222 confidence = [0] * size223 for i in range(len(zt)):224 if (zt[i] > 1.96):225 confidence[i] = 1226 elif (zt[i] < -1.96):227 confidence[i] = -1228 average_filtered_derivative_end = average_filtered_derivative[-len(end):]229 confidence_end = confidence[-len(end):]230 bp11 = best_period(close_end_Actual, average_filtered_derivative_end, confidence_end)231 bp31 = best_period3(close_end_Actual, average_filtered_derivative_end)232 # print(bp11,bp11_r,bp31,bp31_r)233 # print(100000/close_2018_Actual[0]*close_2018_Actual[-1])234 # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Do for 2nd235 start = close_2016236 end = close_2017237 close_all = np.hstack((start, end))238 close_end_Actual = close_2017_Actual239 if TYPE == "L1T":240 filtered = l1tf(start, best_lambda1)241 elif TYPE == "L1C":242 filtered = l1ctf(start, best_lambda1)243 elif TYPE == "L1TC":244 filtered = l1tccf(start, best_lambda1, best_lambda2)245 elif TYPE == "Wave":246 filtered = wave(start)247 else:248 filtered = hp(start, best_lambda1)249 filter_derivative = [(filtered[i] - filtered[i - 1]) for i in range(1, len(filtered))]250 filter_derivative.insert(0, filter_derivative[0])251 average_filtered_derivative = [np.mean(filter_derivative[(i - inter):i]) for i in252 range(inter, len(filter_derivative))]253 temp_lis = np.array([0] * inter)254 average_filtered_derivative = np.hstack((temp_lis, average_filtered_derivative))255 size_start = len(start)256 size_end = len(end)257 temp_lis2 = np.array([0] * size_end)258 average_filtered_derivative = np.hstack((average_filtered_derivative, temp_lis2))259 for i in range(0, size_end):260 if TYPE == "L1T":261 filtered_updating = l1tf(close_all[0:size_start + i + 1], best_lambda1)262 elif TYPE == "L1C":263 filtered_updating = l1ctf(close_all[0:size_start + i + 1], best_lambda1)264 elif TYPE == "L1TC":265 filtered_updating = l1tccf(close_all[0:size_start + i + 1], best_lambda1, best_lambda2)266 elif TYPE== "Wave":267 filtered_updating=wave(close_all[0:size_start + i + 1])268 else:269 filtered_updating = hp(close_all[0:size_start + i + 1], best_lambda1)270 filter_derivative = [(filtered_updating[i] - filtered_updating[i - 1]) for i in271 range(1, len(filtered_updating))]272 target = np.mean(filter_derivative[-inter - 1:-1])273 try:274 average_filtered_derivative[size_start + i] = target275 except :276 continue277 # updating zt278 n = 60279 size = len(close_all)280 st = [0] * size281 for date in range(n, size):282 total = 0283 for i in range(0, n - 1):284 for j in range(i + 1, n):285 value = judge(close_all[date - i], close_all[date - j])286 total = total + value287 st[date] = total288 # st_normal = list(map(lambda num: num * 2 / (n * n + 1), st))289 std = math.sqrt((n * (n - 1) * (2 * n + 5)) / 18)290 zt = list(map(lambda num: num / std, st))291 confidence = [0] * size292 for i in range(len(zt)):293 if (zt[i] > 1.96):294 confidence[i] = 1295 elif (zt[i] < -1.96):296 confidence[i] = -1297 average_filtered_derivative_end = average_filtered_derivative[-len(end):]298 confidence_end = confidence[-len(end):]299 bp12 = best_period(close_end_Actual, average_filtered_derivative_end, confidence_end)300 bp32 = best_period3(close_end_Actual, average_filtered_derivative_end)301 # print(bp12,bp12_r,bp32,bp32_r)302 # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Do for 3rd303 start = close_2017304 end = close_2018305 close_all = np.hstack((start, end))306 close_end_Actual = close_2018_Actual307 if TYPE == "L1T":308 filtered = l1tf(start, best_lambda1)309 elif TYPE == "L1C":310 filtered = l1ctf(start, best_lambda1)311 elif TYPE == "L1TC":312 filtered = l1tccf(start, best_lambda1, best_lambda2)313 elif TYPE == "Wave":314 filtered = wave(start)315 else:316 filtered = hp(start, best_lambda1)317 filter_derivative = [(filtered[i] - filtered[i - 1]) for i in range(1, len(filtered))]318 filter_derivative.insert(0, filter_derivative[0])319 average_filtered_derivative = [np.mean(filter_derivative[(i - inter):i]) for i in320 range(inter, len(filter_derivative))]321 temp_lis = np.array([0] * inter)322 average_filtered_derivative = np.hstack((temp_lis, average_filtered_derivative))323 size_start = len(start)324 size_end = len(end)325 temp_lis2 = np.array([0] * size_end)326 average_filtered_derivative = np.hstack((average_filtered_derivative, temp_lis2))327 for i in range(0, size_end):328 if TYPE == "L1T":329 filtered_updating = l1tf(close_all[0:size_start + i + 1], best_lambda1)330 elif TYPE == "L1C":331 filtered_updating = l1ctf(close_all[0:size_start + i + 1], best_lambda1)332 elif TYPE == "L1TC":333 filtered_updating = l1tccf(close_all[0:size_start + i + 1], best_lambda1, best_lambda2)334 elif TYPE == "Wave":335 filtered_updating=wave(close_all[0:size_start + i + 1])336 else:337 filtered_updating = hp(close_all[0:size_start + i + 1], best_lambda1)338 filter_derivative = [(filtered_updating[i] - filtered_updating[i - 1]) for i in339 range(1, len(filtered_updating))]340 target = np.mean(filter_derivative[-inter - 1:-1])341 try:342 average_filtered_derivative[size_start + i] = target343 except :344 continue345 # updating zt346 n = 60347 size = len(close_all)348 st = [0] * size349 for date in range(n, size):350 total = 0351 for i in range(0, n - 1):352 for j in range(i + 1, n):353 value = judge(close_all[date - i], close_all[date - j])354 total = total + value355 st[date] = total356 # st_normal = list(map(lambda num: num * 2 / (n * n + 1), st))357 std = math.sqrt((n * (n - 1) * (2 * n + 5)) / 18)358 zt = list(map(lambda num: num / std, st))359 confidence = [0] * size360 for i in range(len(zt)):361 if (zt[i] > 1.96):362 confidence[i] = 1363 elif (zt[i] < -1.96):364 confidence[i] = -1365 average_filtered_derivative_end = average_filtered_derivative[-len(end):]366 confidence_end = confidence[-len(end):]367 bp13 = best_period(close_end_Actual, average_filtered_derivative_end, confidence_end)368 bp33 = best_period3(close_end_Actual, average_filtered_derivative_end)369 # print(bp13,bp13_r,bp33,bp33_r)370 # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Do for 4th371 start = close_2018372 end = close_2019373 close_all = np.hstack((start, end))374 close_end_Actual = close_2019_Actual375 if TYPE == "L1T":376 filtered = l1tf(start, best_lambda1)377 elif TYPE == "L1C":378 filtered = l1ctf(start, best_lambda1)379 elif TYPE == "L1TC":380 filtered = l1tccf(start, best_lambda1, best_lambda2)381 elif TYPE == "Wave":382 filtered = wave(start)383 else:384 filtered = hp(start, best_lambda1)385 filter_derivative = [(filtered[i] - filtered[i - 1]) for i in range(1, len(filtered))]386 filter_derivative.insert(0, filter_derivative[0])387 average_filtered_derivative = [np.mean(filter_derivative[(i - inter):i]) for i in388 range(inter, len(filter_derivative))]389 temp_lis = np.array([0] * inter)390 average_filtered_derivative = np.hstack((temp_lis, average_filtered_derivative))391 size_start = len(start)392 size_end = len(end)393 temp_lis2 = np.array([0] * size_end)394 average_filtered_derivative = np.hstack((average_filtered_derivative, temp_lis2))395 for i in range(0, size_end):396 if TYPE == "L1T":397 filtered_updating = l1tf(close_all[0:size_start + i + 1], best_lambda1)398 elif TYPE == "L1C":399 filtered_updating = l1ctf(close_all[0:size_start + i + 1], best_lambda1)400 elif TYPE == "L1TC":401 filtered_updating = l1tccf(close_all[0:size_start + i + 1], best_lambda1, best_lambda2)402 elif TYPE == "Wave":403 filtered_updating=wave(close_all[0:size_start + i + 1])404 else:405 filtered_updating = hp(close_all[0:size_start + i + 1], best_lambda1)406 filter_derivative = [(filtered_updating[i] - filtered_updating[i - 1]) for i in407 range(1, len(filtered_updating))]408 target = np.mean(filter_derivative[-inter - 1:-1])409 try:410 average_filtered_derivative[size_start + i] = target411 except :412 continue413 # updating zt414 n = 60415 size = len(close_all)416 st = [0] * size417 for date in range(n, size):418 total = 0419 for i in range(0, n - 1):420 for j in range(i + 1, n):421 value = judge(close_all[date - i], close_all[date - j])422 total = total + value423 st[date] = total424 std = math.sqrt((n * (n - 1) * (2 * n + 5)) / 18)425 zt = list(map(lambda num: num / std, st))426 confidence = [0] * size427 for i in range(len(zt)):428 if (zt[i] > 1.96):429 confidence[i] = 1430 elif (zt[i] < -1.96):431 confidence[i] = -1432 average_filtered_derivative_end = average_filtered_derivative[-len(end):]433 confidence_end = confidence[-len(end):]434 bp1 = int(np.mean([bp11, bp12, bp13]))435 bp3 = int(np.mean([bp31, bp32, bp33]))436 Ac_list1 = strategy(average_filtered_derivative_end, 0, bp1, confidence_end)437 Ac_list3 = strategy3(average_filtered_derivative_end, 0, bp3)438 print("######################################################################")439 print(bp11, bp12, bp13)440 print(bp31, bp32, bp33)441 print(bp1, bp3)442 revenue1=calculate_revenue(Ac_list1, close_2019_Actual, bp1)443 revenue2=calculate_revenue(Ac_list3, close_2019_Actual, bp3)444 benchmark=100000 / close_2019_Actual[0] * close_2019_Actual[-1]445 print(calculate_revenue(Ac_list1, close_2019_Actual, bp1))446 print(calculate_revenue(Ac_list3, close_2019_Actual, bp3))447 print(100000 / close_2019_Actual[0] * close_2019_Actual[-1])448 return (revenue1,revenue2,benchmark,filtered_updating)449def main():450 # Download df451 start_date = '2015-01-05'452 end_date = '2019-12-31'453 # Use pandas_reader.data.DataReader to load the desired data.454 df = yf.download('000001.SS', start_date, end_date)455 # df = yf.download('^GSPC', start_date, end_date)456 # df = yf.download('GC=F', start_date, end_date)457 df['log Close'] = np.log(df['Close'])458 close_15to18 = df['log Close'].loc["2015-01-04":"2018-12-31"]459 close_2015 = df['log Close'].loc["2015-01-04":"2015-12-31"]460 close_2016_Actual = np.array(df['Adj Close'].loc["2016-01-04":"2016-12-31"])461 close_2016 = np.log(close_2016_Actual)462 close_2017_Actual = np.array(df['Adj Close'].loc["2017-01-04":"2017-12-31"])463 close_2017 = np.log(close_2017_Actual)464 close_2018_Actual = np.array(df['Adj Close'].loc["2018-01-04":"2018-12-31"])465 close_2018 = np.log(close_2018_Actual)466 close_2019_Actual = np.array(df['Adj Close'].loc["2019-01-04":"2019-12-31"])467 close_2019 = np.log(close_2019_Actual)468 leng_of_training = 80469 leng_of_test = 20470 best_lamda_L1T = cross_validation(close_15to18, 0, 10, 20, leng_of_training, leng_of_test, "L1T")471 print("Best lamda for L1-T filter:" + str(best_lamda_L1T)) # 2.631578947368421472 best_lamda_L1C = cross_validation(close_15to18, 0, 10, 20, leng_of_training, leng_of_test, "L1C")473 print("Best lamda for L1-C filter:" + str(best_lamda_L1C)) # 1.0526315789473684474 best_lamda_L2 = cross_validation(close_15to18, 50, 500, 30, leng_of_training, leng_of_test, "L2")475 print("Best lamda for L2 filter:" + str(best_lamda_L2)) # 282.7586206896552476 time = [close_2015, close_2016_Actual, close_2017_Actual, close_2018_Actual, close_2019_Actual]477 alltype=["L1T","L1C","L2","L1TC","Wave"]478 allbestlambda=[best_lamda_L1T,best_lamda_L1C,best_lamda_L2]479 result1=calculate_return(df,alltype[0],allbestlambda[0],time)480 result2=calculate_return(df,alltype[1],allbestlambda[1],time)481 result3=calculate_return(df,alltype[2],allbestlambda[2],time)482 result4=calculate_return(df,alltype[3],allbestlambda[0],time,allbestlambda[1])483 result5=calculate_return(df,alltype[4],0,time)484 #485 # newfilter1=np.array(result1[3])486 # newfilter2=np.array(result2[3])487 # newfilter3=np.array(result3[3])488 # newfilter4=np.array(result4[3])489 #490 # filtered = l1tf(close_15to17, best_lamda_L1T)491 # filtered2 = l1ctf(close_15to17, best_lamda_L1C)492 # gc.collect()493 # filtered3 = hp(close_15to17, best_lamda_L2)494 # filtered4 = l1tccf(close_15to17, best_lamda_L1T,best_lamda_L1C)495 #496 #497 #498 # df["trend_L1T"]=np.hstack((filtered,newfilter1))499 # df["trend_L1C"]=np.hstack((filtered2,newfilter2))500 # df["trend_L2"]=np.hstack((filtered3,newfilter3))501 # df["trend_L1TC"]=np.hstack((filtered4,newfilter4))502 #503 #504 # plt.plot(df["trend_L1T"])505 # plt.plot(df["Adj Close"])506 # plt.show()507 # plt.plot(df["trend_L1C"])508 # plt.plot(df["Adj Close"])509 # plt.show()510 # plt.plot(df["trend_L2"])511 # plt.plot(df["Adj Close"])512 # plt.show()513 # plt.plot(df["trend_L1TC"])514 # plt.plot(df["Adj Close"])515 # plt.show()516 x = np.arange(5)517 allresult=np.array([result1[0],result2[0],result3[0],result4[0],result5[0]])518 allresult2=np.array([result1[1],result2[1],result3[1],result4[1],result5[1]])519 y=np.array([5])520 plt.bar(y,result1[2],label="benchmark",color="red")521 plt.xticks(y,["benchmark"])522 total_width, n = 0.8, 2523 width = total_width / n524 x = x - (total_width - width) / 2525 plt.bar(x,allresult,width=width,label="method 1",color='orange')526 plt.bar(x+width,np.array(allresult2),width=width,color= 'blue',label="method 2")527 plt.yticks(range(0,170000,10000))528 plt.xticks(x, (["L1T","L1C","L2","L1TC","Wavelet"]))529 plt.legend()530 plt.show()531if __name__=="__main__":...

Full Screen

Full Screen

donchian.py

Source:donchian.py Github

copy

Full Screen

1import requests2import re3import locale4import time as tm5import pickle6import numpy as np7import yfinance as yf8from sklearn.linear_model import LinearRegression9from math import floor10from datetime import datetime, timedelta, time11from bs4 import BeautifulSoup12from dbhelper import DBHelper13db = DBHelper()14class donchianCeV:15 def avglist(self, num):16 sumOfNumbers = 017 for t in num:18 sumOfNumbers = sumOfNumbers + t19 avg = sumOfNumbers / len(num)20 return avg21 def save_obj(self, obj, name):22 with open('obj/'+ name + '.pkl', 'wb+') as f:23 pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)24 def load_obj(self, name):25 with open('obj/' + name + '.pkl', 'rb') as f:26 return pickle.load(f)27 def gather_stock_list(self, dbname):28 stockList = []29 trials = 130 while stockList == [] and trials < 300:31 page = requests.get('http://bvmf.bmfbovespa.com.br/indices/ResumoCarteiraTeorica.aspx?Indice=SMLL&idioma=pt-br')32 resultado = BeautifulSoup(page.content, 'html.parser')33 for stock in resultado.findAll('td', {'class': 'rgSorted'}):34 stockText = stock.findAll(text=True)35 stockText = [x for x in stockText if x != '\n']36 stockList.extend(stockText)37 trials += 138 tm.sleep(1)39 day_nowDB = str(datetime.now().date())40 if stockList == []:41 stockListRaw = db.get_stocks("SMALL", dbname)42 stockList.extend(stockListRaw[0][0].split(' '))43 else:44 stocks = ' '.join(stockList)45 db.upd_stocks(stocks, day_nowDB, dbname)46 if 'stockListRaw' in locals():47 return stockList, stockListRaw48 else:49 return stockList50 def gather_EOD(self, stockList):51 day_before = str(datetime.now().date()-timedelta(days=320))52 day_now = str(datetime.now().date()+timedelta(days=2))53 i = 154 iF = len(stockList)55 k = 156 history_all = {}57 close_all = {}58 jump = 1059 while i <= iF:60 m = i61 frstocks = []62 j = k*jump63 while i <= j and i <= iF:64 frstocks.append(f'{stockList[i-1]}.SA')65 i += 166 frstocksY = ' '.join(frstocks)67 try:68 dataY = yf.download(frstocksY, interval='1d', auto_adjust=True, start=day_before, end=day_now)69 data = dataY.to_dict()70 if len(frstocks) == 1:71 stockhigh = [float('%.2f' % data['High'][x]) for x in data['High']]72 stocklow = [float('%.2f' % data['Low'][x]) for x in data['Low']]73 avg_list = [stockhigh, stocklow]74 stockavg = [(x+y)/2 for x,y in zip(*avg_list)]75 history_all[frstocks[0].split('.')[0]] = [stockhigh, stocklow, stockavg]76 closing = float('%.2f' % dataY['Close'][-1])77 if closing == 0 or np.isnan(closing):78 close_all[frstocks[0].split('.')[0]] = float('%.2f' % dataY['Close'][-2])79 else:80 close_all[frstocks[0].split('.')[0]] = closing81 else:82 for stock in frstocks:83 stockhigh = [float('%.2f' % data[('High', stock)][x]) for x in data[('High', stock)]]84 stocklow = [float('%.2f' % data[('Low', stock)][x]) for x in data[('Low', stock)]]85 avg_list = [stockhigh, stocklow]86 stockavg = [(x+y)/2 for x,y in zip(*avg_list)]87 history_all[stock.split('.')[0]] = [stockhigh, stocklow, stockavg]88 closing = float('%.2f' % dataY[('Close', stock)][-1])89 if closing == 0 or np.isnan(closing):90 close_all[stock.split('.')[0]] = float('%.2f' % dataY[('Close', stock)][-2])91 else:92 close_all[stock.split('.')[0]] = closing93 k += 194 except:95 i = m96 tm.sleep(1)97 self.save_obj(history_all, 'history')98 self.save_obj(close_all, 'close')99 return history_all, close_all100 def donch_Compra_func(self, user, dbname, sameday, manual):101 resultado_final = []102 day_before_close = str(datetime.now().date()-timedelta(days=4))103 day_now = str(datetime.now().date()+timedelta(days=2))104 105 stockList_all = self.gather_stock_list(dbname)106 if type(stockList_all) == tuple:107 stockList = stockList_all[0]108 stockListRaw = stockList_all[1]109 else:110 stockList = stockList_all111 try:112 history_all = self.load_obj('history')113 except:114 history_all, close_all = self.gather_EOD(stockList)115 if not (datetime.now().time() >= time(13,0) \116 and datetime.now().time() < time(21,0)):117 close_all = self.load_obj('close')118 else:119 i = 1120 iF = len(stockList)121 k = 1122 close_all = {}123 jump = 15124 while i <= iF:125 m = i126 frstocks = []127 j = k*jump128 while i <= j and i <= iF:129 frstocks.append(f'{stockList[i-1]}.SA')130 i += 1131 frstocksY = ' '.join(frstocks)132 try:133 dataY = yf.download(frstocksY, interval='1d', auto_adjust=True, start=day_before_close, end=day_now)134 data = dataY.to_dict()135 if len(frstocks) == 1:136 closing = float('%.2f' % dataY['Close'][-1])137 if closing == 0 or np.isnan(closing):138 close_all[frstocks[0].split('.')[0]] = float('%.2f' % dataY['Close'][-2])139 else:140 close_all[frstocks[0].split('.')[0]] = closing141 else:142 for stock in frstocks:143 closing = float('%.2f' % dataY[('Close', stock)][-1])144 if closing == 0 or np.isnan(closing):145 close_all[stock.split('.')[0]] = float('%.2f' % dataY[('Close', stock)][-2])146 else:147 close_all[stock.split('.')[0]] = closing148 k += 1149 except:150 i = m151 tm.sleep(1)152 # ---------------------------- Analysis method goes here ---------------------------------153 154 return resultado_final155 156 def donch_Carteira(self, user, dbname):157 if (datetime.now().time() <= time(13,21) \158 or datetime.now().time() > time(21,0)):159 sameday = True160 else:161 sameday = False162 carteira = db.get_carteira(user, dbname)163 if carteira == []:164 resultado_final = 1165 else:166 resultado_final = []167 168 day_before = str(datetime.now().date()-timedelta(days=55))169 day_now = str(datetime.now().date()+timedelta(days=2))170 #13 = ultimo item da lista de stocks171 i = 1 #1-13172 iF = len(carteira)173 k = 1 #1-3174 history_all = {}175 close_all = {}176 jump = 10177 while i <= iF:178 m = i179 frstocks = []180 j = k*jump181 while i <= j and i <= iF:182 frstocks.append(f'{carteira[i-1]}.SA')183 i += 1184 frstocksY = ' '.join(frstocks)185 try:186 dataY = yf.download(frstocksY, interval='1d', auto_adjust=True, start=day_before, end=day_now)187 data = dataY.to_dict()188 if len(frstocks) == 1:189 stocklow = [float('%.2f' % data['Low'][x]) for x in data['Low']]190 history_all[frstocks[0].split('.')[0]] = stocklow191 closing = float('%.2f' % dataY['Close'][-1])192 if closing == 0 or np.isnan(closing):193 close_all[frstocks[0].split('.')[0]] = float('%.2f' % dataY['Close'][-2])194 else:195 close_all[frstocks[0].split('.')[0]] = closing196 else: 197 for stock in frstocks:198 stocklow = [float('%.2f' % data[('Low', stock)][x]) for x in data[('Low', stock)]]199 history_all[stock.split('.')[0]] = stocklow200 closing = float('%.2f' % dataY[('Close', stock)][-1])201 if closing == 0 or np.isnan(closing):202 close_all[stock.split('.')[0]] = float('%.2f' % dataY[('Close', stock)][-2])203 else:204 close_all[stock.split('.')[0]] = closing205 k += 1206 except:207 i = m208 tm.sleep(1)209 210 # ---------------------------- Analysis method goes here ---------------------------------211 212 return resultado_final213 def donch_Compra(self, user, dbname, manual):214 if (datetime.now().time() <= time(10,21) \215 or datetime.now().time() > time(18,0)):216 sameday = True217 else:218 sameday = False219 analysis = self.donch_Compra_func(user, dbname, sameday, manual)...

Full Screen

Full Screen

oracle数据接口测试.py

Source:oracle数据接口测试.py Github

copy

Full Screen

...4 def __init__(self):5 self.conn = cx_Oracle.connect('TEST/123@localhost/orcl')6 self.cursor = self.conn.cursor()7 # 关闭连接8 def close_all(self):9 self.cursor.close()10 self.conn.close()11 # 全查12 def query_all(self):13 self.cursor.execute('select id, name, phone, address from users')14 res = self.cursor.fetchall()15 self.close_all()16 return res17 # 按id查询18 def query_by_name(self, id):19 self.cursor.execute('select id, name, phone, address from users where id={}'.format(id))20 res = self.cursor.fetchall()21 self.close_all()22 return res23 # 新增记录24 def insert(self, data):25 sql = 'insert into users values(\'{}\',\'{}\',\'{}\',\'{}\',\'{}\')'.format(*data)26 try:27 self.cursor.execute(sql)28 self.conn.commit()29 self.close_all()30 print('新增成功')31 return32 except Exception as e:33 # 添加后失败回滚34 self.conn.rollback()35 self.close_all()36 print('新增失败')37 return38 # 删除记录39 def delete(self, id):40 sql = 'delete from users where id={}'.format(id)41 try:42 self.cursor.execute(sql)43 self.conn.commit()44 self.close_all()45 print('删除成功')46 return47 except Exception as e:48 self.conn.rollback()49 self.close_all()50 print('删除失败')51 return52 # 修改记录53 def update(self, id, phone):54 sql = 'update users set phone={} where id={}'.format(phone, id)55 print(sql)56 try:57 self.cursor.execute(sql)58 self.conn.commit()59 self.close_all()60 print('修改成功')61 return62 except Exception as e:63 self.conn.rollback()64 self.close_all()65 print('修改失败')66 return67if __name__ == '__main__':68 a = Api()69 # re = a.query_by_name('1')70 # re = a.query_all()71 data = ['3', '测试', '男', '123456', '上海']72 re = a.insert(data)73 # print(re)74 # a.delete('3')...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run robotframework-appiumlibrary automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful