How to use close method in mountebank

Best JavaScript code snippet using mountebank

indicators.py

Source:indicators.py Github

copy

Full Screen

1# coding:utf-82#3# The MIT License (MIT)4#5# Copyright (c) 2016-2021 yutiansut/QUANTAXIS6#7# Permission is hereby granted, free of charge, to any person obtaining a copy8# of this software and associated documentation files (the "Software"), to deal9# in the Software without restriction, including without limitation the rights10# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell11# copies of the Software, and to permit persons to whom the Software is12# furnished to do so, subject to the following conditions:13#14# The above copyright notice and this permission notice shall be included in all15# copies or substantial portions of the Software.16#17# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR18# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,19# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE20# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER21# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,22# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE23# SOFTWARE.24from functools import reduce25import numpy as np26import pandas as pd27from QUANTAXIS.QAIndicator.base import *28"""29DataFrame 类30以下的函数都可以被直接add_func31"""32"""331. 趋向指标 34又叫趋势跟踪类指标,主要用于跟踪并预测股价的发展趋势35包含的主要指标361. 移动平均线 MA372. 指数平滑移动平均线 MACD383. 趋向指标 DMI394. 瀑布线 PBX405. 平均线差 DMA416. 动力指标(动量线) MTM427. 指数平均线 EXPMA438. 佳庆指标 CHO44"""45def QA_indicator_MA(DataFrame,*args,**kwargs):46 """MA47 48 Arguments:49 DataFrame {[type]} -- [description]50 51 Returns:52 [type] -- [description]53 """54 CLOSE = DataFrame['close']55 return pd.DataFrame({'MA{}'.format(N): MA(CLOSE, N) for N in list(args)})56def QA_indicator_MA_VOL(DataFrame,*args,**kwargs):57 """MA_VOLU58 59 Arguments:60 DataFrame {[type]} -- [description]61 62 Returns:63 [type] -- [description]64 """65 VOL = DataFrame['volume']66 return pd.DataFrame({'MA_VOL{}'.format(N): MA(VOL, N) for N in list(args)})67def QA_indicator_EMA(DataFrame, N):68 CLOSE = DataFrame['close']69 return pd.DataFrame({'EMA': EMA(CLOSE, N)})70def QA_indicator_SMA(DataFrame, N):71 CLOSE = DataFrame['close']72 return pd.DataFrame({'SMA': SMA(CLOSE, N)})73def QA_indicator_MACD(DataFrame, short=12, long=26, mid=9):74 """75 MACD CALC76 """77 CLOSE = DataFrame['close']78 DIF = EMA(CLOSE, short)-EMA(CLOSE, long)79 DEA = EMA(DIF, mid)80 MACD = (DIF-DEA)*281 return pd.DataFrame({'DIF': DIF, 'DEA': DEA, 'MACD': MACD})82def QA_indicator_DMI(DataFrame, M1=14, M2=6):83 """84 趋向指标 DMI85 """86 HIGH = DataFrame.high87 LOW = DataFrame.low88 CLOSE = DataFrame.close89 OPEN = DataFrame.open90 TR = SUM(MAX(MAX(HIGH-LOW, ABS(HIGH-REF(CLOSE, 1))),91 ABS(LOW-REF(CLOSE, 1))), M1)92 HD = HIGH-REF(HIGH, 1)93 LD = REF(LOW, 1)-LOW94 DMP = SUM(IFAND(HD>0,HD>LD,HD,0), M1)95 DMM = SUM(IFAND(LD>0,LD>HD,LD,0), M1)96 DI1 = DMP*100/TR97 DI2 = DMM*100/TR98 ADX = MA(ABS(DI2-DI1)/(DI1+DI2)*100, M2)99 ADXR = (ADX+REF(ADX, M2))/2100 return pd.DataFrame({101 'DI1': DI1, 'DI2': DI2,102 'ADX': ADX, 'ADXR': ADXR103 })104def QA_indicator_PBX(DataFrame, N1=3, N2=5, N3=8, N4=13, N5=18, N6=24):105 '瀑布线'106 C = DataFrame['close']107 PBX1 = (EMA(C, N1) + EMA(C, 2 * N1) + EMA(C, 4 * N1)) / 3108 PBX2 = (EMA(C, N2) + EMA(C, 2 * N2) + EMA(C, 4 * N2)) / 3109 PBX3 = (EMA(C, N3) + EMA(C, 2 * N3) + EMA(C, 4 * N3)) / 3110 PBX4 = (EMA(C, N4) + EMA(C, 2 * N4) + EMA(C, 4 * N4)) / 3111 PBX5 = (EMA(C, N5) + EMA(C, 2 * N5) + EMA(C, 4 * N5)) / 3112 PBX6 = (EMA(C, N6) + EMA(C, 2 * N6) + EMA(C, 4 * N6)) / 3113 DICT = {'PBX1': PBX1, 'PBX2': PBX2, 'PBX3': PBX3,114 'PBX4': PBX4, 'PBX5': PBX5, 'PBX6': PBX6}115 return pd.DataFrame(DICT)116def QA_indicator_DMA(DataFrame, M1=10, M2=50, M3=10):117 """118 平均线差 DMA119 """120 CLOSE = DataFrame.close121 DDD = MA(CLOSE, M1) - MA(CLOSE, M2)122 AMA = MA(DDD, M3)123 return pd.DataFrame({124 'DDD': DDD, 'AMA': AMA125 })126def QA_indicator_MTM(DataFrame, N=12, M=6):127 '动量线'128 C = DataFrame.close129 mtm = C - REF(C, N)130 MTMMA = MA(mtm, M)131 DICT = {'MTM': mtm, 'MTMMA': MTMMA}132 return pd.DataFrame(DICT)133def QA_indicator_EXPMA(DataFrame, P1=5, P2=10, P3=20, P4=60):134 """ 指数平均线 EXPMA"""135 CLOSE = DataFrame.close136 MA1 = EMA(CLOSE, P1)137 MA2 = EMA(CLOSE, P2)138 MA3 = EMA(CLOSE, P3)139 MA4 = EMA(CLOSE, P4)140 return pd.DataFrame({141 'MA1': MA1, 'MA2': MA2, 'MA3': MA3, 'MA4': MA4142 })143def QA_indicator_CHO(DataFrame, N1=10, N2=20, M=6):144 """145 佳庆指标 CHO146 """147 HIGH = DataFrame.high148 LOW = DataFrame.low149 CLOSE = DataFrame.close150 VOL = DataFrame.volume151 MID = SUM(VOL*(2*CLOSE-HIGH-LOW)/(HIGH+LOW), 0)152 CHO = MA(MID, N1)-MA(MID, N2)153 MACHO = MA(CHO, M)154 return pd.DataFrame({155 'CHO': CHO, 'MACHO': MACHO156 })157"""1582. 反趋向指标159主要捕捉趋势的转折点160随机指标KDJ161乖离率 BIAS162变动速率 ROC163顺势指标 CCI164威廉指标 W&R165震荡量(变动速率) OSC166相对强弱指标 RSI167动态买卖指标 ADTM168"""169def QA_indicator_KDJ(DataFrame, N=9, M1=3, M2=3):170 C = DataFrame['close']171 H = DataFrame['high']172 L = DataFrame['low']173 RSV = ((C - LLV(L, N)) / (HHV(H, N) - LLV(L, N)) * 100).groupby('code').fillna(method='ffill')174 K = SMA(RSV, M1)175 D = SMA(K, M2)176 J = 3 * K - 2 * D177 DICT = {'KDJ_K': K, 'KDJ_D': D, 'KDJ_J': J}178 return pd.DataFrame(DICT)179def QA_indicator_BIAS(DataFrame, N1, N2, N3):180 '乖离率'181 CLOSE = DataFrame['close']182 BIAS1 = (CLOSE - MA(CLOSE, N1)) / MA(CLOSE, N1) * 100183 BIAS2 = (CLOSE - MA(CLOSE, N2)) / MA(CLOSE, N2) * 100184 BIAS3 = (CLOSE - MA(CLOSE, N3)) / MA(CLOSE, N3) * 100185 DICT = {'BIAS1': BIAS1, 'BIAS2': BIAS2, 'BIAS3': BIAS3}186 return pd.DataFrame(DICT)187def QA_indicator_ROC(DataFrame, N=12, M=6):188 '变动率指标'189 C = DataFrame['close']190 roc = 100 * (C - REF(C, N)) / REF(C, N)191 ROCMA = MA(roc, M)192 DICT = {'ROC': roc, 'ROCMA': ROCMA}193 return pd.DataFrame(DICT)194def QA_indicator_CCI(DataFrame, N=14):195 """196 TYP:=(HIGH+LOW+CLOSE)/3;197 CCI:(TYP-MA(TYP,N))/(0.015*AVEDEV(TYP,N));198 """199 typ = (DataFrame['high'] + DataFrame['low'] + DataFrame['close']) / 3200 ## 此处AVEDEV可能为0值 因此导致出错 +0.0000000000001201 cci = ((typ - MA(typ, N)) / (0.015 * AVEDEV(typ, N) + 0.00000001))202 a = 100203 b = -100204 return pd.DataFrame({205 'CCI': cci, 'a': a, 'b': b206 })207def QA_indicator_WR(DataFrame, N, N1):208 '威廉指标'209 HIGH = DataFrame['high']210 LOW = DataFrame['low']211 CLOSE = DataFrame['close']212 WR1 = 100 * (HHV(HIGH, N) - CLOSE) / (HHV(HIGH, N) - LLV(LOW, N))213 WR2 = 100 * (HHV(HIGH, N1) - CLOSE) / (HHV(HIGH, N1) - LLV(LOW, N1))214 DICT = {'WR1': WR1, 'WR2': WR2}215 return pd.DataFrame(DICT)216def QA_indicator_OSC(DataFrame, N=20, M=6):217 """变动速率线218 震荡量指标OSC,也叫变动速率线。属于超买超卖类指标,是从移动平均线原理派生出来的一种分析指标。219 它反应当日收盘价与一段时间内平均收盘价的差离值,从而测出股价的震荡幅度。220 按照移动平均线原理,根据OSC的值可推断价格的趋势,如果远离平均线,就很可能向平均线回归。221 """222 C = DataFrame['close']223 OS = (C - MA(C, N)) * 100224 MAOSC = EMA(OS, M)225 DICT = {'OSC': OS, 'MAOSC': MAOSC}226 return pd.DataFrame(DICT)227def QA_indicator_RSI(DataFrame, N1=12, N2=26, N3=9):228 '相对强弱指标RSI1:SMA(MAX(CLOSE-LC,0),N1,1)/SMA(ABS(CLOSE-LC),N1,1)*100;'229 CLOSE = DataFrame['close']230 LC = REF(CLOSE, 1)231 RSI1 = SMA(MAX(CLOSE - LC, 0), N1) / SMA(ABS(CLOSE - LC), N1) * 100232 RSI2 = SMA(MAX(CLOSE - LC, 0), N2) / SMA(ABS(CLOSE - LC), N2) * 100233 RSI3 = SMA(MAX(CLOSE - LC, 0), N3) / SMA(ABS(CLOSE - LC), N3) * 100234 DICT = {'RSI1': RSI1, 'RSI2': RSI2, 'RSI3': RSI3}235 return pd.DataFrame(DICT)236def QA_indicator_ADTM(DataFrame, N=23, M=8):237 '动态买卖气指标'238 HIGH = DataFrame.high239 LOW = DataFrame.low240 OPEN = DataFrame.open241 DTM = IF(OPEN > REF(OPEN, 1), MAX((HIGH - OPEN), (OPEN - REF(OPEN, 1))), 0)242 DBM = IF(OPEN < REF(OPEN, 1), MAX((OPEN - LOW), (OPEN - REF(OPEN, 1))), 0)243 STM = SUM(DTM, N)244 SBM = SUM(DBM, N)245 ADTM1 = IF(STM > SBM, (STM - SBM) / STM,246 IF(STM != SBM, (STM - SBM) / SBM, 0))247 MAADTM = MA(ADTM1, M)248 DICT = {'ADTM': ADTM1, 'MAADTM': MAADTM}249 return pd.DataFrame(DICT)250"""2513. 量能指标252通过成交量的大小和变化研判趋势变化253容量指标 VR254量相对强弱 VRSI255能量指标 CR256人气意愿指标 ARBR257成交量标准差 VSTD"""258def QA_indicator_VR(DataFrame, M1=26, M2=100, M3=200):259 VOL = DataFrame.volume260 CLOSE = DataFrame.close261 LC = REF(CLOSE, 1)262 VR = SUM(IF(CLOSE > LC, VOL, 0), M1)/SUM(IF(CLOSE <= LC, VOL, 0), M1)*100263 a = M2264 b = M3265 return pd.DataFrame({266 'VR': VR, 'a': a, 'b': b267 })268def QA_indicator_VRSI(DataFrame, N=6):269 VOL = DataFrame.volume270 vrsi = SMA(MAX(VOL-REF(VOL, 1), 0), N, 1) / \271 SMA(ABS(VOL-REF(VOL, 1)), N, 1)*100272 return pd.DataFrame({'VRSI': vrsi})273def QA_indicator_CR(DataFrame, N=26, M1=5, M2=10, M3=20):274 HIGH = DataFrame.high275 LOW = DataFrame.low276 CLOSE = DataFrame.close277 VOL = DataFrame.volume278 MID = (HIGH+LOW+CLOSE)/3279 CR = SUM(MAX(0, HIGH-REF(MID, 1)), N)/SUM(MAX(0, REF(MID, 1)-LOW), N)*100280 MA1 = REF(MA(CR, M1), M1/2.5+1)281 MA2 = REF(MA(CR, M2), M2/2.5+1)282 MA3 = REF(MA(CR, M3), M3/2.5+1)283 return pd.DataFrame({284 'CR': CR, 'MA1': MA1, 'MA2': MA2, 'MA3': MA3285 })286def QA_indicator_ARBR(DataFrame, M1=26, M2=70, M3=150):287 HIGH = DataFrame.high288 LOW = DataFrame.low289 CLOSE = DataFrame.close290 OPEN = DataFrame.open291 AR = SUM(HIGH-OPEN, M1)/SUM(OPEN-LOW, M1)*100292 BR = SUM(MAX(0, HIGH-REF(CLOSE, 1)), M1) / \293 SUM(MAX(0, REF(CLOSE, 1)-LOW), M1)*100294 a = M2295 b = M3296 return pd.DataFrame({297 'AR': AR, 'BR': BR, 'a': a, 'b': b298 })299def QA_indicator_VSTD(DataFrame, N=10):300 VOL = DataFrame.volume301 vstd = STD(VOL, N)302 return pd.DataFrame({'VSTD': vstd})303"""3044. 量价指标305通过成交量和股价变动关系分析未来趋势306震荡升降指标ASI307价量趋势PVT308能量潮OBV309量价趋势VPT310"""311def QA_indicator_ASI(DataFrame, M1=26, M2=10):312 """313 LC=REF(CLOSE,1);314 AA=ABS(HIGH-LC);315 BB=ABS(LOW-LC);316 CC=ABS(HIGH-REF(LOW,1));317 DD=ABS(LC-REF(OPEN,1));318 R=IF(AA>BB AND AA>CC,AA+BB/2+DD/4,IF(BB>CC AND BB>AA,BB+AA/2+DD/4,CC+DD/4));319 X=(CLOSE-LC+(CLOSE-OPEN)/2+LC-REF(OPEN,1));320 SI=16*X/R*MAX(AA,BB);321 ASI:SUM(SI,M1);322 ASIT:MA(ASI,M2);323 """324 CLOSE = DataFrame['close']325 HIGH = DataFrame['high']326 LOW = DataFrame['low']327 OPEN = DataFrame['open']328 LC = REF(CLOSE, 1)329 AA = ABS(HIGH - LC)330 BB = ABS(LOW-LC)331 CC = ABS(HIGH - REF(LOW, 1))332 DD = ABS(LC - REF(OPEN, 1))333 R = IFAND(AA > BB, AA > CC, AA+BB/2+DD/4,334 IFAND(BB > CC, BB > AA, BB+AA/2+DD/4, CC+DD/4))335 X = (CLOSE - LC + (CLOSE - OPEN) / 2 + LC - REF(OPEN, 1))336 SI = 16*X/R*MAX(AA, BB)337 ASI = SUM(SI, M1)338 ASIT = MA(ASI, M2)339 return pd.DataFrame({340 'ASI': ASI, 'ASIT': ASIT341 })342def QA_indicator_PVT(DataFrame):343 CLOSE = DataFrame.close344 VOL = DataFrame.volume345 PVT = SUM((CLOSE-REF(CLOSE, 1))/REF(CLOSE, 1)*VOL, 0)346 return pd.DataFrame({'PVT': PVT})347def QA_indicator_OBV(DataFrame):348 """能量潮"""349 VOL = DataFrame.volume350 CLOSE = DataFrame.close351 return pd.DataFrame({352 'OBV': np.cumsum(IF(CLOSE > REF(CLOSE, 1), VOL, IF(CLOSE < REF(CLOSE, 1), -VOL, 0)))/10000353 })354def QA_indicator_VPT(DataFrame, N=51, M=6):355 VOL = DataFrame.volume356 CLOSE = DataFrame.close357 VPT = SUM(VOL*(CLOSE-REF(CLOSE, 1))/REF(CLOSE, 1), 0)358 MAVPT = MA(VPT, M)359 return pd.DataFrame({360 'VPT': VPT, 'MAVPT': MAVPT361 })362"""3635. 压力支撑指标364主要用于分析股价目前收到的压力和支撑365布林带 BOLL366麦克指标 MIKE367"""368def QA_indicator_BOLL(DataFrame, N=20, P=2):369 '布林线'370 C = DataFrame['close']371 boll = MA(C, N)372 UB = boll + P * STD(C, N)373 LB = boll - P * STD(C, N)374 DICT = {'BOLL': boll, 'UB': UB, 'LB': LB}375 return pd.DataFrame(DICT)376def QA_indicator_MIKE(DataFrame, N=12):377 """378 MIKE指标379 指标说明380 MIKE是另外一种形式的路径指标。381 买卖原则382 1 WEAK-S,MEDIUM-S,STRONG-S三条线代表初级、中级、强力支撑。383 2 WEAK-R,MEDIUM-R,STRONG-R三条线代表初级、中级、强力压力。384 """385 HIGH = DataFrame.high386 LOW = DataFrame.low387 CLOSE = DataFrame.close388 TYP = (HIGH+LOW+CLOSE)/3389 LL = LLV(LOW, N)390 HH = HHV(HIGH, N)391 WR = TYP+(TYP-LL)392 MR = TYP+(HH-LL)393 SR = 2*HH-LL394 WS = TYP-(HH-TYP)395 MS = TYP-(HH-LL)396 SS = 2*LL-HH397 return pd.DataFrame({398 'WR': WR, 'MR': MR, 'SR': SR,399 'WS': WS, 'MS': MS, 'SS': SS400 })401def QA_indicator_BBI(DataFrame, N1=3, N2=6, N3=12, N4=24):402 '多空指标'403 C = DataFrame['close']404 bbi = (MA(C, N1) + MA(C, N2) + MA(C, N3) + MA(C, N4)) / 4405 DICT = {'BBI': bbi}406 return pd.DataFrame(DICT)407def QA_indicator_MFI(DataFrame, N=14):408 """409 资金指标410 TYP := (HIGH + LOW + CLOSE)/3;411 V1:=SUM(IF(TYP>REF(TYP,1),TYP*VOL,0),N)/SUM(IF(TYP<REF(TYP,1),TYP*VOL,0),N);412 MFI:100-(100/(1+V1));413 赋值: (最高价 + 最低价 + 收盘价)/3414 V1赋值:如果TYP>1日前的TYP,返回TYP*成交量(手),否则返回0的N日累和/如果TYP<1日前的TYP,返回TYP*成交量(手),否则返回0的N日累和415 输出资金流量指标:100-(100/(1+V1))416 """417 C = DataFrame['close']418 H = DataFrame['high']419 L = DataFrame['low']420 VOL = DataFrame['volume']421 TYP = (C + H + L) / 3422 V1 = SUM(IF(TYP > REF(TYP, 1), TYP * VOL, 0), N) / \423 SUM(IF(TYP < REF(TYP, 1), TYP * VOL, 0), N)424 mfi = 100 - (100 / (1 + V1))425 DICT = {'MFI': mfi}426 return pd.DataFrame(DICT)427def QA_indicator_ATR(DataFrame, N=14):428 """429 输出TR:(最高价-最低价)和昨收-最高价的绝对值的较大值和昨收-最低价的绝对值的较大值430 输出真实波幅:TR的N日简单移动平均431 算法:今日振幅、今日最高与昨收差价、今日最低与昨收差价中的最大值,为真实波幅,求真实波幅的N日移动平均432 参数:N 天数,一般取14433 """434 C = DataFrame['close']435 H = DataFrame['high']436 L = DataFrame['low']437 TR = MAX(MAX((H - L), ABS(REF(C, 1) - H)), ABS(REF(C, 1) - L))438 atr = MA(TR, N)439 return pd.DataFrame({'TR': TR, 'ATR': atr})440def QA_indicator_SKDJ(DataFrame, N=9, M=3):441 """442 1.指标>80 时,回档机率大;指标<20 时,反弹机率大;443 2.K在20左右向上交叉D时,视为买进信号参考; 444 3.K在80左右向下交叉D时,视为卖出信号参考;445 4.SKDJ波动于50左右的任何讯号,其作用不大。446 """447 CLOSE = DataFrame['close']448 LOWV = LLV(DataFrame['low'], N)449 HIGHV = HHV(DataFrame['high'], N)450 RSV = EMA((CLOSE - LOWV) / (HIGHV - LOWV) * 100, M)451 K = EMA(RSV, M)452 D = MA(K, M)453 DICT = {'RSV': RSV, 'SKDJ_K': K, 'SKDJ_D': D}454 return pd.DataFrame(DICT)455def QA_indicator_DDI(DataFrame, N=13, N1=26, M=1, M1=5):456 """457 '方向标准离差指数'458 分析DDI柱状线,由红变绿(正变负),卖出信号参考;由绿变红,买入信号参考。459 """460 H = DataFrame['high']461 L = DataFrame['low']462 DMZ = IF((H + L) > (REF(H, 1) + REF(L, 1)), 463 MAX(ABS(H - REF(H, 1)), ABS(L - REF(L, 1))), 0)464 DMF = IF((H + L) < (REF(H, 1) + REF(L, 1)),465 MAX(ABS(H - REF(H, 1)), ABS(L - REF(L, 1))), 0)466 DIZ = SUM(DMZ, N) / (SUM(DMZ, N) + SUM(DMF, N))467 DIF = SUM(DMF, N) / (SUM(DMF, N) + SUM(DMZ, N))468 ddi = DIZ - DIF469 ADDI = SMA(ddi, N1, M)470 AD = MA(ADDI, M1)471 DICT = {'DDI': ddi, 'ADDI': ADDI, 'AD': AD}472 return pd.DataFrame(DICT)473def QA_indicator_shadow(DataFrame):474 """475 上下影线指标476 """477 return {478 'LOW': lower_shadow(DataFrame), 'UP': upper_shadow(DataFrame),479 'BODY': body(DataFrame), 'BODY_ABS': body_abs(DataFrame), 'PRICE_PCG': price_pcg(DataFrame)480 }481def lower_shadow(DataFrame): # 下影线482 return abs(DataFrame.low - MIN(DataFrame.open, DataFrame.close))483def upper_shadow(DataFrame): # 上影线484 return abs(DataFrame.high - MAX(DataFrame.open, DataFrame.close))485def body_abs(DataFrame):486 return abs(DataFrame.open - DataFrame.close)487def body(DataFrame):488 return DataFrame.close - DataFrame.open489def price_pcg(DataFrame):490 return body(DataFrame) / DataFrame.open491def amplitude(DataFrame):492 return (DataFrame.high - DataFrame.low) / DataFrame.low493"""4946. 大盘指标495通过涨跌家数研究大盘指数的走势496涨跌比率 ADR497绝对幅度指标 ABI498新三价率 TBR499腾落指数 ADL500广量冲力指标501指数平滑广量 STIX...

Full Screen

Full Screen

notes.py

Source:notes.py Github

copy

Full Screen

...34 def set_mon_open(self, open):35 self.__mon_open = open36 def get_mon_open(self):37 return self.__mon_open38 def set_mon_close(self, close):39 self.__mon_close = close40 def get_mon_close(self):41 return self.__mon_close42 def set_mon_break_start(self, bstart):43 self.__mon_bstart = bstart44 def get_mon_break_start(self):45 return self.__mon_bstart46 def set_mon_break_end(self, bend):47 self.__mon_bend = bend48 def get_mon_break_end(self):49 return self.__mon_bend50 def set_tues_open(self, open):51 self.__tues_open = open52 def get_tues_open(self):53 return self.__tues_open54 def set_tues_close(self, close):55 self.__tues_close = close56 def get_tues_close(self):57 return self.__tues_close58 def set_tues_break_start(self, bstart):59 self.__tues_bstart = bstart60 def get_tues_break_start(self):61 return self.__tues_bstart62 def set_tues_break_end(self, bend):63 self.__tues_bend = bend64 def get_tues_break_end(self):65 return self.__tues_bend66 def set_wed_open(self, open):67 self.__wed_open = open68 def get_wed_open(self):69 return self.__wed_open70 def set_wed_close(self, close):71 self.__wed_close = close72 def get_wed_close(self):73 return self.__wed_close74 def set_wed_break_start(self, bstart):75 self.__wed_bstart = bstart76 def get_wed_break_start(self):77 return self.__wed_bstart78 def set_wed_break_end(self, bend):79 self.__wed_bend = bend80 def get_wed_break_end(self):81 return self.__wed_bend82 def set_thur_open(self, open):83 self.__thur_open = open84 def get_thur_open(self):85 return self.__thur_open86 def set_thur_close(self, close):87 self.__thur_close = close88 def get_thur_close(self):89 return self.__thur_close90 def set_thur_break_start(self, bstart):91 self.__thur_bstart = bstart92 def get_thur_break_start(self):93 return self.__thur_bstart94 def set_thur_break_end(self, bend):95 self.__thur_bend = bend96 def get_thur_break_end(self):97 return self.__thur_bend98 def set_fri_open(self, open):99 self.__fri_open = open100 def get_fri_open(self):101 return self.__fri_open102 def set_fri_close(self, close):103 self.__fri_close = close104 def get_fri_close(self):105 return self.__fri_close106 def set_fri_break_start(self, bstart):107 self.__fri_bstart = bstart108 def get_fri_break_start(self):109 return self.__fri_bstart110 def set_fri_break_end(self, bend):111 self.__fri_bend = bend112 def get_fri_break_end(self):113 return self.__fri_bend114 def set_sat_open(self, open):115 self.__sat_open = open116 def get_sat_open(self):117 return self.__sat_open118 def set_sat_close(self, close):119 self.__sat_close = close120 def get_sat_close(self):121 return self.__sat_close122 def set_sat_break_start(self, bstart):123 self.__sat_bstart = bstart124 def get_sat_break_start(self):125 return self.__sat_bstart126 def set_sat_break_end(self, bend):127 self.__sat_bend = bend128 def get_sat_break_end(self):129 return self.__sat_bend130 def set_sun_open(self, open):131 self.__sun_open = open132 def get_sun_open(self):133 return self.__sun_open134 def set_sun_close(self, close):135 self.__sun_close = close136 def get_sun_close(self):137 return self.__sun_close138 def set_sun_break_start(self, bstart):139 self.__sun_bstart = bstart140 def get_sun_break_start(self):141 return self.__sun_bstart142 def set_sun_break_end(self, bend):143 self.__sun_bend = bend144 def get_sun_break_end(self):145 return self.__sun_bend146OperatingHour.__init__(self, mon_open, mon_close, mon_bstart, mon_bend, tues_open, tues_close, tues_bstart,147 tues_bend, wed_open, wed_close, wed_bstart, wed_bend, thur_open, thur_close, thur_bstart,148 thur_bend, fri_open, fri_close, fri_bstart, fri_bend, sat_open, sat_close, sat_bstart,149 sat_bend, sun_open, sun_close, sun_bstart, sun_bend)150mon_open = TimeField('Opening Time:', [validators.Optional()])151mon_close = TimeField('Closing Time:', [validators.Optional()])152mon_break_start = TimeField('Break Start Time:', [validators.Optional()])153mon_break_end = TimeField('Break End Time:', [validators.Optional()])154tues_open = TimeField('Opening Time:', [validators.Optional()])155tues_close = TimeField('Closing Time:', [validators.Optional()])156tues_break_start = TimeField('Break Start Time:', [validators.Optional()])157tues_break_end = TimeField('Break End Time:', [validators.Optional()])158wed_open = TimeField('Opening Time:', [validators.Optional()])159wed_close = TimeField('Closing Time:', [validators.Optional()])160wed_break_start = TimeField('Break Start Time:', [validators.Optional()])161wed_break_end = TimeField('Break End Time:', [validators.Optional()])162thur_open = TimeField('Opening Time:', [validators.Optional()])163thur_close = TimeField('Closing Time:', [validators.Optional()])164thur_break_start = TimeField('Break Start Time:', [validators.Optional()])165thur_break_end = TimeField('Break End Time:', [validators.Optional()])166fri_open = TimeField('Opening Time:', [validators.Optional()])167fri_close = TimeField('Closing Time:', [validators.Optional()])168fri_break_start = TimeField('Break Start Time:', [validators.Optional()])169fri_break_end = TimeField('Break End Time:', [validators.Optional()])170sat_open = TimeField('Opening Time:', [validators.Optional()])171sat_close = TimeField('Closing Time:', [validators.Optional()])172sat_break_start = TimeField('Break Start Time:', [validators.Optional()])173sat_break_end = TimeField('Break End Time:', [validators.Optional()])174sun_open = TimeField('Opening Time:', [validators.Optional()])175sun_close = TimeField('Closing Time:', [validators.Optional()])176sun_break_start = TimeField('Break Start Time:', [validators.Optional()])177sun_break_end = TimeField('Break End Time:', [validators.Optional()])178def edit_clinic_info():179 # create_user_form = ClinicInfoForm(request.form)180 # if request.method == 'POST' and create_user_form.validate():181 # users_dict = {}182 # db = shelve.open('clinic_storage.db', 'c')183 #184 # try:185 # users_dict = db['Users']186 # except:187 # print("Error in retrieving Users from storage.db.")188 #189 # user = User.ClinicInfo(create_user_form.mon_open.data, create_user_form.mon_close.data, create_user_form.mon_break_start.data, create_user_form.mon_break_end.data,190 # create_user_form.tues_open.data, create_user_form.tues_close.data, create_user_form.tues_break_start.data, create_user_form.tues_break_end.data,191 # create_user_form.wed_open.data, create_user_form.wed_close.data, create_user_form.wed_break_start.data, create_user_form.wed_break_end.data,192 # create_user_form.thur_open.data, create_user_form.thur_close.data, create_user_form.thur_break_start.data, create_user_form.thur_break_end.data,193 # create_user_form.fri_open.data, create_user_form.fri_close.data, create_user_form.fri_break_start.data, create_user_form.fri_break_end.data,194 # create_user_form.sat_open.data, create_user_form.sat_close.data, create_user_form.sat_break_start.data, create_user_form.sat_break_end.data,195 # create_user_form.sun_open.data, create_user_form.sun_close.data, create_user_form.sun_break_start.data, create_user_form.sun_break_end.data,196 # create_user_form.off_start.data, create_user_form.off_end.data, create_user_form.off_reason.data, create_user_form.mon_break_end.data,197 # create_user_form.password.data, create_user_form.cfm_password.data, create_user_form.name.data,198 # create_user_form.address.data, create_user_form.email.data, create_user_form.phone.data)199 # users_dict[user.get_clinic_id()] = user200 # db['Users'] = users_dict201 #202 # # Test codes203 # users_dict = db['Users']204 # user = users_dict[user.get_clinic_id()]205 # print(user.get_name(), "was stored in storage.db successfully with Clinic ID =", user.get_clinic_id())206 #207 # db.close()208 # return redirect(url_for('login_clinic'))...

Full Screen

Full Screen

utils.py

Source:utils.py Github

copy

Full Screen

1# -*- coding:utf-8 -*-2# editor: gmj3# Date: 2019-12-03 14:144# desc: 辅助方法集合5def cut_short_period(index_list):6 # 除去序号值差值小于4的区间7 while 1:8 max_index = len(index_list) - 19 # 只有一个值 或者以0开头的区间分割index列表10 if max_index == 0 or (index_list[0] == 0 and max_index == 1):11 break12 for k, v in enumerate(index_list):13 if k < max_index:14 if k == 0:15 if index_list[1] - index_list[0] < 3:16 del index_list[k + 1]17 continue18 if index_list[k + 1] - index_list[k] < 4:19 if k == max_index - 1:20 # 如果是第一段数据少于4,则只删除index_list[k + 1]21 del index_list[k + 1]22 else:23 del index_list[k + 1]24 del index_list[k]25 break26 if max_index + 1 == len(index_list):27 break28def deviation_judge(section_1: tuple, section_2: tuple = None):29 """30 根据价格 和 MACD kdj_D 来判断是否背离31 以及判断振幅和量能的异常32 :param section_1: 离现在最近的区间 (DateFrame数据,index 列表)33 :param section_2: 倒数第三个区间34 :return: 背离状态35 """36 deviation_status = {}37 if section_2:38 data_df1, peak_index1 = section_139 up_sign = 1 if data_df1['MACD'].iloc[peak_index1[0]] > 0 else -140 section_1_close_macd = (data_df1['close'].iloc[peak_index1[0]], data_df1['MACD'].iloc[peak_index1[0]],41 data_df1['kdj_D'].iloc[peak_index1[0]])42 data_df3, peak_index3 = section_243 section_2_close_macd = (data_df3['close'].iloc[peak_index3[0]], data_df3['MACD'].iloc[peak_index3[0]],44 data_df3['kdj_D'].iloc[peak_index3[0]])45 if len(peak_index3) > 1:46 # 判断出最大的macd值的index47 close_macd_list = []48 macd_list = []49 for i in peak_index3:50 close_macd_list.append((data_df3['close'].iloc[i], data_df3['MACD'].iloc[i], data_df3['kdj_D'].iloc[i]))51 macd_list.append(abs(data_df3['MACD'].iloc[i]))52 max_macd = max(macd_list)53 for close_macd in close_macd_list:54 if close_macd[1] == max_macd:55 section_2_close_macd = close_macd56 # 判断背离57 if (section_1_close_macd[0] - section_2_close_macd[0]) * (58 section_1_close_macd[1] - section_2_close_macd[1]) < 0:59 if up_sign > 0:60 deviation_status['macd_deviation_status'] = '上涨MACD背离'61 else:62 deviation_status['macd_deviation_status'] = '下跌MACD背离'63 if (section_1_close_macd[0] - section_2_close_macd[0]) * (64 section_1_close_macd[2] - section_2_close_macd[2]) < 0:65 if up_sign > 0:66 deviation_status['kdj_deviation_status'] = '上涨KDJ背离'67 else:68 deviation_status['kdj_deviation_status'] = '下跌KDJ背离'69 else:70 data_df1, peak_index = section_171 up_sign = 1 if data_df1['MACD'].iloc[peak_index[0]] > 0 else -172 if (data_df1['close'].iloc[peak_index[0]] - data_df1['close'].iloc[peak_index[1]]) * \73 (data_df1['MACD'].iloc[peak_index[0]] - data_df1['MACD'].iloc[peak_index[1]]) < 0:74 if up_sign > 0:75 deviation_status['macd_deviation_status'] = '上涨MACD背离'76 else:77 deviation_status['macd_deviation_status'] = '下跌MACD背离'78 if (data_df1['close'].iloc[peak_index[0]] - data_df1['close'].iloc[peak_index[1]]) * \79 (data_df1['kdj_D'].iloc[peak_index[0]] - data_df1['kdj_D'].iloc[peak_index[1]]) < 0:80 if up_sign > 0:81 deviation_status['kdj_deviation_status'] = '上涨KDJ背离'82 else:83 deviation_status['kdj_deviation_status'] = '下跌KDJ背离'84 # 量能异常 和 振幅异常提示85 warning_res = swing_vol_warning(data_df1)86 if warning_res:87 deviation_status['swing_vol_warning'] = warning_res88 # deviation_status['macd_trend']='MACD 上涨趋势' if up_sign else 'MACD 下跌趋势'89 return deviation_status90def swing_vol_warning(data_df):91 # 量能异常 和 振幅异常提示92 for row in range(data_df.shape[0]):93 row_data = data_df.iloc[row]94 if (row_data['vol'] / row_data['vol_MA_10']) >= 2 and row_data['swing'] > 0.05:95 return f"{row_data['trade_date']}-振幅和量能异常"96 else:97 return None98def get_peak_index(macd_section):99 """100 :param macd_section: 一个macd指标值区间的DataFrame101 :return: 该区间的极值index102 """103 # macd_section=macd_sec['MACD']104 peak_index = []105 # 判断符号106 # macd_section['MACD'].sum()107 for s_i in range(1, macd_section['MACD'].shape[0] - 1):108 # 判断极值 如果 该值与两侧值的差值乘积符号为正,则为极值109 if (macd_section['MACD'].iloc[s_i] - macd_section['MACD'].iloc[s_i - 1]) * (110 macd_section['MACD'].iloc[s_i] - macd_section['MACD'].iloc[s_i + 1]) > 0:111 # 上升找极大值 下跌找极小值112 if (macd_section['MACD'].iloc[s_i] - macd_section['MACD'].iloc[s_i - 1]) * macd_section[113 'MACD'].sum() > 0: # 剔除凹值114 peak_index.append(s_i)115 # 第一区间的值还在持续变化,那么极值就是第一值116 if not peak_index:117 peak_index.append(0)118 cut_short_period(peak_index)119 return peak_index120def deviation_macd_judge(section_1: tuple, section_2: tuple = None):121 """122 根据价格 和 MACD 来判断是否背离123 以及判断振幅和量能的异常124 :param section_1: 离现在最近的区间 (DateFrame数据,index 列表)125 :param section_2: 倒数第三个区间126 :return: 背离状态127 """128 deviation_status = {}129 if section_2:130 data_df1, peak_index1 = section_1131 up_sign = 1 if data_df1['MACD'].iloc[peak_index1[0]] > 0 else -1132 section_1_close_macd = (data_df1['close'].iloc[peak_index1[0]], data_df1['MACD'].iloc[peak_index1[0]])133 data_df3, peak_index3 = section_2134 section_2_close_macd = (data_df3['close'].iloc[peak_index3[0]], data_df3['MACD'].iloc[peak_index3[0]])135 if len(peak_index3) > 1:136 # 判断出最大的macd值的index137 close_macd_list = []138 macd_list = []139 for i in peak_index3:140 close_macd_list.append((data_df3['close'].iloc[i], data_df3['MACD'].iloc[i], data_df3['kdj_D'].iloc[i]))141 macd_list.append(abs(data_df3['MACD'].iloc[i]))142 max_macd = max(macd_list)143 for close_macd in close_macd_list:144 if close_macd[1] == max_macd:145 section_2_close_macd = close_macd146 # 判断背离147 if (section_1_close_macd[0] - section_2_close_macd[0]) * (148 section_1_close_macd[1] - section_2_close_macd[1]) < 0:149 if up_sign > 0:150 deviation_status['macd_deviation_status'] = '上涨MACD背离'151 else:152 deviation_status['macd_deviation_status'] = '下跌MACD背离'153 else:154 data_df1, peak_index = section_1155 up_sign = 1 if data_df1['MACD'].iloc[peak_index[0]] > 0 else -1156 if (data_df1['close'].iloc[peak_index[0]] - data_df1['close'].iloc[peak_index[1]]) * \157 (data_df1['MACD'].iloc[peak_index[0]] - data_df1['MACD'].iloc[peak_index[1]]) < 0:158 if up_sign > 0:159 deviation_status['macd_deviation_status'] = '上涨MACD背离'160 else:161 deviation_status['macd_deviation_status'] = '下跌MACD背离'162 # 量能异常 和 振幅异常提示163 warning_res = swing_vol_warning(data_df1)164 if warning_res:165 deviation_status['swing_vol_warning'] = warning_res...

Full Screen

Full Screen

test_early_close.py

Source:test_early_close.py Github

copy

Full Screen

...32 self.assertRaises(db.DBNoSuchFileError, d2.open,33 self.filename+"2", db.DB_BTREE, db.DB_THREAD, 0666)34 d.put("test","this is a test")35 self.assertEqual(d.get("test"), "this is a test", "put!=get")36 dbenv.close() # This "close" should close the child db handle also37 self.assertRaises(db.DBError, d.get, "test")38 def test02_close_dbenv_before_dbcursor(self):39 dbenv = db.DBEnv()40 dbenv.open(self.homeDir,41 db.DB_INIT_CDB| db.DB_CREATE |db.DB_THREAD|db.DB_INIT_MPOOL,42 0666)43 d = db.DB(dbenv)44 d.open(self.filename, db.DB_BTREE, db.DB_CREATE | db.DB_THREAD, 0666)45 d.put("test","this is a test")46 d.put("test2","another test")47 d.put("test3","another one")48 self.assertEqual(d.get("test"), "this is a test", "put!=get")49 c=d.cursor()50 c.first()51 c.next()52 d.close() # This "close" should close the child db handle also53 # db.close should close the child cursor54 self.assertRaises(db.DBError,c.next)55 d = db.DB(dbenv)56 d.open(self.filename, db.DB_BTREE, db.DB_CREATE | db.DB_THREAD, 0666)57 c=d.cursor()58 c.first()59 c.next()60 dbenv.close()61 # The "close" should close the child db handle also, with cursors62 self.assertRaises(db.DBError, c.next)63 def test03_close_db_before_dbcursor_without_env(self):64 import os.path65 path=os.path.join(self.homeDir,self.filename)66 d = db.DB()67 d.open(path, db.DB_BTREE, db.DB_CREATE | db.DB_THREAD, 0666)68 d.put("test","this is a test")69 d.put("test2","another test")70 d.put("test3","another one")71 self.assertEqual(d.get("test"), "this is a test", "put!=get")72 c=d.cursor()73 c.first()74 c.next()75 d.close()76 # The "close" should close the child db handle also77 self.assertRaises(db.DBError, c.next)78 def test04_close_massive(self):79 dbenv = db.DBEnv()80 dbenv.open(self.homeDir,81 db.DB_INIT_CDB| db.DB_CREATE |db.DB_THREAD|db.DB_INIT_MPOOL,82 0666)83 dbs=[db.DB(dbenv) for i in xrange(16)]84 cursors=[]85 for i in dbs :86 i.open(self.filename, db.DB_BTREE, db.DB_CREATE | db.DB_THREAD, 0666)87 dbs[10].put("test","this is a test")88 dbs[10].put("test2","another test")89 dbs[10].put("test3","another one")90 self.assertEqual(dbs[4].get("test"), "this is a test", "put!=get")91 for i in dbs :92 cursors.extend([i.cursor() for j in xrange(32)])93 for i in dbs[::3] :94 i.close()95 for i in cursors[::3] :96 i.close()97 # Check for missing exception in DB! (after DB close)98 self.assertRaises(db.DBError, dbs[9].get, "test")99 # Check for missing exception in DBCursor! (after DB close)100 self.assertRaises(db.DBError, cursors[101].first)101 cursors[80].first()102 cursors[80].next()103 dbenv.close() # This "close" should close the child db handle also104 # Check for missing exception! (after DBEnv close)105 self.assertRaises(db.DBError, cursors[80].next)106 def test05_close_dbenv_delete_db_success(self):107 dbenv = db.DBEnv()108 dbenv.open(self.homeDir,109 db.DB_INIT_CDB| db.DB_CREATE |db.DB_THREAD|db.DB_INIT_MPOOL,110 0666)111 d = db.DB(dbenv)112 d.open(self.filename, db.DB_BTREE, db.DB_CREATE | db.DB_THREAD, 0666)113 dbenv.close() # This "close" should close the child db handle also114 del d115 try:116 import gc117 except ImportError:118 gc = None119 if gc:120 # force d.__del__ [DB_dealloc] to be called121 gc.collect()122 def test06_close_txn_before_dup_cursor(self) :123 dbenv = db.DBEnv()124 dbenv.open(self.homeDir,db.DB_INIT_TXN | db.DB_INIT_MPOOL |125 db.DB_INIT_LOG | db.DB_CREATE)126 d = db.DB(dbenv)127 txn = dbenv.txn_begin()128 d.open(self.filename, dbtype = db.DB_HASH, flags = db.DB_CREATE,129 txn=txn)130 d.put("XXX", "yyy", txn=txn)131 txn.commit()132 txn = dbenv.txn_begin()133 c1 = d.cursor(txn)134 c2 = c1.dup()135 self.assertEqual(("XXX", "yyy"), c1.first())136 # Not interested in warnings about implicit close.137 import warnings138 if sys.version_info < (2, 6) :139 # Completely resetting the warning state is140 # problematic with python >=2.6 with -3 (py3k warning),141 # because some stdlib modules selectively ignores warnings.142 warnings.simplefilter("ignore")143 txn.commit()144 warnings.resetwarnings()145 else :146 # When we drop support for python 2.4147 # we could use: (in 2.5 we need a __future__ statement)148 #149 # with warnings.catch_warnings():150 # warnings.simplefilter("ignore")151 # txn.commit()152 #153 # We can not use "with" as is, because it would be invalid syntax154 # in python 2.4 and (with no __future__) 2.5.155 # Here we simulate "with" following PEP 343 :156 w = warnings.catch_warnings()157 w.__enter__()158 try :159 warnings.simplefilter("ignore")160 txn.commit()161 finally :162 w.__exit__()163 self.assertRaises(db.DBCursorClosedError, c2.first)164 def test07_close_db_before_sequence(self):165 import os.path166 path=os.path.join(self.homeDir,self.filename)167 d = db.DB()168 d.open(path, db.DB_BTREE, db.DB_CREATE | db.DB_THREAD, 0666)169 dbs=db.DBSequence(d)170 d.close() # This "close" should close the child DBSequence also171 dbs.close() # If not closed, core dump (in Berkeley DB 4.6.*)172#----------------------------------------------------------------------173def test_suite():174 suite = unittest.TestSuite()175 suite.addTest(unittest.makeSuite(DBEnvClosedEarlyCrash))176 return suite177if __name__ == '__main__':...

Full Screen

Full Screen

data_manager.py

Source:data_manager.py Github

copy

Full Screen

1import pandas as pd2import numpy as np3COLUMNS_CHART_DATA = ['date', 'open', 'high', 'low', 'close', 'volume']4COLUMNS_TRAINING_DATA_V1 = [5 'open_lastclose_ratio', 'high_close_ratio', 'low_close_ratio',6 'close_lastclose_ratio', 'volume_lastvolume_ratio',7 'close_ma5_ratio', 'volume_ma5_ratio',8 'close_ma10_ratio', 'volume_ma10_ratio',9 'close_ma20_ratio', 'volume_ma20_ratio',10 'close_ma60_ratio', 'volume_ma60_ratio',11 'close_ma120_ratio', 'volume_ma120_ratio',12]13COLUMNS_TRAINING_DATA_V1_RICH = [14 'open_lastclose_ratio', 'high_close_ratio', 'low_close_ratio',15 'close_lastclose_ratio', 'volume_lastvolume_ratio',16 'close_ma5_ratio', 'volume_ma5_ratio',17 'close_ma10_ratio', 'volume_ma10_ratio',18 'close_ma20_ratio', 'volume_ma20_ratio',19 'close_ma60_ratio', 'volume_ma60_ratio',20 'close_ma120_ratio', 'volume_ma120_ratio',21 'inst_lastinst_ratio', 'frgn_lastfrgn_ratio',22 'inst_ma5_ratio', 'frgn_ma5_ratio',23 'inst_ma10_ratio', 'frgn_ma10_ratio',24 'inst_ma20_ratio', 'frgn_ma20_ratio',25 'inst_ma60_ratio', 'frgn_ma60_ratio',26 'inst_ma120_ratio', 'frgn_ma120_ratio',27]28COLUMNS_TRAINING_DATA_V2 = [29 'per', 'pbr', 'roe',30 'open_lastclose_ratio', 'high_close_ratio', 'low_close_ratio',31 'close_lastclose_ratio', 'volume_lastvolume_ratio',32 'close_ma5_ratio', 'volume_ma5_ratio',33 'close_ma10_ratio', 'volume_ma10_ratio',34 'close_ma20_ratio', 'volume_ma20_ratio',35 'close_ma60_ratio', 'volume_ma60_ratio',36 'close_ma120_ratio', 'volume_ma120_ratio',37 'market_kospi_ma5_ratio', 'market_kospi_ma20_ratio', 38 'market_kospi_ma60_ratio', 'market_kospi_ma120_ratio', 39 'bond_k3y_ma5_ratio', 'bond_k3y_ma20_ratio', 40 'bond_k3y_ma60_ratio', 'bond_k3y_ma120_ratio'41]42def preprocess(data, ver='v1'):43 windows = [5, 10, 20, 60, 120]44 for window in windows:45 data['close_ma{}'.format(window)] = \46 data['close'].rolling(window).mean()47 data['volume_ma{}'.format(window)] = \48 data['volume'].rolling(window).mean()49 data['close_ma%d_ratio' % window] = \50 (data['close'] - data['close_ma%d' % window]) \51 / data['close_ma%d' % window]52 data['volume_ma%d_ratio' % window] = \53 (data['volume'] - data['volume_ma%d' % window]) \54 / data['volume_ma%d' % window]55 56 if ver == 'v1.rich':57 data['inst_ma{}'.format(window)] = \58 data['close'].rolling(window).mean()59 data['frgn_ma{}'.format(window)] = \60 data['volume'].rolling(window).mean()61 data['inst_ma%d_ratio' % window] = \62 (data['close'] - data['inst_ma%d' % window]) \63 / data['inst_ma%d' % window]64 data['frgn_ma%d_ratio' % window] = \65 (data['volume'] - data['frgn_ma%d' % window]) \66 / data['frgn_ma%d' % window]67 data['open_lastclose_ratio'] = np.zeros(len(data))68 data.loc[1:, 'open_lastclose_ratio'] = \69 (data['open'][1:].values - data['close'][:-1].values) \70 / data['close'][:-1].values71 data['high_close_ratio'] = \72 (data['high'].values - data['close'].values) \73 / data['close'].values74 data['low_close_ratio'] = \75 (data['low'].values - data['close'].values) \76 / data['close'].values77 data['close_lastclose_ratio'] = np.zeros(len(data))78 data.loc[1:, 'close_lastclose_ratio'] = \79 (data['close'][1:].values - data['close'][:-1].values) \80 / data['close'][:-1].values81 data['volume_lastvolume_ratio'] = np.zeros(len(data))82 data.loc[1:, 'volume_lastvolume_ratio'] = \83 (data['volume'][1:].values - data['volume'][:-1].values) \84 / data['volume'][:-1] \85 .replace(to_replace=0, method='ffill') \86 .replace(to_replace=0, method='bfill').values87 if ver == 'v1.rich':88 data['inst_lastinst_ratio'] = np.zeros(len(data))89 data.loc[1:, 'inst_lastinst_ratio'] = \90 (data['inst'][1:].values - data['inst'][:-1].values) \91 / data['inst'][:-1] \92 .replace(to_replace=0, method='ffill') \93 .replace(to_replace=0, method='bfill').values94 data['frgn_lastfrgn_ratio'] = np.zeros(len(data))95 data.loc[1:, 'frgn_lastfrgn_ratio'] = \96 (data['frgn'][1:].values - data['frgn'][:-1].values) \97 / data['frgn'][:-1] \98 .replace(to_replace=0, method='ffill') \99 .replace(to_replace=0, method='bfill').values100 return data101def load_data(fpath, date_from, date_to, ver='v1'):102 header = None if ver == 'v1' else 0103 data = pd.read_csv(fpath, thousands=',', header=header, 104 converters={'date': lambda x: str(x)})105 if ver == 'v1':106 data.columns = ['date', 'open', 'high', 'low', 'close', 'volume']107 # 날짜 오름차순 정렬108 data = data.sort_values(by='date').reset_index()109 # 데이터 전처리110 data = preprocess(data)111 112 # 기간 필터링113 data['date'] = data['date'].str.replace('-', '')114 data = data[(data['date'] >= date_from) & (data['date'] <= date_to)]115 data = data.dropna()116 # 차트 데이터 분리117 chart_data = data[COLUMNS_CHART_DATA]118 # 학습 데이터 분리119 training_data = None120 if ver == 'v1':121 training_data = data[COLUMNS_TRAINING_DATA_V1]122 elif ver == 'v1.rich':123 training_data = data[COLUMNS_TRAINING_DATA_V1_RICH]124 elif ver == 'v2':125 data.loc[:, ['per', 'pbr', 'roe']] = \126 data[['per', 'pbr', 'roe']].apply(lambda x: x / 100)127 training_data = data[COLUMNS_TRAINING_DATA_V2]128 training_data = training_data.apply(np.tanh)129 else:130 raise Exception('Invalid version.')131 ...

Full Screen

Full Screen

alphas.py

Source:alphas.py Github

copy

Full Screen

1import numpy as np2import pandas as pd 3#设定基础函数4def get_delta(x,N):5 lst=[]6 for i in range(1,N+2):7 lst.append(x)8 x=x.diff()9 return lst[-1]10def rank(array):11 s = pd.Series(array)12 return s.rank(ascending=False)[len(s)-1]13#单只股票的time series分析指标14def get_alpha1(df):15 df['a1']=(df['high']-df['low'])/abs(df['close']-df['open'])16 return df['a1']17def get_alpha2(df):18 df['a2']=(df['high']-df['close'])/(df['close']-df['low'])19 return df['a2']20def get_alpha3(df):21 df['a3']=(df['high']-df['open'])/(df['open']-df['close'])22 return df['a3']23# Alpha#1: (SignedPower(((returns < 0) ? stddev(returns, 20) : close), 2.)24def get_alpha4(df):25 filter4_1=df['returns'].rolling(window=20).std().where(df['returns']<0)26 df['ralpha4']=filter4_1.fillna(df['close'])27 return df['ralpha4']**228#Alpha#3:correlation(high,volume,5)29def get_alpha5(df):30 return df['high'].rolling(window=5).corr(df['volume'])31#Alpha#4:correlation(open,volume,10)32def get_alpha6(df):33 return df['open'].rolling(window=10).corr(df['volume'])34 35#Alpha#5:delta(sum(open,5)*sum(return,5),10)36def get_alpha7(df):37 return get_delta(df['open'].rolling(window=5).sum()*df['returns'].rolling(window=5).sum(),10)38#Alpha#9: ((0 < ts_min(delta(close, 1), 5)) ? delta(close, 1) : ((ts_max(delta(close, 1), 5) < 0) ? delta(close, 1) : (-1 * delta(close, 1)))) 39def get_alpha8(df):40 filter9_1=df['close'].diff().where((df['close'].diff().rolling(window=5).min())<0)41 filter9_2=filter9_1.fillna(df['close'].diff().where((df['close'].diff().rolling(window=5).max())>0))42 return filter9_2.fillna(-df['close'].diff())43#Alpha#12: (delta(volume, 1)) * (-1 * delta(close, 1))44def get_alpha9(df):45 return -1*df['volume'].diff()*df['close'].diff()46#Alpha#18: stddev(abs((close - open)), 5) + (close - open)) + correlation(close, open, 10)47def get_alpha10(df):48 return abs(df['close']-df['open']).rolling(window=5).std()+(df['close']-df['open'])+get_alpha7(df)49#Alpha#23: (((sum(high, 20) / 20) < high) ? (-1 * delta(high, 2)) : 0) 50def get_alpha11(df):51 df['alpha12']=-1*get_delta(df['high'],2).where(df['high'].rolling(window=20).mean()<df['high'])52 return df['alpha12'].fillna(0)53#Alpha#24: ((((delta((sum(close, 100) / 100), 100) / delay(close, 100)) < 0.05) || ((delta((sum(close, 100) / 100), 100) / delay(close, 100)) == 0.05)) ? (-1 * (close - ts_min(close, 100))) : (-1 * delta(close, 3))) 54def get_alpha12(df):55 nofilter13=df['close'].rolling(window=100).min()-df['close']56 filter13_1=nofilter13.where(get_delta(df['close'].rolling(window=100).mean(),100)/(df['close']-get_delta(df['close'],100))<0.05)57 filter13_2=filter13_1.fillna(nofilter13.where(get_delta(df['close'].rolling(window=100).mean(),100)/(df['close']-get_delta(df['close'],100))==0.05))58 return filter13_2.fillna(-get_delta(df['close'],3))59#Alpha#26: (-1 * ts_max(correlation(ts_rank(volume, 5), ts_rank(high, 5), 5), 3)) 60def get_alpha13(df):61 return df['volume'].rolling(window=5).apply(func=rank).rolling(window=5).corr(df['high'].rolling(window=5).apply(func=rank)).rolling(window=3).max()62#Alpha#35: ((Ts_Rank(volume, 32) * (1 - Ts_Rank(((close + high) - low), 16))) * (1 - Ts_Rank(returns, 32))) 63def get_alpha14(df):64 return df['volume'].rolling(window=32).apply(func=rank)*(1-(df['close']+df['high']-df['low']).rolling(window=32).apply(func=rank))*(1-df['returns'].rolling(window=32).apply(func=rank))65#Alpha#43: (ts_rank((volume / adv20), 20) * ts_rank((-1 * delta(close, 7)), 8)) 66def get_alpha15(df):67 return df['volume']/df['close']*df['volume'].rolling(window=20).mean().rolling(window=20).apply(func=rank)*-get_delta(df['close'],7).rolling(window=8).apply(func=rank)68#Alpha#55: correlation((close - ts_min(low, 12)) / (ts_max(high, 12) - ts_min(low, 12))), volume, 6)69def get_alpha16(df):70 return df['volume'].rolling(window=6).corr((df['close']-df['low'].rolling(window=12).min())/(df['high'].rolling(window=12).max()-df['low'].rolling(window=12).min()))71#Alpha#101: ((close - open) / ((high - low) + .001)) 72def get_alpha17(df):73 return (df['close']-df['open'])/(df['high']-df['low']+0.01)74#Alpha#28: (correlation(adv20, low, 5) + ((high + low) / 2)) - close75def get_alpha18(df):76 return df['low'].rolling(window=5).corr(df['close']*df['volume'].rolling(window=20).mean())+(df['high']+df['low'])/2-df['close']77#EMV:A=(今日最高+今日最低)/2;B=(前日最高+前日最低)/2;C=今日最高-今日最低;2.EM=(A-B)*C/今日成交额;3.EMV=N日内EM的累和;4.MAEMV=EMV的M日简单移动平均.参数N为14,参数M为978def get_alpha19(df):79 return get_delta((df['high']+df['low'])/2,1).rolling(window=14).sum().rolling(window=9).mean()80#MTMMA动量指标81def get_alpha20(df):82 return get_delta(df['close'],12).rolling(window=6).mean()83#横截面分析指标84#Alpha#41: (((high * low)^0.5) - vwap) 85#Alpha#25: rank(((((-1 * returns) * adv20) * vwap) * (high - close))) 86#Alpha#84: SignedPower(Ts_Rank((vwap - ts_max(vwap, 15.3217)), 20.7127), delta(close, 4.96796)) ...

Full Screen

Full Screen

python.py

Source:python.py Github

copy

Full Screen

...23 },24 'write' : {25 'call' : 'evaluate',26 'write' : """open("%(path)s", 'ab+').write(__import__("base64").urlsafe_b64decode('%(chunk_b64)s'))""",27 'truncate' : """open("%(path)s", 'w').close()"""28 },29 'read' : {30 'call': 'evaluate',31 'read' : """__import__("base64").b64encode(open("%(path)s", "rb").read())"""32 },33 'md5' : {34 'call': 'evaluate',35 'md5': """__import__("hashlib").md5(open("%(path)s", 'rb').read()).hexdigest()"""36 },37 'evaluate' : {38 'call': 'render',39 'evaluate': """%(code)s""",40 'test_os': """'-'.join([__import__('os').name, __import__('sys').platform])""",41 'test_os_expected': '^[\w-]+$'...

Full Screen

Full Screen

test1.py

Source:test1.py Github

copy

Full Screen

1import pandas as pd2import numpy as np3import keras4import tensorflow as tf5from keras.preprocessing.sequence import TimeseriesGenerator6import plotly.graph_objects as go7#Implementation8filename = "GOOG.csv"9df = pd.read_csv(filename)10print(df.info())11df['Date'] = pd.to_datetime(df['Date'])12df.set_axis(df['Date'], inplace=True)13df.drop(columns=['Open', 'High', 'Low', 'Volume'], inplace=True)14#Data Preprocessing15close_data = df['Close'].values16close_data = close_data.reshape((-1,1))17split_percent = 0.8018split = int(split_percent * len(close_data))19close_train = close_data[:split]20close_test = close_data[split:]21date_train = df['Date'][:split]22date_test = df['Date'][split:]23print(len(close_train))24print(len(close_test))25look_back = 1526train_generator = TimeseriesGenerator(close_train, close_train, length=look_back, batch_size=20) 27test_generator = TimeseriesGenerator(close_test, close_test, length=look_back, batch_size=1)28#Neural Network29from keras.models import Sequential30from keras.layers import LSTM, Dense31model = Sequential()32model.add(33 LSTM(10,34 activation = 'relu',35 input_shape=(look_back,1))36)37model.add(Dense(1))38model.compile(optimizer = 'adam', loss = 'mse')39num_epochs = 2540model.fit_generator(train_generator, epochs = num_epochs, verbose = 1)41prediction = model.predict_generator(test_generator)42close_train = close_train.reshape((-1))43close_test = close_test.reshape((-1))44prediction = prediction.reshape((-1))45#print("Predicted:", [float(f"{x:.1f}") for x in prediction])46trace1 = go.Scatter(47 x = date_train,48 y = close_train,49 mode = 'lines',50 name = 'Data'51)52trace2 = go.Scatter(53 x = date_test,54 y = prediction,55 mode = 'lines',56 name = 'Prediction'57)58trace3 = go.Scatter(59 x = date_test,60 y = close_test,61 mode='lines',62 name = 'Ground Truth'63)64layout = go.Layout(65 title = "Google Stock",66 xaxis = {'title' : "Date"},67 yaxis = {'title' : "Close"}68)69fig = go.Figure(data=[trace1, trace2, trace3], layout=layout)70fig.show()71close_data = close_data.reshape((-1))72def predict(num_prediction, model):73 prediction_list = close_data[-look_back:]74 75 for _ in range(num_prediction):76 x = prediction_list[-look_back:]77 x = x.reshape((1, look_back, 1))78 out = model.predict(x)[0][0]79 prediction_list = np.append(prediction_list, out)80 prediction_list = prediction_list[look_back-1:]81 82 return prediction_list83 84def predict_dates(num_prediction):85 last_date = df['Date'].values[-1]86 prediction_dates = pd.date_range(last_date, periods=num_prediction+1).tolist()87 return prediction_dates88num_prediction = 3089forecast = predict(num_prediction, model)...

Full Screen

Full Screen

Using AI Code Generation

copy

Full Screen

1var mb = require('mountebank');2var imposter = {3 {4 {5 is: {6 }7 }8 }9};10mb.create(imposter).then(function (server) {11 console.log('server started');12 server.close();13});14var mb = require('mountebank');15var imposter = {16 {17 {18 is: {19 }20 }21 }22};23mb.create(imposter).then(function (server) {24 console.log('server started');25 server.close();26});

Full Screen

Using AI Code Generation

copy

Full Screen

1const mb = require('mountebank');2const imposter = {3 stubs: [{responses: [{is: {body: 'Hello World!'}}]}]4};5mb.create(imposter).then(function (server) {6 console.log('Server started on port', server.port);7 server.close();8});9const mb = require('mountebank');10const imposter = {11 stubs: [{responses: [{is: {body: 'Hello World!'}}]}]12};13mb.create(imposter).then(function (server) {14 console.log('Server started on port', server.port);15 server.close();16});17const mb = require('mountebank');18const imposter = {19 stubs: [{responses: [{is: {body: 'Hello World!'}}]}]20};21mb.create(imposter).then(function (server) {22 console.log('Server started on port', server.port);23 server.close();24});25const mb = require('mountebank');26const imposter = {27 stubs: [{responses: [{is: {body: 'Hello World!'}}]}]28};29mb.create(imposter).then(function (server) {30 console.log('Server started on port', server.port);31 server.close();32});33const mb = require('mountebank');34const imposter = {35 stubs: [{responses: [{is: {body: 'Hello World!'}}]}]36};37mb.create(imposter).then(function (server) {38 console.log('Server started on port', server.port);39 server.close();40});41const mb = require('mountebank');42const imposter = {43 stubs: [{responses

Full Screen

Using AI Code Generation

copy

Full Screen

1var mb = require('mountebank');2var mbHelper = require('mountebank-helper');3var fs = require('fs');4var imposters = JSON.parse(fs.readFileSync('imposters.json', 'utf8'));5mbHelper.createImposters(imposters, function (error, imposters) {6 if (error) {7 console.error(error);8 } else {9 console.log('Imposters created successfully');10 setTimeout(function () {11 mbHelper.closeImposters(imposters, function (error) {12 if (error) {13 console.error(error);14 } else {15 console.log('Imposters closed successfully');16 }17 });18 }, 5000);19 }20});21 {22 {23 {24 "is": {25 "headers": {26 },27 }28 }29 }30 },31 {32 {33 {34 "is": {35 "headers": {36 },37 }38 }39 }40 }

Full Screen

Using AI Code Generation

copy

Full Screen

1var mb = require('mountebank');2var mbHelper = mb.create({host: 'localhost', port: 2525});3mbHelper.close();4var mb = require('mountebank');5var mbHelper = mb.create({host: 'localhost', port: 2525});6mbHelper.close();7var mb = require('mountebank');8var mbHelper = mb.create({host: 'localhost', port: 2525});9mbHelper.close();10var mb = require('mountebank');11var mbHelper = mb.create({host: 'localhost', port: 2525});12mbHelper.close();13var mb = require('mountebank');14var mbHelper = mb.create({host: 'localhost', port: 2525});15mbHelper.close();16var mb = require('mountebank');17var mbHelper = mb.create({host: 'localhost', port: 2525});18mbHelper.close();19var mb = require('mountebank');20var mbHelper = mb.create({host: 'localhost', port: 2525});21mbHelper.close();22var mb = require('mountebank');23var mbHelper = mb.create({host: 'localhost', port: 2525});24mbHelper.close();25var mb = require('mountebank');26var mbHelper = mb.create({host: 'localhost', port: 2525});27mbHelper.close();28var mb = require('mountebank');29var mbHelper = mb.create({host: 'localhost', port: 2525});30mbHelper.close();31var mb = require('mountebank');32var mbHelper = mb.create({host:

Full Screen

Using AI Code Generation

copy

Full Screen

1var mb = require('mountebank');2var mbHelper = require('mountebank-helper');3var mbProcess = mb.create({port:2525, pidfile: 'mb.pid', logfile: 'mb.log', protofile: 'mb.proto'});4mbProcess.start();5mbProcess.stop();6mbProcess.remove();7var mbHelper = require('mountebank-helper');8mbHelper.stop();9mbHelper.remove();10var mbHelper = require('mountebank-helper');11mbHelper.stop();12mbHelper.remove();13var mbHelper = require('mountebank-helper');14mbHelper.stop();15mbHelper.remove();16var mbHelper = require('mountebank-helper');17mbHelper.stop();18mbHelper.remove();19var mbHelper = require('mountebank-helper');20mbHelper.stop();21mbHelper.remove();22var mbHelper = require('mountebank-helper');23mbHelper.stop();24mbHelper.remove();25var mbHelper = require('mountebank-helper');26mbHelper.stop();27mbHelper.remove();28var mbHelper = require('mountebank-helper');29mbHelper.stop();30mbHelper.remove();31var mbHelper = require('mountebank-helper');32mbHelper.stop();33mbHelper.remove();34var mbHelper = require('mountebank-helper');35mbHelper.stop();36mbHelper.remove();37var mbHelper = require('mountebank-helper');38mbHelper.stop();39mbHelper.remove();40var mbHelper = require('mountebank-helper');41mbHelper.stop();42mbHelper.remove();43var mbHelper = require('mountebank-helper');44mbHelper.stop();45mbHelper.remove();46var mbHelper = require('mountebank-helper');47mbHelper.stop();48mbHelper.remove();

Full Screen

Using AI Code Generation

copy

Full Screen

1var mb = require('mountebank');2mb.close();3var mb = require('mountebank');4mb.close();5I am trying to use the Mountebank library in my JavaScript project. I am using the following code to import the library:6var mb = require('mountebank');7I am trying to use the Mountebank library in my JavaScript project. I am using the following code to import the library:8var mb = require('mountebank');9I am trying to use the Mountebank library in my JavaScript project. I am using the following code to import the library:10var mb = require('mountebank');

Full Screen

Using AI Code Generation

copy

Full Screen

1var mb = require('mountebank');2var mbHelper = require('mountebank-helper');3helper.close().then(function() {4 console.log('mountebank closed');5});6var mb = require('mountebank');7var mbHelper = require('mountebank-helper');8helper.create({9}).then(function() {10 console.log('mountebank created');11});12var mb = require('mountebank');13var mbHelper = require('mountebank-helper');14helper.create({15}).then(function() {16 console.log('mountebank created');17});18var mb = require('mountebank');19var mbHelper = require('mountebank-helper');20helper.create({21}).then(function() {22 console.log('mountebank created');23});24var mb = require('mountebank');25var mbHelper = require('mountebank-helper');26helper.create({27}).then(function() {28 console.log('mountebank created');29});30var mb = require('mountebank');31var mbHelper = require('mountebank-helper');32helper.create({

Full Screen

Using AI Code Generation

copy

Full Screen

1const request = require('request');2const mb = require('mountebank');3const imposters = 2525;4const imposter = 2526;5const imposter2 = 2527;6const imposter3 = 2528;7const imposter4 = 2529;8const imposter5 = 2530;9const imposter6 = 2531;10const imposter7 = 2532;11const imposter8 = 2533;12const imposter9 = 2534;13const imposter10 = 2535;14const imposter11 = 2536;15const imposter12 = 2537;16const imposter13 = 2538;17const imposter14 = 2539;18const imposter15 = 2540;19const imposter16 = 2541;20const imposter17 = 2542;21const imposter18 = 2543;22const imposter19 = 2544;23const imposter20 = 2545;24const imposter21 = 2546;25const imposter22 = 2547;26const imposter23 = 2548;27const imposter24 = 2549;28const imposter25 = 2550;29const imposter26 = 2551;30const imposter27 = 2552;31const imposter28 = 2553;32const imposter29 = 2554;33const imposter30 = 2555;34const imposter31 = 2556;35const imposter32 = 2557;36const imposter33 = 2558;37const imposter34 = 2559;38const imposter35 = 2560;39const imposter36 = 2561;40const imposter37 = 2562;41const imposter38 = 2563;42const imposter39 = 2564;43const imposter40 = 2565;44const imposter41 = 2566;45const imposter42 = 2567;46const imposter43 = 2568;47const imposter44 = 2569;48const imposter45 = 2570;49const imposter46 = 2571;50const imposter47 = 2572;51const imposter48 = 2573;52const imposter49 = 2574;53const imposter50 = 2575;54const imposter51 = 2576;55const imposter52 = 2577;

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run mountebank automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful