How to use log method in mochawesome

Best JavaScript code snippet using mochawesome

save_tdx.py

Source:save_tdx.py Github

copy

Full Screen

1# coding:utf-82#3# The MIT License (MIT)4#5# Copyright (c) 2016-2021 yutiansut/QUANTAXIS6#7# Permission is hereby granted, free of charge, to any person obtaining a copy8# of this software and associated documentation files (the "Software"), to deal9# in the Software without restriction, including without limitation the rights10# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell11# copies of the Software, and to permit persons to whom the Software is12# furnished to do so, subject to the following conditions:13#14# The above copyright notice and this permission notice shall be included in all15# copies or substantial portions of the Software.16#17# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR18# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,19# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE20# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER21# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,22# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE23# SOFTWARE.24import concurrent25import datetime26from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor27import json28import pandas as pd29import pymongo30from QUANTAXIS.QAFetch import QA_fetch_get_stock_block31from QUANTAXIS.QAFetch.QATdx import (32 QA_fetch_get_option_day,33 QA_fetch_get_option_min,34 QA_fetch_get_index_day,35 QA_fetch_get_index_min,36 QA_fetch_get_stock_day,37 QA_fetch_get_stock_info,38 QA_fetch_get_stock_list,39 QA_fetch_get_future_list,40 QA_fetch_get_index_list,41 QA_fetch_get_future_day,42 QA_fetch_get_future_min,43 QA_fetch_get_stock_min,44 QA_fetch_get_stock_transaction,45 QA_fetch_get_index_transaction,46 QA_fetch_get_stock_xdxr,47 QA_fetch_get_bond_day,48 QA_fetch_get_bond_list,49 QA_fetch_get_bond_min,50 select_best_ip,51 QA_fetch_get_hkstock_day,52 QA_fetch_get_hkstock_list,53 QA_fetch_get_hkstock_min,54 QA_fetch_get_usstock_list,55 QA_fetch_get_usstock_day,56 QA_fetch_get_usstock_min,57)58from QUANTAXIS.QAFetch.QATdx import (59 QA_fetch_get_commodity_option_AL_contract_time_to_market,60 QA_fetch_get_commodity_option_AU_contract_time_to_market,61 QA_fetch_get_commodity_option_CU_contract_time_to_market,62 QA_fetch_get_commodity_option_SR_contract_time_to_market,63 QA_fetch_get_commodity_option_M_contract_time_to_market,64 QA_fetch_get_commodity_option_RU_contract_time_to_market,65 QA_fetch_get_commodity_option_CF_contract_time_to_market,66 QA_fetch_get_commodity_option_C_contract_time_to_market,67 QA_fetch_get_option_50etf_contract_time_to_market,68 QA_fetch_get_option_300etf_contract_time_to_market,69 QA_fetch_get_option_all_contract_time_to_market,70 QA_fetch_get_option_list,71)72from QUANTAXIS.QAUtil import (73 DATABASE,74 QA_util_get_next_day,75 QA_util_get_real_date,76 QA_util_log_info,77 QA_util_to_json_from_pandas,78 trade_date_sse79)80from QUANTAXIS.QAData.data_fq import _QA_data_stock_to_fq81from QUANTAXIS.QAFetch.QAQuery import QA_fetch_stock_day82from QUANTAXIS.QAUtil import Parallelism83from QUANTAXIS.QAFetch.QATdx import ping, get_ip_list_by_multi_process_ping, stock_ip_list84from multiprocessing import cpu_count85# ip=select_best_ip()86def now_time():87 return str(QA_util_get_real_date(str(datetime.date.today() - datetime.timedelta(days=1)), trade_date_sse, -1)) + \88 ' 17:00:00' if datetime.datetime.now().hour < 15 else str(QA_util_get_real_date(89 str(datetime.date.today()), trade_date_sse, -1)) + ' 15:00:00'90def QA_SU_save_single_stock_day(code : str, client= DATABASE, ui_log=None):91 '''92 save single stock_day93 保存单个股票日线数据94 :param code: 要保存数据的股票代码95 :param client:96 :param ui_log: 给GUI qt 界面使用97 :param ui_progress: 给GUI qt 界面使用98 '''99 #stock_list = QA_fetch_get_stock_list().code.unique().tolist()100 coll_stock_day = client.stock_day101 coll_stock_day.create_index(102 [("code",103 pymongo.ASCENDING),104 ("date_stamp",105 pymongo.ASCENDING)]106 )107 err = []108 def __saving_work(code, coll_stock_day):109 try:110 QA_util_log_info(111 '##JOB01 Now Saving STOCK_DAY==== {}'.format(str(code)),112 ui_log113 )114 # 首选查找数据库 是否 有 这个代码的数据115 ref = coll_stock_day.find({'code': str(code)[0:6]})116 end_date = str(now_time())[0:10]117 # 当前数据库已经包含了这个代码的数据, 继续增量更新118 # 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现119 if ref.count() > 0:120 # 接着上次获取的日期继续更新121 start_date = ref[ref.count() - 1]['date']122 QA_util_log_info(123 'UPDATE_STOCK_DAY \n Trying updating {} from {} to {}'124 .format(code,125 start_date,126 end_date),127 ui_log128 )129 if start_date != end_date:130 coll_stock_day.insert_many(131 QA_util_to_json_from_pandas(132 QA_fetch_get_stock_day(133 str(code),134 QA_util_get_next_day(start_date),135 end_date,136 '00'137 )138 )139 )140 # 当前数据库中没有这个代码的股票数据, 从1990-01-01 开始下载所有的数据141 else:142 start_date = '1990-01-01'143 QA_util_log_info(144 'UPDATE_STOCK_DAY \n Trying updating {} from {} to {}'145 .format(code,146 start_date,147 end_date),148 ui_log149 )150 if start_date != end_date:151 coll_stock_day.insert_many(152 QA_util_to_json_from_pandas(153 QA_fetch_get_stock_day(154 str(code),155 start_date,156 end_date,157 '00'158 )159 )160 )161 except Exception as error0:162 print(error0)163 err.append(str(code))164 __saving_work(code, coll_stock_day)165 if len(err) < 1:166 QA_util_log_info('SUCCESS save stock day ^_^', ui_log)167 else:168 QA_util_log_info('ERROR CODE \n ', ui_log)169 QA_util_log_info(err, ui_log)170def QA_SU_save_stock_day(client=DATABASE, ui_log=None, ui_progress=None):171 '''172 save stock_day173 保存日线数据174 :param client:175 :param ui_log: 给GUI qt 界面使用176 :param ui_progress: 给GUI qt 界面使用177 :param ui_progress_int_value: 给GUI qt 界面使用178 '''179 stock_list = QA_fetch_get_stock_list().code.unique().tolist()180 coll_stock_day = client.stock_day181 coll_stock_day.create_index(182 [("code",183 pymongo.ASCENDING),184 ("date_stamp",185 pymongo.ASCENDING)]186 )187 err = []188 def __saving_work(code, coll_stock_day):189 try:190 QA_util_log_info(191 '##JOB01 Now Saving STOCK_DAY==== {}'.format(str(code)),192 ui_log193 )194 # 首选查找数据库 是否 有 这个代码的数据195 ref = coll_stock_day.find({'code': str(code)[0:6]})196 end_date = str(now_time())[0:10]197 # 当前数据库已经包含了这个代码的数据, 继续增量更新198 # 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现199 if ref.count() > 0:200 # 接着上次获取的日期继续更新201 start_date = ref[ref.count() - 1]['date']202 QA_util_log_info(203 'UPDATE_STOCK_DAY \n Trying updating {} from {} to {}'204 .format(code,205 start_date,206 end_date),207 ui_log208 )209 if start_date != end_date:210 coll_stock_day.insert_many(211 QA_util_to_json_from_pandas(212 QA_fetch_get_stock_day(213 str(code),214 QA_util_get_next_day(start_date),215 end_date,216 '00'217 )218 )219 )220 # 当前数据库中没有这个代码的股票数据, 从1990-01-01 开始下载所有的数据221 else:222 start_date = '1990-01-01'223 QA_util_log_info(224 'UPDATE_STOCK_DAY \n Trying updating {} from {} to {}'225 .format(code,226 start_date,227 end_date),228 ui_log229 )230 if start_date != end_date:231 coll_stock_day.insert_many(232 QA_util_to_json_from_pandas(233 QA_fetch_get_stock_day(234 str(code),235 start_date,236 end_date,237 '00'238 )239 )240 )241 except Exception as error0:242 print(error0)243 err.append(str(code))244 for item in range(len(stock_list)):245 QA_util_log_info('The {} of Total {}'.format(item, len(stock_list)))246 strProgressToLog = 'DOWNLOAD PROGRESS {} {}'.format(247 str(float(item / len(stock_list) * 100))[0:4] + '%',248 ui_log249 )250 intProgressToLog = int(float(item / len(stock_list) * 100))251 QA_util_log_info(252 strProgressToLog,253 ui_log=ui_log,254 ui_progress=ui_progress,255 ui_progress_int_value=intProgressToLog256 )257 __saving_work(stock_list[item], coll_stock_day)258 if len(err) < 1:259 QA_util_log_info('SUCCESS save stock day ^_^', ui_log)260 else:261 QA_util_log_info('ERROR CODE \n ', ui_log)262 QA_util_log_info(err, ui_log)263def gen_param(codelist, start_date=None, end_date=None, if_fq='00', frequence='day', IPList=[]):264 # 生成QA.QAFetch.QATdx.QQA_fetch_get_stock_day多进程处理的参数265 count = len(IPList)266 my_iterator = iter(range(len(codelist)))267 start_date = str(start_date)[0:10]268 end_date = str(end_date)[0:10]269 return [(code, start_date, end_date, if_fq, frequence, IPList[i % count]['ip'], IPList[i % count]['port'])270 for code, i in [(code, next(my_iterator) % count) for code in codelist]]271def QA_SU_save_stock_week(client=DATABASE, ui_log=None, ui_progress=None):272 """save stock_week273 Keyword Arguments:274 client {[type]} -- [description] (default: {DATABASE})275 """276 stock_list = QA_fetch_get_stock_list().code.unique().tolist()277 coll_stock_week = client.stock_week278 coll_stock_week.create_index(279 [("code",280 pymongo.ASCENDING),281 ("date_stamp",282 pymongo.ASCENDING)]283 )284 err = []285 def __saving_work(code, coll_stock_week):286 try:287 QA_util_log_info(288 '##JOB01 Now Saving STOCK_WEEK==== {}'.format(str(code)),289 ui_log=ui_log290 )291 ref = coll_stock_week.find({'code': str(code)[0:6]})292 end_date = str(now_time())[0:10]293 if ref.count() > 0:294 # 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现295 start_date = ref[ref.count() - 1]['date']296 QA_util_log_info(297 'UPDATE_STOCK_WEEK \n Trying updating {} from {} to {}'298 .format(code,299 start_date,300 end_date),301 ui_log=ui_log302 )303 if start_date != end_date:304 coll_stock_week.insert_many(305 QA_util_to_json_from_pandas(306 QA_fetch_get_stock_day(307 str(code),308 QA_util_get_next_day(start_date),309 end_date,310 '00',311 frequence='week'312 )313 )314 )315 else:316 start_date = '1990-01-01'317 QA_util_log_info(318 'UPDATE_STOCK_WEEK \n Trying updating {} from {} to {}'319 .format(code,320 start_date,321 end_date),322 ui_log=ui_log323 )324 if start_date != end_date:325 coll_stock_week.insert_many(326 QA_util_to_json_from_pandas(327 QA_fetch_get_stock_day(328 str(code),329 start_date,330 end_date,331 '00',332 frequence='week'333 )334 )335 )336 except:337 err.append(str(code))338 for item in range(len(stock_list)):339 QA_util_log_info(340 'The {} of Total {}'.format(item,341 len(stock_list)),342 ui_log=ui_log343 )344 strProgress = 'DOWNLOAD PROGRESS {} '.format(345 str(float(item / len(stock_list) * 100))[0:4] + '%'346 )347 intProgress = int(float(item / len(stock_list) * 100))348 QA_util_log_info(349 strProgress,350 ui_log=ui_log,351 ui_progress=ui_progress,352 ui_progress_int_value=intProgress353 )354 __saving_work(stock_list[item], coll_stock_week)355 if len(err) < 1:356 QA_util_log_info('SUCCESS', ui_log=ui_log)357 else:358 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)359 QA_util_log_info(err, ui_log=ui_log)360def QA_SU_save_stock_month(client=DATABASE, ui_log=None, ui_progress=None):361 """save stock_month362 Keyword Arguments:363 client {[type]} -- [description] (default: {DATABASE})364 """365 stock_list = QA_fetch_get_stock_list().code.unique().tolist()366 coll_stock_month = client.stock_month367 coll_stock_month.create_index(368 [("code",369 pymongo.ASCENDING),370 ("date_stamp",371 pymongo.ASCENDING)]372 )373 err = []374 def __saving_work(code, coll_stock_month):375 try:376 QA_util_log_info(377 '##JOB01 Now Saving STOCK_MONTH==== {}'.format(str(code)),378 ui_log=ui_log379 )380 ref = coll_stock_month.find({'code': str(code)[0:6]})381 end_date = str(now_time())[0:10]382 if ref.count() > 0:383 # 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现384 start_date = ref[ref.count() - 1]['date']385 QA_util_log_info(386 'UPDATE_STOCK_MONTH \n Trying updating {} from {} to {}'387 .format(code,388 start_date,389 end_date),390 ui_log=ui_log391 )392 if start_date != end_date:393 coll_stock_month.insert_many(394 QA_util_to_json_from_pandas(395 QA_fetch_get_stock_day(396 str(code),397 QA_util_get_next_day(start_date),398 end_date,399 '00',400 frequence='month'401 )402 )403 )404 else:405 start_date = '1990-01-01'406 QA_util_log_info(407 'UPDATE_STOCK_MONTH \n Trying updating {} from {} to {}'408 .format(code,409 start_date,410 end_date),411 ui_log=ui_log412 )413 if start_date != end_date:414 coll_stock_month.insert_many(415 QA_util_to_json_from_pandas(416 QA_fetch_get_stock_day(417 str(code),418 start_date,419 end_date,420 '00',421 frequence='month'422 )423 )424 )425 except:426 err.append(str(code))427 for item in range(len(stock_list)):428 QA_util_log_info(429 'The {} of Total {}'.format(item,430 len(stock_list)),431 ui_log=ui_log432 )433 strProgress = 'DOWNLOAD PROGRESS {} '.format(434 str(float(item / len(stock_list) * 100))[0:4] + '%'435 )436 intProgress = int(float(item / len(stock_list) * 100))437 QA_util_log_info(438 strProgress,439 ui_log=ui_log,440 ui_progress=ui_progress,441 ui_progress_int_value=intProgress442 )443 __saving_work(stock_list[item], coll_stock_month)444 if len(err) < 1:445 QA_util_log_info('SUCCESS', ui_log=ui_log)446 else:447 QA_util_log_info('ERROR CODE \n ', ui_log=ui_log)448 QA_util_log_info(err, ui_log=ui_log)449def QA_SU_save_stock_year(client=DATABASE, ui_log=None, ui_progress=None):450 """save stock_year451 Keyword Arguments:452 client {[type]} -- [description] (default: {DATABASE})453 """454 stock_list = QA_fetch_get_stock_list().code.unique().tolist()455 coll_stock_year = client.stock_year456 coll_stock_year.create_index(457 [("code",458 pymongo.ASCENDING),459 ("date_stamp",460 pymongo.ASCENDING)]461 )462 err = []463 def __saving_work(code, coll_stock_year):464 try:465 QA_util_log_info(466 '##JOB01 Now Saving STOCK_YEAR==== {}'.format(str(code)),467 ui_log=ui_log468 )469 ref = coll_stock_year.find({'code': str(code)[0:6]})470 end_date = str(now_time())[0:10]471 if ref.count() > 0:472 # 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现473 start_date = ref[ref.count() - 1]['date']474 QA_util_log_info(475 'UPDATE_STOCK_YEAR \n Trying updating {} from {} to {}'476 .format(code,477 start_date,478 end_date),479 ui_log=ui_log480 )481 if start_date != end_date:482 coll_stock_year.insert_many(483 QA_util_to_json_from_pandas(484 QA_fetch_get_stock_day(485 str(code),486 QA_util_get_next_day(start_date),487 end_date,488 '00',489 frequence='year'490 )491 )492 )493 else:494 start_date = '1990-01-01'495 QA_util_log_info(496 'UPDATE_STOCK_YEAR \n Trying updating {} from {} to {}'497 .format(code,498 start_date,499 end_date),500 ui_log=ui_log501 )502 if start_date != end_date:503 coll_stock_year.insert_many(504 QA_util_to_json_from_pandas(505 QA_fetch_get_stock_day(506 str(code),507 start_date,508 end_date,509 '00',510 frequence='year'511 )512 )513 )514 except:515 err.append(str(code))516 for item in range(len(stock_list)):517 QA_util_log_info(518 'The {} of Total {}'.format(item,519 len(stock_list)),520 ui_log=ui_log521 )522 strProgress = 'DOWNLOAD PROGRESS {} '.format(523 str(float(item / len(stock_list) * 100))[0:4] + '%'524 )525 intProgress = int(float(item / len(stock_list) * 100))526 QA_util_log_info(527 strProgress,528 ui_log=ui_log,529 ui_progress=ui_progress,530 ui_progress_int_value=intProgress531 )532 __saving_work(stock_list[item], coll_stock_year)533 if len(err) < 1:534 QA_util_log_info('SUCCESS', ui_log=ui_log)535 else:536 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)537 QA_util_log_info(err, ui_log=ui_log)538def QA_SU_save_stock_xdxr(client=DATABASE, ui_log=None, ui_progress=None):539 """[summary]540 Keyword Arguments:541 client {[type]} -- [description] (default: {DATABASE})542 """543 stock_list = QA_fetch_get_stock_list().code.unique().tolist()544 # client.drop_collection('stock_xdxr')545 try:546 coll = client.stock_xdxr547 coll.create_index(548 [('code',549 pymongo.ASCENDING),550 ('date',551 pymongo.ASCENDING)],552 unique=True553 )554 coll_adj = client.stock_adj555 coll_adj.create_index(556 [('code',557 pymongo.ASCENDING),558 ('date',559 pymongo.ASCENDING)],560 unique=True561 )562 except:563 client.drop_collection('stock_xdxr')564 coll = client.stock_xdxr565 coll.create_index(566 [('code',567 pymongo.ASCENDING),568 ('date',569 pymongo.ASCENDING)],570 unique=True571 )572 client.drop_collection('stock_adj')573 coll_adj = client.stock_adj574 coll_adj.create_index(575 [('code',576 pymongo.ASCENDING),577 ('date',578 pymongo.ASCENDING)],579 unique=True580 )581 err = []582 def __saving_work(code, coll):583 QA_util_log_info(584 '##JOB02 Now Saving XDXR INFO ==== {}'.format(str(code)),585 ui_log=ui_log586 )587 try:588 xdxr = QA_fetch_get_stock_xdxr(str(code))589 try:590 coll.insert_many(591 QA_util_to_json_from_pandas(xdxr),592 ordered=False593 )594 except:595 pass596 try:597 data = QA_fetch_stock_day(str(code), '1990-01-01',str(datetime.date.today()), 'pd')598 qfq = _QA_data_stock_to_fq(data, xdxr, 'qfq')599 qfq = qfq.assign(date=qfq.date.apply(lambda x: str(x)[0:10]))600 adjdata = QA_util_to_json_from_pandas(qfq.loc[:, ['date','code', 'adj']])601 coll_adj.delete_many({'code': code})602 #print(adjdata)603 coll_adj.insert_many(adjdata)604 except Exception as e:605 print(e)606 except Exception as e:607 print(e)608 err.append(str(code))609 for i_ in range(len(stock_list)):610 QA_util_log_info(611 'The {} of Total {}'.format(i_,612 len(stock_list)),613 ui_log=ui_log614 )615 strLogInfo = 'DOWNLOAD PROGRESS {} '.format(616 str(float(i_ / len(stock_list) * 100))[0:4] + '%'617 )618 intLogProgress = int(float(i_ / len(stock_list) * 100))619 QA_util_log_info(620 strLogInfo,621 ui_log=ui_log,622 ui_progress=ui_progress,623 ui_progress_int_value=intLogProgress624 )625 __saving_work(stock_list[i_], coll)626def QA_SU_save_stock_min(client=DATABASE, ui_log=None, ui_progress=None):627 """save stock_min628 Keyword Arguments:629 client {[type]} -- [description] (default: {DATABASE})630 """631 stock_list = QA_fetch_get_stock_list().code.unique().tolist()632 coll = client.stock_min633 coll.create_index(634 [635 ('code',636 pymongo.ASCENDING),637 ('time_stamp',638 pymongo.ASCENDING),639 ('date_stamp',640 pymongo.ASCENDING)641 ]642 )643 err = []644 def __saving_work(code, coll):645 QA_util_log_info(646 '##JOB03 Now Saving STOCK_MIN ==== {}'.format(str(code)),647 ui_log=ui_log648 )649 try:650 for type in ['1min', '5min', '15min', '30min', '60min']:651 ref_ = coll.find({'code': str(code)[0:6], 'type': type})652 end_time = str(now_time())[0:19]653 if ref_.count() > 0:654 start_time = ref_[ref_.count() - 1]['datetime']655 QA_util_log_info(656 '##JOB03.{} Now Saving {} from {} to {} =={} '.format(657 ['1min',658 '5min',659 '15min',660 '30min',661 '60min'].index(type),662 str(code),663 start_time,664 end_time,665 type666 ),667 ui_log=ui_log668 )669 if start_time != end_time:670 __data = QA_fetch_get_stock_min(671 str(code),672 start_time,673 end_time,674 type675 )676 if len(__data) > 1:677 coll.insert_many(678 QA_util_to_json_from_pandas(__data)[1::]679 )680 else:681 start_time = '2015-01-01'682 QA_util_log_info(683 '##JOB03.{} Now Saving {} from {} to {} =={} '.format(684 ['1min',685 '5min',686 '15min',687 '30min',688 '60min'].index(type),689 str(code),690 start_time,691 end_time,692 type693 ),694 ui_log=ui_log695 )696 if start_time != end_time:697 __data = QA_fetch_get_stock_min(698 str(code),699 start_time,700 end_time,701 type702 )703 if len(__data) > 1:704 coll.insert_many(705 QA_util_to_json_from_pandas(__data)706 )707 except Exception as e:708 QA_util_log_info(e, ui_log=ui_log)709 err.append(code)710 QA_util_log_info(err, ui_log=ui_log)711 executor = ThreadPoolExecutor(max_workers=4)712 # executor.map((__saving_work, stock_list[i_], coll),URLS)713 res = {714 executor.submit(__saving_work,715 stock_list[i_],716 coll)717 for i_ in range(len(stock_list))718 }719 count = 0720 for i_ in concurrent.futures.as_completed(res):721 QA_util_log_info(722 'The {} of Total {}'.format(count,723 len(stock_list)),724 ui_log=ui_log725 )726 strProgress = 'DOWNLOAD PROGRESS {} '.format(727 str(float(count / len(stock_list) * 100))[0:4] + '%'728 )729 intProgress = int(count / len(stock_list) * 10000.0)730 QA_util_log_info(731 strProgress,732 ui_log,733 ui_progress=ui_progress,734 ui_progress_int_value=intProgress735 )736 count = count + 1737 if len(err) < 1:738 QA_util_log_info('SUCCESS', ui_log=ui_log)739 else:740 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)741 QA_util_log_info(err, ui_log=ui_log)742def QA_SU_save_single_stock_min(code : str, client=DATABASE, ui_log=None, ui_progress=None):743 """save single stock_min744 Keyword Arguments:745 client {[type]} -- [description] (default: {DATABASE})746 """747 #stock_list = QA_fetch_get_stock_list().code.unique().tolist()748 stock_list = [code]749 coll = client.stock_min750 coll.create_index(751 [752 ('code',753 pymongo.ASCENDING),754 ('time_stamp',755 pymongo.ASCENDING),756 ('date_stamp',757 pymongo.ASCENDING)758 ]759 )760 err = []761 def __saving_work(code, coll):762 QA_util_log_info(763 '##JOB03 Now Saving STOCK_MIN ==== {}'.format(str(code)),764 ui_log=ui_log765 )766 try:767 for type in ['1min', '5min', '15min', '30min', '60min']:768 ref_ = coll.find({'code': str(code)[0:6], 'type': type})769 end_time = str(now_time())[0:19]770 if ref_.count() > 0:771 start_time = ref_[ref_.count() - 1]['datetime']772 QA_util_log_info(773 '##JOB03.{} Now Saving {} from {} to {} =={} '.format(774 ['1min',775 '5min',776 '15min',777 '30min',778 '60min'].index(type),779 str(code),780 start_time,781 end_time,782 type783 ),784 ui_log=ui_log785 )786 if start_time != end_time:787 __data = QA_fetch_get_stock_min(788 str(code),789 start_time,790 end_time,791 type792 )793 if len(__data) > 1:794 coll.insert_many(795 QA_util_to_json_from_pandas(__data)[1::]796 )797 else:798 start_time = '2015-01-01'799 QA_util_log_info(800 '##JOB03.{} Now Saving {} from {} to {} =={} '.format(801 ['1min',802 '5min',803 '15min',804 '30min',805 '60min'].index(type),806 str(code),807 start_time,808 end_time,809 type810 ),811 ui_log=ui_log812 )813 if start_time != end_time:814 __data = QA_fetch_get_stock_min(815 str(code),816 start_time,817 end_time,818 type819 )820 if len(__data) > 1:821 coll.insert_many(822 QA_util_to_json_from_pandas(__data)823 )824 except Exception as e:825 QA_util_log_info(e, ui_log=ui_log)826 err.append(code)827 QA_util_log_info(err, ui_log=ui_log)828 executor = ThreadPoolExecutor(max_workers=4)829 # executor.map((__saving_work, stock_list[i_], coll),URLS)830 res = {831 executor.submit(__saving_work,832 stock_list[i_],833 coll)834 for i_ in range(len(stock_list))835 }836 count = 1837 for i_ in concurrent.futures.as_completed(res):838 QA_util_log_info(839 'The {} of Total {}'.format(count,840 len(stock_list)),841 ui_log=ui_log842 )843 strProgress = 'DOWNLOAD PROGRESS {} '.format(844 str(float(count / len(stock_list) * 100))[0:4] + '%'845 )846 intProgress = int(count / len(stock_list) * 10000.0)847 QA_util_log_info(848 strProgress,849 ui_log,850 ui_progress=ui_progress,851 ui_progress_int_value=intProgress852 )853 count = count + 1854 if len(err) < 1:855 QA_util_log_info('SUCCESS', ui_log=ui_log)856 else:857 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)858 QA_util_log_info(err, ui_log=ui_log)859def QA_SU_save_single_index_day(code : str, client=DATABASE, ui_log=None):860 """save index_day861 Keyword Arguments:862 code : single index code863 client {[type]} -- [description] (default: {DATABASE})864 """865 #__index_list = QA_fetch_get_stock_list('index')866 coll = client.index_day867 coll.create_index(868 [('code',869 pymongo.ASCENDING),870 ('date_stamp',871 pymongo.ASCENDING)]872 )873 err = []874 def __saving_work(code, coll):875 try:876 ref_ = coll.find({'code': str(code)[0:6]})877 end_time = str(now_time())[0:10]878 if ref_.count() > 0:879 start_time = ref_[ref_.count() - 1]['date']880 QA_util_log_info(881 '##JOB04 Now Saving INDEX_DAY==== \n Trying updating {} from {} to {}'882 .format(code,883 start_time,884 end_time),885 ui_log=ui_log886 )887 if start_time != end_time:888 coll.insert_many(889 QA_util_to_json_from_pandas(890 QA_fetch_get_index_day(891 str(code),892 QA_util_get_next_day(start_time),893 end_time894 )895 )896 )897 else:898 try:899 start_time = '1990-01-01'900 QA_util_log_info(901 '##JOB04 Now Saving INDEX_DAY==== \n Trying updating {} from {} to {}'902 .format(code,903 start_time,904 end_time),905 ui_log=ui_log906 )907 coll.insert_many(908 QA_util_to_json_from_pandas(909 QA_fetch_get_index_day(910 str(code),911 start_time,912 end_time913 )914 )915 )916 except:917 start_time = '2009-01-01'918 QA_util_log_info(919 '##JOB04 Now Saving INDEX_DAY==== \n Trying updating {} from {} to {}'920 .format(code,921 start_time,922 end_time),923 ui_log=ui_log924 )925 coll.insert_many(926 QA_util_to_json_from_pandas(927 QA_fetch_get_index_day(928 str(code),929 start_time,930 end_time931 )932 )933 )934 except Exception as e:935 QA_util_log_info(e, ui_log=ui_log)936 err.append(str(code))937 QA_util_log_info(err, ui_log=ui_log)938 __saving_work(code, coll)939 if len(err) < 1:940 QA_util_log_info('SUCCESS', ui_log=ui_log)941 else:942 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)943 QA_util_log_info(err, ui_log=ui_log)944def QA_SU_save_index_day(client=DATABASE, ui_log=None, ui_progress=None):945 """save index_day946 Keyword Arguments:947 client {[type]} -- [description] (default: {DATABASE})948 """949 __index_list = QA_fetch_get_stock_list('index')950 coll = client.index_day951 coll.create_index(952 [('code',953 pymongo.ASCENDING),954 ('date_stamp',955 pymongo.ASCENDING)]956 )957 err = []958 def __saving_work(code, coll):959 try:960 ref_ = coll.find({'code': str(code)[0:6]})961 end_time = str(now_time())[0:10]962 if ref_.count() > 0:963 start_time = ref_[ref_.count() - 1]['date']964 QA_util_log_info(965 '##JOB04 Now Saving INDEX_DAY==== \n Trying updating {} from {} to {}'966 .format(code,967 start_time,968 end_time),969 ui_log=ui_log970 )971 if start_time != end_time:972 coll.insert_many(973 QA_util_to_json_from_pandas(974 QA_fetch_get_index_day(975 str(code),976 QA_util_get_next_day(start_time),977 end_time978 )979 )980 )981 else:982 try:983 start_time = '1990-01-01'984 QA_util_log_info(985 '##JOB04 Now Saving INDEX_DAY==== \n Trying updating {} from {} to {}'986 .format(code,987 start_time,988 end_time),989 ui_log=ui_log990 )991 coll.insert_many(992 QA_util_to_json_from_pandas(993 QA_fetch_get_index_day(994 str(code),995 start_time,996 end_time997 )998 )999 )1000 except:1001 start_time = '2009-01-01'1002 QA_util_log_info(1003 '##JOB04 Now Saving INDEX_DAY==== \n Trying updating {} from {} to {}'1004 .format(code,1005 start_time,1006 end_time),1007 ui_log=ui_log1008 )1009 coll.insert_many(1010 QA_util_to_json_from_pandas(1011 QA_fetch_get_index_day(1012 str(code),1013 start_time,1014 end_time1015 )1016 )1017 )1018 except Exception as e:1019 QA_util_log_info(e, ui_log=ui_log)1020 err.append(str(code))1021 QA_util_log_info(err, ui_log=ui_log)1022 for i_ in range(len(__index_list)):1023 # __saving_work('000001')1024 QA_util_log_info(1025 'The {} of Total {}'.format(i_,1026 len(__index_list)),1027 ui_log=ui_log1028 )1029 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(1030 str(float(i_ / len(__index_list) * 100))[0:4] + '%'1031 )1032 intLogProgress = int(float(i_ / len(__index_list) * 10000.0))1033 QA_util_log_info(1034 strLogProgress,1035 ui_log=ui_log,1036 ui_progress=ui_progress,1037 ui_progress_int_value=intLogProgress1038 )1039 __saving_work(__index_list.index[i_][0], coll)1040 if len(err) < 1:1041 QA_util_log_info('SUCCESS', ui_log=ui_log)1042 else:1043 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)1044 QA_util_log_info(err, ui_log=ui_log)1045def QA_SU_save_index_min(client=DATABASE, ui_log=None, ui_progress=None):1046 """save index_min1047 Keyword Arguments:1048 client {[type]} -- [description] (default: {DATABASE})1049 """1050 __index_list = QA_fetch_get_stock_list('index')1051 coll = client.index_min1052 coll.create_index(1053 [1054 ('code',1055 pymongo.ASCENDING),1056 ('time_stamp',1057 pymongo.ASCENDING),1058 ('date_stamp',1059 pymongo.ASCENDING)1060 ]1061 )1062 err = []1063 def __saving_work(code, coll):1064 QA_util_log_info(1065 '##JOB05 Now Saving Index_MIN ==== {}'.format(str(code)),1066 ui_log=ui_log1067 )1068 try:1069 for type in ['1min', '5min', '15min', '30min', '60min']:1070 ref_ = coll.find({'code': str(code)[0:6], 'type': type})1071 end_time = str(now_time())[0:19]1072 if ref_.count() > 0:1073 start_time = ref_[ref_.count() - 1]['datetime']1074 QA_util_log_info(1075 '##JOB05.{} Now Saving {} from {} to {} =={} '.format(1076 ['1min',1077 '5min',1078 '15min',1079 '30min',1080 '60min'].index(type),1081 str(code),1082 start_time,1083 end_time,1084 type1085 ),1086 ui_log=ui_log1087 )1088 if start_time != end_time:1089 __data = QA_fetch_get_index_min(1090 str(code),1091 start_time,1092 end_time,1093 type1094 )1095 if len(__data) > 1:1096 coll.insert_many(1097 QA_util_to_json_from_pandas(__data[1::])1098 )1099 else:1100 start_time = '2015-01-01'1101 QA_util_log_info(1102 '##JOB05.{} Now Saving {} from {} to {} =={} '.format(1103 ['1min',1104 '5min',1105 '15min',1106 '30min',1107 '60min'].index(type),1108 str(code),1109 start_time,1110 end_time,1111 type1112 ),1113 ui_log=ui_log1114 )1115 if start_time != end_time:1116 __data = QA_fetch_get_index_min(1117 str(code),1118 start_time,1119 end_time,1120 type1121 )1122 if len(__data) > 1:1123 coll.insert_many(1124 QA_util_to_json_from_pandas(__data)1125 )1126 except:1127 err.append(code)1128 executor = ThreadPoolExecutor(max_workers=4)1129 res = {1130 executor.submit(__saving_work,1131 __index_list.index[i_][0],1132 coll)1133 for i_ in range(len(__index_list))1134 } # multi index ./.1135 count = 01136 for i_ in concurrent.futures.as_completed(res):1137 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(1138 str(float(count / len(__index_list) * 100))[0:4] + '%'1139 )1140 intLogProgress = int(float(count / len(__index_list) * 10000.0))1141 QA_util_log_info(1142 'The {} of Total {}'.format(count,1143 len(__index_list)),1144 ui_log=ui_log1145 )1146 QA_util_log_info(1147 strLogProgress,1148 ui_log=ui_log,1149 ui_progress=ui_progress,1150 ui_progress_int_value=intLogProgress1151 )1152 count = count + 11153 if len(err) < 1:1154 QA_util_log_info('SUCCESS', ui_log=ui_log)1155 else:1156 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)1157 QA_util_log_info(err, ui_log=ui_log)1158def QA_SU_save_single_index_min(code : str, client=DATABASE, ui_log=None, ui_progress=None):1159 """save single index_min1160 Keyword Arguments:1161 client {[type]} -- [description] (default: {DATABASE})1162 """1163 #__index_list = QA_fetch_get_stock_list('index')1164 __index_list = [code]1165 coll = client.index_min1166 coll.create_index(1167 [1168 ('code',1169 pymongo.ASCENDING),1170 ('time_stamp',1171 pymongo.ASCENDING),1172 ('date_stamp',1173 pymongo.ASCENDING)1174 ]1175 )1176 err = []1177 def __saving_work(code, coll):1178 QA_util_log_info(1179 '##JOB05 Now Saving Index_MIN ==== {}'.format(str(code)),1180 ui_log=ui_log1181 )1182 try:1183 for type in ['1min', '5min', '15min', '30min', '60min']:1184 ref_ = coll.find({'code': str(code)[0:6], 'type': type})1185 end_time = str(now_time())[0:19]1186 if ref_.count() > 0:1187 start_time = ref_[ref_.count() - 1]['datetime']1188 QA_util_log_info(1189 '##JOB05.{} Now Saving {} from {} to {} =={} '.format(1190 ['1min',1191 '5min',1192 '15min',1193 '30min',1194 '60min'].index(type),1195 str(code),1196 start_time,1197 end_time,1198 type1199 ),1200 ui_log=ui_log1201 )1202 if start_time != end_time:1203 __data = QA_fetch_get_index_min(1204 str(code),1205 start_time,1206 end_time,1207 type1208 )1209 if len(__data) > 1:1210 coll.insert_many(1211 QA_util_to_json_from_pandas(__data[1::])1212 )1213 else:1214 start_time = '2015-01-01'1215 QA_util_log_info(1216 '##JOB05.{} Now Saving {} from {} to {} =={} '.format(1217 ['1min',1218 '5min',1219 '15min',1220 '30min',1221 '60min'].index(type),1222 str(code),1223 start_time,1224 end_time,1225 type1226 ),1227 ui_log=ui_log1228 )1229 if start_time != end_time:1230 __data = QA_fetch_get_index_min(1231 str(code),1232 start_time,1233 end_time,1234 type1235 )1236 if len(__data) > 1:1237 coll.insert_many(1238 QA_util_to_json_from_pandas(__data)1239 )1240 except:1241 err.append(code)1242 executor = ThreadPoolExecutor(max_workers=4)1243 res = {1244 executor.submit(__saving_work,1245 __index_list[i_],1246 coll)1247 for i_ in range(len(__index_list))1248 } # multi index ./.1249 count = 11250 for i_ in concurrent.futures.as_completed(res):1251 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(1252 str(float(count / len(__index_list) * 100))[0:4] + '%'1253 )1254 intLogProgress = int(float(count / len(__index_list) * 10000.0))1255 QA_util_log_info(1256 'The {} of Total {}'.format(count,1257 len(__index_list)),1258 ui_log=ui_log1259 )1260 QA_util_log_info(1261 strLogProgress,1262 ui_log=ui_log,1263 ui_progress=ui_progress,1264 ui_progress_int_value=intLogProgress1265 )1266 count = count + 11267 if len(err) < 1:1268 QA_util_log_info('SUCCESS', ui_log=ui_log)1269 else:1270 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)1271 QA_util_log_info(err, ui_log=ui_log)1272def QA_SU_save_single_etf_day(code : str, client=DATABASE, ui_log=None):1273 """save etf_day1274 Keyword Arguments:1275 code : single etf code1276 client {[type]} -- [description] (default: {DATABASE})1277 """1278 #__index_list = QA_fetch_get_stock_list('etf')1279 coll = client.index_day1280 coll.create_index(1281 [('code',1282 pymongo.ASCENDING),1283 ('date_stamp',1284 pymongo.ASCENDING)]1285 )1286 err = []1287 def __saving_work(code, coll):1288 try:1289 ref_ = coll.find({'code': str(code)[0:6]})1290 end_time = str(now_time())[0:10]1291 if ref_.count() > 0:1292 start_time = ref_[ref_.count() - 1]['date']1293 QA_util_log_info(1294 '##JOB06 Now Saving ETF_DAY==== \n Trying updating {} from {} to {}'1295 .format(code,1296 start_time,1297 end_time),1298 ui_log=ui_log1299 )1300 if start_time != end_time:1301 coll.insert_many(1302 QA_util_to_json_from_pandas(1303 QA_fetch_get_index_day(1304 str(code),1305 QA_util_get_next_day(start_time),1306 end_time1307 )1308 )1309 )1310 else:1311 start_time = '1990-01-01'1312 QA_util_log_info(1313 '##JOB06 Now Saving ETF_DAY==== \n Trying updating {} from {} to {}'1314 .format(code,1315 start_time,1316 end_time),1317 ui_log=ui_log1318 )1319 if start_time != end_time:1320 coll.insert_many(1321 QA_util_to_json_from_pandas(1322 QA_fetch_get_index_day(1323 str(code),1324 start_time,1325 end_time1326 )1327 )1328 )1329 except:1330 err.append(str(code))1331 __saving_work(code, coll)1332 if len(err) < 1:1333 QA_util_log_info('SUCCESS', ui_log=ui_log)1334 else:1335 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)1336 QA_util_log_info(err, ui_log=ui_log)1337def QA_SU_save_etf_day(client=DATABASE, ui_log=None, ui_progress=None):1338 """save etf_day1339 Keyword Arguments:1340 client {[type]} -- [description] (default: {DATABASE})1341 """1342 __index_list = QA_fetch_get_stock_list('etf')1343 coll = client.index_day1344 coll.create_index(1345 [('code',1346 pymongo.ASCENDING),1347 ('date_stamp',1348 pymongo.ASCENDING)]1349 )1350 err = []1351 def __saving_work(code, coll):1352 try:1353 ref_ = coll.find({'code': str(code)[0:6]})1354 end_time = str(now_time())[0:10]1355 if ref_.count() > 0:1356 start_time = ref_[ref_.count() - 1]['date']1357 QA_util_log_info(1358 '##JOB06 Now Saving ETF_DAY==== \n Trying updating {} from {} to {}'1359 .format(code,1360 start_time,1361 end_time),1362 ui_log=ui_log1363 )1364 if start_time != end_time:1365 coll.insert_many(1366 QA_util_to_json_from_pandas(1367 QA_fetch_get_index_day(1368 str(code),1369 QA_util_get_next_day(start_time),1370 end_time1371 )1372 )1373 )1374 else:1375 start_time = '1990-01-01'1376 QA_util_log_info(1377 '##JOB06 Now Saving ETF_DAY==== \n Trying updating {} from {} to {}'1378 .format(code,1379 start_time,1380 end_time),1381 ui_log=ui_log1382 )1383 if start_time != end_time:1384 coll.insert_many(1385 QA_util_to_json_from_pandas(1386 QA_fetch_get_index_day(1387 str(code),1388 start_time,1389 end_time1390 )1391 )1392 )1393 except:1394 err.append(str(code))1395 for i_ in range(len(__index_list)):1396 # __saving_work('000001')1397 QA_util_log_info(1398 'The {} of Total {}'.format(i_,1399 len(__index_list)),1400 ui_log=ui_log1401 )1402 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(1403 str(float(i_ / len(__index_list) * 100))[0:4] + '%'1404 )1405 intLogProgress = int(float(i_ / len(__index_list) * 10000.0))1406 QA_util_log_info(1407 strLogProgress,1408 ui_log=ui_log,1409 ui_progress=ui_progress,1410 ui_progress_int_value=intLogProgress1411 )1412 __saving_work(__index_list.index[i_][0], coll)1413 if len(err) < 1:1414 QA_util_log_info('SUCCESS', ui_log=ui_log)1415 else:1416 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)1417 QA_util_log_info(err, ui_log=ui_log)1418def QA_SU_save_etf_min(client=DATABASE, ui_log=None, ui_progress=None):1419 """save etf_min1420 Keyword Arguments:1421 client {[type]} -- [description] (default: {DATABASE})1422 """1423 __index_list = QA_fetch_get_stock_list('etf')1424 coll = client.index_min1425 coll.create_index(1426 [1427 ('code',1428 pymongo.ASCENDING),1429 ('time_stamp',1430 pymongo.ASCENDING),1431 ('date_stamp',1432 pymongo.ASCENDING)1433 ]1434 )1435 err = []1436 def __saving_work(code, coll):1437 QA_util_log_info(1438 '##JOB07 Now Saving ETF_MIN ==== {}'.format(str(code)),1439 ui_log=ui_log1440 )1441 try:1442 for type in ['1min', '5min', '15min', '30min', '60min']:1443 ref_ = coll.find({'code': str(code)[0:6], 'type': type})1444 end_time = str(now_time())[0:19]1445 if ref_.count() > 0:1446 start_time = ref_[ref_.count() - 1]['datetime']1447 QA_util_log_info(1448 '##JOB07.{} Now Saving {} from {} to {} =={} '.format(1449 ['1min',1450 '5min',1451 '15min',1452 '30min',1453 '60min'].index(type),1454 str(code),1455 start_time,1456 end_time,1457 type1458 ),1459 ui_log=ui_log1460 )1461 if start_time != end_time:1462 __data = QA_fetch_get_index_min(1463 str(code),1464 start_time,1465 end_time,1466 type1467 )1468 if len(__data) > 1:1469 coll.insert_many(1470 QA_util_to_json_from_pandas(__data[1::])1471 )1472 else:1473 start_time = '2015-01-01'1474 QA_util_log_info(1475 '##JOB07.{} Now Saving {} from {} to {} =={} '.format(1476 ['1min',1477 '5min',1478 '15min',1479 '30min',1480 '60min'].index(type),1481 str(code),1482 start_time,1483 end_time,1484 type1485 ),1486 ui_log=ui_log1487 )1488 if start_time != end_time:1489 __data = QA_fetch_get_index_min(1490 str(code),1491 start_time,1492 end_time,1493 type1494 )1495 if len(__data) > 1:1496 coll.insert_many(1497 QA_util_to_json_from_pandas(__data)1498 )1499 except:1500 err.append(code)1501 executor = ThreadPoolExecutor(max_workers=4)1502 res = {1503 executor.submit(__saving_work,1504 __index_list.index[i_][0],1505 coll)1506 for i_ in range(len(__index_list))1507 } # multi index ./.1508 count = 11509 for i_ in concurrent.futures.as_completed(res):1510 QA_util_log_info(1511 'The {} of Total {}'.format(count,1512 len(__index_list)),1513 ui_log=ui_log1514 )1515 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(1516 str(float(count / len(__index_list) * 100))[0:4] + '%'1517 )1518 intLogProgress = int(float(count / len(__index_list) * 10000.0))1519 QA_util_log_info(1520 strLogProgress,1521 ui_log=ui_log,1522 ui_progress=ui_progress,1523 ui_progress_int_value=intLogProgress1524 )1525 count = count + 11526 if len(err) < 1:1527 QA_util_log_info('SUCCESS', ui_log=ui_log)1528 else:1529 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)1530 QA_util_log_info(err, ui_log=ui_log)1531def QA_SU_save_single_etf_min(code : str, client=DATABASE, ui_log=None, ui_progress=None):1532 """save single etf_min1533 Keyword Arguments:1534 client {[type]} -- [description] (default: {DATABASE})1535 """1536 #__index_list = QA_fetch_get_stock_list('etf')1537 __index_list = [code]1538 coll = client.index_min1539 coll.create_index(1540 [1541 ('code',1542 pymongo.ASCENDING),1543 ('time_stamp',1544 pymongo.ASCENDING),1545 ('date_stamp',1546 pymongo.ASCENDING)1547 ]1548 )1549 err = []1550 def __saving_work(code, coll):1551 QA_util_log_info(1552 '##JOB07 Now Saving ETF_MIN ==== {}'.format(str(code)),1553 ui_log=ui_log1554 )1555 try:1556 for type in ['1min', '5min', '15min', '30min', '60min']:1557 ref_ = coll.find({'code': str(code)[0:6], 'type': type})1558 end_time = str(now_time())[0:19]1559 if ref_.count() > 0:1560 start_time = ref_[ref_.count() - 1]['datetime']1561 QA_util_log_info(1562 '##JOB07.{} Now Saving {} from {} to {} =={} '.format(1563 ['1min',1564 '5min',1565 '15min',1566 '30min',1567 '60min'].index(type),1568 str(code),1569 start_time,1570 end_time,1571 type1572 ),1573 ui_log=ui_log1574 )1575 if start_time != end_time:1576 __data = QA_fetch_get_index_min(1577 str(code),1578 start_time,1579 end_time,1580 type1581 )1582 if len(__data) > 1:1583 coll.insert_many(1584 QA_util_to_json_from_pandas(__data[1::])1585 )1586 else:1587 start_time = '2015-01-01'1588 QA_util_log_info(1589 '##JOB07.{} Now Saving {} from {} to {} =={} '.format(1590 ['1min',1591 '5min',1592 '15min',1593 '30min',1594 '60min'].index(type),1595 str(code),1596 start_time,1597 end_time,1598 type1599 ),1600 ui_log=ui_log1601 )1602 if start_time != end_time:1603 __data = QA_fetch_get_index_min(1604 str(code),1605 start_time,1606 end_time,1607 type1608 )1609 if len(__data) > 1:1610 coll.insert_many(1611 QA_util_to_json_from_pandas(__data)1612 )1613 except:1614 err.append(code)1615 executor = ThreadPoolExecutor(max_workers=4)1616 res = {1617 executor.submit(__saving_work,1618 __index_list[i_],1619 coll)1620 for i_ in range(len(__index_list))1621 } # multi index ./.1622 count = 11623 for i_ in concurrent.futures.as_completed(res):1624 QA_util_log_info(1625 'The {} of Total {}'.format(count,1626 len(__index_list)),1627 ui_log=ui_log1628 )1629 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(1630 str(float(count / len(__index_list) * 100))[0:4] + '%'1631 )1632 intLogProgress = int(float(count / len(__index_list) * 10000.0))1633 QA_util_log_info(1634 strLogProgress,1635 ui_log=ui_log,1636 ui_progress=ui_progress,1637 ui_progress_int_value=intLogProgress1638 )1639 count = count + 11640 if len(err) < 1:1641 QA_util_log_info('SUCCESS', ui_log=ui_log)1642 else:1643 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)1644 QA_util_log_info(err, ui_log=ui_log)1645def QA_SU_save_stock_list(client=DATABASE, ui_log=None, ui_progress=None):1646 """save stock_list1647 Keyword Arguments:1648 client {[type]} -- [description] (default: {DATABASE})1649 """1650 client.drop_collection('stock_list')1651 coll = client.stock_list1652 coll.create_index('code')1653 try:1654 # 🛠todo 这个应该是第一个任务 JOB01, 先更新股票列表!!1655 QA_util_log_info(1656 '##JOB08 Now Saving STOCK_LIST ====',1657 ui_log=ui_log,1658 ui_progress=ui_progress,1659 ui_progress_int_value=50001660 )1661 stock_list_from_tdx = QA_fetch_get_stock_list()1662 pandas_data = QA_util_to_json_from_pandas(stock_list_from_tdx)1663 coll.insert_many(pandas_data)1664 QA_util_log_info(1665 "完成股票列表获取",1666 ui_log=ui_log,1667 ui_progress=ui_progress,1668 ui_progress_int_value=100001669 )1670 except Exception as e:1671 QA_util_log_info(e, ui_log=ui_log)1672 print(" Error save_tdx.QA_SU_save_stock_list exception!")1673 pass1674def QA_SU_save_etf_list(client=DATABASE, ui_log=None, ui_progress=None):1675 """save etf_list1676 Keyword Arguments:1677 client {[type]} -- [description] (default: {DATABASE})1678 """1679 try:1680 QA_util_log_info(1681 '##JOB16 Now Saving ETF_LIST ====',1682 ui_log=ui_log,1683 ui_progress=ui_progress,1684 ui_progress_int_value=50001685 )1686 etf_list_from_tdx = QA_fetch_get_stock_list(type_="etf")1687 pandas_data = QA_util_to_json_from_pandas(etf_list_from_tdx)1688 if len(pandas_data) > 0:1689 # 获取到数据后才进行drop collection 操作1690 client.drop_collection('etf_list')1691 coll = client.etf_list1692 coll.create_index('code')1693 coll.insert_many(pandas_data)1694 QA_util_log_info(1695 "完成ETF列表获取",1696 ui_log=ui_log,1697 ui_progress=ui_progress,1698 ui_progress_int_value=100001699 )1700 except Exception as e:1701 QA_util_log_info(e, ui_log=ui_log)1702 print(" Error save_tdx.QA_SU_save_etf_list exception!")1703 pass1704def QA_SU_save_stock_block(client=DATABASE, ui_log=None, ui_progress=None):1705 """save stock_block1706 Keyword Arguments:1707 client {[type]} -- [description] (default: {DATABASE})1708 """1709 client.drop_collection('stock_block')1710 coll = client.stock_block1711 coll.create_index('code')1712 try:1713 QA_util_log_info(1714 '##JOB09 Now Saving STOCK_BlOCK ====',1715 ui_log=ui_log,1716 ui_progress=ui_progress,1717 ui_progress_int_value=50001718 )1719 coll.insert_many(1720 QA_util_to_json_from_pandas(QA_fetch_get_stock_block('tdx'))1721 )1722 QA_util_log_info(1723 'tdx Block ====',1724 ui_log=ui_log,1725 ui_progress=ui_progress,1726 ui_progress_int_value=50001727 )1728 # # 🛠todo fixhere here 获取同花顺板块, 还是调用tdx的1729 # coll.insert_many(1730 # QA_util_to_json_from_pandas(QA_fetch_get_stock_block('ths'))1731 # )1732 # QA_util_log_info(1733 # 'ths Block ====',1734 # ui_log=ui_log,1735 # ui_progress=ui_progress,1736 # ui_progress_int_value=80001737 # )1738 # tushare 的板块数据有中证500成分,增加获取中证500成分 ——阿财1739 coll.insert_many(1740 QA_util_to_json_from_pandas(QA_fetch_get_stock_block('tushare'))1741 )1742 QA_util_log_info(1743 'tushare Block ====',1744 ui_log=ui_log,1745 ui_progress=ui_progress,1746 ui_progress_int_value=90001747 )1748 QA_util_log_info(1749 '完成股票板块获取=',1750 ui_log=ui_log,1751 ui_progress=ui_progress,1752 ui_progress_int_value=100001753 )1754 # coll.insert_many(1755 # QA_util_to_json_from_pandas(QA_fetch_get_stock_block('QA'))1756 # )1757 # QA_util_log_info(1758 # 'QA_Select Block ====',1759 # ui_log=ui_log,1760 # ui_progress=ui_progress,1761 # ui_progress_int_value=80001762 # )1763 QA_util_log_info(1764 '完成股票板块获取=',1765 ui_log=ui_log,1766 ui_progress=ui_progress,1767 ui_progress_int_value=100001768 )1769 except Exception as e:1770 QA_util_log_info(e, ui_log=ui_log)1771 print(" Error save_tdx.QA_SU_save_stock_block exception!")1772 pass1773def QA_SU_save_stock_info(client=DATABASE, ui_log=None, ui_progress=None):1774 """save stock_info1775 Keyword Arguments:1776 client {[type]} -- [description] (default: {DATABASE})1777 """1778 client.drop_collection('stock_info')1779 stock_list = QA_fetch_get_stock_list().code.unique().tolist()1780 coll = client.stock_info1781 coll.create_index('code')1782 err = []1783 def __saving_work(code, coll):1784 QA_util_log_info(1785 '##JOB10 Now Saving STOCK INFO ==== {}'.format(str(code)),1786 ui_log=ui_log1787 )1788 try:1789 coll.insert_many(1790 QA_util_to_json_from_pandas(QA_fetch_get_stock_info(str(code)))1791 )1792 except:1793 err.append(str(code))1794 for i_ in range(len(stock_list)):1795 # __saving_work('000001')1796 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(1797 str(float(i_ / len(stock_list) * 100))[0:4] + '%'1798 )1799 intLogProgress = int(float(i_ / len(stock_list) * 10000.0))1800 QA_util_log_info('The {} of Total {}'.format(i_, len(stock_list)))1801 QA_util_log_info(1802 strLogProgress,1803 ui_log=ui_log,1804 ui_progress=ui_progress,1805 ui_progress_int_value=intLogProgress1806 )1807 __saving_work(stock_list[i_], coll)1808 if len(err) < 1:1809 QA_util_log_info('SUCCESS', ui_log=ui_log)1810 else:1811 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)1812 QA_util_log_info(err, ui_log=ui_log)1813def QA_SU_save_stock_transaction(1814 client=DATABASE,1815 ui_log=None,1816 ui_progress=None1817):1818 """save stock_transaction1819 Keyword Arguments:1820 client {[type]} -- [description] (default: {DATABASE})1821 """1822 stock_list = QA_fetch_get_stock_list().code.unique().tolist()1823 coll = client.stock_transaction1824 coll.create_index(1825 [1826 ('code',1827 pymongo.ASCENDING),1828 ('time_stamp',1829 pymongo.ASCENDING),1830 ('date_stamp',1831 pymongo.ASCENDING)1832 ]1833 )1834 err = []1835 def __saving_work(code):1836 QA_util_log_info(1837 '##JOB11 Now Saving STOCK_TRANSACTION ==== {}'.format(str(code)),1838 ui_log=ui_log1839 )1840 try:1841 coll.insert_many(1842 QA_util_to_json_from_pandas(1843 # 🛠todo str(stock_list[code]) 参数不对?1844 QA_fetch_get_stock_transaction(1845 str(code),1846 '2019-01-01',1847 str(now_time())[0:10]1848 )1849 )1850 )1851 except:1852 err.append(str(code))1853 for i_ in range(len(stock_list)):1854 # __saving_work('000001')1855 QA_util_log_info(1856 'The {} of Total {}'.format(i_,1857 len(stock_list)),1858 ui_log=ui_log1859 )1860 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(1861 str(float(i_ / len(stock_list) * 100))[0:4] + '%'1862 )1863 intLogProgress = int(float(i_ / len(stock_list) * 10000.0))1864 QA_util_log_info(1865 strLogProgress,1866 ui_log=ui_log,1867 ui_progress=ui_progress,1868 ui_progress_int_value=intLogProgress1869 )1870 __saving_work(stock_list[i_])1871 if len(err) < 1:1872 QA_util_log_info('SUCCESS', ui_log=ui_log)1873 else:1874 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)1875 QA_util_log_info(err, ui_log=ui_log)1876def QA_SU_save_index_transaction(1877 client=DATABASE,1878 ui_log=None,1879 ui_progress=None1880):1881 """save index_transaction1882 Keyword Arguments:1883 client {[type]} -- [description] (default: {DATABASE})1884 """1885 index_list = QA_fetch_get_index_list().code.unique().tolist()1886 coll = client.index_transaction1887 coll.create_index(1888 [1889 ('code',1890 pymongo.ASCENDING),1891 ('time_stamp',1892 pymongo.ASCENDING),1893 ('date_stamp',1894 pymongo.ASCENDING)1895 ]1896 )1897 err = []1898 def __saving_work(code):1899 QA_util_log_info(1900 '##JOB11 Now Saving INDEX_TRANSACTION ==== {}'.format(str(code)),1901 ui_log=ui_log1902 )1903 try:1904 coll.insert_many(1905 QA_util_to_json_from_pandas(1906 # 🛠todo str(stock_list[code]) 参数不对?1907 QA_fetch_get_index_transaction(1908 str(code),1909 '2019-01-01',1910 str(now_time())[0:10]1911 )1912 )1913 )1914 except:1915 err.append(str(code))1916 for i_ in range(len(index_list)):1917 # __saving_work('000001')1918 QA_util_log_info(1919 'The {} of Total {}'.format(i_,1920 len(index_list)),1921 ui_log=ui_log1922 )1923 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(1924 str(float(i_ / len(index_list) * 100))[0:4] + '%'1925 )1926 intLogProgress = int(float(i_ / len(index_list) * 10000.0))1927 QA_util_log_info(1928 strLogProgress,1929 ui_log=ui_log,1930 ui_progress=ui_progress,1931 ui_progress_int_value=intLogProgress1932 )1933 __saving_work(index_list[i_])1934 if len(err) < 1:1935 QA_util_log_info('SUCCESS', ui_log=ui_log)1936 else:1937 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)1938 QA_util_log_info(err, ui_log=ui_log)1939########################################################################################################1940def _save_option_commodity_ru_day(1941 client=DATABASE,1942 ui_log=None,1943 ui_progress=None1944):1945 ##################### ru 天然橡胶 ############################################################################1946 option_ru_contract_list = QA_fetch_get_commodity_option_RU_contract_time_to_market()1947 coll_option_commodity_ru_day = client.option_commodity_ru_day1948 coll_option_commodity_ru_day.create_index(1949 [("code",1950 pymongo.ASCENDING),1951 ("date_stamp",1952 pymongo.ASCENDING)]1953 )1954 err = []1955 def __saving_work(code, coll_option_commodity_ru_day):1956 try:1957 QA_util_log_info(1958 '##JOB12 Now Saving OPTION_DAY_COMMODITY_RU 天然橡胶 ==== {}'.format(1959 str(code)1960 ),1961 ui_log=ui_log1962 )1963 # 首选查找数据库 是否 有 这个代码的数据1964 ref = coll_option_commodity_ru_day.find({'code': str(code)[0:8]})1965 end_date = str(now_time())[0:10]1966 # 当前数据库已经包含了这个代码的数据, 继续增量更新1967 # 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现1968 if ref.count() > 0:1969 # 接着上次获取的日期继续更新1970 start_date = ref[ref.count() - 1]['date']1971 QA_util_log_info(1972 ' 上次获取 期权ru 天然橡胶 日线数据的最后日期是 {}'.format(start_date),1973 ui_log=ui_log1974 )1975 QA_util_log_info(1976 'UPDATE_OPTION_RU_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'1977 .format(code,1978 start_date,1979 end_date),1980 ui_log=ui_log1981 )1982 if start_date != end_date:1983 start_date0 = QA_util_get_next_day(start_date)1984 df0 = QA_fetch_get_option_day(1985 code=code,1986 start_date=start_date0,1987 end_date=end_date,1988 frequence='day',1989 ip=None,1990 port=None1991 )1992 retCount = df0.iloc[:, 0].size1993 QA_util_log_info(1994 "日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(1995 start_date0,1996 end_date,1997 code,1998 retCount1999 ),2000 ui_log=ui_log2001 )2002 coll_option_commodity_ru_day.insert_many(2003 QA_util_to_json_from_pandas(df0)2004 )2005 else:2006 QA_util_log_info(2007 "^已经获取过这天的数据了^ {}".format(start_date),2008 ui_log=ui_log2009 )2010 else:2011 start_date = '1990-01-01'2012 QA_util_log_info(2013 'UPDATE_M_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'2014 .format(code,2015 start_date,2016 end_date),2017 ui_log=ui_log2018 )2019 if start_date != end_date:2020 df0 = QA_fetch_get_option_day(2021 code=code,2022 start_date=start_date,2023 end_date=end_date,2024 frequence='day',2025 ip=None,2026 port=None2027 )2028 retCount = df0.iloc[:, 0].size2029 QA_util_log_info(2030 "日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(2031 start_date,2032 end_date,2033 code,2034 retCount2035 ),2036 ui_log=ui_log2037 )2038 coll_option_commodity_ru_day.insert_many(2039 QA_util_to_json_from_pandas(df0)2040 )2041 else:2042 QA_util_log_info(2043 "*已经获取过这天的数据了* {}".format(start_date),2044 ui_log=ui_log2045 )2046 except Exception as error0:2047 print(error0)2048 err.append(str(code))2049 for item in range(len(option_ru_contract_list)):2050 QA_util_log_info(2051 'The {} of Total {}'.format(item,2052 len(option_ru_contract_list)),2053 ui_log=ui_log2054 )2055 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(2056 str(float(item / len(option_ru_contract_list) * 100))[0:4] + '%'2057 )2058 intLogProgress = int(2059 float(item / len(option_ru_contract_list) * 10000.0)2060 )2061 QA_util_log_info(2062 strLogProgress,2063 ui_log=ui_log,2064 ui_progress=ui_progress,2065 ui_progress_int_value=intLogProgress2066 )2067 __saving_work(2068 option_ru_contract_list[item].code,2069 coll_option_commodity_ru_day2070 )2071 if len(err) < 1:2072 QA_util_log_info('SUCCESS save option ru day ^_^ ', ui_log=ui_log)2073 else:2074 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)2075 QA_util_log_info(err, ui_log=ui_log)2076def _save_option_commodity_c_day(2077 client=DATABASE,2078 ui_log=None,2079 ui_progress=None,2080):2081 ##################### c 玉米 ############################################################################2082 option_c_contract_list = QA_fetch_get_commodity_option_C_contract_time_to_market()2083 coll_option_commodity_c_day = client.option_commodity_c_day2084 coll_option_commodity_c_day.create_index(2085 [("code",2086 pymongo.ASCENDING),2087 ("date_stamp",2088 pymongo.ASCENDING)]2089 )2090 err = []2091 def __saving_work(code, coll_option_commodity_c_day):2092 try:2093 QA_util_log_info(2094 '##JOB12 Now Saving OPTION_DAY_COMMODITY_C 玉米 ==== {}'.format(2095 str(code)2096 ),2097 ui_log=ui_log2098 )2099 # 首选查找数据库 是否 有 这个代码的数据2100 ref = coll_option_commodity_c_day.find({'code': str(code)[0:8]})2101 end_date = str(now_time())[0:10]2102 # 当前数据库已经包含了这个代码的数据, 继续增量更新2103 # 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现2104 if ref.count() > 0:2105 # 接着上次获取的日期继续更新2106 start_date = ref[ref.count() - 1]['date']2107 QA_util_log_info(2108 ' 上次获取 玉米C 天然橡胶 日线数据的最后日期是 {}'.format(start_date),2109 ui_log=ui_log2110 )2111 QA_util_log_info(2112 'UPDATE_OPTION_C_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'2113 .format(code,2114 start_date,2115 end_date),2116 ui_log=ui_log2117 )2118 if start_date != end_date:2119 start_date0 = QA_util_get_next_day(start_date)2120 df0 = QA_fetch_get_option_day(2121 code=code,2122 start_date=start_date0,2123 end_date=end_date,2124 frequence='day',2125 ip=None,2126 port=None2127 )2128 retCount = df0.iloc[:, 0].size2129 QA_util_log_info(2130 "日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(2131 start_date0,2132 end_date,2133 code,2134 retCount2135 ),2136 ui_log=ui_log2137 )2138 coll_option_commodity_c_day.insert_many(2139 QA_util_to_json_from_pandas(df0)2140 )2141 else:2142 QA_util_log_info(2143 "^已经获取过这天的数据了^ {}".format(start_date),2144 ui_log=ui_log2145 )2146 else:2147 start_date = '1990-01-01'2148 QA_util_log_info(2149 'UPDATE_C_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'2150 .format(code,2151 start_date,2152 end_date),2153 ui_log=ui_log2154 )2155 if start_date != end_date:2156 df0 = QA_fetch_get_option_day(2157 code=code,2158 start_date=start_date,2159 end_date=end_date,2160 frequence='day',2161 ip=None,2162 port=None2163 )2164 retCount = df0.iloc[:, 0].size2165 QA_util_log_info(2166 "日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(2167 start_date,2168 end_date,2169 code,2170 retCount2171 ),2172 ui_log=ui_log2173 )2174 coll_option_commodity_c_day.insert_many(2175 QA_util_to_json_from_pandas(df0)2176 )2177 else:2178 QA_util_log_info(2179 "*已经获取过这天的数据了* {}".format(start_date),2180 ui_log=ui_log2181 )2182 except Exception as error0:2183 print(error0)2184 err.append(str(code))2185 for item in range(len(option_c_contract_list)):2186 QA_util_log_info(2187 'The {} of Total {}'.format(item,2188 len(option_c_contract_list)),2189 ui_log=ui_log2190 )2191 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(2192 str(float(item / len(option_c_contract_list) * 100))[0:4] + '%'2193 )2194 intLogProgress = int(2195 float(item / len(option_c_contract_list) * 10000.0)2196 )2197 QA_util_log_info(2198 strLogProgress,2199 ui_log=ui_log,2200 ui_progress=ui_progress,2201 ui_progress_int_value=intLogProgress2202 )2203 __saving_work(2204 option_c_contract_list[item].code,2205 coll_option_commodity_c_day2206 )2207 if len(err) < 1:2208 QA_util_log_info('SUCCESS save option ru day ^_^ ', ui_log=ui_log)2209 else:2210 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)2211 QA_util_log_info(err, ui_log=ui_log)2212def _save_option_commodity_cf_day(2213 client=DATABASE,2214 ui_log=None,2215 ui_progress=None2216):2217 ##################### cf 棉花 ############################################################################2218 option_cf_contract_list = QA_fetch_get_commodity_option_CF_contract_time_to_market()2219 coll_option_commodity_cf_day = client.option_commodity_cf_day2220 coll_option_commodity_cf_day.create_index(2221 [("code",2222 pymongo.ASCENDING),2223 ("date_stamp",2224 pymongo.ASCENDING)]2225 )2226 err = []2227 def __saving_work(code, coll_option_commodity_cf_day):2228 try:2229 QA_util_log_info(2230 '##JOB12 Now Saving OPTION_DAY_COMMODITY_CF 棉花 ==== {}'.format(2231 str(code)2232 ),2233 ui_log=ui_log2234 )2235 # 首选查找数据库 是否 有 这个代码的数据2236 ref = coll_option_commodity_cf_day.find({'code': str(code)[0:8]})2237 end_date = str(now_time())[0:10]2238 # 当前数据库已经包含了这个代码的数据, 继续增量更新2239 # 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现2240 if ref.count() > 0:2241 # 接着上次获取的日期继续更新2242 start_date = ref[ref.count() - 1]['date']2243 QA_util_log_info(2244 ' 上次获取 期权ru 天然橡胶 日线数据的最后日期是 {}'.format(start_date),2245 ui_log=ui_log2246 )2247 QA_util_log_info(2248 'UPDATE_OPTION_CF_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'2249 .format(code,2250 start_date,2251 end_date),2252 ui_log=ui_log2253 )2254 if start_date != end_date:2255 start_date0 = QA_util_get_next_day(start_date)2256 df0 = QA_fetch_get_option_day(2257 code=code,2258 start_date=start_date0,2259 end_date=end_date,2260 frequence='day',2261 ip=None,2262 port=None2263 )2264 retCount = df0.iloc[:, 0].size2265 QA_util_log_info(2266 "日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(2267 start_date0,2268 end_date,2269 code,2270 retCount2271 ),2272 ui_log=ui_log2273 )2274 coll_option_commodity_cf_day.insert_many(2275 QA_util_to_json_from_pandas(df0)2276 )2277 else:2278 QA_util_log_info(2279 "^已经获取过这天的数据了^ {}".format(start_date),2280 ui_log=ui_log2281 )2282 else:2283 start_date = '1990-01-01'2284 QA_util_log_info(2285 'UPDATE_M_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'2286 .format(code,2287 start_date,2288 end_date),2289 ui_log=ui_log2290 )2291 if start_date != end_date:2292 df0 = QA_fetch_get_option_day(2293 code=code,2294 start_date=start_date,2295 end_date=end_date,2296 frequence='day',2297 ip=None,2298 port=None2299 )2300 retCount = df0.iloc[:, 0].size2301 QA_util_log_info(2302 "日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(2303 start_date,2304 end_date,2305 code,2306 retCount2307 ),2308 ui_log=ui_log2309 )2310 coll_option_commodity_cf_day.insert_many(2311 QA_util_to_json_from_pandas(df0)2312 )2313 else:2314 QA_util_log_info(2315 "*已经获取过这天的数据了* {}".format(start_date),2316 ui_log=ui_log2317 )2318 except Exception as error0:2319 print(error0)2320 err.append(str(code))2321 for item in range(len(option_cf_contract_list)):2322 QA_util_log_info(2323 'The {} of Total {}'.format(item,2324 len(option_cf_contract_list)),2325 ui_log=ui_log2326 )2327 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(2328 str(float(item / len(option_cf_contract_list) * 100))[0:4] + '%'2329 )2330 intLogProgress = int(2331 float(item / len(option_cf_contract_list) * 10000.0)2332 )2333 QA_util_log_info(2334 strLogProgress,2335 ui_log=ui_log,2336 ui_progress=ui_progress,2337 ui_progress_int_value=intLogProgress2338 )2339 __saving_work(2340 option_cf_contract_list[item].code,2341 coll_option_commodity_cf_day2342 )2343 if len(err) < 1:2344 QA_util_log_info('SUCCESS save option ru day ^_^ ', ui_log=ui_log)2345 else:2346 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)2347 QA_util_log_info(err, ui_log=ui_log)2348def _save_option_commodity_sr_day(2349 client=DATABASE,2350 ui_log=None,2351 ui_progress=None2352):2353 ##################### sr 白糖 ############################################################################2354 option_sr_contract_list = QA_fetch_get_commodity_option_SR_contract_time_to_market(2355 )2356 coll_option_commodity_sr_day = client.option_commodity_sr_day2357 coll_option_commodity_sr_day.create_index(2358 [("code",2359 pymongo.ASCENDING),2360 ("date_stamp",2361 pymongo.ASCENDING)]2362 )2363 err = []2364 def __saving_work(code, coll_option_commodity_sr_day):2365 try:2366 QA_util_log_info(2367 '##JOB12 Now Saving OPTION_DAY_COMMODITY_SR 白糖 ==== {}'.format(2368 str(code)2369 ),2370 ui_log=ui_log2371 )2372 # 首选查找数据库 是否 有 这个代码的数据2373 ref = coll_option_commodity_sr_day.find({'code': str(code)[0:8]})2374 end_date = str(now_time())[0:10]2375 # 当前数据库已经包含了这个代码的数据, 继续增量更新2376 # 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现2377 if ref.count() > 0:2378 # 接着上次获取的日期继续更新2379 start_date = ref[ref.count() - 1]['date']2380 QA_util_log_info(2381 ' 上次获取期权sr白糖日线数据的最后日期是 {}'.format(start_date),2382 ui_log=ui_log2383 )2384 QA_util_log_info(2385 'UPDATE_OPTION_M_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'2386 .format(code,2387 start_date,2388 end_date),2389 ui_log=ui_log2390 )2391 if start_date != end_date:2392 start_date0 = QA_util_get_next_day(start_date)2393 df0 = QA_fetch_get_option_day(2394 code=code,2395 start_date=start_date0,2396 end_date=end_date,2397 frequence='day',2398 ip=None,2399 port=None2400 )2401 retCount = df0.iloc[:, 0].size2402 QA_util_log_info(2403 "日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(2404 start_date0,2405 end_date,2406 code,2407 retCount2408 ),2409 ui_log=ui_log2410 )2411 coll_option_commodity_sr_day.insert_many(2412 QA_util_to_json_from_pandas(df0)2413 )2414 else:2415 QA_util_log_info(2416 "^已经获取过这天的数据了^ {}".format(start_date),2417 ui_log=ui_log2418 )2419 else:2420 start_date = '1990-01-01'2421 QA_util_log_info(2422 'UPDATE_M_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'2423 .format(code,2424 start_date,2425 end_date),2426 ui_log=ui_log2427 )2428 if start_date != end_date:2429 df0 = QA_fetch_get_option_day(2430 code=code,2431 start_date=start_date,2432 end_date=end_date,2433 frequence='day',2434 ip=None,2435 port=None2436 )2437 retCount = df0.iloc[:, 0].size2438 QA_util_log_info(2439 "日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(2440 start_date,2441 end_date,2442 code,2443 retCount2444 ),2445 ui_log=ui_log2446 )2447 coll_option_commodity_sr_day.insert_many(2448 QA_util_to_json_from_pandas(df0)2449 )2450 else:2451 QA_util_log_info(2452 "*已经获取过这天的数据了* {}".format(start_date),2453 ui_log=ui_log2454 )2455 except Exception as error0:2456 print(error0)2457 err.append(str(code))2458 for item in range(len(option_sr_contract_list)):2459 QA_util_log_info(2460 'The {} of Total {}'.format(item,2461 len(option_sr_contract_list)),2462 ui_log=ui_log2463 )2464 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(2465 str(float(item / len(option_sr_contract_list) * 100))[0:4] + '%'2466 )2467 intLogProgress = int(2468 float(item / len(option_sr_contract_list) * 10000.0)2469 )2470 QA_util_log_info(2471 strLogProgress,2472 ui_log=ui_log,2473 ui_progress=ui_progress,2474 ui_progress_int_value=intLogProgress2475 )2476 __saving_work(2477 option_sr_contract_list[item].code,2478 coll_option_commodity_sr_day2479 )2480 if len(err) < 1:2481 QA_util_log_info('SUCCESS save option sr day ^_^ ', ui_log=ui_log)2482 else:2483 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)2484 QA_util_log_info(err, ui_log=ui_log)2485def _save_option_commodity_m_day(2486 client=DATABASE,2487 ui_log=None,2488 ui_progress=None2489):2490 ##################### M 豆粕 ############################################################################2491 option_m_contract_list = QA_fetch_get_commodity_option_M_contract_time_to_market(2492 )2493 coll_option_commodity_m_day = client.option_commodity_m_day2494 coll_option_commodity_m_day.create_index(2495 [("code",2496 pymongo.ASCENDING),2497 ("date_stamp",2498 pymongo.ASCENDING)]2499 )2500 err = []2501 def __saving_work(code, coll_option_commodity_m_day):2502 try:2503 QA_util_log_info(2504 '##JOB12 Now Saving OPTION_DAY_COMMODITY_M 豆粕 ==== {}'.format(2505 str(code)2506 ),2507 ui_log=ui_log2508 )2509 # 首选查找数据库 是否 有 这个代码的数据2510 # M XXXXXX 编码格式2511 ref = coll_option_commodity_m_day.find({'code': str(code)[0:8]})2512 end_date = str(now_time())[0:10]2513 # 当前数据库已经包含了这个代码的数据, 继续增量更新2514 # 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现2515 if ref.count() > 0:2516 # 接着上次获取的日期继续更新2517 start_date = ref[ref.count() - 1]['date']2518 QA_util_log_info(2519 ' 上次获取期权M豆粕日线数据的最后日期是 {}'.format(start_date),2520 ui_log=ui_log2521 )2522 QA_util_log_info(2523 'UPDATE_OPTION_M_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'2524 .format(code,2525 start_date,2526 end_date),2527 ui_log=ui_log2528 )2529 if start_date != end_date:2530 start_date0 = QA_util_get_next_day(start_date)2531 df0 = QA_fetch_get_option_day(2532 code=code,2533 start_date=start_date0,2534 end_date=end_date,2535 frequence='day',2536 ip=None,2537 port=None2538 )2539 retCount = df0.iloc[:, 0].size2540 QA_util_log_info(2541 "日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(2542 start_date0,2543 end_date,2544 code,2545 retCount2546 ),2547 ui_log=ui_log2548 )2549 coll_option_commodity_m_day.insert_many(2550 QA_util_to_json_from_pandas(df0)2551 )2552 else:2553 QA_util_log_info(2554 "^已经获取过这天的数据了^ {}".format(start_date),2555 ui_log=ui_log2556 )2557 else:2558 start_date = '1990-01-01'2559 QA_util_log_info(2560 'UPDATE_M_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'2561 .format(code,2562 start_date,2563 end_date),2564 ui_log=ui_log2565 )2566 if start_date != end_date:2567 df0 = QA_fetch_get_option_day(2568 code=code,2569 start_date=start_date,2570 end_date=end_date,2571 frequence='day',2572 ip=None,2573 port=None2574 )2575 retCount = df0.iloc[:, 0].size2576 QA_util_log_info(2577 "日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(2578 start_date,2579 end_date,2580 code,2581 retCount2582 ),2583 ui_log=ui_log2584 )2585 coll_option_commodity_m_day.insert_many(2586 QA_util_to_json_from_pandas(df0)2587 )2588 else:2589 QA_util_log_info(2590 "*已经获取过这天的数据了* {}".format(start_date),2591 ui_log=ui_log2592 )2593 except Exception as error0:2594 print(error0)2595 err.append(str(code))2596 for item in range(len(option_m_contract_list)):2597 QA_util_log_info(2598 'The {} of Total {}'.format(item,2599 len(option_m_contract_list)),2600 ui_log=ui_log2601 )2602 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(2603 str(float(item / len(option_m_contract_list) * 100))[0:4] + '%'2604 )2605 intLogProgress = int(2606 float(item / len(option_m_contract_list) * 10000.0)2607 )2608 QA_util_log_info(2609 strLogProgress,2610 ui_log=ui_log,2611 ui_progress=ui_progress,2612 ui_progress_int_value=intLogProgress2613 )2614 __saving_work(2615 option_m_contract_list[item].code,2616 coll_option_commodity_m_day2617 )2618 if len(err) < 1:2619 QA_util_log_info('SUCCESS save option m day ^_^ ', ui_log=ui_log)2620 else:2621 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)2622 QA_util_log_info(err, ui_log=ui_log)2623def _save_option_commodity_al_day(client=DATABASE,ui_log=None,ui_progress=None):2624 ##################### Al 铝 ############################################################################2625 option_al_contract_list = QA_fetch_get_commodity_option_AL_contract_time_to_market()2626 coll_option_commodity_al_day = client.option_commodity_al_day2627 coll_option_commodity_al_day.create_index(2628 [("code",2629 pymongo.ASCENDING),2630 ("date_stamp",2631 pymongo.ASCENDING)]2632 )2633 err = []2634 def __saving_work(code, coll_option_commodity_al_day):2635 try:2636 QA_util_log_info(2637 '##JOB13 Now Saving OPTION_DAY_COMMODITY_AL 铝 ==== {}'.format(2638 str(code)2639 ),2640 ui_log=ui_log2641 )2642 # 首选查找数据库 是否 有 这个代码的数据2643 # 期权代码 从 10000001 开始编码 100012282644 ref = coll_option_commodity_al_day.find({'code': str(code)[0:8]})2645 end_date = str(now_time())[0:10]2646 # 当前数据库已经包含了这个代码的数据, 继续增量更新2647 # 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现2648 if ref.count() > 0:2649 # 接着上次获取的日期继续更新2650 start_date = ref[ref.count() - 1]['date']2651 QA_util_log_info(2652 ' 上次获取期权AU日线数据的最后日期是 {}'.format(start_date),2653 ui_log=ui_log2654 )2655 QA_util_log_info(2656 'UPDATE_OPTION_AU_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'2657 .format(code,2658 start_date,2659 end_date),2660 ui_log=ui_log2661 )2662 if start_date != end_date:2663 start_date0 = QA_util_get_next_day(start_date)2664 df0 = QA_fetch_get_option_day(2665 code=code,2666 start_date=start_date0,2667 end_date=end_date,2668 frequence='day',2669 ip=None,2670 port=None2671 )2672 retCount = df0.iloc[:, 0].size2673 QA_util_log_info(2674 "日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(2675 start_date0,2676 end_date,2677 code,2678 retCount2679 ),2680 ui_log=ui_log2681 )2682 coll_option_commodity_al_day.insert_many(2683 QA_util_to_json_from_pandas(df0)2684 )2685 else:2686 QA_util_log_info(2687 "^已经获取过这天的数据了^ {}".format(start_date),2688 ui_log=ui_log2689 )2690 else:2691 start_date = '1990-01-01'2692 QA_util_log_info(2693 'UPDATE_AU_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'2694 .format(code,2695 start_date,2696 end_date),2697 ui_log=ui_log2698 )2699 if start_date != end_date:2700 df0 = QA_fetch_get_option_day(2701 code=code,2702 start_date=start_date,2703 end_date=end_date,2704 frequence='day',2705 ip=None,2706 port=None2707 )2708 retCount = df0.iloc[:, 0].size2709 QA_util_log_info(2710 "日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(2711 start_date,2712 end_date,2713 code,2714 retCount2715 ),2716 ui_log=ui_log2717 )2718 coll_option_commodity_al_day.insert_many(2719 QA_util_to_json_from_pandas(df0)2720 )2721 else:2722 QA_util_log_info(2723 "*已经获取过这天的数据了* {}".format(start_date),2724 ui_log=ui_log2725 )2726 except Exception as error0:2727 print(error0)2728 err.append(str(code))2729 for item in range(len(option_al_contract_list)):2730 QA_util_log_info(2731 'The {} of Total {}'.format(item, len(option_al_contract_list)),2732 ui_log=ui_log2733 )2734 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(2735 str(float(item / len(option_al_contract_list) * 100))[0:4] + '%'2736 )2737 intLogProgress = int(2738 float(item / len(option_al_contract_list) * 10000.0)2739 )2740 QA_util_log_info(2741 strLogProgress,2742 ui_log=ui_log,2743 ui_progress=ui_progress,2744 ui_progress_int_value=intLogProgress2745 )2746 __saving_work(2747 option_al_contract_list[item].code,2748 coll_option_commodity_al_day2749 )2750 if len(err) < 1:2751 QA_util_log_info('SUCCESS save option au day ^_^ ', ui_log=ui_log)2752 else:2753 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)2754 QA_util_log_info(err, ui_log=ui_log)2755def _save_option_commodity_cu_day(2756 client=DATABASE,2757 ui_log=None,2758 ui_progress=None2759):2760 ##################### CU 铜 ############################################################################2761 option_cu_contract_list = QA_fetch_get_commodity_option_CU_contract_time_to_market(2762 )2763 coll_option_commodity_cu_day = client.option_commodity_cu_day2764 coll_option_commodity_cu_day.create_index(2765 [("code",2766 pymongo.ASCENDING),2767 ("date_stamp",2768 pymongo.ASCENDING)]2769 )2770 err = []2771 def __saving_work(code, coll_option_commodity_cu_day):2772 try:2773 QA_util_log_info(2774 '##JOB12 Now Saving OPTION_DAY_COMMODITY_CU 铜 ==== {}'.format(2775 str(code)2776 ),2777 ui_log=ui_log2778 )2779 # 首选查找数据库 是否 有 这个代码的数据2780 # 期权代码 从 10000001 开始编码 100012282781 ref = coll_option_commodity_cu_day.find({'code': str(code)[0:8]})2782 end_date = str(now_time())[0:10]2783 # 当前数据库已经包含了这个代码的数据, 继续增量更新2784 # 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现2785 if ref.count() > 0:2786 # 接着上次获取的日期继续更新2787 start_date = ref[ref.count() - 1]['date']2788 QA_util_log_info(2789 ' 上次获取期权CU日线数据的最后日期是 {}'.format(start_date),2790 ui_log=ui_log2791 )2792 QA_util_log_info(2793 'UPDATE_OPTION_CU_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'2794 .format(code,2795 start_date,2796 end_date),2797 ui_log=ui_log2798 )2799 if start_date != end_date:2800 start_date0 = QA_util_get_next_day(start_date)2801 df0 = QA_fetch_get_option_day(2802 code=code,2803 start_date=start_date0,2804 end_date=end_date,2805 frequence='day',2806 ip=None,2807 port=None2808 )2809 retCount = df0.iloc[:, 0].size2810 QA_util_log_info(2811 "日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(2812 start_date0,2813 end_date,2814 code,2815 retCount2816 ),2817 ui_log=ui_log2818 )2819 coll_option_commodity_cu_day.insert_many(2820 QA_util_to_json_from_pandas(df0)2821 )2822 else:2823 QA_util_log_info(2824 "^已经获取过这天的数据了^ {}".format(start_date),2825 ui_log=ui_log2826 )2827 else:2828 start_date = '1990-01-01'2829 QA_util_log_info(2830 'UPDATE_CU_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'2831 .format(code,2832 start_date,2833 end_date),2834 ui_log=ui_log2835 )2836 if start_date != end_date:2837 df0 = QA_fetch_get_option_day(2838 code=code,2839 start_date=start_date,2840 end_date=end_date,2841 frequence='day',2842 ip=None,2843 port=None2844 )2845 retCount = df0.iloc[:, 0].size2846 QA_util_log_info(2847 "日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(2848 start_date,2849 end_date,2850 code,2851 retCount2852 ),2853 ui_log=ui_log2854 )2855 coll_option_commodity_cu_day.insert_many(2856 QA_util_to_json_from_pandas(df0)2857 )2858 else:2859 QA_util_log_info(2860 "*已经获取过这天的数据了* {}".format(start_date),2861 ui_log=ui_log2862 )2863 except Exception as error0:2864 print(error0)2865 err.append(str(code))2866 for item in range(len(option_cu_contract_list)):2867 QA_util_log_info(2868 'The {} of Total {}'.format(item,2869 len(option_cu_contract_list)),2870 ui_log=ui_log2871 )2872 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(2873 str(float(item / len(option_cu_contract_list) * 100))[0:4] + '%'2874 )2875 intLogProgress = int(2876 float(item / len(option_cu_contract_list) * 10000.0)2877 )2878 QA_util_log_info(2879 strLogProgress,2880 ui_log=ui_log,2881 ui_progress=ui_progress,2882 ui_progress_int_value=intLogProgress2883 )2884 __saving_work(2885 option_cu_contract_list[item].code,2886 coll_option_commodity_cu_day2887 )2888 if len(err) < 1:2889 QA_util_log_info('SUCCESS save option cu day ^_^ ', ui_log=ui_log)2890 else:2891 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)2892 QA_util_log_info(err, ui_log=ui_log)2893def _save_option_commodity_au_day(client=DATABASE,ui_log=None,ui_progress=None):2894 ##################### Au 金 ############################################################################2895 option_au_contract_list = QA_fetch_get_commodity_option_AU_contract_time_to_market()2896 coll_option_commodity_au_day = client.option_commodity_au_day2897 coll_option_commodity_au_day.create_index(2898 [("code",2899 pymongo.ASCENDING),2900 ("date_stamp",2901 pymongo.ASCENDING)]2902 )2903 err = []2904 def __saving_work(code, coll_option_commodity_au_day):2905 try:2906 QA_util_log_info(2907 '##JOB13 Now Saving OPTION_DAY_COMMODITY_AU 金 ==== {}'.format(2908 str(code)2909 ),2910 ui_log=ui_log2911 )2912 # 首选查找数据库 是否 有 这个代码的数据2913 # 期权代码 从 10000001 开始编码 100012282914 ref = coll_option_commodity_au_day.find({'code': str(code)[0:8]})2915 end_date = str(now_time())[0:10]2916 # 当前数据库已经包含了这个代码的数据, 继续增量更新2917 # 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现2918 if ref.count() > 0:2919 # 接着上次获取的日期继续更新2920 start_date = ref[ref.count() - 1]['date']2921 QA_util_log_info(2922 ' 上次获取期权AU日线数据的最后日期是 {}'.format(start_date),2923 ui_log=ui_log2924 )2925 QA_util_log_info(2926 'UPDATE_OPTION_AU_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'2927 .format(code,2928 start_date,2929 end_date),2930 ui_log=ui_log2931 )2932 if start_date != end_date:2933 start_date0 = QA_util_get_next_day(start_date)2934 df0 = QA_fetch_get_option_day(2935 code=code,2936 start_date=start_date0,2937 end_date=end_date,2938 frequence='day',2939 ip=None,2940 port=None2941 )2942 retCount = df0.iloc[:, 0].size2943 QA_util_log_info(2944 "日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(2945 start_date0,2946 end_date,2947 code,2948 retCount2949 ),2950 ui_log=ui_log2951 )2952 coll_option_commodity_au_day.insert_many(2953 QA_util_to_json_from_pandas(df0)2954 )2955 else:2956 QA_util_log_info(2957 "^已经获取过这天的数据了^ {}".format(start_date),2958 ui_log=ui_log2959 )2960 else:2961 start_date = '1990-01-01'2962 QA_util_log_info(2963 'UPDATE_AU_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'2964 .format(code,2965 start_date,2966 end_date),2967 ui_log=ui_log2968 )2969 if start_date != end_date:2970 df0 = QA_fetch_get_option_day(2971 code=code,2972 start_date=start_date,2973 end_date=end_date,2974 frequence='day',2975 ip=None,2976 port=None2977 )2978 retCount = df0.iloc[:, 0].size2979 QA_util_log_info(2980 "日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(2981 start_date,2982 end_date,2983 code,2984 retCount2985 ),2986 ui_log=ui_log2987 )2988 coll_option_commodity_au_day.insert_many(2989 QA_util_to_json_from_pandas(df0)2990 )2991 else:2992 QA_util_log_info(2993 "*已经获取过这天的数据了* {}".format(start_date),2994 ui_log=ui_log2995 )2996 except Exception as error0:2997 print(error0)2998 err.append(str(code))2999 for item in range(len(option_au_contract_list)):3000 QA_util_log_info(3001 'The {} of Total {}'.format(item,3002 len(option_au_contract_list)),3003 ui_log=ui_log3004 )3005 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(3006 str(float(item / len(option_au_contract_list) * 100))[0:4] + '%'3007 )3008 intLogProgress = int(3009 float(item / len(option_au_contract_list) * 10000.0)3010 )3011 QA_util_log_info(3012 strLogProgress,3013 ui_log=ui_log,3014 ui_progress=ui_progress,3015 ui_progress_int_value=intLogProgress3016 )3017 __saving_work(3018 option_au_contract_list[item].code,3019 coll_option_commodity_au_day3020 )3021 if len(err) < 1:3022 QA_util_log_info('SUCCESS save option au day ^_^ ', ui_log=ui_log)3023 else:3024 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)3025 QA_util_log_info(err, ui_log=ui_log)3026def QA_SU_save_option_commodity_day(3027 client=DATABASE,3028 ui_log=None,3029 ui_progress=None3030):3031 '''3032 :param client:3033 :return:3034 '''3035 _save_option_commodity_cu_day(3036 client=client,3037 ui_log=ui_log,3038 ui_progress=ui_progress,3039 )3040 _save_option_commodity_m_day(3041 client=client,3042 ui_log=ui_log,3043 ui_progress=ui_progress3044 )3045 _save_option_commodity_sr_day(3046 client=client,3047 ui_log=ui_log,3048 ui_progress=ui_progress3049 )3050 _save_option_commodity_ru_day(3051 client=client,3052 ui_log=ui_log,3053 ui_progress=ui_progress3054 )3055 _save_option_commodity_cf_day(3056 client=client,3057 ui_log=ui_log,3058 ui_progress=ui_progress3059 )3060 _save_option_commodity_c_day(3061 client=client,3062 ui_log=ui_log,3063 ui_progress=ui_progress3064 )3065 _save_option_commodity_au_day(3066 client=client,3067 ui_log=ui_log,3068 ui_progress=ui_progress3069 )3070 _save_option_commodity_al_day(3071 client=client,3072 ui_log=ui_log,3073 ui_progress=ui_progress3074 )3075'''3076期权分钟线3077todo: 代码需要重构 , 把重复的代码合并在一起3078'''3079def _save_option_commodity_ru_min(3080 client=DATABASE,3081 ui_log=None,3082 ui_progress=None3083):3084 '''3085 :param client:3086 :param ui_log:3087 :param ui_progress:3088 :return:3089 '''3090 '''3091 :param client:3092 :return:3093 '''3094 option_contract_list = QA_fetch_get_commodity_option_C_contract_time_to_market()3095 coll_option_min = client.option_commodity_ru_min3096 coll_option_min.create_index(3097 [("code",3098 pymongo.ASCENDING),3099 ("date_stamp",3100 pymongo.ASCENDING)]3101 )3102 err = []3103 # 索引 code3104 err = []3105 def __saving_work(code, coll):3106 QA_util_log_info(3107 '##JOB13 Now Saving Option RU 棉花 MIN ==== {}'.format(str(code)),3108 ui_log=ui_log3109 )3110 try:3111 for type in ['1min', '5min', '15min', '30min', '60min']:3112 ref_ = coll.find({'code': str(code)[0:8], 'type': type})3113 end_time = str(now_time())[0:19]3114 if ref_.count() > 0:3115 start_time = ref_[ref_.count() - 1]['datetime']3116 QA_util_log_info(3117 '##JOB13.{} Now Saving Option RU 天然橡胶 {} from {} to {} =={} '3118 .format(3119 ['1min',3120 '5min',3121 '15min',3122 '30min',3123 '60min'].index(type),3124 str(code),3125 start_time,3126 end_time,3127 type3128 ),3129 ui_log=ui_log3130 )3131 if start_time != end_time:3132 __data = QA_fetch_get_future_min(3133 str(code),3134 start_time,3135 end_time,3136 type3137 )3138 if len(__data) > 1:3139 QA_util_log_info(3140 " 写入 新增历史合约记录数 {} ".format(len(__data))3141 )3142 coll.insert_many(3143 QA_util_to_json_from_pandas(__data[1::])3144 )3145 else:3146 start_time = '2015-01-01'3147 QA_util_log_info(3148 '##JOB13.{} Now Option RU 天然橡胶 {} from {} to {} =={} '3149 .format(3150 ['1min',3151 '5min',3152 '15min',3153 '30min',3154 '60min'].index(type),3155 str(code),3156 start_time,3157 end_time,3158 type3159 ),3160 ui_log=ui_log3161 )3162 if start_time != end_time:3163 __data = QA_fetch_get_future_min(3164 str(code),3165 start_time,3166 end_time,3167 type3168 )3169 if len(__data) > 1:3170 QA_util_log_info(3171 " 写入 新增合约记录数 {} ".format(len(__data))3172 )3173 coll.insert_many(3174 QA_util_to_json_from_pandas(__data)3175 )3176 except:3177 err.append(code)3178 executor = ThreadPoolExecutor(max_workers=4)3179 res = {3180 executor.submit(3181 __saving_work,3182 option_contract_list[i_]["code"],3183 coll_option_min3184 )3185 for i_ in range(len(option_contract_list))3186 } # multi index ./.3187 count = 03188 for i_ in concurrent.futures.as_completed(res):3189 QA_util_log_info(3190 'The {} of Total {}'.format(count,3191 len(option_contract_list)),3192 ui_log=ui_log3193 )3194 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(3195 str(float(count / len(option_contract_list) * 100))[0:4] + '%'3196 )3197 intLogProgress = int(float(count / len(option_contract_list) * 10000.0))3198 QA_util_log_info(3199 strLogProgress,3200 ui_log=ui_log,3201 ui_progress=ui_progress,3202 ui_progress_int_value=intLogProgress3203 )3204 count = count + 13205 if len(err) < 1:3206 QA_util_log_info('SUCCESS', ui_log=ui_log)3207 else:3208 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)3209 QA_util_log_info(err, ui_log=ui_log)3210 pass3211def _save_option_commodity_c_min(3212 client=DATABASE,3213 ui_log=None,3214 ui_progress=None3215):3216 '''3217 :param client:3218 :param ui_log:3219 :param ui_progress:3220 :return:3221 '''3222 '''3223 :param client:3224 :return:3225 '''3226 option_contract_list = QA_fetch_get_commodity_option_C_contract_time_to_market()3227 coll_option_min = client.option_commodity_c_min3228 coll_option_min.create_index(3229 [("code",3230 pymongo.ASCENDING),3231 ("date_stamp",3232 pymongo.ASCENDING)]3233 )3234 err = []3235 # 索引 code3236 err = []3237 def __saving_work(code, coll):3238 QA_util_log_info(3239 '##JOB13 Now Saving Option C 玉米 MIN ==== {}'.format(str(code)),3240 ui_log=ui_log3241 )3242 try:3243 for type in ['1min', '5min', '15min', '30min', '60min']:3244 ref_ = coll.find({'code': str(code)[0:8], 'type': type})3245 end_time = str(now_time())[0:19]3246 if ref_.count() > 0:3247 start_time = ref_[ref_.count() - 1]['datetime']3248 QA_util_log_info(3249 '##JOB13.{} Now Saving Option C 玉米 {} from {} to {} =={} '3250 .format(3251 ['1min',3252 '5min',3253 '15min',3254 '30min',3255 '60min'].index(type),3256 str(code),3257 start_time,3258 end_time,3259 type3260 ),3261 ui_log=ui_log3262 )3263 if start_time != end_time:3264 __data = QA_fetch_get_future_min(3265 str(code),3266 start_time,3267 end_time,3268 type3269 )3270 if len(__data) > 1:3271 QA_util_log_info(3272 " 写入 新增历史合约记录数 {} ".format(len(__data))3273 )3274 coll.insert_many(3275 QA_util_to_json_from_pandas(__data[1::])3276 )3277 else:3278 start_time = '2015-01-01'3279 QA_util_log_info(3280 '##JOB13.{} Now Option C 玉米 {} from {} to {} =={} '3281 .format(3282 ['1min',3283 '5min',3284 '15min',3285 '30min',3286 '60min'].index(type),3287 str(code),3288 start_time,3289 end_time,3290 type3291 ),3292 ui_log=ui_log3293 )3294 if start_time != end_time:3295 __data = QA_fetch_get_future_min(3296 str(code),3297 start_time,3298 end_time,3299 type3300 )3301 if len(__data) > 1:3302 QA_util_log_info(3303 " 写入 新增合约记录数 {} ".format(len(__data))3304 )3305 coll.insert_many(3306 QA_util_to_json_from_pandas(__data)3307 )3308 except:3309 err.append(code)3310 executor = ThreadPoolExecutor(max_workers=4)3311 res = {3312 executor.submit(3313 __saving_work,3314 option_contract_list[i_]["code"],3315 coll_option_min3316 )3317 for i_ in range(len(option_contract_list))3318 } # multi index ./.3319 count = 03320 for i_ in concurrent.futures.as_completed(res):3321 QA_util_log_info(3322 'The {} of Total {}'.format(count,3323 len(option_contract_list)),3324 ui_log=ui_log3325 )3326 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(3327 str(float(count / len(option_contract_list) * 100))[0:4] + '%'3328 )3329 intLogProgress = int(float(count / len(option_contract_list) * 10000.0))3330 QA_util_log_info(3331 strLogProgress,3332 ui_log=ui_log,3333 ui_progress=ui_progress,3334 ui_progress_int_value=intLogProgress3335 )3336 count = count + 13337 if len(err) < 1:3338 QA_util_log_info('SUCCESS', ui_log=ui_log)3339 else:3340 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)3341 QA_util_log_info(err, ui_log=ui_log)3342 pass3343def _save_option_commodity_cf_min(3344 client=DATABASE,3345 ui_log=None,3346 ui_progress=None3347):3348 '''3349 :param client:3350 :param ui_log:3351 :param ui_progress:3352 :return:3353 '''3354 '''3355 :param client:3356 :return:3357 '''3358 option_contract_list = QA_fetch_get_commodity_option_CF_contract_time_to_market()3359 coll_option_min = client.option_commodity_cf_min3360 coll_option_min.create_index(3361 [("code",3362 pymongo.ASCENDING),3363 ("date_stamp",3364 pymongo.ASCENDING)]3365 )3366 err = []3367 # 索引 code3368 err = []3369 def __saving_work(code, coll):3370 QA_util_log_info(3371 '##JOB13 Now Saving Option CF 棉花 MIN ==== {}'.format(str(code)),3372 ui_log=ui_log3373 )3374 try:3375 for type in ['1min', '5min', '15min', '30min', '60min']:3376 ref_ = coll.find({'code': str(code)[0:8], 'type': type})3377 end_time = str(now_time())[0:19]3378 if ref_.count() > 0:3379 start_time = ref_[ref_.count() - 1]['datetime']3380 QA_util_log_info(3381 '##JOB13.{} Now Saving Option CF 棉花 {} from {} to {} =={} '3382 .format(3383 ['1min',3384 '5min',3385 '15min',3386 '30min',3387 '60min'].index(type),3388 str(code),3389 start_time,3390 end_time,3391 type3392 ),3393 ui_log=ui_log3394 )3395 if start_time != end_time:3396 __data = QA_fetch_get_future_min(3397 str(code),3398 start_time,3399 end_time,3400 type3401 )3402 if len(__data) > 1:3403 QA_util_log_info(3404 " 写入 新增历史合约记录数 {} ".format(len(__data))3405 )3406 coll.insert_many(3407 QA_util_to_json_from_pandas(__data[1::])3408 )3409 else:3410 start_time = '2015-01-01'3411 QA_util_log_info(3412 '##JOB13.{} Now Option CF 棉花 {} from {} to {} =={} '3413 .format(3414 ['1min',3415 '5min',3416 '15min',3417 '30min',3418 '60min'].index(type),3419 str(code),3420 start_time,3421 end_time,3422 type3423 ),3424 ui_log=ui_log3425 )3426 if start_time != end_time:3427 __data = QA_fetch_get_future_min(3428 str(code),3429 start_time,3430 end_time,3431 type3432 )3433 if len(__data) > 1:3434 QA_util_log_info(3435 " 写入 新增合约记录数 {} ".format(len(__data))3436 )3437 coll.insert_many(3438 QA_util_to_json_from_pandas(__data)3439 )3440 except:3441 err.append(code)3442 executor = ThreadPoolExecutor(max_workers=4)3443 res = {3444 executor.submit(3445 __saving_work,3446 option_contract_list[i_]["code"],3447 coll_option_min3448 )3449 for i_ in range(len(option_contract_list))3450 } # multi index ./.3451 count = 03452 for i_ in concurrent.futures.as_completed(res):3453 QA_util_log_info(3454 'The {} of Total {}'.format(count,3455 len(option_contract_list)),3456 ui_log=ui_log3457 )3458 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(3459 str(float(count / len(option_contract_list) * 100))[0:4] + '%'3460 )3461 intLogProgress = int(float(count / len(option_contract_list) * 10000.0))3462 QA_util_log_info(3463 strLogProgress,3464 ui_log=ui_log,3465 ui_progress=ui_progress,3466 ui_progress_int_value=intLogProgress3467 )3468 count = count + 13469 if len(err) < 1:3470 QA_util_log_info('SUCCESS', ui_log=ui_log)3471 else:3472 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)3473 QA_util_log_info(err, ui_log=ui_log)3474 pass3475def _save_option_commodity_ru_min(3476 client=DATABASE,3477 ui_log=None,3478 ui_progress=None3479):3480 '''3481 :param client:3482 :param ui_log:3483 :param ui_progress:3484 :return:3485 '''3486 '''3487 :param client:3488 :return:3489 '''3490 option_contract_list = QA_fetch_get_commodity_option_RU_contract_time_to_market(3491 )3492 coll_option_min = client.option_commodity_ru_min3493 coll_option_min.create_index(3494 [("code",3495 pymongo.ASCENDING),3496 ("date_stamp",3497 pymongo.ASCENDING)]3498 )3499 err = []3500 # 索引 code3501 err = []3502 def __saving_work(code, coll):3503 QA_util_log_info(3504 '##JOB13 Now Saving Option RU 天然橡胶 MIN ==== {}'.format(str(code)),3505 ui_log=ui_log3506 )3507 try:3508 for type in ['1min', '5min', '15min', '30min', '60min']:3509 ref_ = coll.find({'code': str(code)[0:8], 'type': type})3510 end_time = str(now_time())[0:19]3511 if ref_.count() > 0:3512 start_time = ref_[ref_.count() - 1]['datetime']3513 QA_util_log_info(3514 '##JOB13.{} Now Saving Option RU 天然橡胶 {} from {} to {} =={} '3515 .format(3516 ['1min',3517 '5min',3518 '15min',3519 '30min',3520 '60min'].index(type),3521 str(code),3522 start_time,3523 end_time,3524 type3525 ),3526 ui_log=ui_log3527 )3528 if start_time != end_time:3529 __data = QA_fetch_get_future_min(3530 str(code),3531 start_time,3532 end_time,3533 type3534 )3535 if len(__data) > 1:3536 QA_util_log_info(3537 " 写入 新增历史合约记录数 {} ".format(len(__data))3538 )3539 coll.insert_many(3540 QA_util_to_json_from_pandas(__data[1::])3541 )3542 else:3543 start_time = '2015-01-01'3544 QA_util_log_info(3545 '##JOB13.{} Now Option RU 天然橡胶 {} from {} to {} =={} '3546 .format(3547 ['1min',3548 '5min',3549 '15min',3550 '30min',3551 '60min'].index(type),3552 str(code),3553 start_time,3554 end_time,3555 type3556 ),3557 ui_log=ui_log3558 )3559 if start_time != end_time:3560 __data = QA_fetch_get_future_min(3561 str(code),3562 start_time,3563 end_time,3564 type3565 )3566 if len(__data) > 1:3567 QA_util_log_info(3568 " 写入 新增合约记录数 {} ".format(len(__data))3569 )3570 coll.insert_many(3571 QA_util_to_json_from_pandas(__data)3572 )3573 except:3574 err.append(code)3575 executor = ThreadPoolExecutor(max_workers=4)3576 res = {3577 executor.submit(3578 __saving_work,3579 option_contract_list[i_]["code"],3580 coll_option_min3581 )3582 for i_ in range(len(option_contract_list))3583 } # multi index ./.3584 count = 03585 for i_ in concurrent.futures.as_completed(res):3586 QA_util_log_info(3587 'The {} of Total {}'.format(count,3588 len(option_contract_list)),3589 ui_log=ui_log3590 )3591 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(3592 str(float(count / len(option_contract_list) * 100))[0:4] + '%'3593 )3594 intLogProgress = int(float(count / len(option_contract_list) * 10000.0))3595 QA_util_log_info(3596 strLogProgress,3597 ui_log=ui_log,3598 ui_progress=ui_progress,3599 ui_progress_int_value=intLogProgress3600 )3601 count = count + 13602 if len(err) < 1:3603 QA_util_log_info('SUCCESS', ui_log=ui_log)3604 else:3605 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)3606 QA_util_log_info(err, ui_log=ui_log)3607 pass3608def _save_option_commodity_cu_min(3609 client=DATABASE,3610 ui_log=None,3611 ui_progress=None3612):3613 '''3614 :param client:3615 :param ui_log:3616 :param ui_progress:3617 :return:3618 '''3619 '''3620 :param client:3621 :return:3622 '''3623 option_contract_list = QA_fetch_get_commodity_option_CU_contract_time_to_market(3624 )3625 coll_option_min = client.option_commodity_cu_min3626 coll_option_min.create_index(3627 [("code",3628 pymongo.ASCENDING),3629 ("date_stamp",3630 pymongo.ASCENDING)]3631 )3632 err = []3633 # 索引 code3634 err = []3635 def __saving_work(code, coll):3636 QA_util_log_info(3637 '##JOB13 Now Saving Option CU 铜 MIN ==== {}'.format(str(code)),3638 ui_log=ui_log3639 )3640 try:3641 for type in ['1min', '5min', '15min', '30min', '60min']:3642 ref_ = coll.find({'code': str(code)[0:8], 'type': type})3643 end_time = str(now_time())[0:19]3644 if ref_.count() > 0:3645 start_time = ref_[ref_.count() - 1]['datetime']3646 QA_util_log_info(3647 '##JOB13.{} Now Saving Option CU 铜 {} from {} to {} =={} '3648 .format(3649 ['1min',3650 '5min',3651 '15min',3652 '30min',3653 '60min'].index(type),3654 str(code),3655 start_time,3656 end_time,3657 type3658 ),3659 ui_log=ui_log3660 )3661 if start_time != end_time:3662 __data = QA_fetch_get_future_min(3663 str(code),3664 start_time,3665 end_time,3666 type3667 )3668 if len(__data) > 1:3669 QA_util_log_info(3670 " 写入 新增历史合约记录数 {} ".format(len(__data))3671 )3672 coll.insert_many(3673 QA_util_to_json_from_pandas(__data[1::])3674 )3675 else:3676 start_time = '2015-01-01'3677 QA_util_log_info(3678 '##JOB13.{} Now Option CU 铜 {} from {} to {} =={} '3679 .format(3680 ['1min',3681 '5min',3682 '15min',3683 '30min',3684 '60min'].index(type),3685 str(code),3686 start_time,3687 end_time,3688 type3689 ),3690 ui_log=ui_log3691 )3692 if start_time != end_time:3693 __data = QA_fetch_get_future_min(3694 str(code),3695 start_time,3696 end_time,3697 type3698 )3699 if len(__data) > 1:3700 QA_util_log_info(3701 " 写入 新增合约记录数 {} ".format(len(__data))3702 )3703 coll.insert_many(3704 QA_util_to_json_from_pandas(__data)3705 )3706 except:3707 err.append(code)3708 executor = ThreadPoolExecutor(max_workers=4)3709 res = {3710 executor.submit(3711 __saving_work,3712 option_contract_list[i_]["code"],3713 coll_option_min3714 )3715 for i_ in range(len(option_contract_list))3716 } # multi index ./.3717 count = 03718 for i_ in concurrent.futures.as_completed(res):3719 QA_util_log_info(3720 'The {} of Total {}'.format(count,3721 len(option_contract_list)),3722 ui_log=ui_log3723 )3724 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(3725 str(float(count / len(option_contract_list) * 100))[0:4] + '%'3726 )3727 intLogProgress = int(float(count / len(option_contract_list) * 10000.0))3728 QA_util_log_info(3729 strLogProgress,3730 ui_log=ui_log,3731 ui_progress=ui_progress,3732 ui_progress_int_value=intLogProgress3733 )3734 count = count + 13735 if len(err) < 1:3736 QA_util_log_info('SUCCESS', ui_log=ui_log)3737 else:3738 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)3739 QA_util_log_info(err, ui_log=ui_log)3740 pass3741def _save_option_commodity_au_min(3742 client=DATABASE,3743 ui_log=None,3744 ui_progress=None3745):3746 '''3747 :param client:3748 :param ui_log:3749 :param ui_progress:3750 :return:3751 '''3752 '''3753 :param client:3754 :return:3755 '''3756 option_contract_list = QA_fetch_get_commodity_option_AU_contract_time_to_market(3757 )3758 coll_option_min = client.option_commodity_au_min3759 coll_option_min.create_index(3760 [("code",3761 pymongo.ASCENDING),3762 ("date_stamp",3763 pymongo.ASCENDING)]3764 )3765 err = []3766 # 索引 code3767 err = []3768 def __saving_work(code, coll):3769 QA_util_log_info(3770 '##JOB13 Now Saving Option AU 金 MIN ==== {}'.format(str(code)),3771 ui_log=ui_log3772 )3773 try:3774 for type in ['1min', '5min', '15min', '30min', '60min']:3775 ref_ = coll.find({'code': str(code)[0:8], 'type': type})3776 end_time = str(now_time())[0:19]3777 if ref_.count() > 0:3778 start_time = ref_[ref_.count() - 1]['datetime']3779 QA_util_log_info(3780 '##JOB13.{} Now Saving Option AU 金 {} from {} to {} =={} '3781 .format(3782 ['1min',3783 '5min',3784 '15min',3785 '30min',3786 '60min'].index(type),3787 str(code),3788 start_time,3789 end_time,3790 type3791 ),3792 ui_log=ui_log3793 )3794 if start_time != end_time:3795 __data = QA_fetch_get_future_min(3796 str(code),3797 start_time,3798 end_time,3799 type3800 )3801 if len(__data) > 1:3802 QA_util_log_info(3803 " 写入 新增历史合约记录数 {} ".format(len(__data))3804 )3805 coll.insert_many(3806 QA_util_to_json_from_pandas(__data[1::])3807 )3808 else:3809 start_time = '2015-01-01'3810 QA_util_log_info(3811 '##JOB13.{} Now Option AU 金 {} from {} to {} =={} '3812 .format(3813 ['1min',3814 '5min',3815 '15min',3816 '30min',3817 '60min'].index(type),3818 str(code),3819 start_time,3820 end_time,3821 type3822 ),3823 ui_log=ui_log3824 )3825 if start_time != end_time:3826 __data = QA_fetch_get_future_min(3827 str(code),3828 start_time,3829 end_time,3830 type3831 )3832 if len(__data) > 1:3833 QA_util_log_info(3834 " 写入 新增合约记录数 {} ".format(len(__data))3835 )3836 coll.insert_many(3837 QA_util_to_json_from_pandas(__data)3838 )3839 except:3840 err.append(code)3841 executor = ThreadPoolExecutor(max_workers=4)3842 res = {3843 executor.submit(3844 __saving_work,3845 option_contract_list[i_]["code"],3846 coll_option_min3847 )3848 for i_ in range(len(option_contract_list))3849 } # multi index ./.3850 count = 03851 for i_ in concurrent.futures.as_completed(res):3852 QA_util_log_info(3853 'The {} of Total {}'.format(count,3854 len(option_contract_list)),3855 ui_log=ui_log3856 )3857 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(3858 str(float(count / len(option_contract_list) * 100))[0:4] + '%'3859 )3860 intLogProgress = int(float(count / len(option_contract_list) * 10000.0))3861 QA_util_log_info(3862 strLogProgress,3863 ui_log=ui_log,3864 ui_progress=ui_progress,3865 ui_progress_int_value=intLogProgress3866 )3867 count = count + 13868 if len(err) < 1:3869 QA_util_log_info('SUCCESS', ui_log=ui_log)3870 else:3871 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)3872 QA_util_log_info(err, ui_log=ui_log)3873 pass3874def _save_option_commodity_al_min(3875 client=DATABASE,3876 ui_log=None,3877 ui_progress=None3878):3879 '''3880 :param client:3881 :param ui_log:3882 :param ui_progress:3883 :return:3884 '''3885 '''3886 :param client:3887 :return:3888 '''3889 option_contract_list = QA_fetch_get_commodity_option_AL_contract_time_to_market()3890 coll_option_min = client.option_commodity_al_min3891 coll_option_min.create_index(3892 [("code",3893 pymongo.ASCENDING),3894 ("date_stamp",3895 pymongo.ASCENDING)]3896 )3897 err = []3898 # 索引 code3899 err = []3900 def __saving_work(code, coll):3901 QA_util_log_info(3902 '##JOB20 Now Saving Option AL 铝 MIN ==== {}'.format(str(code)),3903 ui_log=ui_log3904 )3905 try:3906 for type in ['1min', '5min', '15min', '30min', '60min']:3907 ref_ = coll.find({'code': str(code)[0:8], 'type': type})3908 end_time = str(now_time())[0:19]3909 if ref_.count() > 0:3910 start_time = ref_[ref_.count() - 1]['datetime']3911 QA_util_log_info(3912 '##JOB20.{} Now Saving Option AL 铝 {} from {} to {} =={} '3913 .format(3914 ['1min',3915 '5min',3916 '15min',3917 '30min',3918 '60min'].index(type),3919 str(code),3920 start_time,3921 end_time,3922 type3923 ),3924 ui_log=ui_log3925 )3926 if start_time != end_time:3927 __data = QA_fetch_get_future_min(3928 str(code),3929 start_time,3930 end_time,3931 type3932 )3933 if len(__data) > 1:3934 QA_util_log_info(3935 " 写入 新增历史合约记录数 {} ".format(len(__data))3936 )3937 coll.insert_many(3938 QA_util_to_json_from_pandas(__data[1::])3939 )3940 else:3941 start_time = '2015-01-01'3942 QA_util_log_info(3943 '##JOB20.{} Now Option AL 铝 {} from {} to {} =={} '3944 .format(3945 ['1min',3946 '5min',3947 '15min',3948 '30min',3949 '60min'].index(type),3950 str(code),3951 start_time,3952 end_time,3953 type3954 ),3955 ui_log=ui_log3956 )3957 if start_time != end_time:3958 __data = QA_fetch_get_future_min(3959 str(code),3960 start_time,3961 end_time,3962 type3963 )3964 if len(__data) > 1:3965 QA_util_log_info(3966 " 写入 新增合约记录数 {} ".format(len(__data))3967 )3968 coll.insert_many(3969 QA_util_to_json_from_pandas(__data)3970 )3971 except:3972 err.append(code)3973 executor = ThreadPoolExecutor(max_workers=4)3974 res = {3975 executor.submit(3976 __saving_work,3977 option_contract_list[i_]["code"],3978 coll_option_min3979 )3980 for i_ in range(len(option_contract_list))3981 } # multi index ./.3982 count = 03983 for i_ in concurrent.futures.as_completed(res):3984 QA_util_log_info(3985 'The {} of Total {}'.format(count,3986 len(option_contract_list)),3987 ui_log=ui_log3988 )3989 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(3990 str(float(count / len(option_contract_list) * 100))[0:4] + '%'3991 )3992 intLogProgress = int(float(count / len(option_contract_list) * 10000.0))3993 QA_util_log_info(3994 strLogProgress,3995 ui_log=ui_log,3996 ui_progress=ui_progress,3997 ui_progress_int_value=intLogProgress3998 )3999 count = count + 14000 if len(err) < 1:4001 QA_util_log_info('SUCCESS', ui_log=ui_log)4002 else:4003 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)4004 QA_util_log_info(err, ui_log=ui_log)4005 pass4006def _save_option_commodity_sr_min(4007 client=DATABASE,4008 ui_log=None,4009 ui_progress=None4010):4011 '''4012 :param client:4013 :param ui_log:4014 :param ui_progress:4015 :return:4016 '''4017 '''4018 :param client:4019 :return:4020 '''4021 option_contract_list = QA_fetch_get_commodity_option_SR_contract_time_to_market(4022 )4023 coll_option_min = client.option_commodity_sr_min4024 coll_option_min.create_index(4025 [("code",4026 pymongo.ASCENDING),4027 ("date_stamp",4028 pymongo.ASCENDING)]4029 )4030 err = []4031 # 索引 code4032 err = []4033 def __saving_work(code, coll):4034 QA_util_log_info(4035 '##JOB13 Now Saving Option SR 白糖 ==== {}'.format(str(code)),4036 ui_log=ui_log4037 )4038 try:4039 for type in ['1min', '5min', '15min', '30min', '60min']:4040 ref_ = coll.find({'code': str(code)[0:8], 'type': type})4041 end_time = str(now_time())[0:19]4042 if ref_.count() > 0:4043 start_time = ref_[ref_.count() - 1]['datetime']4044 QA_util_log_info(4045 '##JOB13.{} Now Saving Option SR 白糖 {} from {} to {} =={} '4046 .format(4047 ['1min',4048 '5min',4049 '15min',4050 '30min',4051 '60min'].index(type),4052 str(code),4053 start_time,4054 end_time,4055 type4056 ),4057 ui_log=ui_log4058 )4059 if start_time != end_time:4060 __data = QA_fetch_get_future_min(4061 str(code),4062 start_time,4063 end_time,4064 type4065 )4066 if len(__data) > 1:4067 QA_util_log_info(4068 " 写入 新增历史合约记录数 {} ".format(len(__data))4069 )4070 coll.insert_many(4071 QA_util_to_json_from_pandas(__data[1::])4072 )4073 else:4074 start_time = '2015-01-01'4075 QA_util_log_info(4076 '##JOB13.{} Now Option SR 白糖 {} from {} to {} =={} '4077 .format(4078 ['1min',4079 '5min',4080 '15min',4081 '30min',4082 '60min'].index(type),4083 str(code),4084 start_time,4085 end_time,4086 type4087 ),4088 ui_log=ui_log4089 )4090 if start_time != end_time:4091 __data = QA_fetch_get_future_min(4092 str(code),4093 start_time,4094 end_time,4095 type4096 )4097 if len(__data) > 1:4098 QA_util_log_info(4099 " 写入 新增合约记录数 {} ".format(len(__data))4100 )4101 coll.insert_many(4102 QA_util_to_json_from_pandas(__data)4103 )4104 except:4105 err.append(code)4106 executor = ThreadPoolExecutor(max_workers=4)4107 res = {4108 executor.submit(4109 __saving_work,4110 option_contract_list[i_]["code"],4111 coll_option_min4112 )4113 for i_ in range(len(option_contract_list))4114 } # multi index ./.4115 count = 04116 for i_ in concurrent.futures.as_completed(res):4117 QA_util_log_info(4118 'The {} of Total {}'.format(count,4119 len(option_contract_list)),4120 ui_log=ui_log4121 )4122 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(4123 str(float(count / len(option_contract_list) * 100))[0:4] + '%'4124 )4125 intLogProgress = int(float(count / len(option_contract_list) * 10000.0))4126 QA_util_log_info(4127 strLogProgress,4128 ui_log=ui_log,4129 ui_progress=ui_progress,4130 ui_progress_int_value=intLogProgress4131 )4132 count = count + 14133 if len(err) < 1:4134 QA_util_log_info('SUCCESS', ui_log=ui_log)4135 else:4136 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)4137 QA_util_log_info(err, ui_log=ui_log)4138 pass4139def _save_option_commodity_m_min(4140 client=DATABASE,4141 ui_log=None,4142 ui_progress=None4143):4144 '''4145 :param client:4146 :param ui_log:4147 :param ui_progress:4148 :return:4149 '''4150 option_contract_list = QA_fetch_get_commodity_option_M_contract_time_to_market(4151 )4152 coll_option_min = client.option_commodity_m_min4153 coll_option_min.create_index(4154 [("code",4155 pymongo.ASCENDING),4156 ("date_stamp",4157 pymongo.ASCENDING)]4158 )4159 err = []4160 # 索引 code4161 err = []4162 def __saving_work(code, coll):4163 QA_util_log_info(4164 '##JOB13 Now Saving Option M 豆粕 ==== {}'.format(str(code)),4165 ui_log=ui_log4166 )4167 try:4168 for type in ['1min', '5min', '15min', '30min', '60min']:4169 ref_ = coll.find({'code': str(code)[0:8], 'type': type})4170 end_time = str(now_time())[0:19]4171 if ref_.count() > 0:4172 start_time = ref_[ref_.count() - 1]['datetime']4173 QA_util_log_info(4174 '##JOB13.{} Now Saving Option M 豆粕 {} from {} to {} =={} '4175 .format(4176 ['1min',4177 '5min',4178 '15min',4179 '30min',4180 '60min'].index(type),4181 str(code),4182 start_time,4183 end_time,4184 type4185 ),4186 ui_log=ui_log4187 )4188 if start_time != end_time:4189 __data = QA_fetch_get_future_min(4190 str(code),4191 start_time,4192 end_time,4193 type4194 )4195 if len(__data) > 1:4196 QA_util_log_info(4197 " 写入 新增历史合约记录数 {} ".format(len(__data))4198 )4199 coll.insert_many(4200 QA_util_to_json_from_pandas(__data[1::])4201 )4202 else:4203 start_time = '2015-01-01'4204 QA_util_log_info(4205 '##JOB13.{} Now Option M 豆粕 {} from {} to {} =={} '4206 .format(4207 ['1min',4208 '5min',4209 '15min',4210 '30min',4211 '60min'].index(type),4212 str(code),4213 start_time,4214 end_time,4215 type4216 ),4217 ui_log=ui_log4218 )4219 if start_time != end_time:4220 __data = QA_fetch_get_future_min(4221 str(code),4222 start_time,4223 end_time,4224 type4225 )4226 if len(__data) > 1:4227 QA_util_log_info(4228 " 写入 新增合约记录数 {} ".format(len(__data))4229 )4230 coll.insert_many(4231 QA_util_to_json_from_pandas(__data)4232 )4233 except:4234 err.append(code)4235 executor = ThreadPoolExecutor(max_workers=4)4236 res = {4237 executor.submit(4238 __saving_work,4239 option_contract_list[i_]["code"],4240 coll_option_min4241 )4242 for i_ in range(len(option_contract_list))4243 } # multi index ./.4244 count = 04245 for i_ in concurrent.futures.as_completed(res):4246 QA_util_log_info(4247 'The {} of Total {}'.format(count,4248 len(option_contract_list)),4249 ui_log=ui_log4250 )4251 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(4252 str(float(count / len(option_contract_list) * 100))[0:4] + '%'4253 )4254 intLogProgress = int(float(count / len(option_contract_list) * 10000.0))4255 QA_util_log_info(4256 strLogProgress,4257 ui_log=ui_log,4258 ui_progress=ui_progress,4259 ui_progress_int_value=intLogProgress4260 )4261 count = count + 14262 if len(err) < 1:4263 QA_util_log_info('SUCCESS', ui_log=ui_log)4264 else:4265 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)4266 QA_util_log_info(err, ui_log=ui_log)4267 pass4268def QA_SU_save_option_commodity_min(4269 client=DATABASE,4270 ui_log=None,4271 ui_progress=None4272):4273 '''4274 :param client:4275 :return:4276 '''4277 # 测试中发现, 一起回去,容易出现错误,每次获取一个品种后 ,更换服务ip继续获取 ?4278 _save_option_commodity_cu_min(4279 client=client,4280 ui_log=ui_log,4281 ui_progress=ui_progress4282 )4283 _save_option_commodity_sr_min(4284 client=client,4285 ui_log=ui_log,4286 ui_progress=ui_progress4287 )4288 _save_option_commodity_m_min(4289 client=client,4290 ui_log=ui_log,4291 ui_progress=ui_progress4292 )4293 _save_option_commodity_ru_min(4294 client=client,4295 ui_log=ui_log,4296 ui_progress=ui_progress4297 )4298 _save_option_commodity_cf_min(4299 client=client,4300 ui_log=ui_log,4301 ui_progress=ui_progress4302 )4303 _save_option_commodity_c_min(4304 client=client,4305 ui_log=ui_log,4306 ui_progress=ui_progress4307 )4308 _save_option_commodity_au_min(4309 client=client,4310 ui_log=ui_log,4311 ui_progress=ui_progress4312 )4313 _save_option_commodity_al_min(4314 client=client,4315 ui_log=ui_log,4316 ui_progress=ui_progress4317 )4318def QA_SU_save_option_50etf_min(client=DATABASE, ui_log=None, ui_progress=None):4319 '''4320 :param client:4321 :return:4322 '''4323 option_contract_list = QA_fetch_get_option_50etf_contract_time_to_market()4324 coll_option_min = client.option_day_min4325 coll_option_min.create_index(4326 [("code",4327 pymongo.ASCENDING),4328 ("date_stamp",4329 pymongo.ASCENDING)]4330 )4331 err = []4332 # 索引 code4333 err = []4334 def __saving_work(code, coll):4335 QA_util_log_info(4336 '##JOB13 Now Saving Option 50ETF MIN ==== {}'.format(str(code)),4337 ui_log=ui_log4338 )4339 try:4340 for type in ['1min', '5min', '15min', '30min', '60min']:4341 ref_ = coll.find({'code': str(code)[0:8], 'type': type})4342 end_time = str(now_time())[0:19]4343 if ref_.count() > 0:4344 start_time = ref_[ref_.count() - 1]['datetime']4345 QA_util_log_info(4346 '##JOB13.{} Now Saving Option 50ETF {} from {} to {} =={} '4347 .format(4348 ['1min',4349 '5min',4350 '15min',4351 '30min',4352 '60min'].index(type),4353 str(code),4354 start_time,4355 end_time,4356 type4357 ),4358 ui_log=ui_log4359 )4360 if start_time != end_time:4361 __data = QA_fetch_get_future_min(4362 str(code),4363 start_time,4364 end_time,4365 type4366 )4367 if len(__data) > 1:4368 QA_util_log_info(4369 " 写入 新增历史合约记录数 {} ".format(len(__data))4370 )4371 coll.insert_many(4372 QA_util_to_json_from_pandas(__data[1::])4373 )4374 else:4375 start_time = '2015-01-01'4376 QA_util_log_info(4377 '##JOB13.{} Now Option 50ETF {} from {} to {} =={} '4378 .format(4379 ['1min',4380 '5min',4381 '15min',4382 '30min',4383 '60min'].index(type),4384 str(code),4385 start_time,4386 end_time,4387 type4388 ),4389 ui_log=ui_log4390 )4391 if start_time != end_time:4392 __data = QA_fetch_get_future_min(4393 str(code),4394 start_time,4395 end_time,4396 type4397 )4398 if len(__data) > 1:4399 QA_util_log_info(4400 " 写入 新增合约记录数 {} ".format(len(__data))4401 )4402 coll.insert_many(4403 QA_util_to_json_from_pandas(__data)4404 )4405 except:4406 err.append(code)4407 executor = ThreadPoolExecutor(max_workers=4)4408 res = {4409 executor.submit(4410 __saving_work,4411 option_contract_list[i_]["code"],4412 coll_option_min4413 )4414 for i_ in range(len(option_contract_list))4415 } # multi index ./.4416 count = 04417 for i_ in concurrent.futures.as_completed(res):4418 QA_util_log_info(4419 'The {} of Total {}'.format(count,4420 len(option_contract_list)),4421 ui_log=ui_log4422 )4423 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(4424 str(float(count / len(option_contract_list) * 100))[0:4] + '%'4425 )4426 intLogProgress = int(float(count / len(option_contract_list) * 10000.0))4427 QA_util_log_info(4428 strLogProgress,4429 ui_log=ui_log,4430 ui_progress=ui_progress,4431 ui_progress_int_value=intLogProgress4432 )4433 count = count + 14434 if len(err) < 1:4435 QA_util_log_info('SUCCESS', ui_log=ui_log)4436 else:4437 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)4438 QA_util_log_info(err, ui_log=ui_log)4439def QA_SU_save_option_50etf_day(client=DATABASE, ui_log=None, ui_progress=None):4440 '''4441 :param client:4442 :return:4443 '''4444 option_contract_list = QA_fetch_get_option_50etf_contract_time_to_market()4445 coll_option_day = client.option_day4446 coll_option_day.create_index(4447 [("code",4448 pymongo.ASCENDING),4449 ("date_stamp",4450 pymongo.ASCENDING)]4451 )4452 err = []4453 # 索引 code4454 def __saving_work(code, coll_option_day):4455 try:4456 QA_util_log_info(4457 '##JOB12 Now Saving OPTION_DAY==== {}'.format(str(code)),4458 ui_log=ui_log4459 )4460 # 首选查找数据库 是否 有 这个代码的数据4461 # 期权代码 从 10000001 开始编码 100012284462 ref = coll_option_day.find({'code': str(code)[0:8]})4463 end_date = str(now_time())[0:10]4464 # 当前数据库已经包含了这个代码的数据, 继续增量更新4465 # 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现4466 if ref.count() > 0:4467 # 接着上次获取的日期继续更新4468 start_date = ref[ref.count() - 1]['date']4469 QA_util_log_info(4470 ' 上次获取期权日线数据的最后日期是 {}'.format(start_date),4471 ui_log=ui_log4472 )4473 QA_util_log_info(4474 'UPDATE_OPTION_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'4475 .format(code,4476 start_date,4477 end_date),4478 ui_log=ui_log4479 )4480 if start_date != end_date:4481 start_date0 = QA_util_get_next_day(start_date)4482 df0 = QA_fetch_get_option_day(4483 code=code,4484 start_date=start_date0,4485 end_date=end_date,4486 frequence='day',4487 ip=None,4488 port=None4489 )4490 retCount = df0.iloc[:, 0].size4491 QA_util_log_info(4492 "日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(4493 start_date0,4494 end_date,4495 code,4496 retCount4497 ),4498 ui_log=ui_log4499 )4500 coll_option_day.insert_many(4501 QA_util_to_json_from_pandas(df0)4502 )4503 else:4504 QA_util_log_info(4505 "^已经获取过这天的数据了^ {}".format(start_date),4506 ui_log=ui_log4507 )4508 else:4509 start_date = '1990-01-01'4510 QA_util_log_info(4511 'UPDATE_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'4512 .format(code,4513 start_date,4514 end_date),4515 ui_log=ui_log4516 )4517 if start_date != end_date:4518 df0 = QA_fetch_get_option_day(4519 code=code,4520 start_date=start_date,4521 end_date=end_date,4522 frequence='day',4523 ip=None,4524 port=None4525 )4526 retCount = df0.iloc[:, 0].size4527 QA_util_log_info(4528 "日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(4529 start_date,4530 end_date,4531 code,4532 retCount4533 ),4534 ui_log=ui_log4535 )4536 coll_option_day.insert_many(4537 QA_util_to_json_from_pandas(df0)4538 )4539 else:4540 QA_util_log_info(4541 "*已经获取过这天的数据了* {}".format(start_date),4542 ui_log=ui_log4543 )4544 except Exception as error0:4545 print(error0)4546 err.append(str(code))4547 for item in range(len(option_contract_list)):4548 QA_util_log_info(4549 'The {} of Total {}'.format(item,4550 len(option_contract_list)),4551 ui_log=ui_log4552 )4553 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(4554 str(float(item / len(option_contract_list) * 100))[0:4] + '%'4555 )4556 intLogProgress = int(float(item / len(option_contract_list) * 10000.0))4557 QA_util_log_info(4558 strLogProgress,4559 ui_log=ui_log,4560 ui_progress=ui_progress,4561 ui_progress_int_value=intLogProgress4562 )4563 __saving_work(option_contract_list[item].code, coll_option_day)4564 if len(err) < 1:4565 QA_util_log_info('SUCCESS save option day ^_^ ', ui_log=ui_log)4566 else:4567 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)4568 QA_util_log_info(err, ui_log=ui_log)4569def QA_SU_save_option_300etf_min(client=DATABASE, ui_log=None, ui_progress=None):4570 '''4571 :param client:4572 :return:4573 '''4574 option_contract_list = QA_fetch_get_option_300etf_contract_time_to_market()4575 coll_option_min = client.option_day_min4576 coll_option_min.create_index(4577 [("code",4578 pymongo.ASCENDING),4579 ("date_stamp",4580 pymongo.ASCENDING)]4581 )4582 err = []4583 # 索引 code4584 err = []4585 def __saving_work(code, coll):4586 QA_util_log_info(4587 '##JOB13 Now Saving Option shanghai sse 300 ETF MIN ==== {}'.format(str(code)),4588 ui_log=ui_log4589 )4590 try:4591 for type in ['1min', '5min', '15min', '30min', '60min']:4592 ref_ = coll.find({'code': str(code)[0:8], 'type': type})4593 end_time = str(now_time())[0:19]4594 if ref_.count() > 0:4595 start_time = ref_[ref_.count() - 1]['datetime']4596 QA_util_log_info(4597 '##JOB13.{} Now Saving Option shanghai 300ETF {} from {} to {} =={} '4598 .format(4599 ['1min',4600 '5min',4601 '15min',4602 '30min',4603 '60min'].index(type),4604 str(code),4605 start_time,4606 end_time,4607 type4608 ),4609 ui_log=ui_log4610 )4611 if start_time != end_time:4612 __data = QA_fetch_get_future_min(4613 str(code),4614 start_time,4615 end_time,4616 type4617 )4618 if len(__data) > 1:4619 QA_util_log_info(4620 " 写入 新增历史合约记录数 {} ".format(len(__data))4621 )4622 coll.insert_many(4623 QA_util_to_json_from_pandas(__data[1::])4624 )4625 else:4626 start_time = '2015-01-01'4627 QA_util_log_info(4628 '##JOB13.{} Now Option shanghai sse 300ETF {} from {} to {} =={} '4629 .format(4630 ['1min',4631 '5min',4632 '15min',4633 '30min',4634 '60min'].index(type),4635 str(code),4636 start_time,4637 end_time,4638 type4639 ),4640 ui_log=ui_log4641 )4642 if start_time != end_time:4643 __data = QA_fetch_get_future_min(4644 str(code),4645 start_time,4646 end_time,4647 type4648 )4649 if len(__data) > 1:4650 QA_util_log_info(4651 " 写入 新增合约记录数 {} ".format(len(__data))4652 )4653 coll.insert_many(4654 QA_util_to_json_from_pandas(__data)4655 )4656 except:4657 err.append(code)4658 executor = ThreadPoolExecutor(max_workers=4)4659 res = {4660 executor.submit(4661 __saving_work,4662 option_contract_list[i_]["code"],4663 coll_option_min4664 )4665 for i_ in range(len(option_contract_list))4666 } # multi index ./.4667 count = 04668 for i_ in concurrent.futures.as_completed(res):4669 QA_util_log_info(4670 'The {} of Total {}'.format(count,4671 len(option_contract_list)),4672 ui_log=ui_log4673 )4674 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(4675 str(float(count / len(option_contract_list) * 100))[0:4] + '%'4676 )4677 intLogProgress = int(float(count / len(option_contract_list) * 10000.0))4678 QA_util_log_info(4679 strLogProgress,4680 ui_log=ui_log,4681 ui_progress=ui_progress,4682 ui_progress_int_value=intLogProgress4683 )4684 count = count + 14685 if len(err) < 1:4686 QA_util_log_info('SUCCESS', ui_log=ui_log)4687 else:4688 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)4689 QA_util_log_info(err, ui_log=ui_log)4690def QA_SU_save_option_300etf_day(client=DATABASE, ui_log=None, ui_progress=None):4691 '''4692 :param client:4693 :return:4694 '''4695 option_contract_list = QA_fetch_get_option_300etf_contract_time_to_market()4696 coll_option_day = client.option_day4697 coll_option_day.create_index(4698 [("code",4699 pymongo.ASCENDING),4700 ("date_stamp",4701 pymongo.ASCENDING)]4702 )4703 err = []4704 # 索引 code4705 def __saving_work(code, coll_option_day):4706 try:4707 QA_util_log_info(4708 '##JOB12 Now Saving shanghai sse 300 etf OPTION_DAY==== {}'.format(str(code)),4709 ui_log=ui_log4710 )4711 # 首选查找数据库 是否 有 这个代码的数据4712 # 期权代码 从 10000001 开始编码 100012284713 ref = coll_option_day.find({'code': str(code)[0:8]})4714 end_date = str(now_time())[0:10]4715 # 当前数据库已经包含了这个代码的数据, 继续增量更新4716 # 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现4717 if ref.count() > 0:4718 # 接着上次获取的日期继续更新4719 start_date = ref[ref.count() - 1]['date']4720 QA_util_log_info(4721 ' 上次获取期权日线数据的最后日期是 {}'.format(start_date),4722 ui_log=ui_log4723 )4724 QA_util_log_info(4725 'UPDATE_OPTION_DAY shanghai sse 300 etf \n 从上一次下载数据开始继续 Trying update {} from {} to {}'4726 .format(code,4727 start_date,4728 end_date),4729 ui_log=ui_log4730 )4731 if start_date != end_date:4732 start_date0 = QA_util_get_next_day(start_date)4733 df0 = QA_fetch_get_option_day(4734 code=code,4735 start_date=start_date0,4736 end_date=end_date,4737 frequence='day',4738 ip=None,4739 port=None4740 )4741 retCount = df0.iloc[:, 0].size4742 QA_util_log_info(4743 "日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(4744 start_date0,4745 end_date,4746 code,4747 retCount4748 ),4749 ui_log=ui_log4750 )4751 coll_option_day.insert_many(4752 QA_util_to_json_from_pandas(df0)4753 )4754 else:4755 QA_util_log_info(4756 "^已经获取过这天的数据了^ {}".format(start_date),4757 ui_log=ui_log4758 )4759 else:4760 start_date = '1990-01-01'4761 QA_util_log_info(4762 'UPDATE_OPTION_DAY shanghai sse 300 etf \n 从新开始下载数据 Trying update {} from {} to {}'4763 .format(code,4764 start_date,4765 end_date),4766 ui_log=ui_log4767 )4768 if start_date != end_date:4769 df0 = QA_fetch_get_option_day(4770 code=code,4771 start_date=start_date,4772 end_date=end_date,4773 frequence='day',4774 ip=None,4775 port=None4776 )4777 retCount = df0.iloc[:, 0].size4778 QA_util_log_info(4779 "日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(4780 start_date,4781 end_date,4782 code,4783 retCount4784 ),4785 ui_log=ui_log4786 )4787 coll_option_day.insert_many(4788 QA_util_to_json_from_pandas(df0)4789 )4790 else:4791 QA_util_log_info(4792 "*已经获取过这天的数据了* {}".format(start_date),4793 ui_log=ui_log4794 )4795 except Exception as error0:4796 print(error0)4797 err.append(str(code))4798 for item in range(len(option_contract_list)):4799 QA_util_log_info(4800 'The {} of Total {}'.format(item,4801 len(option_contract_list)),4802 ui_log=ui_log4803 )4804 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(4805 str(float(item / len(option_contract_list) * 100))[0:4] + '%'4806 )4807 intLogProgress = int(float(item / len(option_contract_list) * 10000.0))4808 QA_util_log_info(4809 strLogProgress,4810 ui_log=ui_log,4811 ui_progress=ui_progress,4812 ui_progress_int_value=intLogProgress4813 )4814 __saving_work(option_contract_list[item].code, coll_option_day)4815 if len(err) < 1:4816 QA_util_log_info('SUCCESS save option day ^_^ ', ui_log=ui_log)4817 else:4818 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)4819 QA_util_log_info(err, ui_log=ui_log)4820def QA_SU_save_option_contract_list(4821 client=DATABASE,4822 ui_log=None,4823 ui_progress=None):4824 rows_of_option = QA_fetch_get_option_all_contract_time_to_market()4825 # rows_cu = QA_fetch_get_commodity_option_CU_contract_time_to_market()4826 # rows_m = QA_fetch_get_commodity_option_M_contract_time_to_market()4827 # rows_sr = QA_fetch_get_commodity_option_SR_contract_time_to_market()4828 # rows_cf = QA_fetch_get_commodity_option_CF_contract_time_to_market()4829 # rows_ru = QA_fetch_get_commodity_option_RU_contract_time_to_market()4830 # rows_c = QA_fetch_get_commodity_option_C_contract_time_to_market()4831 try:4832 # 🛠todo 这个应该是第一个任务 JOB01, 先更新股票列表!!4833 QA_util_log_info(4834 '##JOB15 Now Saving OPTION_CONTRACT_LIST ====',4835 ui_log=ui_log,4836 ui_progress=ui_progress,4837 ui_progress_int_value=50004838 )4839 coll = client.option_contract_list4840 coll.create_index([('desc', pymongo.ASCENDING)], unique=True)4841 # todo fixhere4842 # from_items is deprecated. Please use DataFrame.from_dict(dict(items), ...) instead. DataFrame.from_dict4843 try:4844 df = pd.DataFrame.from_items([(s.desc, s) for s in rows_of_option])4845 df = (df.T)4846 js = QA_util_to_json_from_pandas(df)4847 # result0 = coll.insert_many(js)4848 for a_js_row in js:4849 # print(a_js_row)4850 akey = a_js_row['desc']4851 id0 = coll.find_one({'desc': akey})4852 if id0 is None:4853 coll.insert(a_js_row)4854 # print(result0)4855 except pymongo.errors.BulkWriteError as e:4856 # https://ask.helplib.com/python/post_127405304857 panic = filter(4858 lambda x: x['code'] != 11000,4859 e.details['writeErrors']4860 )4861 # if len(panic) > 0:4862 # print4863 # "really panic"4864 QA_util_log_info(4865 "完成合约列表更新",4866 ui_log=ui_log,4867 ui_progress=ui_progress,4868 ui_progress_int_value=100004869 )4870 except Exception as e:4871 QA_util_log_info(e, ui_log=ui_log)4872 print(" Error save_tdx.QA_SU_save_option_contract_list exception!")4873def QA_SU_save_option_day_all(client=DATABASE,ui_log=None,ui_progress=None):4874 option_contract_list = QA_fetch_get_option_all_contract_time_to_market()4875 coll_option_day = client.option_day_all4876 coll_option_day.create_index(4877 [("code",4878 pymongo.ASCENDING),4879 ("date_stamp",4880 pymongo.ASCENDING)]4881 )4882 err = []4883 # 索引 code4884 def __saving_work(code, coll_option_day):4885 try:4886 QA_util_log_info(4887 '##JOB12 Now Saving OPTION_DAY ALL ==== {}'.format(str(code)),4888 ui_log=ui_log4889 )4890 # 首选查找数据库 是否 有 这个代码的数据4891 # 期权代码 从 10000001 开始编码 100012284892 ref = coll_option_day.find({'code': str(code)[0:8]})4893 end_date = str(now_time())[0:10]4894 # 当前数据库已经包含了这个代码的数据, 继续增量更新4895 # 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现4896 if ref.count() > 0:4897 # 接着上次获取的日期继续更新4898 start_date = ref[ref.count() - 1]['date']4899 QA_util_log_info(4900 ' 上次获取期权日线数据的最后日期是 {}'.format(start_date),4901 ui_log=ui_log4902 )4903 QA_util_log_info(4904 'UPDATE_OPTION_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'4905 .format(code,4906 start_date,4907 end_date),4908 ui_log=ui_log4909 )4910 if start_date != end_date:4911 start_date0 = QA_util_get_next_day(start_date)4912 df0 = QA_fetch_get_option_day(4913 code=code,4914 start_date=start_date0,4915 end_date=end_date,4916 frequence='day',4917 ip=None,4918 port=None4919 )4920 retCount = df0.iloc[:, 0].size4921 QA_util_log_info(4922 "日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(4923 start_date0,4924 end_date,4925 code,4926 retCount4927 ),4928 ui_log=ui_log4929 )4930 coll_option_day.insert_many(4931 QA_util_to_json_from_pandas(df0)4932 )4933 else:4934 QA_util_log_info(4935 "^已经获取过这天的数据了^ {}".format(start_date),4936 ui_log=ui_log4937 )4938 else:4939 start_date = '1990-01-01'4940 QA_util_log_info(4941 'UPDATE_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'4942 .format(code,4943 start_date,4944 end_date),4945 ui_log=ui_log4946 )4947 if start_date != end_date:4948 df0 = QA_fetch_get_option_day(4949 code=code,4950 start_date=start_date,4951 end_date=end_date,4952 frequence='day',4953 ip=None,4954 port=None4955 )4956 retCount = df0.iloc[:, 0].size4957 QA_util_log_info(4958 "日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".format(4959 start_date,4960 end_date,4961 code,4962 retCount4963 ),4964 ui_log=ui_log4965 )4966 coll_option_day.insert_many(4967 QA_util_to_json_from_pandas(df0)4968 )4969 else:4970 QA_util_log_info(4971 "*已经获取过这天的数据了* {}".format(start_date),4972 ui_log=ui_log4973 )4974 except Exception as error0:4975 print(error0)4976 err.append(str(code))4977 for item in range(len(option_contract_list)):4978 QA_util_log_info(4979 'The {} of Total {}'.format(item,4980 len(option_contract_list)),4981 ui_log=ui_log4982 )4983 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(4984 str(float(item / len(option_contract_list) * 100))[0:4] + '%'4985 )4986 intLogProgress = int(float(item / len(option_contract_list) * 10000.0))4987 QA_util_log_info(4988 strLogProgress,4989 ui_log=ui_log,4990 ui_progress=ui_progress,4991 ui_progress_int_value=intLogProgress4992 )4993 __saving_work(option_contract_list[item].code, coll_option_day)4994 if len(err) < 1:4995 QA_util_log_info('SUCCESS save option day all contract ^_^ ', ui_log=ui_log)4996 else:4997 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)4998 QA_util_log_info(err, ui_log=ui_log)4999def QA_SU_save_option_min_all(client=DATABASE, ui_log=None, ui_progress=None):5000 '''5001 :param client:5002 :return:5003 '''5004 option_contract_list = QA_fetch_get_option_all_contract_time_to_market()5005 coll_option_min = client.option_min_all5006 coll_option_min.create_index(5007 [("code",5008 pymongo.ASCENDING),5009 ("date_stamp",5010 pymongo.ASCENDING)]5011 )5012 err = []5013 # 索引 code5014 err = []5015 def __saving_work(code, coll):5016 QA_util_log_info(5017 '##JOB15 Now Saving Option ALL MIN ==== {}'.format(str(code)),5018 ui_log=ui_log5019 )5020 try:5021 for type in ['1min', '5min', '15min', '30min', '60min']:5022 ref_ = coll.find({'code': str(code)[0:8], 'type': type})5023 end_time = str(now_time())[0:19]5024 if ref_.count() > 0:5025 start_time = ref_[ref_.count() - 1]['datetime']5026 QA_util_log_info(5027 '##JOB99.{} Now Saving Option ALL MIN {} from {} to {} =={} '5028 .format(5029 ['1min',5030 '5min',5031 '15min',5032 '30min',5033 '60min'].index(type),5034 str(code),5035 start_time,5036 end_time,5037 type5038 ),5039 ui_log=ui_log5040 )5041 if start_time != end_time:5042 __data = QA_fetch_get_future_min(5043 str(code),5044 start_time,5045 end_time,5046 type5047 )5048 if len(__data) > 1:5049 QA_util_log_info(5050 " 写入 新增历史合约记录数 {} ".format(len(__data))5051 )5052 coll.insert_many(5053 QA_util_to_json_from_pandas(__data[1::])5054 )5055 else:5056 start_time = '2015-01-01'5057 QA_util_log_info(5058 '##JOB15.{} Now Option ALL MIN {} from {} to {} =={} '5059 .format(5060 ['1min',5061 '5min',5062 '15min',5063 '30min',5064 '60min'].index(type),5065 str(code),5066 start_time,5067 end_time,5068 type5069 ),5070 ui_log=ui_log5071 )5072 if start_time != end_time:5073 __data = QA_fetch_get_future_min(5074 str(code),5075 start_time,5076 end_time,5077 type5078 )5079 if len(__data) > 1:5080 QA_util_log_info(5081 " 写入 新增合约记录数 {} ".format(len(__data))5082 )5083 coll.insert_many(5084 QA_util_to_json_from_pandas(__data)5085 )5086 except:5087 err.append(code)5088 executor = ThreadPoolExecutor(max_workers=4)5089 res = {5090 executor.submit(5091 __saving_work,5092 option_contract_list[i_]["code"],5093 coll_option_min5094 )5095 for i_ in range(len(option_contract_list))5096 } # multi index ./.5097 count = 05098 for i_ in concurrent.futures.as_completed(res):5099 QA_util_log_info(5100 'The {} of Total {}'.format(count,5101 len(option_contract_list)),5102 ui_log=ui_log5103 )5104 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(5105 str(float(count / len(option_contract_list) * 100))[0:4] + '%'5106 )5107 intLogProgress = int(float(count / len(option_contract_list) * 10000.0))5108 QA_util_log_info(5109 strLogProgress,5110 ui_log=ui_log,5111 ui_progress=ui_progress,5112 ui_progress_int_value=intLogProgress5113 )5114 count = count + 15115 if len(err) < 1:5116 QA_util_log_info('SUCCESS', ui_log=ui_log)5117 else:5118 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)5119 QA_util_log_info(err, ui_log=ui_log)5120#######################################################################################5121def QA_SU_save_future_list(client=DATABASE, ui_log=None, ui_progress=None):5122 future_list = QA_fetch_get_future_list()5123 coll_future_list = client.future_list5124 coll_future_list.create_index("code", unique=True)5125 try:5126 coll_future_list.insert_many(5127 QA_util_to_json_from_pandas(future_list),5128 ordered=False5129 )5130 except:5131 pass5132def QA_SU_save_index_list(client=DATABASE, ui_log=None, ui_progress=None):5133 index_list = QA_fetch_get_index_list()5134 coll_index_list = client.index_list5135 coll_index_list.create_index("code", unique=True)5136 try:5137 coll_index_list.insert_many(5138 QA_util_to_json_from_pandas(index_list),5139 ordered=False5140 )5141 except:5142 pass5143def QA_SU_save_single_future_day(code : str, client=DATABASE, ui_log=None, ui_progress=None):5144 '''5145 save single_future_day5146 保存单个期货数据日线数据5147 :param client:5148 :param ui_log: 给GUI qt 界面使用5149 :param ui_progress: 给GUI qt 界面使用5150 :param ui_progress_int_value: 给GUI qt 界面使用5151 :return:5152 '''5153 coll_future_day = client.future_day5154 coll_future_day.create_index(5155 [("code",5156 pymongo.ASCENDING),5157 ("date_stamp",5158 pymongo.ASCENDING)]5159 )5160 err = []5161 def __saving_work(code, coll_future_day):5162 try:5163 QA_util_log_info(5164 '##JOB12 Now Saving Future_DAY==== {}'.format(str(code)),5165 ui_log5166 )5167 # 首选查找数据库 是否 有 这个代码的数据5168 ref = coll_future_day.find({'code': str(code)[0:4]})5169 end_date = str(now_time())[0:10]5170 # 当前数据库已经包含了这个代码的数据, 继续增量更新5171 # 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现5172 if ref.count() > 0:5173 # 接着上次获取的日期继续更新5174 start_date = ref[ref.count() - 1]['date']5175 QA_util_log_info(5176 'UPDATE_Future_DAY \n Trying updating {} from {} to {}'5177 .format(code,5178 start_date,5179 end_date),5180 ui_log5181 )5182 if start_date != end_date:5183 coll_future_day.insert_many(5184 QA_util_to_json_from_pandas(5185 QA_fetch_get_future_day(5186 str(code),5187 QA_util_get_next_day(start_date),5188 end_date5189 )5190 )5191 )5192 # 当前数据库中没有这个代码的股票数据, 从1990-01-01 开始下载所有的数据5193 else:5194 start_date = '2001-01-01'5195 QA_util_log_info(5196 'UPDATE_Future_DAY \n Trying updating {} from {} to {}'5197 .format(code,5198 start_date,5199 end_date),5200 ui_log5201 )5202 if start_date != end_date:5203 coll_future_day.insert_many(5204 QA_util_to_json_from_pandas(5205 QA_fetch_get_future_day(5206 str(code),5207 start_date,5208 end_date5209 )5210 )5211 )5212 except Exception as error0:5213 print(error0)5214 err.append(str(code))5215 __saving_work(code, coll_future_day)5216 if len(err) < 1:5217 QA_util_log_info('SUCCESS save future day ^_^', ui_log)5218 else:5219 QA_util_log_info(' ERROR CODE \n ', ui_log)5220 QA_util_log_info(err, ui_log)5221def QA_SU_save_future_day(client=DATABASE, ui_log=None, ui_progress=None):5222 '''5223 save future_day5224 保存日线数据5225 :param client:5226 :param ui_log: 给GUI qt 界面使用5227 :param ui_progress: 给GUI qt 界面使用5228 :param ui_progress_int_value: 给GUI qt 界面使用5229 :return:5230 '''5231 future_list = [5232 item for item in QA_fetch_get_future_list().code.unique().tolist()5233 if str(item)[-2:] in ['L8',5234 'L9']5235 ]5236 coll_future_day = client.future_day5237 coll_future_day.create_index(5238 [("code",5239 pymongo.ASCENDING),5240 ("date_stamp",5241 pymongo.ASCENDING)]5242 )5243 err = []5244 def __saving_work(code, coll_future_day):5245 try:5246 QA_util_log_info(5247 '##JOB12 Now Saving Future_DAY==== {}'.format(str(code)),5248 ui_log5249 )5250 # 首选查找数据库 是否 有 这个代码的数据5251 ref = coll_future_day.find({'code': str(code)[0:4]})5252 end_date = str(now_time())[0:10]5253 # 当前数据库已经包含了这个代码的数据, 继续增量更新5254 # 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现5255 if ref.count() > 0:5256 # 接着上次获取的日期继续更新5257 start_date = ref[ref.count() - 1]['date']5258 QA_util_log_info(5259 'UPDATE_Future_DAY \n Trying updating {} from {} to {}'5260 .format(code,5261 start_date,5262 end_date),5263 ui_log5264 )5265 if start_date != end_date:5266 coll_future_day.insert_many(5267 QA_util_to_json_from_pandas(5268 QA_fetch_get_future_day(5269 str(code),5270 QA_util_get_next_day(start_date),5271 end_date5272 )5273 )5274 )5275 # 当前数据库中没有这个代码的股票数据, 从1990-01-01 开始下载所有的数据5276 else:5277 start_date = '2001-01-01'5278 QA_util_log_info(5279 'UPDATE_Future_DAY \n Trying updating {} from {} to {}'5280 .format(code,5281 start_date,5282 end_date),5283 ui_log5284 )5285 if start_date != end_date:5286 coll_future_day.insert_many(5287 QA_util_to_json_from_pandas(5288 QA_fetch_get_future_day(5289 str(code),5290 start_date,5291 end_date5292 )5293 )5294 )5295 except Exception as error0:5296 print(error0)5297 err.append(str(code))5298 for item in range(len(future_list)):5299 QA_util_log_info('The {} of Total {}'.format(item, len(future_list)))5300 strProgressToLog = 'DOWNLOAD PROGRESS {} {}'.format(5301 str(float(item / len(future_list) * 100))[0:4] + '%',5302 ui_log5303 )5304 intProgressToLog = int(float(item / len(future_list) * 100))5305 QA_util_log_info(5306 strProgressToLog,5307 ui_log=ui_log,5308 ui_progress=ui_progress,5309 ui_progress_int_value=intProgressToLog5310 )5311 __saving_work(future_list[item], coll_future_day)5312 if len(err) < 1:5313 QA_util_log_info('SUCCESS save future day ^_^', ui_log)5314 else:5315 QA_util_log_info(' ERROR CODE \n ', ui_log)5316 QA_util_log_info(err, ui_log)5317def QA_SU_save_future_day_all(client=DATABASE, ui_log=None, ui_progress=None):5318 '''5319 save future_day_all5320 保存日线数据(全部, 包含单月合约)5321 :param client:5322 :param ui_log: 给GUI qt 界面使用5323 :param ui_progress: 给GUI qt 界面使用5324 :param ui_progress_int_value: 给GUI qt 界面使用5325 :return:5326 '''5327 future_list = QA_fetch_get_future_list().code.unique().tolist()5328 coll_future_day = client.future_day5329 coll_future_day.create_index(5330 [("code",5331 pymongo.ASCENDING),5332 ("date_stamp",5333 pymongo.ASCENDING)]5334 )5335 err = []5336 def __saving_work(code, coll_future_day):5337 try:5338 QA_util_log_info(5339 '##JOB12 Now Saving Future_DAY==== {}'.format(str(code)),5340 ui_log5341 )5342 # 首选查找数据库 是否 有 这个代码的数据5343 ref = coll_future_day.find({'code': str(code)[0:6]})5344 end_date = str(now_time())[0:10]5345 # 当前数据库已经包含了这个代码的数据, 继续增量更新5346 # 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现5347 if ref.count() > 0:5348 # 接着上次获取的日期继续更新5349 start_date = ref[ref.count() - 1]['date']5350 QA_util_log_info(5351 'UPDATE_Future_DAY \n Trying updating {} from {} to {}'5352 .format(code,5353 start_date,5354 end_date),5355 ui_log5356 )5357 if start_date != end_date:5358 coll_future_day.insert_many(5359 QA_util_to_json_from_pandas(5360 QA_fetch_get_future_day(5361 str(code),5362 QA_util_get_next_day(start_date),5363 end_date5364 )5365 )5366 )5367 # 当前数据库中没有这个代码的股票数据, 从1990-01-01 开始下载所有的数据5368 else:5369 start_date = '2001-01-01'5370 QA_util_log_info(5371 'UPDATE_Future_DAY \n Trying updating {} from {} to {}'5372 .format(code,5373 start_date,5374 end_date),5375 ui_log5376 )5377 if start_date != end_date:5378 coll_future_day.insert_many(5379 QA_util_to_json_from_pandas(5380 QA_fetch_get_future_day(5381 str(code),5382 start_date,5383 end_date5384 )5385 )5386 )5387 except Exception as error0:5388 print(error0)5389 err.append(str(code))5390 for item in range(len(future_list)):5391 QA_util_log_info('The {} of Total {}'.format(item, len(future_list)))5392 strProgressToLog = 'DOWNLOAD PROGRESS {} {}'.format(5393 str(float(item / len(future_list) * 100))[0:4] + '%',5394 ui_log5395 )5396 intProgressToLog = int(float(item / len(future_list) * 100))5397 QA_util_log_info(5398 strProgressToLog,5399 ui_log=ui_log,5400 ui_progress=ui_progress,5401 ui_progress_int_value=intProgressToLog5402 )5403 __saving_work(future_list[item], coll_future_day)5404 if len(err) < 1:5405 QA_util_log_info('SUCCESS save future day ^_^', ui_log)5406 else:5407 QA_util_log_info(' ERROR CODE \n ', ui_log)5408 QA_util_log_info(err, ui_log)5409def QA_SU_save_single_future_min(code : str, client=DATABASE, ui_log=None, ui_progress=None):5410 """save single_future_min5411 Keyword Arguments:5412 client {[type]} -- [description] (default: {DATABASE})5413 """5414 coll = client.future_min5415 coll.create_index(5416 [5417 ('code',5418 pymongo.ASCENDING),5419 ('time_stamp',5420 pymongo.ASCENDING),5421 ('date_stamp',5422 pymongo.ASCENDING)5423 ]5424 )5425 err = []5426 def __saving_work(code, coll):5427 QA_util_log_info(5428 '##JOB13 Now Saving Future_MIN ==== {}'.format(str(code)),5429 ui_log=ui_log5430 )5431 try:5432 for type in ['1min', '5min', '15min', '30min', '60min']:5433 ref_ = coll.find({'code': str(code)[0:6], 'type': type})5434 end_time = str(now_time())[0:19]5435 if ref_.count() > 0:5436 start_time = ref_[ref_.count() - 1]['datetime']5437 QA_util_log_info(5438 '##JOB13.{} Now Saving Future {} from {} to {} =={} '5439 .format(5440 ['1min',5441 '5min',5442 '15min',5443 '30min',5444 '60min'].index(type),5445 str(code),5446 start_time,5447 end_time,5448 type5449 ),5450 ui_log=ui_log5451 )5452 if start_time != end_time:5453 __data = QA_fetch_get_future_min(5454 str(code),5455 start_time,5456 end_time,5457 type5458 )5459 if len(__data) > 1:5460 coll.insert_many(5461 QA_util_to_json_from_pandas(__data[1::])5462 )5463 else:5464 start_time = '2015-01-01'5465 QA_util_log_info(5466 '##JOB13.{} Now Saving Future {} from {} to {} =={} '5467 .format(5468 ['1min',5469 '5min',5470 '15min',5471 '30min',5472 '60min'].index(type),5473 str(code),5474 start_time,5475 end_time,5476 type5477 ),5478 ui_log=ui_log5479 )5480 if start_time != end_time:5481 __data = QA_fetch_get_future_min(5482 str(code),5483 start_time,5484 end_time,5485 type5486 )5487 if len(__data) > 1:5488 coll.insert_many(5489 QA_util_to_json_from_pandas(__data)5490 )5491 except:5492 err.append(code)5493 __saving_work(code, coll)5494 if len(err) < 1:5495 QA_util_log_info('SUCCESS', ui_log=ui_log)5496 else:5497 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)5498 QA_util_log_info(err, ui_log=ui_log)5499def QA_SU_save_future_min(client=DATABASE, ui_log=None, ui_progress=None):5500 """save future_min5501 Keyword Arguments:5502 client {[type]} -- [description] (default: {DATABASE})5503 """5504 future_list = [5505 item for item in QA_fetch_get_future_list().code.unique().tolist()5506 if str(item)[-2:] in ['L8',5507 'L9']5508 ]5509 coll = client.future_min5510 coll.create_index(5511 [5512 ('code',5513 pymongo.ASCENDING),5514 ('time_stamp',5515 pymongo.ASCENDING),5516 ('date_stamp',5517 pymongo.ASCENDING)5518 ]5519 )5520 err = []5521 def __saving_work(code, coll):5522 QA_util_log_info(5523 '##JOB13 Now Saving Future_MIN ==== {}'.format(str(code)),5524 ui_log=ui_log5525 )5526 try:5527 for type in ['1min', '5min', '15min', '30min', '60min']:5528 ref_ = coll.find({'code': str(code)[0:6], 'type': type})5529 end_time = str(now_time())[0:19]5530 if ref_.count() > 0:5531 start_time = ref_[ref_.count() - 1]['datetime']5532 QA_util_log_info(5533 '##JOB13.{} Now Saving Future {} from {} to {} =={} '5534 .format(5535 ['1min',5536 '5min',5537 '15min',5538 '30min',5539 '60min'].index(type),5540 str(code),5541 start_time,5542 end_time,5543 type5544 ),5545 ui_log=ui_log5546 )5547 if start_time != end_time:5548 __data = QA_fetch_get_future_min(5549 str(code),5550 start_time,5551 end_time,5552 type5553 )5554 if len(__data) > 1:5555 coll.insert_many(5556 QA_util_to_json_from_pandas(__data[1::])5557 )5558 else:5559 start_time = '2015-01-01'5560 QA_util_log_info(5561 '##JOB13.{} Now Saving Future {} from {} to {} =={} '5562 .format(5563 ['1min',5564 '5min',5565 '15min',5566 '30min',5567 '60min'].index(type),5568 str(code),5569 start_time,5570 end_time,5571 type5572 ),5573 ui_log=ui_log5574 )5575 if start_time != end_time:5576 __data = QA_fetch_get_future_min(5577 str(code),5578 start_time,5579 end_time,5580 type5581 )5582 if len(__data) > 1:5583 coll.insert_many(5584 QA_util_to_json_from_pandas(__data)5585 )5586 except:5587 err.append(code)5588 executor = ThreadPoolExecutor(max_workers=4)5589 res = {5590 executor.submit(__saving_work,5591 future_list[i_],5592 coll)5593 for i_ in range(len(future_list))5594 } # multi index ./.5595 count = 05596 for i_ in concurrent.futures.as_completed(res):5597 QA_util_log_info(5598 'The {} of Total {}'.format(count,5599 len(future_list)),5600 ui_log=ui_log5601 )5602 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(5603 str(float(count / len(future_list) * 100))[0:4] + '%'5604 )5605 intLogProgress = int(float(count / len(future_list) * 10000.0))5606 QA_util_log_info(5607 strLogProgress,5608 ui_log=ui_log,5609 ui_progress=ui_progress,5610 ui_progress_int_value=intLogProgress5611 )5612 count = count + 15613 if len(err) < 1:5614 QA_util_log_info('SUCCESS', ui_log=ui_log)5615 else:5616 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)5617 QA_util_log_info(err, ui_log=ui_log)5618def QA_SU_save_future_min_all(client=DATABASE, ui_log=None, ui_progress=None):5619 """save future_min_all (全部, 包含单月合约)5620 Keyword Arguments:5621 client {[type]} -- [description] (default: {DATABASE})5622 """5623 future_list = QA_fetch_get_future_list().code.unique().tolist()5624 coll = client.future_min5625 coll.create_index(5626 [5627 ('code',5628 pymongo.ASCENDING),5629 ('time_stamp',5630 pymongo.ASCENDING),5631 ('date_stamp',5632 pymongo.ASCENDING)5633 ]5634 )5635 err = []5636 def __saving_work(code, coll):5637 QA_util_log_info(5638 '##JOB13 Now Saving Future_MIN ==== {}'.format(str(code)),5639 ui_log=ui_log5640 )5641 try:5642 for type in ['1min', '5min', '15min', '30min', '60min']:5643 ref_ = coll.find({'code': str(code)[0:6], 'type': type})5644 end_time = str(now_time())[0:19]5645 if ref_.count() > 0:5646 start_time = ref_[ref_.count() - 1]['datetime']5647 QA_util_log_info(5648 '##JOB13.{} Now Saving Future {} from {} to {} =={} '5649 .format(5650 ['1min',5651 '5min',5652 '15min',5653 '30min',5654 '60min'].index(type),5655 str(code),5656 start_time,5657 end_time,5658 type5659 ),5660 ui_log=ui_log5661 )5662 if start_time != end_time:5663 __data = QA_fetch_get_future_min(5664 str(code),5665 start_time,5666 end_time,5667 type5668 )5669 if len(__data) > 1:5670 coll.insert_many(5671 QA_util_to_json_from_pandas(__data[1::])5672 )5673 else:5674 start_time = '2015-01-01'5675 QA_util_log_info(5676 '##JOB13.{} Now Saving Future {} from {} to {} =={} '5677 .format(5678 ['1min',5679 '5min',5680 '15min',5681 '30min',5682 '60min'].index(type),5683 str(code),5684 start_time,5685 end_time,5686 type5687 ),5688 ui_log=ui_log5689 )5690 if start_time != end_time:5691 __data = QA_fetch_get_future_min(5692 str(code),5693 start_time,5694 end_time,5695 type5696 )5697 if len(__data) > 1:5698 coll.insert_many(5699 QA_util_to_json_from_pandas(__data)5700 )5701 except:5702 err.append(code)5703 executor = ThreadPoolExecutor(max_workers=4)5704 res = {5705 executor.submit(__saving_work,5706 future_list[i_],5707 coll)5708 for i_ in range(len(future_list))5709 } # multi index ./.5710 count = 05711 for i_ in concurrent.futures.as_completed(res):5712 QA_util_log_info(5713 'The {} of Total {}'.format(count,5714 len(future_list)),5715 ui_log=ui_log5716 )5717 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(5718 str(float(count / len(future_list) * 100))[0:4] + '%'5719 )5720 intLogProgress = int(float(count / len(future_list) * 10000.0))5721 QA_util_log_info(5722 strLogProgress,5723 ui_log=ui_log,5724 ui_progress=ui_progress,5725 ui_progress_int_value=intLogProgress5726 )5727 count = count + 15728 if len(err) < 1:5729 QA_util_log_info('SUCCESS', ui_log=ui_log)5730 else:5731 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)5732 QA_util_log_info(err, ui_log=ui_log)5733def QA_SU_save_single_bond_day(code : str, client=DATABASE, ui_log=None):5734 """save bond_day5735 Keyword Arguments:5736 code : single bond code5737 client {[type]} -- [description] (default: {DATABASE})5738 """5739 #__bond_list = QA_fetch_get_stock_list('bond')5740 coll = client.bond_day5741 coll.create_index(5742 [('code',5743 pymongo.ASCENDING),5744 ('date_stamp',5745 pymongo.ASCENDING)]5746 )5747 err = []5748 def __saving_work(code, coll):5749 try:5750 ref_ = coll.find({'code': str(code)[0:6]})5751 end_time = str(now_time())[0:10]5752 if ref_.count() > 0:5753 start_time = ref_[ref_.count() - 1]['date']5754 QA_util_log_info(5755 '##JOB06 Now Saving BOND_DAY==== \n Trying updating {} from {} to {}'5756 .format(code,5757 start_time,5758 end_time),5759 ui_log=ui_log5760 )5761 if start_time != end_time:5762 coll.insert_many(5763 QA_util_to_json_from_pandas(5764 QA_fetch_get_bond_day(5765 str(code),5766 QA_util_get_next_day(start_time),5767 end_time5768 )5769 )5770 )5771 else:5772 start_time = '1990-01-01'5773 QA_util_log_info(5774 '##JOB06 Now Saving BOND_DAY==== \n Trying updating {} from {} to {}'5775 .format(code,5776 start_time,5777 end_time),5778 ui_log=ui_log5779 )5780 if start_time != end_time:5781 coll.insert_many(5782 QA_util_to_json_from_pandas(5783 QA_fetch_get_bond_day(5784 str(code),5785 start_time,5786 end_time5787 )5788 )5789 )5790 except:5791 err.append(str(code))5792 __saving_work(code, coll)5793 if len(err) < 1:5794 QA_util_log_info('SUCCESS', ui_log=ui_log)5795 else:5796 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)5797 QA_util_log_info(err, ui_log=ui_log)5798def QA_SU_save_bond_day(client=DATABASE, ui_log=None, ui_progress=None):5799 """save bond_day5800 Keyword Arguments:5801 client {[type]} -- [description] (default: {DATABASE})5802 """5803 __bond_list = QA_fetch_get_bond_list()5804 coll = client.bond_day5805 coll.create_index(5806 [('code',5807 pymongo.ASCENDING),5808 ('date_stamp',5809 pymongo.ASCENDING)]5810 )5811 err = []5812 def __saving_work(code, coll):5813 try:5814 ref_ = coll.find({'code': str(code)[0:6]})5815 end_time = str(now_time())[0:10]5816 if ref_.count() > 0:5817 start_time = ref_[ref_.count() - 1]['date']5818 QA_util_log_info(5819 '##JOB06 Now Saving BOND_DAY==== \n Trying updating {} from {} to {}'5820 .format(code,5821 start_time,5822 end_time),5823 ui_log=ui_log5824 )5825 if start_time != end_time:5826 coll.insert_many(5827 QA_util_to_json_from_pandas(5828 QA_fetch_get_bond_day(5829 str(code),5830 QA_util_get_next_day(start_time),5831 end_time5832 )5833 )5834 )5835 else:5836 start_time = '1990-01-01'5837 QA_util_log_info(5838 '##JOB06 Now Saving BOND_DAY==== \n Trying updating {} from {} to {}'5839 .format(code,5840 start_time,5841 end_time),5842 ui_log=ui_log5843 )5844 if start_time != end_time:5845 coll.insert_many(5846 QA_util_to_json_from_pandas(5847 QA_fetch_get_bond_day(5848 str(code),5849 start_time,5850 end_time5851 )5852 )5853 )5854 except:5855 err.append(str(code))5856 for i_ in range(len(__bond_list)):5857 # __saving_work('000001')5858 QA_util_log_info(5859 'The {} of Total {}'.format(i_,5860 len(__bond_list)),5861 ui_log=ui_log5862 )5863 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(5864 str(float(i_ / len(__bond_list) * 100))[0:4] + '%'5865 )5866 intLogProgress = int(float(i_ / len(__bond_list) * 10000.0))5867 QA_util_log_info(5868 strLogProgress,5869 ui_log=ui_log,5870 ui_progress=ui_progress,5871 ui_progress_int_value=intLogProgress5872 )5873 __saving_work(__bond_list.index[i_][0], coll)5874 if len(err) < 1:5875 QA_util_log_info('SUCCESS', ui_log=ui_log)5876 else:5877 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)5878 QA_util_log_info(err, ui_log=ui_log)5879def QA_SU_save_bond_min(client=DATABASE, ui_log=None, ui_progress=None):5880 """save bond_min5881 Keyword Arguments:5882 client {[type]} -- [description] (default: {DATABASE})5883 """5884 __bond_list = QA_fetch_get_bond_list()5885 coll = client.bond_min5886 coll.create_index(5887 [5888 ('code',5889 pymongo.ASCENDING),5890 ('time_stamp',5891 pymongo.ASCENDING),5892 ('date_stamp',5893 pymongo.ASCENDING)5894 ]5895 )5896 err = []5897 def __saving_work(code, coll):5898 QA_util_log_info(5899 '##JOB07 Now Saving BOND_MIN ==== {}'.format(str(code)),5900 ui_log=ui_log5901 )5902 try:5903 for type in ['1min', '5min', '15min', '30min', '60min']:5904 ref_ = coll.find({'code': str(code)[0:6], 'type': type})5905 end_time = str(now_time())[0:19]5906 if ref_.count() > 0:5907 start_time = ref_[ref_.count() - 1]['datetime']5908 QA_util_log_info(5909 '##JOB07.{} Now Saving {} from {} to {} =={} '.format(5910 ['1min',5911 '5min',5912 '15min',5913 '30min',5914 '60min'].index(type),5915 str(code),5916 start_time,5917 end_time,5918 type5919 ),5920 ui_log=ui_log5921 )5922 if start_time != end_time:5923 __data = QA_fetch_get_bond_min(5924 str(code),5925 start_time,5926 end_time,5927 type5928 )5929 if len(__data) > 1:5930 coll.insert_many(5931 QA_util_to_json_from_pandas(__data[1::])5932 )5933 else:5934 start_time = '2015-01-01'5935 QA_util_log_info(5936 '##JOB07.{} Now Saving {} from {} to {} =={} '.format(5937 ['1min',5938 '5min',5939 '15min',5940 '30min',5941 '60min'].index(type),5942 str(code),5943 start_time,5944 end_time,5945 type5946 ),5947 ui_log=ui_log5948 )5949 if start_time != end_time:5950 __data = QA_fetch_get_bond_min(5951 str(code),5952 start_time,5953 end_time,5954 type5955 )5956 if len(__data) > 1:5957 coll.insert_many(5958 QA_util_to_json_from_pandas(__data)5959 )5960 except:5961 err.append(code)5962 executor = ThreadPoolExecutor(max_workers=4)5963 res = {5964 executor.submit(__saving_work,5965 __bond_list.index[i_][0],5966 coll)5967 for i_ in range(len(__bond_list))5968 } # multi bond ./.5969 count = 15970 for _ in concurrent.futures.as_completed(res):5971 QA_util_log_info(5972 'The {} of Total {}'.format(count,5973 len(__bond_list)),5974 ui_log=ui_log5975 )5976 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(5977 str(float(count / len(__bond_list) * 100))[0:4] + '%'5978 )5979 intLogProgress = int(float(count / len(__bond_list) * 10000.0))5980 QA_util_log_info(5981 strLogProgress,5982 ui_log=ui_log,5983 ui_progress=ui_progress,5984 ui_progress_int_value=intLogProgress5985 )5986 count = count + 15987 if len(err) < 1:5988 QA_util_log_info('SUCCESS', ui_log=ui_log)5989 else:5990 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)5991 QA_util_log_info(err, ui_log=ui_log)5992def QA_SU_save_single_bond_min(code : str, client=DATABASE, ui_log=None, ui_progress=None):5993 """save single bond_min5994 Keyword Arguments:5995 client {[type]} -- [description] (default: {DATABASE})5996 """5997 #__bond_list = QA_fetch_get_stock_list('bond')5998 __bond_list = [code]5999 coll = client.bond_min6000 coll.create_index(6001 [6002 ('code',6003 pymongo.ASCENDING),6004 ('time_stamp',6005 pymongo.ASCENDING),6006 ('date_stamp',6007 pymongo.ASCENDING)6008 ]6009 )6010 err = []6011 def __saving_work(code, coll):6012 QA_util_log_info(6013 '##JOB07 Now Saving BOND_MIN ==== {}'.format(str(code)),6014 ui_log=ui_log6015 )6016 try:6017 for type in ['1min', '5min', '15min', '30min', '60min']:6018 ref_ = coll.find({'code': str(code)[0:6], 'type': type})6019 end_time = str(now_time())[0:19]6020 if ref_.count() > 0:6021 start_time = ref_[ref_.count() - 1]['datetime']6022 QA_util_log_info(6023 '##JOB07.{} Now Saving {} from {} to {} =={} '.format(6024 ['1min',6025 '5min',6026 '15min',6027 '30min',6028 '60min'].index(type),6029 str(code),6030 start_time,6031 end_time,6032 type6033 ),6034 ui_log=ui_log6035 )6036 if start_time != end_time:6037 __data = QA_fetch_get_bond_min(6038 str(code),6039 start_time,6040 end_time,6041 type6042 )6043 if len(__data) > 1:6044 coll.insert_many(6045 QA_util_to_json_from_pandas(__data[1::])6046 )6047 else:6048 start_time = '2015-01-01'6049 QA_util_log_info(6050 '##JOB07.{} Now Saving {} from {} to {} =={} '.format(6051 ['1min',6052 '5min',6053 '15min',6054 '30min',6055 '60min'].index(type),6056 str(code),6057 start_time,6058 end_time,6059 type6060 ),6061 ui_log=ui_log6062 )6063 if start_time != end_time:6064 __data = QA_fetch_get_bond_min(6065 str(code),6066 start_time,6067 end_time,6068 type6069 )6070 if len(__data) > 1:6071 coll.insert_many(6072 QA_util_to_json_from_pandas(__data)6073 )6074 except:6075 err.append(code)6076 executor = ThreadPoolExecutor(max_workers=4)6077 res = {6078 executor.submit(__saving_work,6079 __bond_list[i_],6080 coll)6081 for i_ in range(len(__bond_list))6082 } # multi bond ./.6083 count = 16084 for _ in concurrent.futures.as_completed(res):6085 QA_util_log_info(6086 'The {} of Total {}'.format(count,6087 len(__bond_list)),6088 ui_log=ui_log6089 )6090 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(6091 str(float(count / len(__bond_list) * 100))[0:4] + '%'6092 )6093 intLogProgress = int(float(count / len(__bond_list) * 10000.0))6094 QA_util_log_info(6095 strLogProgress,6096 ui_log=ui_log,6097 ui_progress=ui_progress,6098 ui_progress_int_value=intLogProgress6099 )6100 count = count + 16101 if len(err) < 1:6102 QA_util_log_info('SUCCESS', ui_log=ui_log)6103 else:6104 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)6105 QA_util_log_info(err, ui_log=ui_log)6106def QA_SU_save_bond_list(client=DATABASE, ui_log=None, ui_progress=None):6107 """save bond_list6108 Keyword Arguments:6109 client {[type]} -- [description] (default: {DATABASE})6110 """6111 try:6112 QA_util_log_info(6113 '##JOB16 Now Saving BOND_LIST ====',6114 ui_log=ui_log,6115 ui_progress=ui_progress,6116 ui_progress_int_value=50006117 )6118 bond_list_from_tdx = QA_fetch_get_bond_list()6119 pandas_data = QA_util_to_json_from_pandas(bond_list_from_tdx)6120 if len(pandas_data) > 0:6121 # 获取到数据后才进行drop collection 操作6122 client.drop_collection('bond_list')6123 coll = client.bond_list6124 coll.create_index('code')6125 coll.insert_many(pandas_data)6126 QA_util_log_info(6127 "完成bond列表获取",6128 ui_log=ui_log,6129 ui_progress=ui_progress,6130 ui_progress_int_value=100006131 )6132 except Exception as e:6133 QA_util_log_info(e, ui_log=ui_log)6134 print(" Error save_tdx.QA_SU_save_bond_list exception!")6135 pass6136###############################################################6137# HKSTOCK6138###############################################################6139def QA_SU_save_single_hkstock_day(code : str, client=DATABASE, ui_log=None):6140 """save bond_day6141 Keyword Arguments:6142 code : single hkstock code6143 client {[type]} -- [description] (default: {DATABASE})6144 """6145 #__hkstock_list = QA_fetch_get_hkstock_list()6146 coll = client.hkstock_day6147 coll.create_index(6148 [('code',6149 pymongo.ASCENDING),6150 ('date_stamp',6151 pymongo.ASCENDING)]6152 )6153 err = []6154 def __saving_work(code, coll):6155 try:6156 ref_ = coll.find({'code': str(code)[0:6]})6157 end_time = str(now_time())[0:10]6158 if ref_.count() > 0:6159 start_time = ref_[ref_.count() - 1]['date']6160 QA_util_log_info(6161 '##JOB06 Now Saving HKSTOCK_DAY==== \n Trying updating {} from {} to {}'6162 .format(code,6163 start_time,6164 end_time),6165 ui_log=ui_log6166 )6167 if start_time != end_time:6168 coll.insert_many(6169 QA_util_to_json_from_pandas(6170 QA_fetch_get_hkstock_day(6171 str(code),6172 QA_util_get_next_day(start_time),6173 end_time6174 )6175 )6176 )6177 else:6178 start_time = '1990-01-01'6179 QA_util_log_info(6180 '##JOB06 Now Saving HKSTOCK_DAY==== \n Trying updating {} from {} to {}'6181 .format(code,6182 start_time,6183 end_time),6184 ui_log=ui_log6185 )6186 if start_time != end_time:6187 coll.insert_many(6188 QA_util_to_json_from_pandas(6189 QA_fetch_get_hkstock_day(6190 str(code),6191 start_time,6192 end_time6193 )6194 )6195 )6196 except:6197 err.append(str(code))6198 __saving_work(code, coll)6199 if len(err) < 1:6200 QA_util_log_info('SUCCESS', ui_log=ui_log)6201 else:6202 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)6203 QA_util_log_info(err, ui_log=ui_log)6204def QA_SU_save_hkstock_day(client=DATABASE, ui_log=None, ui_progress=None):6205 """save hkstock_day6206 Keyword Arguments:6207 client {[type]} -- [description] (default: {DATABASE})6208 """6209 __hkstock_list = QA_fetch_get_hkstock_list().code.unique().tolist()6210 coll = client.hkstock_day6211 coll.create_index(6212 [('code',6213 pymongo.ASCENDING),6214 ('date_stamp',6215 pymongo.ASCENDING)]6216 )6217 err = []6218 def __saving_work(code, coll):6219 try:6220 ref_ = coll.find({'code': str(code)[0:6]})6221 end_time = str(now_time())[0:10]6222 if ref_.count() > 0:6223 start_time = ref_[ref_.count() - 1]['date']6224 QA_util_log_info(6225 '##JOB06 Now Saving HKSTOCK_DAY==== \n Trying updating {} from {} to {}'6226 .format(code,6227 start_time,6228 end_time),6229 ui_log=ui_log6230 )6231 if start_time != end_time:6232 coll.insert_many(6233 QA_util_to_json_from_pandas(6234 QA_fetch_get_hkstock_day(6235 str(code),6236 QA_util_get_next_day(start_time),6237 end_time6238 )6239 )6240 )6241 else:6242 start_time = '1990-01-01'6243 QA_util_log_info(6244 '##JOB06 Now Saving HKSTOCK_DAY==== \n Trying updating {} from {} to {}'6245 .format(code,6246 start_time,6247 end_time),6248 ui_log=ui_log6249 )6250 if start_time != end_time:6251 coll.insert_many(6252 QA_util_to_json_from_pandas(6253 QA_fetch_get_hkstock_day(6254 str(code),6255 start_time,6256 end_time6257 )6258 )6259 )6260 except:6261 err.append(str(code))6262 for i_ in range(len(__hkstock_list)):6263 # __saving_work('000001')6264 QA_util_log_info(6265 'The {} of Total {}'.format(i_,6266 len(__hkstock_list)),6267 ui_log=ui_log6268 )6269 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(6270 str(float(i_ / len(__hkstock_list) * 100))[0:4] + '%'6271 )6272 intLogProgress = int(float(i_ / len(__hkstock_list) * 10000.0))6273 QA_util_log_info(6274 strLogProgress,6275 ui_log=ui_log,6276 ui_progress=ui_progress,6277 ui_progress_int_value=intLogProgress6278 )6279 __saving_work(__hkstock_list[i_], coll)6280 if len(err) < 1:6281 QA_util_log_info('SUCCESS', ui_log=ui_log)6282 else:6283 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)6284 QA_util_log_info(err, ui_log=ui_log)6285def QA_SU_save_hkstock_min(client=DATABASE, ui_log=None, ui_progress=None):6286 """save hkstock_min6287 Keyword Arguments:6288 client {[type]} -- [description] (default: {DATABASE})6289 """6290 __hkstock_list = QA_fetch_get_hkstock_list().code.unique().tolist()6291 coll = client.hkstock_min6292 coll.create_index(6293 [6294 ('code',6295 pymongo.ASCENDING),6296 ('time_stamp',6297 pymongo.ASCENDING),6298 ('date_stamp',6299 pymongo.ASCENDING)6300 ]6301 )6302 err = []6303 def __saving_work(code, coll):6304 QA_util_log_info(6305 '##JOB07 Now Saving HKSTOCK_MIN ==== {}'.format(str(code)),6306 ui_log=ui_log6307 )6308 try:6309 for type in ['1min', '5min', '15min', '30min', '60min']:6310 ref_ = coll.find({'code': str(code)[0:6], 'type': type})6311 end_time = str(now_time())[0:19]6312 if ref_.count() > 0:6313 start_time = ref_[ref_.count() - 1]['datetime']6314 QA_util_log_info(6315 '##JOB07.{} Now Saving {} from {} to {} =={} '.format(6316 ['1min',6317 '5min',6318 '15min',6319 '30min',6320 '60min'].index(type),6321 str(code),6322 start_time,6323 end_time,6324 type6325 ),6326 ui_log=ui_log6327 )6328 if start_time != end_time:6329 __data = QA_fetch_get_hkstock_min(6330 str(code),6331 start_time,6332 end_time,6333 type6334 )6335 if len(__data) > 1:6336 coll.insert_many(6337 QA_util_to_json_from_pandas(__data[1::])6338 )6339 else:6340 start_time = '2015-01-01'6341 QA_util_log_info(6342 '##JOB07.{} Now Saving {} from {} to {} =={} '.format(6343 ['1min',6344 '5min',6345 '15min',6346 '30min',6347 '60min'].index(type),6348 str(code),6349 start_time,6350 end_time,6351 type6352 ),6353 ui_log=ui_log6354 )6355 if start_time != end_time:6356 __data = QA_fetch_get_hkstock_min(6357 str(code),6358 start_time,6359 end_time,6360 type6361 )6362 if len(__data) > 1:6363 coll.insert_many(6364 QA_util_to_json_from_pandas(__data)6365 )6366 except:6367 err.append(code)6368 executor = ThreadPoolExecutor(max_workers=4)6369 res = {6370 executor.submit(__saving_work,6371 __hkstock_list[i_],6372 coll)6373 for i_ in range(len(__hkstock_list))6374 } # multi bond ./.6375 count = 16376 for _ in concurrent.futures.as_completed(res):6377 QA_util_log_info(6378 'The {} of Total {}'.format(count,6379 len(__hkstock_list)),6380 ui_log=ui_log6381 )6382 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(6383 str(float(count / len(__hkstock_list) * 100))[0:4] + '%'6384 )6385 intLogProgress = int(float(count / len(__hkstock_list) * 10000.0))6386 QA_util_log_info(6387 strLogProgress,6388 ui_log=ui_log,6389 ui_progress=ui_progress,6390 ui_progress_int_value=intLogProgress6391 )6392 count = count + 16393 if len(err) < 1:6394 QA_util_log_info('SUCCESS', ui_log=ui_log)6395 else:6396 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)6397 QA_util_log_info(err, ui_log=ui_log)6398def QA_SU_save_single_hkstock_min(code : str, client=DATABASE, ui_log=None, ui_progress=None):6399 """save single hkstock_min6400 Keyword Arguments:6401 client {[type]} -- [description] (default: {DATABASE})6402 """6403 #__bond_list = QA_fetch_get_stock_list('bond')6404 __hkstock_list = [code]6405 coll = client.hkstock_min6406 coll.create_index(6407 [6408 ('code',6409 pymongo.ASCENDING),6410 ('time_stamp',6411 pymongo.ASCENDING),6412 ('date_stamp',6413 pymongo.ASCENDING)6414 ]6415 )6416 err = []6417 def __saving_work(code, coll):6418 QA_util_log_info(6419 '##JOB07 Now Saving HKSTOCK_MIN ==== {}'.format(str(code)),6420 ui_log=ui_log6421 )6422 try:6423 for type in ['1min', '5min', '15min', '30min', '60min']:6424 ref_ = coll.find({'code': str(code)[0:6], 'type': type})6425 end_time = str(now_time())[0:19]6426 if ref_.count() > 0:6427 start_time = ref_[ref_.count() - 1]['datetime']6428 QA_util_log_info(6429 '##JOB07.{} Now Saving {} from {} to {} =={} '.format(6430 ['1min',6431 '5min',6432 '15min',6433 '30min',6434 '60min'].index(type),6435 str(code),6436 start_time,6437 end_time,6438 type6439 ),6440 ui_log=ui_log6441 )6442 if start_time != end_time:6443 __data = QA_fetch_get_hkstock_min(6444 str(code),6445 start_time,6446 end_time,6447 type6448 )6449 if len(__data) > 1:6450 coll.insert_many(6451 QA_util_to_json_from_pandas(__data[1::])6452 )6453 else:6454 start_time = '2015-01-01'6455 QA_util_log_info(6456 '##JOB07.{} Now Saving {} from {} to {} =={} '.format(6457 ['1min',6458 '5min',6459 '15min',6460 '30min',6461 '60min'].index(type),6462 str(code),6463 start_time,6464 end_time,6465 type6466 ),6467 ui_log=ui_log6468 )6469 if start_time != end_time:6470 __data = QA_fetch_get_hkstock_min(6471 str(code),6472 start_time,6473 end_time,6474 type6475 )6476 if len(__data) > 1:6477 coll.insert_many(6478 QA_util_to_json_from_pandas(__data)6479 )6480 except:6481 err.append(code)6482 executor = ThreadPoolExecutor(max_workers=4)6483 res = {6484 executor.submit(__saving_work,6485 __hkstock_list[i_],6486 coll)6487 for i_ in range(len(__hkstock_list))6488 } # multi bond ./.6489 count = 16490 for _ in concurrent.futures.as_completed(res):6491 QA_util_log_info(6492 'The {} of Total {}'.format(count,6493 len(__hkstock_list)),6494 ui_log=ui_log6495 )6496 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(6497 str(float(count / len(__hkstock_list) * 100))[0:4] + '%'6498 )6499 intLogProgress = int(float(count / len(__hkstock_list) * 10000.0))6500 QA_util_log_info(6501 strLogProgress,6502 ui_log=ui_log,6503 ui_progress=ui_progress,6504 ui_progress_int_value=intLogProgress6505 )6506 count = count + 16507 if len(err) < 1:6508 QA_util_log_info('SUCCESS', ui_log=ui_log)6509 else:6510 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)6511 QA_util_log_info(err, ui_log=ui_log)6512def QA_SU_save_hkstock_list(client=DATABASE, ui_log=None, ui_progress=None):6513 """save hkstock_list6514 Keyword Arguments:6515 client {[type]} -- [description] (default: {DATABASE})6516 """6517 try:6518 QA_util_log_info(6519 '##JOB16 Now Saving HKSTOCK_LIST ====',6520 ui_log=ui_log,6521 ui_progress=ui_progress,6522 ui_progress_int_value=50006523 )6524 hkstock_list_from_tdx = QA_fetch_get_hkstock_list()6525 pandas_data = QA_util_to_json_from_pandas(hkstock_list_from_tdx)6526 if len(pandas_data) > 0:6527 # 获取到数据后才进行drop collection 操作6528 client.drop_collection('hkstock_list')6529 coll = client.hkstock_list6530 coll.create_index('code')6531 coll.insert_many(pandas_data)6532 QA_util_log_info(6533 "完成HKSTOCK列表获取",6534 ui_log=ui_log,6535 ui_progress=ui_progress,6536 ui_progress_int_value=100006537 )6538 except Exception as e:6539 QA_util_log_info(e, ui_log=ui_log)6540 print(" Error save_tdx.QA_SU_save_hkstock_list exception!")6541 pass6542###############################################################6543# USSTOCK6544###############################################################6545def QA_SU_save_single_usstock_day(code : str, client=DATABASE, ui_log=None):6546 """save usstock_day6547 Keyword Arguments:6548 code : single usstock code6549 client {[type]} -- [description] (default: {DATABASE})6550 """6551 #__hkstock_list = QA_fetch_get_hkstock_list()6552 coll = client.usstock_day6553 coll.create_index(6554 [('code',6555 pymongo.ASCENDING),6556 ('date_stamp',6557 pymongo.ASCENDING)]6558 )6559 err = []6560 def __saving_work(code, coll):6561 try:6562 # 目前美股列表中最长的字段是7个6563 ref_ = coll.find({'code': str(code)[0:7]})6564 end_time = str(now_time())[0:10]6565 if ref_.count() > 0:6566 start_time = ref_[ref_.count() - 1]['date']6567 QA_util_log_info(6568 '##JOB08 Now Saving USSTOCK_DAY==== \n Trying updating {} from {} to {}'6569 .format(code,6570 start_time,6571 end_time),6572 ui_log=ui_log6573 )6574 if start_time != end_time:6575 coll.insert_many(6576 QA_util_to_json_from_pandas(6577 QA_fetch_get_usstock_day(6578 str(code),6579 QA_util_get_next_day(start_time),6580 end_time6581 )6582 )6583 )6584 else:6585 start_time = '1990-01-01'6586 QA_util_log_info(6587 '##JOB08 Now Saving USSTOCK_DAY==== \n Trying updating {} from {} to {}'6588 .format(code,6589 start_time,6590 end_time),6591 ui_log=ui_log6592 )6593 if start_time != end_time:6594 coll.insert_many(6595 QA_util_to_json_from_pandas(6596 QA_fetch_get_usstock_day(6597 str(code),6598 start_time,6599 end_time6600 )6601 )6602 )6603 except:6604 err.append(str(code))6605 __saving_work(code, coll)6606 if len(err) < 1:6607 QA_util_log_info('SUCCESS', ui_log=ui_log)6608 else:6609 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)6610 QA_util_log_info(err, ui_log=ui_log)6611def QA_SU_save_usstock_day(client=DATABASE, ui_log=None, ui_progress=None):6612 """save usstock_day6613 Keyword Arguments:6614 client {[type]} -- [description] (default: {DATABASE})6615 """6616 __usstock_list = QA_fetch_get_usstock_list().code.unique().tolist()6617 coll = client.usstock_day6618 coll.create_index(6619 [('code',6620 pymongo.ASCENDING),6621 ('date_stamp',6622 pymongo.ASCENDING)]6623 )6624 err = []6625 def __saving_work(code, coll):6626 try:6627 # 目前美股列表中最长的字段是7个6628 ref_ = coll.find({'code': str(code)[0:7]})6629 end_time = str(now_time())[0:10]6630 if ref_.count() > 0:6631 start_time = ref_[ref_.count() - 1]['date']6632 QA_util_log_info(6633 '##JOB08 Now Saving USSTOCK_DAY==== \n Trying updating {} from {} to {}'6634 .format(code,6635 start_time,6636 end_time),6637 ui_log=ui_log6638 )6639 if start_time != end_time:6640 coll.insert_many(6641 QA_util_to_json_from_pandas(6642 QA_fetch_get_usstock_day(6643 str(code),6644 QA_util_get_next_day(start_time),6645 end_time6646 )6647 )6648 )6649 else:6650 start_time = '1990-01-01'6651 QA_util_log_info(6652 '##JOB08 Now Saving USSTOCK_DAY==== \n Trying updating {} from {} to {}'6653 .format(code,6654 start_time,6655 end_time),6656 ui_log=ui_log6657 )6658 if start_time != end_time:6659 coll.insert_many(6660 QA_util_to_json_from_pandas(6661 QA_fetch_get_usstock_day(6662 str(code),6663 start_time,6664 end_time6665 )6666 )6667 )6668 except:6669 err.append(str(code))6670 for i_ in range(len(__usstock_list)):6671 # __saving_work('000001')6672 QA_util_log_info(6673 'The {} of Total {}'.format(i_,6674 len(__usstock_list)),6675 ui_log=ui_log6676 )6677 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(6678 str(float(i_ / len(__usstock_list) * 100))[0:4] + '%'6679 )6680 intLogProgress = int(float(i_ / len(__usstock_list) * 10000.0))6681 QA_util_log_info(6682 strLogProgress,6683 ui_log=ui_log,6684 ui_progress=ui_progress,6685 ui_progress_int_value=intLogProgress6686 )6687 __saving_work(__usstock_list[i_], coll)6688 if len(err) < 1:6689 QA_util_log_info('SUCCESS', ui_log=ui_log)6690 else:6691 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)6692 QA_util_log_info(err, ui_log=ui_log)6693def QA_SU_save_usstock_min(client=DATABASE, ui_log=None, ui_progress=None):6694 """save usstock_min6695 Keyword Arguments:6696 client {[type]} -- [description] (default: {DATABASE})6697 """6698 __usstock_list = QA_fetch_get_usstock_list().code.unique().tolist()6699 coll = client.usstock_min6700 coll.create_index(6701 [6702 ('code',6703 pymongo.ASCENDING),6704 ('time_stamp',6705 pymongo.ASCENDING),6706 ('date_stamp',6707 pymongo.ASCENDING)6708 ]6709 )6710 err = []6711 def __saving_work(code, coll):6712 QA_util_log_info(6713 '##JOB09 Now Saving USSTOCK_MIN ==== {}'.format(str(code)),6714 ui_log=ui_log6715 )6716 try:6717 for type in ['1min', '5min', '15min', '30min', '60min']:6718 ref_ = coll.find({'code': str(code)[0:6], 'type': type})6719 end_time = str(now_time())[0:19]6720 if ref_.count() > 0:6721 start_time = ref_[ref_.count() - 1]['datetime']6722 QA_util_log_info(6723 '##JOB09.{} Now Saving {} from {} to {} =={} '.format(6724 ['1min',6725 '5min',6726 '15min',6727 '30min',6728 '60min'].index(type),6729 str(code),6730 start_time,6731 end_time,6732 type6733 ),6734 ui_log=ui_log6735 )6736 if start_time != end_time:6737 __data = QA_fetch_get_usstock_min(6738 str(code),6739 start_time,6740 end_time,6741 type6742 )6743 if len(__data) > 1:6744 coll.insert_many(6745 QA_util_to_json_from_pandas(__data[1::])6746 )6747 else:6748 start_time = '2015-01-01'6749 QA_util_log_info(6750 '##JOB09.{} Now Saving {} from {} to {} =={} '.format(6751 ['1min',6752 '5min',6753 '15min',6754 '30min',6755 '60min'].index(type),6756 str(code),6757 start_time,6758 end_time,6759 type6760 ),6761 ui_log=ui_log6762 )6763 if start_time != end_time:6764 __data = QA_fetch_get_usstock_min(6765 str(code),6766 start_time,6767 end_time,6768 type6769 )6770 if len(__data) > 1:6771 coll.insert_many(6772 QA_util_to_json_from_pandas(__data)6773 )6774 except:6775 err.append(code)6776 executor = ThreadPoolExecutor(max_workers=4)6777 res = {6778 executor.submit(__saving_work,6779 __usstock_list[i_],6780 coll)6781 for i_ in range(len(__usstock_list))6782 } # multi bond ./.6783 count = 16784 for _ in concurrent.futures.as_completed(res):6785 QA_util_log_info(6786 'The {} of Total {}'.format(count,6787 len(__usstock_list)),6788 ui_log=ui_log6789 )6790 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(6791 str(float(count / len(__usstock_list) * 100))[0:4] + '%'6792 )6793 intLogProgress = int(float(count / len(__usstock_list) * 10000.0))6794 QA_util_log_info(6795 strLogProgress,6796 ui_log=ui_log,6797 ui_progress=ui_progress,6798 ui_progress_int_value=intLogProgress6799 )6800 count = count + 16801 if len(err) < 1:6802 QA_util_log_info('SUCCESS', ui_log=ui_log)6803 else:6804 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)6805 QA_util_log_info(err, ui_log=ui_log)6806def QA_SU_save_single_usstock_min(code : str, client=DATABASE, ui_log=None, ui_progress=None):6807 """save single usstock_min6808 Keyword Arguments:6809 client {[type]} -- [description] (default: {DATABASE})6810 """6811 #__bond_list = QA_fetch_get_stock_list('bond')6812 __usstock_list = [code]6813 coll = client.usstock_min6814 coll.create_index(6815 [6816 ('code',6817 pymongo.ASCENDING),6818 ('time_stamp',6819 pymongo.ASCENDING),6820 ('date_stamp',6821 pymongo.ASCENDING)6822 ]6823 )6824 err = []6825 def __saving_work(code, coll):6826 QA_util_log_info(6827 '##JOB07 Now Saving USSTOCK_MIN ==== {}'.format(str(code)),6828 ui_log=ui_log6829 )6830 try:6831 for type in ['1min', '5min', '15min', '30min', '60min']:6832 ref_ = coll.find({'code': str(code)[0:6], 'type': type})6833 end_time = str(now_time())[0:19]6834 if ref_.count() > 0:6835 start_time = ref_[ref_.count() - 1]['datetime']6836 QA_util_log_info(6837 '##JOB09.{} Now Saving {} from {} to {} =={} '.format(6838 ['1min',6839 '5min',6840 '15min',6841 '30min',6842 '60min'].index(type),6843 str(code),6844 start_time,6845 end_time,6846 type6847 ),6848 ui_log=ui_log6849 )6850 if start_time != end_time:6851 __data = QA_fetch_get_usstock_min(6852 str(code),6853 start_time,6854 end_time,6855 type6856 )6857 if len(__data) > 1:6858 coll.insert_many(6859 QA_util_to_json_from_pandas(__data[1::])6860 )6861 else:6862 start_time = '2015-01-01'6863 QA_util_log_info(6864 '##JOB09.{} Now Saving {} from {} to {} =={} '.format(6865 ['1min',6866 '5min',6867 '15min',6868 '30min',6869 '60min'].index(type),6870 str(code),6871 start_time,6872 end_time,6873 type6874 ),6875 ui_log=ui_log6876 )6877 if start_time != end_time:6878 __data = QA_fetch_get_usstock_min(6879 str(code),6880 start_time,6881 end_time,6882 type6883 )6884 if len(__data) > 1:6885 coll.insert_many(6886 QA_util_to_json_from_pandas(__data)6887 )6888 except:6889 err.append(code)6890 executor = ThreadPoolExecutor(max_workers=4)6891 res = {6892 executor.submit(__saving_work,6893 __usstock_list[i_],6894 coll)6895 for i_ in range(len(__usstock_list))6896 } # multi bond ./.6897 count = 16898 for _ in concurrent.futures.as_completed(res):6899 QA_util_log_info(6900 'The {} of Total {}'.format(count,6901 len(__usstock_list)),6902 ui_log=ui_log6903 )6904 strLogProgress = 'DOWNLOAD PROGRESS {} '.format(6905 str(float(count / len(__usstock_list) * 100))[0:4] + '%'6906 )6907 intLogProgress = int(float(count / len(__usstock_list) * 10000.0))6908 QA_util_log_info(6909 strLogProgress,6910 ui_log=ui_log,6911 ui_progress=ui_progress,6912 ui_progress_int_value=intLogProgress6913 )6914 count = count + 16915 if len(err) < 1:6916 QA_util_log_info('SUCCESS', ui_log=ui_log)6917 else:6918 QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log)6919 QA_util_log_info(err, ui_log=ui_log)6920def QA_SU_save_usstock_list(client=DATABASE, ui_log=None, ui_progress=None):6921 """save usstock_list6922 Keyword Arguments:6923 client {[type]} -- [description] (default: {DATABASE})6924 """6925 try:6926 QA_util_log_info(6927 '##JOB16 Now Saving USSTOCK_LIST ====',6928 ui_log=ui_log,6929 ui_progress=ui_progress,6930 ui_progress_int_value=50006931 )6932 usstock_list_from_tdx = QA_fetch_get_usstock_list()6933 pandas_data = QA_util_to_json_from_pandas(usstock_list_from_tdx)6934 if len(pandas_data) > 0:6935 # 获取到数据后才进行drop collection 操作6936 client.drop_collection('usstock_list')6937 coll = client.usstock_list6938 coll.create_index('code')6939 coll.insert_many(pandas_data)6940 QA_util_log_info(6941 "完成USSTOCK列表获取",6942 ui_log=ui_log,6943 ui_progress=ui_progress,6944 ui_progress_int_value=100006945 )6946 except Exception as e:6947 QA_util_log_info(e, ui_log=ui_log)6948 print(" Error save_tdx.QA_SU_save_usstock_list exception!")6949 pass6950if __name__ == '__main__':6951 # QA_SU_save_stock_day()6952 # QA_SU_save_stock_xdxr()6953 # QA_SU_save_stock_min()6954 # QA_SU_save_stock_transaction()6955 # QA_SU_save_index_day()6956 # QA_SU_save_stock_list()6957 # QA_SU_save_index_min()6958 # QA_SU_save_index_list()6959 # QA_SU_save_future_list()6960 #QA_SU_save_future_day()6961 #QA_SU_save_future_min()6962 # QA_SU_save_hkstock_list()6963 # QA_SU_save_hkstock_day()6964 #QA_SU_save_hkstock_min()6965 #QA_SU_save_single_hkstock_min(code="00338")6966 # QA_SU_save_usstock_list()6967 # QA_SU_save_single_usstock_day(code ='YDEC')6968 # QA_SU_save_usstock_day()...

Full Screen

Full Screen

bounds.py

Source:bounds.py Github

copy

Full Screen

...59 tf.summary.scalar("weight_entropy/%d" % t, weight_entropy)60 log_weights.append(log_weight_acc)61 # Compute the lower bound on the log evidence.62 log_p_hat = (tf.reduce_logsumexp(log_weight_acc, axis=0) -63 tf.log(tf.cast(num_samples, observation.dtype))) / num_timesteps64 loss = -tf.reduce_mean(log_p_hat)65 losses = [Loss("log_p_hat", loss)]66 # we clip off the initial state before returning.67 # there are no emas for iwae, so we return a noop for that68 return log_p_hat, losses, tf.no_op(), states[1:], log_weights69def multinomial_resampling(log_weights, states, n, b):70 """Resample states with multinomial resampling.71 Args:72 log_weights: A (n x b) Tensor representing a batch of b logits for n-ary73 Categorical distribution.74 states: A list of (b*n x d) Tensors that will be resample in from the groups75 of every n-th row.76 Returns:77 resampled_states: A list of (b*n x d) Tensors resampled via stratified sampling.78 log_probs: A (n x b) Tensor of the log probabilities of the ancestry decisions.79 resampling_parameters: The Tensor of parameters of the resampling distribution.80 ancestors: An (n x b) Tensor of integral indices representing the ancestry decisions.81 resampling_dist: The distribution object for resampling.82 """83 log_weights = tf.convert_to_tensor(log_weights)84 states = [tf.convert_to_tensor(state) for state in states]85 resampling_parameters = tf.transpose(log_weights, perm=[1,0])86 resampling_dist = tf.contrib.distributions.Categorical(logits=resampling_parameters)87 ancestors = tf.stop_gradient(88 resampling_dist.sample(sample_shape=n))89 log_probs = resampling_dist.log_prob(ancestors)90 offset = tf.expand_dims(tf.range(b), 0)91 ancestor_inds = tf.reshape(ancestors * b + offset, [-1])92 resampled_states = []93 for state in states:94 resampled_states.append(tf.gather(state, ancestor_inds))95 return resampled_states, log_probs, resampling_parameters, ancestors, resampling_dist96def stratified_resampling(log_weights, states, n, b):97 """Resample states with straitified resampling.98 Args:99 log_weights: A (n x b) Tensor representing a batch of b logits for n-ary100 Categorical distribution.101 states: A list of (b*n x d) Tensors that will be resample in from the groups102 of every n-th row.103 Returns:104 resampled_states: A list of (b*n x d) Tensors resampled via stratified sampling.105 log_probs: A (n x b) Tensor of the log probabilities of the ancestry decisions.106 resampling_parameters: The Tensor of parameters of the resampling distribution.107 ancestors: An (n x b) Tensor of integral indices representing the ancestry decisions.108 resampling_dist: The distribution object for resampling.109 """110 log_weights = tf.convert_to_tensor(log_weights)111 states = [tf.convert_to_tensor(state) for state in states]112 log_weights = tf.transpose(log_weights, perm=[1,0])113 probs = tf.nn.softmax(114 tf.tile(tf.expand_dims(log_weights, axis=1),115 [1, n, 1])116 )117 cdfs = tf.concat([tf.zeros((b,n,1), dtype=probs.dtype), tf.cumsum(probs, axis=2)], 2)118 bins = tf.range(n, dtype=probs.dtype) / n119 bins = tf.tile(tf.reshape(bins, [1,-1,1]), [b,1,n+1])120 strat_cdfs = tf.minimum(tf.maximum((cdfs - bins) * n, 0.0), 1.0)121 resampling_parameters = strat_cdfs[:,:,1:] - strat_cdfs[:,:,:-1]122 resampling_dist = tf.contrib.distributions.Categorical(123 probs = resampling_parameters,124 allow_nan_stats=False)125 ancestors = tf.stop_gradient(126 resampling_dist.sample())127 log_probs = resampling_dist.log_prob(ancestors)128 ancestors = tf.transpose(ancestors, perm=[1,0])129 log_probs = tf.transpose(log_probs, perm=[1,0])130 offset = tf.expand_dims(tf.range(b), 0)131 ancestor_inds = tf.reshape(ancestors * b + offset, [-1])132 resampled_states = []133 for state in states:134 resampled_states.append(tf.gather(state, ancestor_inds))135 return resampled_states, log_probs, resampling_parameters, ancestors, resampling_dist136def systematic_resampling(log_weights, states, n, b):137 """Resample states with systematic resampling.138 Args:139 log_weights: A (n x b) Tensor representing a batch of b logits for n-ary140 Categorical distribution.141 states: A list of (b*n x d) Tensors that will be resample in from the groups142 of every n-th row.143 Returns:144 resampled_states: A list of (b*n x d) Tensors resampled via stratified sampling.145 log_probs: A (n x b) Tensor of the log probabilities of the ancestry decisions.146 resampling_parameters: The Tensor of parameters of the resampling distribution.147 ancestors: An (n x b) Tensor of integral indices representing the ancestry decisions.148 resampling_dist: The distribution object for resampling.149 """150 log_weights = tf.convert_to_tensor(log_weights)151 states = [tf.convert_to_tensor(state) for state in states]152 log_weights = tf.transpose(log_weights, perm=[1,0])153 probs = tf.nn.softmax(154 tf.tile(tf.expand_dims(log_weights, axis=1),155 [1, n, 1])156 )157 cdfs = tf.concat([tf.zeros((b,n,1), dtype=probs.dtype), tf.cumsum(probs, axis=2)], 2)158 bins = tf.range(n, dtype=probs.dtype) / n159 bins = tf.tile(tf.reshape(bins, [1,-1,1]), [b,1,n+1])160 strat_cdfs = tf.minimum(tf.maximum((cdfs - bins) * n, 0.0), 1.0)161 resampling_parameters = strat_cdfs[:,:,1:] - strat_cdfs[:,:,:-1]162 resampling_dist = tf.contrib.distributions.Categorical(163 probs=resampling_parameters,164 allow_nan_stats=True)165 U = tf.random_uniform((b, 1, 1), dtype=probs.dtype)166 ancestors = tf.stop_gradient(tf.reduce_sum(tf.to_float(U > strat_cdfs[:,:,1:]), axis=-1))167 log_probs = resampling_dist.log_prob(ancestors)168 ancestors = tf.transpose(ancestors, perm=[1,0])169 log_probs = tf.transpose(log_probs, perm=[1,0])170 offset = tf.expand_dims(tf.range(b, dtype=probs.dtype), 0)171 ancestor_inds = tf.reshape(ancestors * b + offset, [-1])172 resampled_states = []173 for state in states:174 resampled_states.append(tf.gather(state, ancestor_inds))175 return resampled_states, log_probs, resampling_parameters, ancestors, resampling_dist176def log_blend(inputs, weights):177 """Blends state in the log space.178 Args:179 inputs: A set of scalar states, one for each particle in each particle filter.180 Should be [num_samples, batch_size].181 weights: A set of weights used to blend the state. Each set of weights182 should be of dimension [num_samples] (one weight for each previous particle).183 There should be one set of weights for each new particle in each particle filter.184 Thus the shape should be [num_samples, batch_size, num_samples] where185 the first axis indexes new particle and the last axis indexes old particles.186 Returns:187 blended: The blended states, a tensor of shape [num_samples, batch_size].188 """189 raw_max = tf.reduce_max(inputs, axis=0, keepdims=True)190 my_max = tf.stop_gradient(191 tf.where(tf.is_finite(raw_max), raw_max, tf.zeros_like(raw_max))192 )193 # Don't ask.194 blended = tf.log(tf.einsum("ijk,kj->ij", weights, tf.exp(inputs - raw_max))) + my_max195 return blended196def relaxed_resampling(log_weights, states, num_samples, batch_size,197 log_r_x=None, blend_type="log", temperature=0.5,198 straight_through=False):199 """Resample states with relaxed resampling.200 Args:201 log_weights: A (n x b) Tensor representing a batch of b logits for n-ary202 Categorical distribution.203 states: A list of (b*n x d) Tensors that will be resample in from the groups204 of every n-th row.205 Returns:206 resampled_states: A list of (b*n x d) Tensors resampled via stratified sampling.207 log_probs: A (n x b) Tensor of the log probabilities of the ancestry decisions.208 resampling_parameters: The Tensor of parameters of the resampling distribution.209 ancestors: An (n x b x n) Tensor of relaxed one hot representations of the ancestry decisions.210 resampling_dist: The distribution object for resampling.211 """212 assert blend_type in ["log", "linear"], "Blend type must be 'log' or 'linear'."213 log_weights = tf.convert_to_tensor(log_weights)214 states = [tf.convert_to_tensor(state) for state in states]215 state_dim = states[0].get_shape().as_list()[-1]216 # weights are num_samples by batch_size, so we transpose to get a217 # set of batch_size distributions over [0,num_samples).218 resampling_parameters = tf.transpose(log_weights, perm=[1, 0])219 resampling_dist = tf.contrib.distributions.RelaxedOneHotCategorical(220 temperature,221 logits=resampling_parameters)222 # sample num_samples samples from the distribution, resulting in a223 # [num_samples, batch_size, num_samples] Tensor that represents a set of224 # [num_samples, batch_size] blending weights. The dimensions represent225 # [sample index, batch index, blending weight index]226 ancestors = resampling_dist.sample(sample_shape=num_samples)227 if straight_through:228 # Forward pass discrete choices, backwards pass soft choices229 hard_ancestor_indices = tf.argmax(ancestors, axis=-1)230 hard_ancestors = tf.one_hot(hard_ancestor_indices, num_samples,231 dtype=ancestors.dtype)232 ancestors = tf.stop_gradient(hard_ancestors - ancestors) + ancestors233 log_probs = resampling_dist.log_prob(ancestors)234 if log_r_x is not None and blend_type == "log":235 log_r_x = tf.reshape(log_r_x, [num_samples, batch_size])236 log_r_x = log_blend(log_r_x, ancestors)237 log_r_x = tf.reshape(log_r_x, [num_samples*batch_size])238 elif log_r_x is not None and blend_type == "linear":239 # If blend type is linear just add log_r to the states that will be blended240 # linearly.241 states.append(log_r_x)242 # transpose the 'indices' to be [batch_index, blending weight index, sample index]243 ancestor_inds = tf.transpose(ancestors, perm=[1, 2, 0])244 resampled_states = []245 for state in states:246 # state is currently [num_samples * batch_size, state_dim] so we reshape247 # to [num_samples, batch_size, state_dim] and then transpose to248 # [batch_size, state_size, num_samples]249 state = tf.transpose(tf.reshape(state, [num_samples, batch_size, -1]), perm=[1, 2, 0])250 # state is now (batch_size, state_size, num_samples)251 # and ancestor is (batch index, blending weight index, sample index)252 # multiplying these gives a matrix of size [batch_size, state_size, num_samples]253 next_state = tf.matmul(state, ancestor_inds)254 # transpose the state to be [num_samples, batch_size, state_size]255 # and then reshape it to match the state format.256 next_state = tf.reshape(tf.transpose(next_state, perm=[2,0,1]), [num_samples*batch_size, state_dim])257 resampled_states.append(next_state)258 new_dist = tf.contrib.distributions.Categorical(259 logits=resampling_parameters)260 if log_r_x is not None and blend_type == "linear":261 # If blend type is linear pop off log_r that we added to the states.262 log_r_x = tf.squeeze(resampled_states[-1])263 resampled_states = resampled_states[:-1]264 return resampled_states, log_probs, log_r_x, resampling_parameters, ancestors, new_dist265def fivo(model,266 observation,267 num_timesteps,268 resampling_schedule,269 num_samples=1,270 use_resampling_grads=True,271 resampling_type="multinomial",272 resampling_temperature=0.5,273 aux=True,274 summarize=False):275 """Compute the FIVO evidence lower bound.276 Args:277 model: A callable that computes one timestep of the model.278 observation: A shape [batch_size*num_samples, state_size] Tensor279 containing z_n, the observation for each sequence in the batch.280 num_timesteps: The number of timesteps in each sequence, an integer.281 resampling_schedule: A list of booleans of length num_timesteps, contains282 True if a resampling should occur on a specific timestep.283 num_samples: The number of samples to use to compute the IWAE bound.284 use_resampling_grads: Whether or not to include the resampling gradients285 in loss.286 resampling type: The type of resampling, one of "multinomial", "stratified",287 "relaxed-logblend", "relaxed-linearblend", "relaxed-stateblend", or288 "systematic".289 resampling_temperature: A positive temperature only used for relaxed290 resampling.291 aux: If true, compute the FIVO-AUX bound.292 Returns:293 log_p_hat: The IWAE estimator of the lower bound on the log marginal.294 loss: A tensor that you can perform gradient descent on to optimize the295 bound.296 maintain_ema_op: An op to update the baseline ema used for the resampling297 gradients.298 states: The sequence of states sampled.299 """300 # Initialization301 num_instances = tf.cast(tf.shape(observation)[0], tf.int32)302 batch_size = tf.cast(num_instances / num_samples, tf.int32)303 states = [model.zero_state(num_instances)]304 prev_state = states[0]305 log_weight_acc = tf.zeros(shape=[num_samples, batch_size], dtype=observation.dtype)306 prev_log_r_zt = tf.zeros([num_instances], dtype=observation.dtype)307 log_weights = []308 log_weights_all = []309 log_p_hats = []310 resampling_log_probs = []311 for t in xrange(num_timesteps):312 # run the model for one timestep313 (zt, log_q_zt, log_p_zt, log_p_x_given_z, log_r_zt) = model(314 prev_state, observation, t)315 # update accumulators316 states.append(zt)317 log_weight = log_p_zt + log_p_x_given_z - log_q_zt318 if aux:319 if t == num_timesteps - 1:320 log_weight -= prev_log_r_zt321 else:322 log_weight += log_r_zt - prev_log_r_zt323 prev_log_r_zt = log_r_zt324 log_weight_acc += tf.reshape(log_weight, [num_samples, batch_size])325 log_weights_all.append(log_weight_acc)326 if resampling_schedule[t]:327 # These objects will be resampled328 to_resample = [states[-1]]329 if aux and "relaxed" not in resampling_type:330 to_resample.append(prev_log_r_zt)331 # do the resampling332 if resampling_type == "multinomial":333 (resampled,334 resampling_log_prob,335 _, _, _) = multinomial_resampling(log_weight_acc,336 to_resample,337 num_samples,338 batch_size)339 elif resampling_type == "stratified":340 (resampled,341 resampling_log_prob,342 _, _, _) = stratified_resampling(log_weight_acc,343 to_resample,344 num_samples,345 batch_size)346 elif resampling_type == "systematic":347 (resampled,348 resampling_log_prob,349 _, _, _) = systematic_resampling(log_weight_acc,350 to_resample,351 num_samples,352 batch_size)353 elif "relaxed" in resampling_type:354 if aux:355 if resampling_type == "relaxed-logblend":356 (resampled,357 resampling_log_prob,358 prev_log_r_zt,359 _, _, _) = relaxed_resampling(log_weight_acc,360 to_resample,361 num_samples,362 batch_size,363 temperature=resampling_temperature,364 log_r_x=prev_log_r_zt,365 blend_type="log")366 elif resampling_type == "relaxed-linearblend":367 (resampled,368 resampling_log_prob,369 prev_log_r_zt,370 _, _, _) = relaxed_resampling(log_weight_acc,371 to_resample,372 num_samples,373 batch_size,374 temperature=resampling_temperature,375 log_r_x=prev_log_r_zt,376 blend_type="linear")377 elif resampling_type == "relaxed-stateblend":378 (resampled,379 resampling_log_prob,380 _, _, _, _) = relaxed_resampling(log_weight_acc,381 to_resample,382 num_samples,383 batch_size,384 temperature=resampling_temperature)385 # Calculate prev_log_r_zt from the post-resampling state386 prev_r_zt = model.r.r_xn(resampled[0], t)387 prev_log_r_zt = tf.reduce_sum(388 prev_r_zt.log_prob(observation), axis=[1])389 elif resampling_type == "relaxed-stateblend-st":390 (resampled,391 resampling_log_prob,392 _, _, _, _) = relaxed_resampling(log_weight_acc,393 to_resample,394 num_samples,395 batch_size,396 temperature=resampling_temperature,397 straight_through=True)398 # Calculate prev_log_r_zt from the post-resampling state399 prev_r_zt = model.r.r_xn(resampled[0], t)400 prev_log_r_zt = tf.reduce_sum(401 prev_r_zt.log_prob(observation), axis=[1])402 else:403 (resampled,404 resampling_log_prob,405 _, _, _, _) = relaxed_resampling(log_weight_acc,406 to_resample,407 num_samples,408 batch_size,409 temperature=resampling_temperature)410 #if summarize:411 # resampling_entropy = resampling_dist.entropy()412 # resampling_entropy = tf.reduce_mean(resampling_entropy)413 # tf.summary.scalar("weight_entropy/%d" % t, resampling_entropy)414 resampling_log_probs.append(tf.reduce_sum(resampling_log_prob, axis=0))415 prev_state = resampled[0]416 if aux and "relaxed" not in resampling_type:417 # Squeeze out the extra dim potentially added by resampling.418 # prev_log_r_zt should always be [num_instances]419 prev_log_r_zt = tf.squeeze(resampled[1])420 # Update the log p hat estimate, taking a log sum exp over the sample421 # dimension. The appended tensor is [batch_size].422 log_p_hats.append(423 tf.reduce_logsumexp(log_weight_acc, axis=0) - tf.log(424 tf.cast(num_samples, dtype=observation.dtype)))425 # reset the weights426 log_weights.append(log_weight_acc)427 log_weight_acc = tf.zeros_like(log_weight_acc)428 else:429 prev_state = states[-1]430 # Compute the final weight update. If we just resampled this will be zero.431 final_update = (tf.reduce_logsumexp(log_weight_acc, axis=0) -432 tf.log(tf.cast(num_samples, dtype=observation.dtype)))433 # If we ever resampled, then sum up the previous log p hat terms434 if len(log_p_hats) > 0:435 log_p_hat = tf.reduce_sum(log_p_hats, axis=0) + final_update436 else: # otherwise, log_p_hat only comes from the final update437 log_p_hat = final_update438 if use_resampling_grads and any(resampling_schedule):439 # compute the rewards440 # cumsum([a, b, c]) => [a, a+b, a+b+c]441 # learning signal at timestep t is442 # [sum from i=t+1 to T of log_p_hat_i for t=1:T]443 # so we will compute (sum from i=1 to T of log_p_hat_i)444 # and at timestep t will subtract off (sum from i=1 to t of log_p_hat_i)445 # rewards is a [num_resampling_events, batch_size] Tensor446 rewards = tf.stop_gradient(447 tf.expand_dims(log_p_hat, 0) - tf.cumsum(log_p_hats, axis=0))448 batch_avg_rewards = tf.reduce_mean(rewards, axis=1)449 # compute ema baseline.450 # centered_rewards is [num_resampling_events, batch_size]451 baseline_ema = tf.train.ExponentialMovingAverage(decay=0.94)452 maintain_baseline_op = baseline_ema.apply([batch_avg_rewards])453 baseline = tf.expand_dims(baseline_ema.average(batch_avg_rewards), 1)454 centered_rewards = rewards - baseline455 if summarize:456 summ.summarize_learning_signal(rewards, "rewards")457 summ.summarize_learning_signal(centered_rewards, "centered_rewards")458 # compute the loss tensor.459 resampling_grads = tf.reduce_sum(460 tf.stop_gradient(centered_rewards) * resampling_log_probs, axis=0)461 losses = [Loss("log_p_hat", -tf.reduce_mean(log_p_hat)/num_timesteps),462 Loss("resampling_grads", -tf.reduce_mean(resampling_grads)/num_timesteps)]463 else:464 losses = [Loss("log_p_hat", -tf.reduce_mean(log_p_hat)/num_timesteps)]465 maintain_baseline_op = tf.no_op()466 log_p_hat /= num_timesteps467 # we clip off the initial state before returning.468 return log_p_hat, losses, maintain_baseline_op, states[1:], log_weights_all469def fivo_aux_td(470 model,471 observation,472 num_timesteps,473 resampling_schedule,474 num_samples=1,475 summarize=False):476 """Compute the FIVO_AUX evidence lower bound."""477 # Initialization478 num_instances = tf.cast(tf.shape(observation)[0], tf.int32)479 batch_size = tf.cast(num_instances / num_samples, tf.int32)480 states = [model.zero_state(num_instances)]481 prev_state = states[0]482 log_weight_acc = tf.zeros(shape=[num_samples, batch_size], dtype=observation.dtype)483 prev_log_r = tf.zeros([num_instances], dtype=observation.dtype)484 # must be pre-resampling485 log_rs = []486 # must be post-resampling487 r_tilde_params = [model.r_tilde.r_zt(states[0], observation, 0)]488 log_r_tildes = []489 log_p_xs = []490 # contains the weight at each timestep before resampling only on resampling timesteps491 log_weights = []492 # contains weight at each timestep before resampling493 log_weights_all = []494 log_p_hats = []495 for t in xrange(num_timesteps):496 # run the model for one timestep497 # zt is state, [num_instances, state_dim]498 # log_q_zt, log_p_x_given_z is [num_instances]499 # r_tilde_mu, r_tilde_sigma is [num_instances, state_dim]500 # p_ztplus1 is a normal distribution on [num_instances, state_dim]501 (zt, log_q_zt, log_p_zt, log_p_x_given_z,502 r_tilde_mu, r_tilde_sigma_sq, p_ztplus1) = model(prev_state, observation, t)503 # Compute the log weight without log r.504 log_weight = log_p_zt + log_p_x_given_z - log_q_zt505 # Compute log r.506 if t == num_timesteps - 1:507 log_r = tf.zeros_like(prev_log_r)508 else:509 p_mu = p_ztplus1.mean()510 p_sigma_sq = p_ztplus1.variance()511 log_r = (tf.log(r_tilde_sigma_sq) -512 tf.log(r_tilde_sigma_sq + p_sigma_sq) -513 tf.square(r_tilde_mu - p_mu)/(r_tilde_sigma_sq + p_sigma_sq))514 log_r = 0.5*tf.reduce_sum(log_r, axis=-1)515 #log_weight += tf.stop_gradient(log_r - prev_log_r)516 log_weight += log_r - prev_log_r517 log_weight_acc += tf.reshape(log_weight, [num_samples, batch_size])518 # Update accumulators519 states.append(zt)520 log_weights_all.append(log_weight_acc)521 log_p_xs.append(log_p_x_given_z)522 log_rs.append(log_r)523 # Compute log_r_tilde as [num_instances] Tensor.524 prev_r_tilde_mu, prev_r_tilde_sigma_sq = r_tilde_params[-1]525 prev_log_r_tilde = -0.5*tf.reduce_sum(526 tf.square(zt - prev_r_tilde_mu)/prev_r_tilde_sigma_sq, axis=-1)527 #tf.square(tf.stop_gradient(zt) - r_tilde_mu)/r_tilde_sigma_sq, axis=-1)528 #tf.square(zt - r_tilde_mu)/r_tilde_sigma_sq, axis=-1)529 log_r_tildes.append(prev_log_r_tilde)530 # optionally resample531 if resampling_schedule[t]:532 # These objects will be resampled533 if t < num_timesteps - 1:534 to_resample = [zt, log_r, r_tilde_mu, r_tilde_sigma_sq]535 else:536 to_resample = [zt, log_r]537 (resampled,538 _, _, _, _) = multinomial_resampling(log_weight_acc,539 to_resample,540 num_samples,541 batch_size)542 prev_state = resampled[0]543 # Squeeze out the extra dim potentially added by resampling.544 # prev_log_r_zt and log_r_tilde should always be [num_instances]545 prev_log_r = tf.squeeze(resampled[1])546 if t < num_timesteps -1:547 r_tilde_params.append((resampled[2], resampled[3]))548 # Update the log p hat estimate, taking a log sum exp over the sample549 # dimension. The appended tensor is [batch_size].550 log_p_hats.append(551 tf.reduce_logsumexp(log_weight_acc, axis=0) - tf.log(552 tf.cast(num_samples, dtype=observation.dtype)))553 # reset the weights554 log_weights.append(log_weight_acc)555 log_weight_acc = tf.zeros_like(log_weight_acc)556 else:557 prev_state = zt558 prev_log_r = log_r559 if t < num_timesteps - 1:560 r_tilde_params.append((r_tilde_mu, r_tilde_sigma_sq))561 # Compute the final weight update. If we just resampled this will be zero.562 final_update = (tf.reduce_logsumexp(log_weight_acc, axis=0) -563 tf.log(tf.cast(num_samples, dtype=observation.dtype)))564 # If we ever resampled, then sum up the previous log p hat terms565 if len(log_p_hats) > 0:566 log_p_hat = tf.reduce_sum(log_p_hats, axis=0) + final_update567 else: # otherwise, log_p_hat only comes from the final update568 log_p_hat = final_update569 # Compute the bellman loss.570 # Will remove the first timestep as it is not used.571 # log p(x_t|z_t) is in row t-1.572 log_p_x = tf.reshape(tf.stack(log_p_xs),573 [num_timesteps, num_samples, batch_size])574 # log r_t is contained in row t-1.575 # last column is zeros (because at timestep T (num_timesteps) r is 1.576 log_r = tf.reshape(tf.stack(log_rs),577 [num_timesteps, num_samples, batch_size])...

Full Screen

Full Screen

log_stream_test.py

Source:log_stream_test.py Github

copy

Full Screen

1#!/usr/bin/env python32#3# Copyright 2018 - The Android Open Source Project4#5# Licensed under the Apache License, Version 2.0 (the "License");6# you may not use this file except in compliance with the License.7# You may obtain a copy of the License at8#9# http://www.apache.org/licenses/LICENSE-2.010#11# Unless required by applicable law or agreed to in writing, software12# distributed under the License is distributed on an "AS IS" BASIS,13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.14# See the License for the specific language governing permissions and15# limitations under the License.16import logging17import os18import unittest19import mock20from acts import context21from acts.libs.logging import log_stream22from acts.libs.logging.log_stream import AlsoToLogHandler23from acts.libs.logging.log_stream import InvalidStyleSetError24from acts.libs.logging.log_stream import LogStyles25from acts.libs.logging.log_stream import _LogStream26class TestClass(object):27 """Dummy class for TestEvents"""28 def __init__(self):29 self.test_name = self.test_case.__name__30 def test_case(self):31 """Dummy test case for test events."""32class LogStreamTest(unittest.TestCase):33 """Tests the _LogStream class in acts.libs.logging.log_stream."""34 @staticmethod35 def patch(imported_name, *args, **kwargs):36 return mock.patch('acts.libs.logging.log_stream.%s' % imported_name,37 *args, **kwargs)38 @classmethod39 def setUpClass(cls):40 # logging.log_path only exists if logger._setup_test_logger is called.41 # Here we set it to a value that is likely to not exist so file IO is42 # not executed (an error is raised instead of creating the file).43 logging.log_path = '/f/a/i/l/p/a/t/h'44 def setUp(self):45 log_stream._log_streams = dict()46 # __init__47 @mock.patch('os.makedirs')48 def test_init_adds_null_handler(self, *_):49 """Tests that a NullHandler is added to the logger upon initialization.50 This ensures that no log output is generated when a test class is not51 running.52 """53 debug_monolith_log = LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG54 with self.patch('MovableFileHandler'):55 log = log_stream.create_logger(self._testMethodName,56 log_styles=debug_monolith_log)57 self.assertTrue(isinstance(log.handlers[0], logging.NullHandler))58 # __validate_style59 @mock.patch('os.makedirs')60 def test_validate_styles_raises_when_same_location_set_multiple_times(61 self, *_):62 """Tests that a style is invalid if it sets the same handler twice.63 If the error is NOT raised, then a LogStream can create a Logger that64 has multiple LogHandlers trying to write to the same file.65 """66 with self.assertRaises(InvalidStyleSetError) as catch:67 log_stream.create_logger(68 self._testMethodName,69 log_styles=[LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG,70 LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG])71 self.assertTrue(72 'has been set multiple' in catch.exception.args[0],73 msg='__validate_styles did not raise the expected error message')74 @mock.patch('os.makedirs')75 def test_validate_styles_raises_when_multiple_file_outputs_set(self, *_):76 """Tests that a style is invalid if more than one of MONOLITH_LOG,77 TESTCLASS_LOG, and TESTCASE_LOG is set for the same log level.78 If the error is NOT raised, then a LogStream can create a Logger that79 has multiple LogHandlers trying to write to the same file.80 """81 with self.assertRaises(InvalidStyleSetError) as catch:82 log_stream.create_logger(83 self._testMethodName,84 log_styles=[LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG,85 LogStyles.LOG_DEBUG | LogStyles.TESTCLASS_LOG])86 self.assertTrue(87 'More than one of' in catch.exception.args[0],88 msg='__validate_styles did not raise the expected error message')89 with self.assertRaises(InvalidStyleSetError) as catch:90 log_stream.create_logger(91 self._testMethodName,92 log_styles=[LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG,93 LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG])94 self.assertTrue(95 'More than one of' in catch.exception.args[0],96 msg='__validate_styles did not raise the expected error message')97 with self.assertRaises(InvalidStyleSetError) as catch:98 log_stream.create_logger(99 self._testMethodName,100 log_styles=[LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG,101 LogStyles.LOG_DEBUG | LogStyles.TESTCLASS_LOG,102 LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG])103 self.assertTrue(104 'More than one of' in catch.exception.args[0],105 msg='__validate_styles did not raise the expected error message')106 @mock.patch('os.makedirs')107 def test_validate_styles_raises_when_no_level_exists(self, *_):108 """Tests that a style is invalid if it does not contain a log level.109 If the style does not contain a log level, then there is no way to110 pass the information coming from the logger to the correct file.111 """112 with self.assertRaises(InvalidStyleSetError) as catch:113 log_stream.create_logger(self._testMethodName,114 log_styles=[LogStyles.MONOLITH_LOG])115 self.assertTrue(116 'log level' in catch.exception.args[0],117 msg='__validate_styles did not raise the expected error message')118 @mock.patch('os.makedirs')119 def test_validate_styles_raises_when_no_location_exists(self, *_):120 """Tests that a style is invalid if it does not contain a log level.121 If the style does not contain a log level, then there is no way to122 pass the information coming from the logger to the correct file.123 """124 with self.assertRaises(InvalidStyleSetError) as catch:125 log_stream.create_logger(self._testMethodName,126 log_styles=[LogStyles.LOG_INFO])127 self.assertTrue(128 'log location' in catch.exception.args[0],129 msg='__validate_styles did not raise the expected error message')130 @mock.patch('os.makedirs')131 def test_validate_styles_raises_when_rotate_logs_no_file_handler(self, *_):132 """Tests that a LogStyle cannot set ROTATE_LOGS without *_LOG flag.133 If the LogStyle contains ROTATE_LOGS, it must be associated with a log134 that is rotatable. TO_ACTS_LOG and TO_STDOUT are not rotatable logs,135 since those are both controlled by another object/process. The user136 must specify MONOLITHIC_LOG or TESTCASE_LOG.137 """138 with self.assertRaises(InvalidStyleSetError) as catch:139 log_stream.create_logger(140 self._testMethodName,141 # Added LOG_DEBUG here to prevent the no_level_exists raise from142 # occurring.143 log_styles=[LogStyles.LOG_DEBUG + LogStyles.ROTATE_LOGS])144 self.assertTrue(145 'log type' in catch.exception.args[0],146 msg='__validate_styles did not raise the expected error message')147 # __handle_style148 @mock.patch('os.makedirs')149 def test_handle_style_to_acts_log_creates_handler(self, *_):150 """Tests that using the flag TO_ACTS_LOG creates an AlsoToLogHandler."""151 info_acts_log = LogStyles.LOG_INFO + LogStyles.TO_ACTS_LOG152 log = log_stream.create_logger(self._testMethodName,153 log_styles=info_acts_log)154 self.assertTrue(isinstance(log.handlers[1], AlsoToLogHandler))155 @mock.patch('os.makedirs')156 def test_handle_style_to_acts_log_creates_handler_is_lowest_level(self, *_):157 """Tests that using the flag TO_ACTS_LOG creates an AlsoToLogHandler158 that is set to the lowest LogStyles level."""159 info_acts_log = (LogStyles.LOG_DEBUG + LogStyles.LOG_INFO +160 LogStyles.TO_ACTS_LOG)161 log = log_stream.create_logger(self._testMethodName,162 log_styles=info_acts_log)163 self.assertTrue(isinstance(log.handlers[1], AlsoToLogHandler))164 self.assertEqual(log.handlers[1].level, logging.DEBUG)165 @mock.patch('os.makedirs')166 def test_handle_style_to_stdout_creates_stream_handler(self, *_):167 """Tests that using the flag TO_STDOUT creates a StreamHandler."""168 info_acts_log = LogStyles.LOG_INFO + LogStyles.TO_STDOUT169 log = log_stream.create_logger(self._testMethodName,170 log_styles=info_acts_log)171 self.assertTrue(isinstance(log.handlers[1], logging.StreamHandler))172 @mock.patch('os.makedirs')173 def test_handle_style_creates_file_handler(self, *_):174 """Tests handle_style creates a MovableFileHandler for the MONOLITH_LOG."""175 info_acts_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG176 expected = mock.MagicMock()177 with self.patch('MovableFileHandler', return_value=expected):178 log = log_stream.create_logger(self._testMethodName,179 log_styles=info_acts_log)180 self.assertEqual(log.handlers[1], expected)181 @mock.patch('os.makedirs')182 def test_handle_style_creates_rotating_file_handler(self, *_):183 """Tests handle_style creates a MovableFileHandler for the ROTATE_LOGS."""184 info_acts_log = (LogStyles.LOG_INFO + LogStyles.ROTATE_LOGS +185 LogStyles.MONOLITH_LOG)186 expected = mock.MagicMock()187 with self.patch('MovableRotatingFileHandler', return_value=expected):188 log = log_stream.create_logger(self._testMethodName,189 log_styles=info_acts_log)190 self.assertEqual(log.handlers[1], expected)191 # __create_rotating_file_handler192 def test_create_rotating_file_handler_does_what_it_says_it_does(self):193 """Tests that __create_rotating_file_handler does exactly that."""194 expected = mock.MagicMock()195 with self.patch('MovableRotatingFileHandler', return_value=expected):196 # Through name-mangling, this function is automatically renamed. See197 # https://docs.python.org/3/tutorial/classes.html#private-variables198 fh = _LogStream._LogStream__create_rotating_file_handler('')199 self.assertEqual(expected, fh,200 'The function did not return a MovableRotatingFileHandler.')201 # __get_file_handler_creator202 def test_get_file_handler_creator_returns_rotating_file_handler(self):203 """Tests the function returns a MovableRotatingFileHandler when the log_style204 has LogStyle.ROTATE_LOGS."""205 expected = mock.MagicMock()206 with self.patch('_LogStream._LogStream__create_rotating_file_handler',207 return_value=expected):208 # Through name-mangling, this function is automatically renamed. See209 # https://docs.python.org/3/tutorial/classes.html#private-variables210 fh_creator = _LogStream._LogStream__get_file_handler_creator(211 LogStyles.ROTATE_LOGS)212 self.assertEqual(expected, fh_creator('/d/u/m/m/y/p/a/t/h'),213 'The function did not return a MovableRotatingFileHandler.')214 def test_get_file_handler_creator_returns_file_handler(self):215 """Tests the function returns a MovableFileHandler when the log_style does NOT216 have LogStyle.ROTATE_LOGS."""217 expected = mock.MagicMock()218 with self.patch('MovableFileHandler', return_value=expected):219 # Through name-mangling, this function is automatically renamed. See220 # https://docs.python.org/3/tutorial/classes.html#private-variables221 handler = _LogStream._LogStream__get_file_handler_creator(222 LogStyles.NONE)()223 self.assertTrue(isinstance(handler, mock.Mock))224 # __get_lowest_log_level225 def test_get_lowest_level_gets_lowest_level(self):226 """Tests __get_lowest_level returns the lowest LogStyle level given."""227 level = _LogStream._LogStream__get_lowest_log_level(228 LogStyles.ALL_LEVELS)229 self.assertEqual(level, LogStyles.LOG_DEBUG)230 # __get_current_output_dir231 @mock.patch('os.makedirs')232 def test_get_current_output_dir_gets_correct_path(self, *_):233 """Tests __get_current_output_dir gets the correct path from the context234 """235 info_monolith_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG236 base_path = "BASEPATH"237 subcontext = "SUBCONTEXT"238 with self.patch('MovableFileHandler'):239 logstream = log_stream._LogStream(240 self._testMethodName, log_styles=info_monolith_log,241 base_path=base_path, subcontext=subcontext)242 expected = os.path.join(base_path, subcontext)243 self.assertEqual(244 logstream._LogStream__get_current_output_dir(), expected)245 # __create_handler246 @mock.patch('os.makedirs')247 def test_create_handler_creates_handler_at_correct_path(self, *_):248 """Tests that __create_handler calls the handler creator with the249 correct absolute path to the log file.250 """251 info_monolith_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG252 base_path = 'BASEPATH'253 with self.patch('MovableFileHandler') as file_handler:254 log_stream.create_logger(255 self._testMethodName, log_styles=info_monolith_log,256 base_path=base_path)257 expected = os.path.join(258 base_path, '%s_%s.txt' % (self._testMethodName, 'info'))259 file_handler.assert_called_with(expected)260 # __remove_handler261 @mock.patch('os.makedirs')262 def test_remove_handler_removes_a_handler(self, *_):263 """Tests that __remove_handler removes the handler from the logger and264 closes the handler.265 """266 dummy_obj = mock.Mock()267 dummy_obj.logger = mock.Mock()268 handler = mock.Mock()269 _LogStream._LogStream__remove_handler(dummy_obj, handler)270 self.assertTrue(dummy_obj.logger.removeHandler.called)271 self.assertTrue(handler.close.called)272 # update_handlers273 @mock.patch('os.makedirs')274 def test_update_handlers_updates_filehandler_target(self, _):275 """Tests that update_handlers invokes the underlying276 MovableFileHandler.set_file method on the correct path.277 """278 info_testclass_log = LogStyles.LOG_INFO + LogStyles.TESTCLASS_LOG279 file_name = 'FILENAME'280 with self.patch('MovableFileHandler'):281 log = log_stream.create_logger(282 self._testMethodName, log_styles=info_testclass_log)283 handler = log.handlers[-1]284 handler.baseFilename = file_name285 stream = log_stream._log_streams[log.name]286 stream._LogStream__get_current_output_dir = (287 lambda: 'BASEPATH/TestClass'288 )289 stream.update_handlers(context.NewTestClassContextEvent())290 handler.set_file.assert_called_with('BASEPATH/TestClass/FILENAME')291 # cleanup292 @mock.patch('os.makedirs')293 def test_cleanup_removes_all_handlers(self, *_):294 """ Tests that cleanup removes all handlers in the logger, except295 the NullHandler.296 """297 info_testcase_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG298 with self.patch('MovableFileHandler'):299 log_stream.create_logger(self._testMethodName,300 log_styles=info_testcase_log)301 created_log_stream = log_stream._log_streams[self._testMethodName]302 created_log_stream.cleanup()303 self.assertEqual(len(created_log_stream.logger.handlers), 1)304class LogStreamModuleTests(unittest.TestCase):305 @classmethod306 def setUpClass(cls):307 # logging.log_path only exists if logger._setup_test_logger is called.308 # Here we set it to a value that is likely to not exist so file IO is309 # not executed (an error is raised instead of creating the file).310 logging.log_path = '/f/a/i/l/p/a/t/h'311 def setUp(self):312 log_stream._log_streams = {}313 # _update_handlers314 @staticmethod315 def create_new_context_event():316 return context.NewContextEvent()317 def test_update_handlers_delegates_calls_to_log_streams(self):318 """Tests _update_handlers calls update_handlers on each log_stream.319 """320 log_stream._log_streams = {321 'a': mock.Mock(),322 'b': mock.Mock()323 }324 log_stream._update_handlers(self.create_new_context_event())325 self.assertTrue(log_stream._log_streams['a'].update_handlers.called)326 self.assertTrue(log_stream._log_streams['b'].update_handlers.called)327 # _set_logger328 def test_set_logger_overwrites_previous_logger(self):329 """Tests that calling set_logger overwrites the previous logger within330 log_stream._log_streams.331 """332 previous = mock.Mock()333 log_stream._log_streams = {334 'a': previous335 }336 expected = mock.Mock()337 expected.name = 'a'338 log_stream._set_logger(expected)339 self.assertEqual(log_stream._log_streams['a'], expected)340if __name__ == '__main__':...

Full Screen

Full Screen

log_stream.py

Source:log_stream.py Github

copy

Full Screen

...121 def __init__(self, to_logger=None, *args, **kwargs):122 super().__init__(*args, **kwargs)123 self._log = logging.getLogger(to_logger)124 def emit(self, record):125 self._log.log(record.levelno, record.getMessage())126class MovableFileHandler(FileHandler):127 """FileHandler implementation that allows the output file to be changed128 during operation.129 """130 def set_file(self, file_name):131 """Set the target output file to file_name.132 Args:133 file_name: path to the new output file134 """135 self.baseFilename = os.path.abspath(file_name)136 if self.stream is not None:137 new_stream = self._open()138 # An atomic operation redirects the output and closes the old file139 os.dup2(new_stream.fileno(), self.stream.fileno())...

Full Screen

Full Screen

rdp_accountant.py

Source:rdp_accountant.py Github

copy

Full Screen

...52 a, b = min(logx, logy), max(logx, logy)53 if a == -np.inf: # adding 054 return b55 # Use exp(a) + exp(b) = (exp(a - b) + 1) * exp(b)56 return math.log1p(math.exp(a - b)) + b # log1p(x) = log(x + 1)57def _log_sub(logx, logy):58 """Subtract two numbers in the log space. Answer must be positive."""59 if logy == -np.inf: # subtracting 060 return logx61 assert logx > logy62 try:63 # Use exp(x) - exp(y) = (exp(x - y) - 1) * exp(y).64 return math.log(math.expm1(logx - logy)) + logy # expm1(x) = exp(x) - 165 except OverflowError:66 return logx67def _log_print(logx):68 """Pretty print."""69 if logx < math.log(sys.float_info.max):70 return "{}".format(math.exp(logx))71 else:72 return "exp({})".format(logx)73def _compute_log_a_int(q, sigma, alpha):74 """Compute log(A_alpha) for integer alpha."""75 assert isinstance(alpha, (int, long))76 # The first and second terms of A_alpha in the log space:77 log_a1, log_a2 = -np.inf, -np.inf78 for i in range(alpha + 1):79 # Compute in the log space. Extra care needed for q = 0 or 1.80 log_coef_i = math.log(special.binom(alpha, i))81 if q > 0:82 log_coef_i += i * math.log(q)83 elif i > 0:84 continue # The term is 0, skip the rest.85 if q < 1.0:86 log_coef_i += (alpha - i) * math.log(1 - q)87 elif i < alpha:88 continue # The term is 0, skip the rest.89 s1 = log_coef_i + (i * i - i) / (2.0 * (sigma ** 2))90 s2 = log_coef_i + (i * i + i) / (2.0 * (sigma ** 2))91 log_a1 = _log_add(log_a1, s1)92 log_a2 = _log_add(log_a2, s2)93 log_a = _log_add(math.log(1 - q) + log_a1, math.log(q) + log_a2)94 if FLAGS.rdp_verbose:95 print("A: by binomial expansion {} = {} + {}".format(96 _log_print(log_a),97 _log_print(math.log(1 - q) + log_a1), _log_print(math.log(q) + log_a2)))98 return float(log_a)99def _compute_log_a_frac(q, sigma, alpha):100 """Compute log(A_alpha) for fractional alpha."""101 # The four parts of A_alpha in the log space:102 log_a11, log_a12 = -np.inf, -np.inf103 log_a21, log_a22 = -np.inf, -np.inf104 i = 0105 z0, _ = _compute_zs(sigma, q)106 while True: # do ... until loop107 coef = special.binom(alpha, i)108 log_coef = math.log(abs(coef))109 j = alpha - i110 log_t1 = log_coef + i * math.log(q) + j * math.log(1 - q)111 log_t2 = log_coef + j * math.log(q) + i * math.log(1 - q)112 log_e11 = math.log(.5) + _log_erfc((i - z0) / (math.sqrt(2) * sigma))113 log_e12 = math.log(.5) + _log_erfc((z0 - j) / (math.sqrt(2) * sigma))114 log_e21 = math.log(.5) + _log_erfc((i - (z0 - 1)) / (math.sqrt(2) * sigma))115 log_e22 = math.log(.5) + _log_erfc((z0 - 1 - j) / (math.sqrt(2) * sigma))116 log_s11 = log_t1 + (i * i - i) / (2 * (sigma ** 2)) + log_e11117 log_s12 = log_t2 + (j * j - j) / (2 * (sigma ** 2)) + log_e12118 log_s21 = log_t1 + (i * i + i) / (2 * (sigma ** 2)) + log_e21119 log_s22 = log_t2 + (j * j + j) / (2 * (sigma ** 2)) + log_e22120 if coef > 0:121 log_a11 = _log_add(log_a11, log_s11)122 log_a12 = _log_add(log_a12, log_s12)123 log_a21 = _log_add(log_a21, log_s21)124 log_a22 = _log_add(log_a22, log_s22)125 else:126 log_a11 = _log_sub(log_a11, log_s11)127 log_a12 = _log_sub(log_a12, log_s12)128 log_a21 = _log_sub(log_a21, log_s21)129 log_a22 = _log_sub(log_a22, log_s22)130 i += 1131 if max(log_s11, log_s21, log_s21, log_s22) < -30:132 break133 log_a = _log_add(134 math.log(1. - q) + _log_add(log_a11, log_a12),135 math.log(q) + _log_add(log_a21, log_a22))136 return log_a137def _compute_log_a(q, sigma, alpha):138 """Compute log(A_alpha) for any positive finite alpha."""139 if float(alpha).is_integer():140 return _compute_log_a_int(q, sigma, int(alpha))141 else:142 return _compute_log_a_frac(q, sigma, alpha)143def _log_erfc(x):144 # Can be replaced with a single call to log_ntdr if available:145 # return np.log(2.) + special.log_ntdr(-x * 2**.5)146 r = special.erfc(x)147 if r == 0.0:148 # Using the Laurent series at infinity for the tail of the erfc function:149 # erfc(x) ~ exp(-x^2-.5/x^2+.625/x^4)/(x*pi^.5)150 # To verify in Mathematica:151 # Series[Log[Erfc[x]] + Log[x] + Log[Pi]/2 + x^2, {x, Infinity, 6}]152 return (-math.log(math.pi) / 2 - math.log(x) - x ** 2 - .5 * x ** -2 +153 .625 * x ** -4 - 37. / 24. * x ** -6 + 353. / 64. * x ** -8)154 else:155 return math.log(r)156def _compute_zs(sigma, q):157 z0 = sigma ** 2 * math.log(1 / q - 1) + .5158 z1 = min(z0 - 2, z0 / 2)159 return z0, z1160def _compute_log_b0(sigma, q, alpha, z1):161 """Return an approximation to log(B0) or None if failed to converge."""162 z0, _ = _compute_zs(sigma, q)163 s, log_term, log_b0, k, sign, max_log_term = 0, 1., 0, 0, 1, -np.inf164 # Keep adding new terms until precision is no longer preserved.165 # Don't stop on the negative.166 while (k < alpha or (log_term > max_log_term - 36 and log_term > -30) or167 sign < 0.):168 log_b1 = k * (k - 2 * z0) / (2 * sigma ** 2)169 log_b2 = _log_erfc((k - z1) / (math.sqrt(2) * sigma))170 log_term = log_b0 + log_b1 + log_b2171 max_log_term = max(max_log_term, log_term)172 s += sign * math.exp(log_term)173 k += 1174 # Maintain invariant: sign * exp(log_b0) = {-alpha choose k}175 log_b0 += math.log(abs(-alpha - k + 1)) - math.log(k)176 sign *= -1177 if s == 0: # May happen if all terms are < 1e-324.178 return -np.inf179 if s < 0 or math.log(s) < max_log_term - 25: # The series failed to converge.180 return None181 c = math.log(.5) - math.log(1 - q) * alpha182 return c + math.log(s)183def _bound_log_b1(sigma, q, alpha, z1):184 log_c = _log_add(math.log(1 - q),185 math.log(q) + (2 * z1 - 1.) / (2 * sigma ** 2))186 return math.log(.5) - log_c * alpha + _log_erfc(z1 / (math.sqrt(2) * sigma))187def _bound_log_b(q, sigma, alpha):188 """Compute a numerically stable bound on log(B_alpha)."""189 if q == 1.: # If the sampling rate is 100%, A and B are symmetric.190 return _compute_log_a(q, sigma, alpha)191 z0, z1 = _compute_zs(sigma, q)192 log_b_bound = np.inf193 # Puts a lower bound on B1: it cannot be less than its value at z0.194 log_lb_b1 = _bound_log_b1(sigma, q, alpha, z0)195 while z0 - z1 > 1e-3:196 m = (z0 + z1) / 2197 log_b0 = _compute_log_b0(sigma, q, alpha, m)198 if log_b0 is None:199 z0 = m200 continue201 log_b1 = _bound_log_b1(sigma, q, alpha, m)202 log_b_bound = min(log_b_bound, _log_add(log_b0, log_b1))203 log_b_min_bound = _log_add(log_b0, log_lb_b1)204 if (log_b_bound < 0 or205 log_b_min_bound < 0 or206 log_b_bound > log_b_min_bound + .01):207 # If the bound is likely to be too loose, move z1 closer to z0 and repeat.208 z1 = m209 else:210 break211 return log_b_bound212def _log_bound_b_elementary(q, alpha):213 return -math.log(1 - q) * alpha214def _compute_delta(orders, rdp, eps):215 """Compute delta given an RDP curve and target epsilon.216 Args:217 orders: An array (or a scalar) of orders.218 rdp: A list (or a scalar) of RDP guarantees.219 eps: The target epsilon.220 Returns:221 Pair of (delta, optimal_order).222 Raises:223 ValueError: If input is malformed.224 """225 orders_vec = np.atleast_1d(orders)226 rdp_vec = np.atleast_1d(rdp)227 if len(orders_vec) != len(rdp_vec):228 raise ValueError("Input lists must have the same length.")229 deltas = np.exp((rdp_vec - eps) * (orders_vec - 1))230 idx_opt = np.argmin(deltas)231 return min(deltas[idx_opt], 1.), orders_vec[idx_opt]232def _compute_eps(orders, rdp, delta):233 """Compute epsilon given an RDP curve and target delta.234 Args:235 orders: An array (or a scalar) of orders.236 rdp: A list (or a scalar) of RDP guarantees.237 delta: The target delta.238 Returns:239 Pair of (eps, optimal_order).240 Raises:241 ValueError: If input is malformed.242 """243 orders_vec = np.atleast_1d(orders)244 rdp_vec = np.atleast_1d(rdp)245 if len(orders_vec) != len(rdp_vec):246 raise ValueError("Input lists must have the same length.")247 eps = rdp_vec - math.log(delta) / (orders_vec - 1)248 idx_opt = np.nanargmin(eps) # Ignore NaNs249 return eps[idx_opt], orders_vec[idx_opt]250def _compute_rdp(q, sigma, alpha):251 """Compute RDP of the Gaussian mechanism with sampling at order alpha.252 Args:253 q: The sampling rate.254 sigma: The std of the additive Gaussian noise.255 alpha: The order at which RDP is computed.256 Returns:257 RDP at alpha, can be np.inf.258 """259 if np.isinf(alpha):260 return np.inf261 log_moment_a = _compute_log_a(q, sigma, alpha - 1)...

Full Screen

Full Screen

_stan_builtins.py

Source:_stan_builtins.py Github

copy

Full Screen

1# -*- coding: utf-8 -*-2"""3 pygments.lexers._stan_builtins4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~5 This file contains the names of functions for Stan used by6 ``pygments.lexers.math.StanLexer. This is for Stan language version 2.5.0.7 :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.8 :license: BSD, see LICENSE for details.9"""10KEYWORDS = (11 'else',12 'for',13 'if',14 'in',15 'increment_log_prob',16 'integrate_ode',17 'lp__',18 'print',19 'reject',20 'return',21 'while'22)23TYPES = (24 'cholesky_factor_corr',25 'cholesky_factor_cov',26 'corr_matrix',27 'cov_matrix',28 'int',29 'matrix',30 'ordered',31 'positive_ordered',32 'real',33 'row_vector',34 'simplex',35 'unit_vector',36 'vector',37 'void'38)39FUNCTIONS = (40 'Phi',41 'Phi_approx',42 'abs',43 'acos',44 'acosh',45 'append_col',46 'append_row',47 'asin',48 'asinh',49 'atan',50 'atan2',51 'atanh',52 'bernoulli_ccdf_log',53 'bernoulli_cdf',54 'bernoulli_cdf_log',55 'bernoulli_log',56 'bernoulli_logit_log',57 'bernoulli_rng',58 'bessel_first_kind',59 'bessel_second_kind',60 'beta_binomial_ccdf_log',61 'beta_binomial_cdf',62 'beta_binomial_cdf_log',63 'beta_binomial_log',64 'beta_binomial_rng',65 'beta_ccdf_log',66 'beta_cdf',67 'beta_cdf_log',68 'beta_log',69 'beta_rng',70 'binary_log_loss',71 'binomial_ccdf_log',72 'binomial_cdf',73 'binomial_cdf_log',74 'binomial_coefficient_log',75 'binomial_log',76 'binomial_logit_log',77 'binomial_rng',78 'block',79 'categorical_log',80 'categorical_logit_log',81 'categorical_rng',82 'cauchy_ccdf_log',83 'cauchy_cdf',84 'cauchy_cdf_log',85 'cauchy_log',86 'cauchy_rng',87 'cbrt',88 'ceil',89 'chi_square_ccdf_log',90 'chi_square_cdf',91 'chi_square_cdf_log',92 'chi_square_log',93 'chi_square_rng',94 'cholesky_decompose',95 'col',96 'cols',97 'columns_dot_product',98 'columns_dot_self',99 'cos',100 'cosh',101 'crossprod',102 'cumulative_sum',103 'determinant',104 'diag_matrix',105 'diag_post_multiply',106 'diag_pre_multiply',107 'diagonal',108 'digamma',109 'dims',110 'dirichlet_log',111 'dirichlet_rng',112 'distance',113 'dot_product',114 'dot_self',115 'double_exponential_ccdf_log',116 'double_exponential_cdf',117 'double_exponential_cdf_log',118 'double_exponential_log',119 'double_exponential_rng',120 'e',121 'eigenvalues_sym',122 'eigenvectors_sym',123 'erf',124 'erfc',125 'exp',126 'exp2',127 'exp_mod_normal_ccdf_log',128 'exp_mod_normal_cdf',129 'exp_mod_normal_cdf_log',130 'exp_mod_normal_log',131 'exp_mod_normal_rng',132 'expm1',133 'exponential_ccdf_log',134 'exponential_cdf',135 'exponential_cdf_log',136 'exponential_log',137 'exponential_rng',138 'fabs',139 'falling_factorial',140 'fdim',141 'floor',142 'fma',143 'fmax',144 'fmin',145 'fmod',146 'frechet_ccdf_log',147 'frechet_cdf',148 'frechet_cdf_log',149 'frechet_log',150 'frechet_rng',151 'gamma_ccdf_log',152 'gamma_cdf',153 'gamma_cdf_log',154 'gamma_log',155 'gamma_p',156 'gamma_q',157 'gamma_rng',158 'gaussian_dlm_obs_log',159 'get_lp',160 'gumbel_ccdf_log',161 'gumbel_cdf',162 'gumbel_cdf_log',163 'gumbel_log',164 'gumbel_rng',165 'head',166 'hypergeometric_log',167 'hypergeometric_rng',168 'hypot',169 'if_else',170 'int_step',171 'inv',172 'inv_chi_square_ccdf_log',173 'inv_chi_square_cdf',174 'inv_chi_square_cdf_log',175 'inv_chi_square_log',176 'inv_chi_square_rng',177 'inv_cloglog',178 'inv_gamma_ccdf_log',179 'inv_gamma_cdf',180 'inv_gamma_cdf_log',181 'inv_gamma_log',182 'inv_gamma_rng',183 'inv_logit',184 'inv_sqrt',185 'inv_square',186 'inv_wishart_log',187 'inv_wishart_rng',188 'inverse',189 'inverse_spd',190 'is_inf',191 'is_nan',192 'lbeta',193 'lgamma',194 'lkj_corr_cholesky_log',195 'lkj_corr_cholesky_rng',196 'lkj_corr_log',197 'lkj_corr_rng',198 'lkj_cov_log',199 'lmgamma',200 'log',201 'log10',202 'log1m',203 'log1m_exp',204 'log1m_inv_logit',205 'log1p',206 'log1p_exp',207 'log2',208 'log_determinant',209 'log_diff_exp',210 'log_falling_factorial',211 'log_inv_logit',212 'log_rising_factorial',213 'log_softmax',214 'log_sum_exp',215 'logistic_ccdf_log',216 'logistic_cdf',217 'logistic_cdf_log',218 'logistic_log',219 'logistic_rng',220 'logit',221 'lognormal_ccdf_log',222 'lognormal_cdf',223 'lognormal_cdf_log',224 'lognormal_log',225 'lognormal_rng',226 'machine_precision',227 'max',228 'mdivide_left_tri_low',229 'mdivide_right_tri_low',230 'mean',231 'min',232 'modified_bessel_first_kind',233 'modified_bessel_second_kind',234 'multi_gp_log',235 'multi_normal_cholesky_log',236 'multi_normal_cholesky_rng',237 'multi_normal_log',238 'multi_normal_prec_log',239 'multi_normal_rng',240 'multi_student_t_log',241 'multi_student_t_rng',242 'multinomial_log',243 'multinomial_rng',244 'multiply_log',245 'multiply_lower_tri_self_transpose',246 'neg_binomial_2_log',247 'neg_binomial_2_log_log',248 'neg_binomial_2_log_rng',249 'neg_binomial_2_rng',250 'neg_binomial_ccdf_log',251 'neg_binomial_cdf',252 'neg_binomial_cdf_log',253 'neg_binomial_log',254 'neg_binomial_rng',255 'negative_infinity',256 'normal_ccdf_log',257 'normal_cdf',258 'normal_cdf_log',259 'normal_log',260 'normal_rng',261 'not_a_number',262 'num_elements',263 'ordered_logistic_log',264 'ordered_logistic_rng',265 'owens_t',266 'pareto_ccdf_log',267 'pareto_cdf',268 'pareto_cdf_log',269 'pareto_log',270 'pareto_rng',271 'pareto_type_2_ccdf_log',272 'pareto_type_2_cdf',273 'pareto_type_2_cdf_log',274 'pareto_type_2_log',275 'pareto_type_2_rng',276 'pi',277 'poisson_ccdf_log',278 'poisson_cdf',279 'poisson_cdf_log',280 'poisson_log',281 'poisson_log_log',282 'poisson_rng',283 'positive_infinity',284 'pow',285 'prod',286 'qr_Q',287 'qr_R',288 'quad_form',289 'quad_form_diag',290 'quad_form_sym',291 'rank',292 'rayleigh_ccdf_log',293 'rayleigh_cdf',294 'rayleigh_cdf_log',295 'rayleigh_log',296 'rayleigh_rng',297 'rep_array',298 'rep_matrix',299 'rep_row_vector',300 'rep_vector',301 'rising_factorial',302 'round',303 'row',304 'rows',305 'rows_dot_product',306 'rows_dot_self',307 'scaled_inv_chi_square_ccdf_log',308 'scaled_inv_chi_square_cdf',309 'scaled_inv_chi_square_cdf_log',310 'scaled_inv_chi_square_log',311 'scaled_inv_chi_square_rng',312 'sd',313 'segment',314 'sin',315 'singular_values',316 'sinh',317 'size',318 'skew_normal_ccdf_log',319 'skew_normal_cdf',320 'skew_normal_cdf_log',321 'skew_normal_log',322 'skew_normal_rng',323 'softmax',324 'sort_asc',325 'sort_desc',326 'sort_indices_asc',327 'sort_indices_desc',328 'sqrt',329 'sqrt2',330 'square',331 'squared_distance',332 'step',333 'student_t_ccdf_log',334 'student_t_cdf',335 'student_t_cdf_log',336 'student_t_log',337 'student_t_rng',338 'sub_col',339 'sub_row',340 'sum',341 'tail',342 'tan',343 'tanh',344 'tcrossprod',345 'tgamma',346 'to_array_1d',347 'to_array_2d',348 'to_matrix',349 'to_row_vector',350 'to_vector',351 'trace',352 'trace_gen_quad_form',353 'trace_quad_form',354 'trigamma',355 'trunc',356 'uniform_ccdf_log',357 'uniform_cdf',358 'uniform_cdf_log',359 'uniform_log',360 'uniform_rng',361 'variance',362 'von_mises_log',363 'von_mises_rng',364 'weibull_ccdf_log',365 'weibull_cdf',366 'weibull_cdf_log',367 'weibull_log',368 'weibull_rng',369 'wishart_log',370 'wishart_rng'371)372DISTRIBUTIONS = (373 'bernoulli',374 'bernoulli_logit',375 'beta',376 'beta_binomial',377 'binomial',378 'binomial_logit',379 'categorical',380 'categorical_logit',381 'cauchy',382 'chi_square',383 'dirichlet',384 'double_exponential',385 'exp_mod_normal',386 'exponential',387 'frechet',388 'gamma',389 'gaussian_dlm_obs',390 'gumbel',391 'hypergeometric',392 'inv_chi_square',393 'inv_gamma',394 'inv_wishart',395 'lkj_corr',396 'lkj_corr_cholesky',397 'lkj_cov',398 'logistic',399 'lognormal',400 'multi_gp',401 'multi_normal',402 'multi_normal_cholesky',403 'multi_normal_prec',404 'multi_student_t',405 'multinomial',406 'neg_binomial',407 'neg_binomial_2',408 'neg_binomial_2_log',409 'normal',410 'ordered_logistic',411 'pareto',412 'pareto_type_2',413 'poisson',414 'poisson_log',415 'rayleigh',416 'scaled_inv_chi_square',417 'skew_normal',418 'student_t',419 'uniform',420 'von_mises',421 'weibull',422 'wishart'423)424RESERVED = (425 'alignas',426 'alignof',427 'and',428 'and_eq',429 'asm',430 'auto',431 'bitand',432 'bitor',433 'bool',434 'break',435 'case',436 'catch',437 'char',438 'char16_t',439 'char32_t',440 'class',441 'compl',442 'const',443 'const_cast',444 'constexpr',445 'continue',446 'decltype',447 'default',448 'delete',449 'do',450 'double',451 'dynamic_cast',452 'enum',453 'explicit',454 'export',455 'extern',456 'false',457 'false',458 'float',459 'friend',460 'fvar',461 'goto',462 'inline',463 'int',464 'long',465 'mutable',466 'namespace',467 'new',468 'noexcept',469 'not',470 'not_eq',471 'nullptr',472 'operator',473 'or',474 'or_eq',475 'private',476 'protected',477 'public',478 'register',479 'reinterpret_cast',480 'repeat',481 'short',482 'signed',483 'sizeof',484 'static',485 'static_assert',486 'static_cast',487 'struct',488 'switch',489 'template',490 'then',491 'this',492 'thread_local',493 'throw',494 'true',495 'true',496 'try',497 'typedef',498 'typeid',499 'typename',500 'union',501 'unsigned',502 'until',503 'using',504 'var',505 'virtual',506 'void',507 'volatile',508 'wchar_t',509 'xor',510 'xor_eq'...

Full Screen

Full Screen

rotation_term.py

Source:rotation_term.py Github

copy

Full Screen

...31 return np.empty(0), np.empty(0)32 def get_complex_coefficients(self, params):33 log_a, log_Q1, mix_par, log_Q2, log_period = params34 Q = np.exp(log_Q2) + np.exp(log_Q1)35 log_Q1 = np.log(Q)36 P = np.exp(log_period)37 log_omega1 = np.log(4*np.pi*Q) - np.log(P) - 0.5*np.log(4.0*Q*Q-1.0)38 log_S1 = log_a - log_omega1 - log_Q139 mix = -np.log(1.0 + np.exp(-mix_par))40 Q = np.exp(log_Q2)41 P = 0.5*np.exp(log_period)42 log_omega2 = np.log(4*np.pi*Q) - np.log(P) - 0.5*np.log(4.0*Q*Q-1.0)43 log_S2 = mix + log_a - log_omega2 - log_Q244 c1 = super(MixtureOfSHOsTerm, self).get_complex_coefficients([45 log_S1, log_Q1, log_omega1,46 ])47 c2 = super(MixtureOfSHOsTerm, self).get_complex_coefficients([48 log_S2, log_Q2, log_omega2,49 ])50 return [np.array([a, b]) for a, b in zip(c1, c2)]51 def log_prior(self):52 lp = super(MixtureOfSHOsTerm, self).log_prior()53 if not np.isfinite(lp):54 return -np.inf55 mix = 1.0 / (1.0 + np.exp(-self.mix_par))56 return lp + np.log(mix) + np.log(1.0 - mix)57class MixtureTerm(terms.Term):58 parameter_names = ("log_a1", "log_b1", "log_f1", "log_P",59 "mix_par", "log_b2", "log_f2")60 def get_real_coefficients(self, params):61 log_a1, log_b1, log_f1, log_P, mix_par, log_b2, log_f2 = params62 mix = 1.0 / (1.0 + np.exp(-mix_par))63 a1 = np.exp(log_a1)64 b1 = np.exp(log_b1)65 f1 = np.exp(log_f1)66 mx1 = np.sqrt(1+f1**2)67 c1 = 2*np.pi*f1*np.exp(-log_P) / (1.0 + b1 + mx1)68 a2 = np.exp(log_a1) * mix69 b2 = np.exp(log_b2)70 f2 = np.exp(log_f2)71 mx2 = np.sqrt(1+f2**2)72 c2 = 4*np.pi*f2*np.exp(-log_P) / (1.0 + b2 + mx2)73 return (74 np.array([75 a1 * (mx1 + b1) / (1.0 + mx1 + b1),76 a2 * (mx2 + b2) / (1.0 + mx2 + b2),77 ]),78 np.array([c1, c2]),79 )80 def get_complex_coefficients(self, params):81 log_a1, log_b1, log_f1, log_P, mix_par, log_b2, log_f2 = params82 mix = 1.0 / (1.0 + np.exp(-mix_par))83 a1 = np.exp(log_a1)84 b1 = np.exp(log_b1)85 f1 = np.exp(log_f1)86 mx1 = np.sqrt(1+f1**2)87 c1 = 2*np.pi*f1*np.exp(-log_P) / (1.0 + b1 + mx1)88 factor1 = a1 / (1.0 + mx1 + b1)89 a2 = np.exp(log_a1) * mix90 b2 = np.exp(log_b2)91 f2 = np.exp(log_f2)92 mx2 = np.sqrt(1+f2**2)93 c2 = 4*np.pi*f2*np.exp(-log_P) / (1.0 + b2 + mx2)94 factor2 = a2 / (1.0 + mx2 + b2)95 return (96 np.array([factor1, factor2]),97 np.array([factor1*np.exp(log_f1), factor2*np.exp(log_f2)]),98 np.array([c1, c2]),99 np.array([2*np.pi*np.exp(-log_P), 4*np.pi*np.exp(-log_P)])100 )101 def log_prior(self):102 lp = super(MixtureTerm, self).log_prior()103 if not np.isfinite(lp):104 return -np.inf105 mix = 1.0 / (1.0 + np.exp(-self.mix_par))106 return lp + np.log(mix) + np.log(1.0 - mix)107 def grad_log_prior(self):108 g = np.zeros(self.full_size)109 theta = np.exp(-self.mix_par)110 g[4] += (theta - 1) / (1 + theta)...

Full Screen

Full Screen

test_log_manager.py

Source:test_log_manager.py Github

copy

Full Screen

...33 print 'LogManager instance1: ', lm134 print 'LogManager instance2: ', lm235 assert lm1 == lm236@needs_config37def test_trace_log():38 lm = LogManager.get_instance()39 lm.set_file_log_level('trace')40 logger = lm.get_logger('TraceLogger')41 log_msg = 'This is a trace log message'42 logger.trace(log_msg)43 last_log_line = read_last_log_line()44 print 'Looking for log message: ', log_msg45 print 'Last log line: ', last_log_line46 assert last_log_line.find(log_msg) >= 047@needs_config48def test_debug_log():49 lm = LogManager.get_instance()50 logger = lm.get_logger('DebugLogger')51 log_msg = 'This is a debug log message'52 logger.debug(log_msg)53 last_log_line = read_last_log_line()54 print 'Looking for log message: ', log_msg55 print 'Last log line: ', last_log_line56 assert last_log_line.find(log_msg) >= 057@needs_config58def test_warn_log():59 lm = LogManager.get_instance()60 logger = lm.get_logger('WarnLogger')61 log_msg = 'This is a warn log message'62 logger.warn(log_msg)63 last_log_line = read_last_log_line()64 print 'Looking for log message: ', log_msg65 print 'Last log line: ', last_log_line66 assert last_log_line.find(log_msg) >= 067@needs_config68def test_info_log():69 lm = LogManager.get_instance()70 logger = lm.get_logger('WarnLogger')71 log_msg = 'This is an info log message'72 logger.info(log_msg)73 last_log_line = read_last_log_line()74 print 'Looking for log message: ', log_msg75 print 'Last log line: ', last_log_line76 assert last_log_line.find(log_msg) >= 077@needs_config78def test_error_log():79 lm = LogManager.get_instance()80 logger = lm.get_logger('WarnLogger')81 log_msg = 'This is an error log message'82 logger.error(log_msg)83 last_log_line = read_last_log_line()84 print 'Looking for log message: ', log_msg85 print 'Last log line: ', last_log_line86 assert last_log_line.find(log_msg) >= 087@needs_config88def test_critical_log():89 lm = LogManager.get_instance()90 logger = lm.get_logger('WarnLogger')91 log_msg = 'This is a critical log message'92 logger.critical(log_msg)93 last_log_line = read_last_log_line()94 print 'Looking for log message: ', log_msg95 print 'Last log line: ', last_log_line96 assert last_log_line.find(log_msg) >= 097@needs_cleanup98def test_cleanup():99 pass100# Testing101if __name__ == '__main__':102 pass

Full Screen

Full Screen

Using AI Code Generation

copy

Full Screen

1const { log } = require('mochawesome/addContext');2const { expect } = require('chai');3const { Given, When, Then } = require('cucumber');4const { Builder, By, Key, until } = require('selenium-webdriver');5const { Options } = require('selenium-webdriver/chrome');6const { setDefaultTimeout } = require('cucumber');7const { addContext } = require('mochawesome/addContext');8setDefaultTimeout(60 * 1000);9Given(/^I am on the "([^"]*)" page$/, async function (page) {10 this.driver = await new Builder().forBrowser('chrome').build();11 await this.driver.wait(until.elementLocated(By.name('q')), 1000);12 await this.driver.findElement(By.name('q')).sendKeys(page, Key.RETURN);13 await this.driver.wait(until.elementLocated(By.id('resultStats')), 1000);14 await this.driver.sleep(5000);15 await this.driver.takeScreenshot().then(function (image, err) {16 addContext(this, { title: 'Title', value: 'Value' });17 addContext(this, image, 'image/png');18 });19});20When(/^I search for "([^"]*)"$/, async function (searchTerm) {21 await this.driver.findElement(By.name('q')).sendKeys(searchTerm, Key.RETURN);22 await this.driver.wait(until.elementLocated(By.id('resultStats')), 1000);23 await this.driver.sleep(5000);24 await this.driver.takeScreenshot().then(function (image, err) {25 addContext(this, { title: 'Title', value: 'Value' });26 addContext(this, image, 'image/png');27 });28});29Then(/^I should see "([^"]*)" in the results$/, async function (searchTerm) {30 expect(results.length).to.be.above(0);31 await this.driver.takeScreenshot().then(function (image, err) {32 addContext(this, { title: 'Title', value: 'Value' });33 addContext(this, image, 'image/png');34 });35});36Then(/^I close the browser$/, async function () {37 await this.driver.quit();38});

Full Screen

Using AI Code Generation

copy

Full Screen

1const log = require('mochawesome/addContext');2const mocha = require('mocha');3const assert = require('assert');4const webdriver = require('selenium-webdriver');5const By = webdriver.By;6const until = webdriver.until;7const chrome = require('selenium-webdriver/chrome');8const firefox = require('selenium-webdriver/firefox');9const edge = require('selenium-webdriver/edge');10const ie = require('selenium-webdriver/ie');11const safari = require('selenium-webdriver/safari');12const remote = require('selenium-webdriver/remote');13mocha.timeout(200000);14describe('Test Suite', function () {15 it('Test Case', async function () {16 let driver = await new webdriver.Builder().forBrowser('chrome').build();17 driver.manage().setTimeouts( { implicit: 10000 } );18 driver.manage().window().maximize();19 await driver.findElement(By.name('q')).sendKeys('Selenium');20 await driver.findElement(By.name('btnK')).click();21 await driver.takeScreenshot().then(function (image, err) {22 require('fs').writeFile('./screenshots/screenshot.png', image, 'base64', function (err) {23 if (err) {24 console.log(err);25 }26 });27 });28 log(driver, 'Screenshot taken');29 let title = await driver.getTitle();30 assert.strictEqual(title, 'Selenium - Google Search');31 });32});

Full Screen

Using AI Code Generation

copy

Full Screen

1const mochawesome = require('mochawesome');2const log = mochawesome.addContext;3describe('My First Test', () => {4 it('Does not do much!', () => {5 log('This is a log message');6 expect(true).to.equal(true)7 })8})9const merge = require('mochawesome-merge');10merge(source, destination);

Full Screen

Using AI Code Generation

copy

Full Screen

1describe('Mochawesome test', function() {2 it('should pass', function() {3 expect(true).to.be.true;4 });5 });6 describe('Mochawesome test', function() {7 it('should fail', function() {8 expect(false).to.be.true;9 });10 });11 describe('Mochawesome test', function() {12 it('should fail', function() {13 expect(false).to.be.true;14 });15 });16 describe('Mochawesome test', function() {17 it('should fail', function() {18 expect(false).to.be.true;19 });20 });21 describe('Mochawesome test', function() {22 it('should fail', function() {23 expect(false).to.be.true;24 });25 });26 describe('Mochawesome test', function() {27 it('should fail', function() {28 expect(false).to.be.true;29 });30 });31 describe('Mochawesome test', function() {32 it('should fail', function() {33 expect(false).to.be.true;34 });35 });36 describe('Mochawesome test', function() {37 it('should fail', function() {38 expect(false).to.be.true;39 });40 });41 describe('Mochawesome test', function() {42 it('should fail', function() {43 expect(false).to.be.true;44 });45 });46 describe('Mochawesome test', function() {47 it('should fail', function() {48 expect(false).to.be.true;49 });50 });

Full Screen

Using AI Code Generation

copy

Full Screen

1var log = require('mochawesome/addContext');2const { expect } = require('chai');3const { Given, When, Then } = require('cucumber');4const puppeteer = require('puppeteer');5const { getDriver } = require('./driver');6const { getLogin } = require('./login');7const { getHome } = require('./home');8const { getSearch } = require('./search');9const { getCreate } = require('./create');10const { getEdit } = require('./edit');11const { getDelete } = require('./delete');12const { getLogout } = require('./logout');13let driver;14let login;15let home;16let search;17let create;18let edit;19let del;20let logout;21Given('I am on the login page', async function () {22 driver = await getDriver();23 login = await getLogin(driver);24 await login.open();25 log(driver, 'I am on the login page');26});27When('I enter valid credentials', async function () {28 await login.login('admin', 'admin');29 log(driver, 'I enter valid credentials');30});31Then('I should be logged in', async function () {32 home = await getHome(driver);33 const isAtHomePage = await home.isAt();34 expect(isAtHomePage).to.be.true;35 log(driver, 'I should be logged in');36});37Given('I am on the home page', async function () {38 driver = await getDriver();39 home = await getHome(driver);40 await home.open();41 log(driver, 'I am on the home page');42});43When('I click on the search button', async function () {44 await home.search();45 log(driver, 'I click on the search button');46});47Then('I should be on the search page', async function () {48 search = await getSearch(driver);49 const isAtSearchPage = await search.isAt();50 expect(isAtSearchPage).to.be.true;51 log(driver, 'I should be on the search page');52});53Given('I am on the search page', async function () {54 driver = await getDriver();55 search = await getSearch(driver);56 await search.open();57 log(driver, 'I am on the search page');58});59When('I enter valid search criteria', async function () {60 await search.search('test');61 log(driver, 'I enter valid search criteria');62});63Then('I should see the search

Full Screen

Using AI Code Generation

copy

Full Screen

1const mochawesome = require('mochawesome')2describe('My Test', () => {3 it('should do something', () => {4 log('some text')5 })6})7{8 "chartsOptions": {9 "legendBackgroundColor": "rgba(0,0,0,0)",10 "legendTemplate": "<ul class=\"<%=name.toLowerCase()%>-legend\"><% for (var i=0; i<datasets.length; i++){%><li><span style=\"background-color:<%=datasets[i].fillColor%>\">&nbsp;&nbsp;&nbsp;&nbsp;</span><%if(datasets[i].label){%><%=datasets[i].label%><%}%></li><%}%></ul>"11 },12}13 <link rel="icon" href="data:;base64,iVBORw0KGgo=">

Full Screen

Using AI Code Generation

copy

Full Screen

1const log = require('mochawesome/addContext');2describe('Mochawesome', function () {3 it('should add context to test', function () {4 log(this, 'some context');5 });6});7const context = require('mochawesome/addContext');8describe('Mochawesome', function () {9 it('should add context to test', function () {10 context(this, 'some context');11 });12});13const addContext = require('mochawesome/addContext');14describe('Mochawesome', function () {15 it('should add context to test', function () {16 addContext(this, 'some context');17 });18});19const context = require('mochawesome/addContext');20describe('Mochawesome', function () {21 it('should add context to test', function () {22 context(this, { title: 'some title', value: 'some context' });23 });24});25const addContext = require('mochawesome/addContext');26describe('Mochawesome', function () {27 it('should add context to test', function () {28 addContext(this, { title: 'some title', value: 'some context' });29 });30});31const context = require('mochawesome/addContext');32describe('Mochawesome', function () {33 it('should add context to test', function () {34 context(this, { title: 'some title', value: 'some context', screenshot: 'base64encoded

Full Screen

Using AI Code Generation

copy

Full Screen

1const log = require('mochawesome/addContext');2describe('Testing mochawesome', () => {3 it('should log to mochawesome', () => {4 log('this is a log message');5 });6});7![alt text](

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run mochawesome automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful