How to use archive_results method in autotest

Best Python code snippet using autotest_python

FilterCSVs.py

Source:FilterCSVs.py Github

copy

Full Screen

1import SimulateTrade2from utils import ensure_dir_exist, archive_dir_folders3import pandas as pd4import time5import os6import zipfile7import datetime8import sys9DEST_DIR = ".\\FilteredCSVs"10if __name__ == '__main__':11 src_dir = SimulateTrade.SOURCE_DIR12 archive_results = False13 if len(sys.argv) > 1:14 src_dir = sys.argv[1]15 if len(sys.argv) > 2:16 archive_results = bool(sys.argv[2].lower())17 start_time = time.time()18 snp_500_symbols = SimulateTrade.get_snp_symbols(SimulateTrade.SNP_SYMBOLS_FILE_PATH)19 zip_files = SimulateTrade.get_zip_files_in_folder(src_dir)20 for curr_file in zip_files:21 files_by_zip = {}22 file_path = os.path.join(src_dir, curr_file)23 files_by_zip[file_path] = SimulateTrade.get_files_from_zip_by_date(file_path)24 for zip_file in files_by_zip:25 print(f'Filtering {zip_file}')26 dir_path = DEST_DIR if not archive_results else os.path.join(DEST_DIR, os.path.basename(os.path.splitext(zip_file)[0]))27 ensure_dir_exist(dir_path)28 zip_file_obj = zipfile.ZipFile(zip_file)29 for curr_date in files_by_zip[zip_file]:30 file_time = time.time()31 date_info = files_by_zip[zip_file][curr_date]32 day = date_info['day']33 month = date_info['month']34 year = date_info['year']35 36 stock_quotes_file = date_info['stockquotes']37 stock_quotes_data = pd.read_csv(zip_file_obj.open(stock_quotes_file))38 snp_quotes = SimulateTrade.filter_equity_snp_symbols(stock_quotes_data, snp_500_symbols)39 snp_quotes.to_csv(os.path.join(dir_path, f'stockquotes_{year}{month:02}{day:02}.csv'),40 index=False)41 print(f'Filtering {zip_file}\\{stock_quotes_file} took {time.time() - file_time} seconds')42 options_file = date_info['options']43 options_data = pd.read_csv(zip_file_obj.open(options_file))44 snp_options = SimulateTrade.filter_snp_symbols(options_data, snp_500_symbols)45 snp_options['Expiration'] = pd.to_datetime(snp_options['Expiration'], format='%m/%d/%Y')46 zip_date = datetime.datetime(year=year, month=month, day=day)47 snp_options = SimulateTrade.filter_tradable_options(snp_options, zip_date, 0, 8, 4)48 snp_options.to_csv(os.path.join(dir_path, f'options_{year}{month:02}{day:02}.csv'),49 index=False)50 print(f'Filtering {zip_file}\\{options_file} took {time.time() - file_time} seconds')51 52 if archive_results:53 print('archiving output...')54 archive_dir_folders(DEST_DIR)55 end_time = time.time()...

Full Screen

Full Screen

cli.py

Source:cli.py Github

copy

Full Screen

1from collections import defaultdict2import os3import shutil4import arrow5import click6from dsr_test_staging.config import settings7import logging8from dsr_test_staging.docker import Docker9from dsr_test_staging.init_logging import init_logging10from dsr_test_staging.repo import Repo11from dsr_test_staging.constants import R_LIB_DIRNAME12import json13from dotenv import load_dotenv14logger = logging.getLogger(__name__)15load_dotenv()16@click.group()17def cli():18 pass19@cli.command()20def list_versions():21 for version in settings.target_versions:22 print(version)23@cli.command()24@click.option(25 '--output', '-o',26 type=str,27 default='',28 required=True29)30@click.option(31 '--version', '-v',32 type=str,33 default='',34 required=False,35)36def test_dsr(output, version):37 results = defaultdict(dict)38 if version == '':39 versions = settings.target_versions40 else:41 versions = [version]42 for version in versions:43 try:44 results[version] = test_dsr_version(version)45 results[version]['pass'] = True46 except Exception:47 results[version]['pass'] = False48 with open(output, 'w') as fp:49 fp.write(json.dumps(results))50def test_dsr_version(version):51 init_logging()52 r_lib_cache_dir = os.path.join(53 settings.docker_dir,54 R_LIB_DIRNAME,55 )56 archive_dir = os.path.join(57 settings.docker_dir,58 f't{arrow.now().int_timestamp}',59 )60 os.mkdir(archive_dir)61 # deploy repos62 logger.info(f'Deploy repos from: {settings.repos_dir} to: {archive_dir}')63 archive_results = Repo.run(64 repos_dir=settings.repos_dir,65 archive_dir=archive_dir,66 )67 logger.info(f'{archive_results}')68 # copy R script69 shutil.copytree('R', os.path.join(archive_dir, 'R'))70 # init Dockerfile71 docker = Docker(72 version=version,73 archive_dir=archive_dir,74 archive_results=archive_results,75 r_lib_cache_dir=r_lib_cache_dir,76 )77 docker.generate()78 # build docker79 docker.build()80 # test courses81 docker.run()82 return archive_results83if __name__ == '__main__':...

Full Screen

Full Screen

control.py

Source:control.py Github

copy

Full Screen

1# write your code here2import gzip3from collections import Counter4archive_files = []5archive_results = {}6for archives in range(3):7 archive_files.append(input())8for file_name in archive_files:9 with gzip.open(file_name, 'r') as data:10 lines = []11 for line in data:12 line = line.decode('UTF-8')13 lines.append(line.rstrip())14 sequences = []15 cg_total = 016 sequence_length = 017 reads = 018 for index in range(len(lines)):19 if '@SRR' in lines[index]:20 line = lines[index + 1]21 sequences.append(line)22 length = len(line)23 sequence_length += length24 reads += 125 cg = (line.count('G') + line.count('C')) / length26 cg_total += cg27 ave_length = round(sequence_length / reads)28 cg_ave = round((cg_total / reads) * 100, 2)29 duplicates = Counter(sequences)30 repeats = sum(duplicates[i] - 1 for i in duplicates if duplicates[i] - 1 > 0)31 ns = [n.count('N') / len(n) * 100 for n in duplicates if 'N' in n]32 reads_with_ns = len(ns)33 ns_per_read = round(sum(ns) / reads, 2)34 archive_results[file_name] = [reads, ave_length, repeats, reads_with_ns, cg_ave, ns_per_read]35combined_read_n = {key: repeats + reads_with_ns for key in archive_results}36best = min(combined_read_n, key=combined_read_n.get)37reads, average_length, repeats, reads_with_ns, cg_average, ns_per_read = archive_results[best]38print(f'Reads in the file = {reads}:')39print(f'Reads sequence average length = {average_length}')40print(f'Repeats = {repeats}')41print(f'Reads with Ns = {reads_with_ns}')42print(f'GC content average = {cg_average}%')...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run autotest automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful