How to use basic_run method in SeleniumBase

Best Python code snippet using SeleniumBase

dotted.py

Source:dotted.py Github

copy

Full Screen

1#!/usr/local/bin/python2# -*- coding: utf-8 -*-3import pysftp4from datetime import datetime5import os6import os.path7import sys8from subprocess import call, check_call9import subprocess10import numpy as np11import matplotlib.pyplot as plt12import statsmodels.api as sm # recommended import according to the docs13from matplotlib.ticker import ScalarFormatter14import matplotlib.ticker as ticker15from os import walk16from matplotlib.backends.backend_pdf import PdfPages17import pandas as pd18import operator19from matplotlib import rc20from collections import OrderedDict21from collections import Counter22plt.style.use('fivethirtyeight')23################################################################################24#### VARIABLES25################################################################################26bench_ip = '192.168.112.37'27cluster_ip = '192.168.112.'28machines = ['38', '39', '40', '55', '56']29cluster_user = 'gsd'30cluster_private_key = '/Users/ricardo/.ssh/gsd_private_key'31bb_summary_path = '/Users/ricardo/github/DottedDB/benchmarks/priv/summary.r'32local_bb_path = '/Users/ricardo/github/basho_bench/tests/current/'33cluster_bb_path = '/home/gsd/basho_bench/tests/current/'34cluster_ycsb_path = '/home/gsd/YCSB/'35cluster_dotted_path = '/home/gsd/DottedDB/_build/default/rel/dotted_db/data/stats/current/'36cluster_basic_path = '/home/gsd/BasicDB/_build/default/rel/basic_db/data/stats/current/'37cluster_dotted_dstat = '/home/gsd/DottedDB/benchmarks/tests/dstat.csv'38cluster_basic_dstat = '/home/gsd/BasicDB/benchmarks/tests/dstat.csv'39cluster_path = '/users/ricardo/github/DottedDB/benchmarks/tests/cluster/'40local_path = '/Users/ricardo/github/DottedDB/benchmarks/tests/local/'41current_dotted_dir = '/Users/ricardo/github/DottedDB/benchmarks/tests/current_dotted'42current_basic_dir = '/Users/ricardo/github/DottedDB/benchmarks/tests/current_basic'43test_path = '/Users/ricardo/github/DottedDB/benchmarks/tests/'44current_dir = test_path + 'current'45dotted_dev1_path = '/Users/ricardo/github/DottedDB/_build/dev/dev1/dotted_db/data/stats/current/'46dotted_dev2_path = '/Users/ricardo/github/DottedDB/_build/dev/dev2/dotted_db/data/stats/current/'47dotted_dev3_path = '/Users/ricardo/github/DottedDB/_build/dev/dev3/dotted_db/data/stats/current/'48dotted_dev4_path = '/Users/ricardo/github/DottedDB/_build/dev/dev4/dotted_db/data/stats/current/'49basic_dev1_path = '/Users/ricardo/github/BasicDB/_build/dev/dev1/basic_db/data/stats/current/'50basic_dev2_path = '/Users/ricardo/github/BasicDB/_build/dev/dev2/basic_db/data/stats/current/'51basic_dev3_path = '/Users/ricardo/github/BasicDB/_build/dev/dev3/basic_db/data/stats/current/'52basic_dev4_path = '/Users/ricardo/github/BasicDB/_build/dev/dev4/basic_db/data/stats/current/'53""" Create a new folder for the incoming files.54Also, make the folder current a symlink to the new folder.55"""56def create_folder(type=""):57 if type == 'local_dotted' or type == 'local_basic':58 new = local_path + type + '/' + get_folder_time() + '/'59 os.makedirs(new)60 print "New local directory: " + new61 elif type == 'cluster_dotted' or type == 'cluster_basic':62 new = cluster_path + type + '/' + get_folder_time() + '/'63 os.makedirs(new)64 print "New cluster directory: " + new65 else:66 print "Error creating dir: " + type + "!"67 sys.exit(1)68 if type == 'cluster_dotted' or type == 'local_dotted':69 change_current_dotted(new)70 elif type == 'cluster_basic' or type == 'local_basic':71 change_current_basic(new)72 else:73 print "Error creating dir: " + type + "!"74 sys.exit(1)75 change_current(new)76def change_current(f):77 if not os.path.exists(f):78 print "Folder " + f + " does not exist!"79 sys.exit(1)80 else:81 call(["rm","-f", current_dir])82 call(["ln","-s", f, current_dir])83 print "\'current\' now points to: " + f84def change_current_basic(f):85 if not os.path.exists(f):86 print "Folder " + f + " does not exist!"87 sys.exit(1)88 else:89 call(["rm","-f", current_basic_dir])90 call(["ln","-s", f, current_basic_dir])91 print "\'basic_current\' now points to: " + f92def change_current_dotted(f):93 if not os.path.exists(f):94 print "Folder " + f + " does not exist!"95 sys.exit(1)96 else:97 call(["rm","-f", current_dotted_dir])98 call(["ln","-s", f, current_dotted_dir])99 print "\'current_dotted\' now points to: " + f100def get_folder_time():101 now = datetime.now()102 return '%4d%02d%02d_%02d%02d%02d' % (now.year, now.month, now.day, now.hour, now.minute, now.second)103""" Get stat files from the local dev folders104"""105def get_local_files(type):106 print "Getting files from local folders"107 if type != 'local_dotted' and type != 'local_basic':108 print "ERROR: get_files error in path"109 sys.exit(1)110 elif type == 'local_dotted':111 dev1_path = dotted_dev1_path112 dev2_path = dotted_dev2_path113 dev3_path = dotted_dev3_path114 dev4_path = dotted_dev4_path115 elif type == 'local_basic':116 dev1_path = basic_dev1_path117 dev2_path = basic_dev2_path118 dev3_path = basic_dev3_path119 dev4_path = basic_dev4_path120 print "Getting stats files from: dev1"121 os.makedirs(current_dir + '/dev1')122 call(["cp","-r", dev1_path, current_dir + '/dev1'])123 print "Getting stats files from: dev2"124 os.makedirs(current_dir + '/dev2')125 call(["cp","-r", dev2_path, current_dir + '/dev2'])126 print "Getting stats files from: dev3"127 os.makedirs(current_dir + '/dev3')128 call(["cp","-r", dev3_path, current_dir + '/dev3'])129 print "Getting stats files from: dev4"130 os.makedirs(current_dir + '/dev4')131 call(["cp","-r", dev4_path, current_dir + '/dev4'])132def get_local_bb():133 print "Getting basho bench files from local machine"134 os.makedirs(current_dir + '/basho_bench')135 call(["cp","-r", local_bb_path, current_dir + '/basho_bench'])136""" Get stat files from the machines in the cluster137"""138def get_cluster_files(type):139 print "Getting files from remote cluster"140 if type != 'cluster_dotted' and type != 'cluster_basic':141 print "ERROR: get_files error in path"142 sys.exit(1)143 i = 0144 for m in machines:145 i += 1146 machine = cluster_ip + m147 print "Getting stats files from: ", machine148 s = pysftp.Connection(host=machine,username=cluster_user,private_key=cluster_private_key)149 os.makedirs(current_dir + "/node%s/"%i)150 if type == 'cluster_dotted':151 s.get_d(cluster_dotted_path, current_dir + "/node%s/"%i)152 s.get(cluster_dotted_dstat, current_dir + "/node%s/dstat.csv"%i)153 if type == 'cluster_basic':154 s.get_d(cluster_basic_path, current_dir + "/node%s/"%i)155 s.get(cluster_basic_dstat, current_dir + "/node%s/dstat.csv"%i)156 s.close()157def get_cluster_bb():158 print "Getting basho bench files from cluster bench machine"159 s = pysftp.Connection(host=bench_ip,username=cluster_user,private_key=cluster_private_key)160 os.makedirs(current_dir + "/basho_bench")161 s.get_d(cluster_bb_path, current_dir + "/basho_bench")162 s.close()163def get_ycsb(type):164 print "Getting YCSB files from cluster bench machine"165 s = pysftp.Connection(host=bench_ip,username=cluster_user,private_key=cluster_private_key)166 os.makedirs(current_dir + "/ycsb")167 if type == 'cluster_dotted':168 s.get(cluster_ycsb_path + 'dotteddb/dotted_cluster.props', current_dir + "/ycsb/cluster.props")169 s.get(cluster_ycsb_path + 'dotteddb/workload', current_dir + "/ycsb/workload.props")170 s.get(cluster_ycsb_path + 'dotted_load.csv', current_dir + "/ycsb/dotted_load.csv")171 s.get(cluster_ycsb_path + 'dotted_run.csv', current_dir + "/ycsb/dotted_run.csv")172 if type == 'cluster_basic':173 s.get(cluster_ycsb_path + 'mybasicdb/basic_cluster.props', current_dir + "/ycsb/cluster.props")174 s.get(cluster_ycsb_path + 'mybasicdb/workload', current_dir + "/ycsb/workload.props")175 s.get(cluster_ycsb_path + 'basic_load.csv', current_dir + "/ycsb/basic_load.csv")176 s.get(cluster_ycsb_path + 'basic_run.csv', current_dir + "/ycsb/basic_run.csv")177 s.close()178""" Do bb stuff179"""180def do_bashobench(f=''):181 if f == '':182 call(["Rscript","--vanilla",bb_summary_path,"-i",current_dir+"/basho_bench"])183 else:184 call(["Rscript","--vanilla",bb_summary_path,"-i",f+"/basho_bench"])185################################186## PLOTTING187################################188def do_plot(type):189 initial_offset = 20190 if type == 'cluster':191 dotted = np.loadtxt((current_dotted_dir +'/node1/bench_file.csv'), delimiter=':', usecols=[1])192 elif type == 'local':193 dotted = np.loadtxt((current_dotted_dir +'/dev1/bench_file.csv'), delimiter=':', usecols=[1])194 DS = int(dotted[0]/5)-initial_offset195 DE = int(dotted[1]/5)+5196 if type == 'cluster':197 bench = np.loadtxt((current_basic_dir +'/node1/bench_file.csv'), delimiter=':', usecols=[1])198 elif type == 'local':199 bench = np.loadtxt((current_basic_dir +'/dev1/bench_file.csv'), delimiter=':', usecols=[1])200 BS = int(bench[0]/5)-initial_offset201 BE = int(bench[1]/5)+5202 NVnodes = int(bench[6])203 RF = int(bench[7])204 print " "205 print "Dotted: start", DS*5," end", DE*5206 print "Basic : start", BS*5," end", BE*5207 print "Vnodes:", NVnodes208 print "RF:\t", RF209 # print "Plot: Entries per Clock"210 # clock_entries_plot(type, DS,DE,BS,BE)211 # print "Plot: sync transferred size"212 # sync_size_plot(type, DS,DE,BS,BE)213 # print "Plot: Write Latency"214 # repair_latency_plot(type, DS,DE,BS,BE)215 print "Plot: Strip Latency"216 strip_latency_plot(type, DS,DE,BS,BE)217 # print "Plot: Total number of keys"218 # number_keys_plot(type, DS,DE,BS,BE,NVnodes,RF)219 # print "Plot: Sync Hit Ratio"220 # sync_hit_ratio_plot(type, DS,DE,BS,BE)221 # print "Plot: Node Metadata"222 # node_metadate_plot(type, DS,DE,BS,BE,bench)223 # print "Plot: dstat"224 # dstat_plot()225def dstat_plot():226 basic = load_dstat_csv(current_basic_dir)227 dotted = load_dstat_csv(current_dotted_dir)228 print basic, "\nshape: ", basic.shape229 basic2 = mean_matrix(basic)230 print basic2, "\nshape: ", basic2.shape231 dotted2 = mean_matrix(dotted)232 basic3 = basic2[basic2[:,0].argsort()]233 dotted3 = dotted2[dotted2[:,0].argsort()]234 # 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23235 # "system", "total cpu usage",,,,,, "dsk/total",, "net/total",, "paging",, "system",, "load avg",,, "memory usage",,,, "swap",236 # "time", "usr", "sys", "idl", "wai", "hiq", "siq", "read", "writ", "recv", "send", "in", "out", "int", "csw", "1m", "5m", "15m", "used", "buff", "cach", "free", "used", "free"237 plt.style.use('fivethirtyeight')238 plt.figure()239 plt.title("Dstat")240 i=0241 for t in basic3[:,0]:242 # plt.plot(i, basic3[i,1], linewidth=2, label='Basic', c='r', marker='^')243 basic3[i,0] = i244 i = i+1245 i=0246 for t in dotted3[:,0]:247 # plt.plot(i, dotted3[i,1], linewidth=2, label='Dotted', c='g', marker='o')248 dotted3[i,0] = i249 i = i+1250 plt.plot(basic3[:,0], basic3[:,1], linewidth=1, label='Basic', c='r')251 plt.plot(dotted3[:,0], dotted3[:,1], linewidth=1, label='Dotted', c='g')252 plt.xlabel('Time (s)')253 plt.ylabel('%')254 plt.legend(loc='upper right')255 # plt.ylim(ymin=-150.0)256 # plt.ylim((-0.2,5))257 # plt.xlim(xmin=-5.0)258 # plt.xlim(xmax=(DE-DS)*5)259 plt.xlim(xmax=750)260 # plt.xlim((0,700))261 # save in PDF262 pp = PdfPages(current_dotted_dir + '/dstat.pdf')263 pp.savefig()264 pp.close()265def plot_ycsb():266 print "YCSB"267 basic = np.loadtxt((current_basic_dir +'/ycsb/basic_run.csv'), delimiter=',', skiprows=2)268 dotted = np.loadtxt((current_dotted_dir +'/ycsb/dotted_run.csv'), delimiter=',', skiprows=2)269 basic_overall = np.array(filter(lambda x: x[0] == '[OVERALL]', basic))270 dotted_overall = np.array(filter(lambda x: x[0] == '[OVERALL]', dotted))271 for op in ['[READ]','[DELETE]','[UPDATE]']:272 basic2 = np.array(filter(lambda x: x[0] == op, basic))273 dotted = np.array(filter(lambda x: x[0] == op, dotted))274def plot_bb():275 print "Throughput"276 ops_plot()277 print "Latencies"278 for op in ['delete','get','update','put']:279 latencies_plot(op)280def clock_entries_paper():281 change_current_basic(cluster_path + 'cluster_basic/entries_rf3/')282 basic2 = load_cluster_basic_csv('entries-per-clock_hist.csv', True)283 basic_rf3 = mean_matrix(basic2)284 change_current_dotted(cluster_path + 'cluster_dotted/entries_rf3/')285 dotted2 = load_cluster_dotted_csv('entries-per-clock_hist.csv', True)286 dotted_rf3 = mean_matrix(dotted2)287 initial_offset_rf3 = 15288 final_offset_rf3 = 10289 dotted = np.loadtxt((current_dotted_dir +'/node1/bench_file.csv'), delimiter=':', usecols=[1])290 DS3 = int(dotted[0]/5)-initial_offset_rf3291 DE3 = int(dotted[1]/5)+final_offset_rf3292 bench = np.loadtxt((current_basic_dir +'/node1/bench_file.csv'), delimiter=':', usecols=[1])293 BS3 = int(bench[0]/5)-initial_offset_rf3294 BE3 = int(bench[1]/5)+final_offset_rf3295 change_current_basic(cluster_path + 'cluster_basic/entries_rf6/')296 basic2 = load_cluster_basic_csv('entries-per-clock_hist.csv', True)297 basic_rf6 = mean_matrix(basic2)298 change_current_dotted(cluster_path + 'cluster_dotted/entries_rf6/')299 dotted2 = load_cluster_dotted_csv('entries-per-clock_hist.csv', True)300 dotted_rf6 = mean_matrix(dotted2)301 initial_offset_rf6 = 20302 final_offset_rf6 = 10303 dotted = np.loadtxt((current_dotted_dir +'/node1/bench_file.csv'), delimiter=':', usecols=[1])304 DS6 = int(dotted[0]/5)-initial_offset_rf6305 DE6 = int(dotted[1]/5)+final_offset_rf6306 bench = np.loadtxt((current_basic_dir +'/node1/bench_file.csv'), delimiter=':', usecols=[1])307 BS6 = int(bench[0]/5)-initial_offset_rf6308 BE6 = int(bench[1]/5)+final_offset_rf6309 # NVnodes = int(bench[6])310 # RF = int(bench[7])311 plt.style.use('fivethirtyeight')312 fig = plt.figure()313 fig.add_axes([0.10, 0.10, 0.87, 0.85])314 # plt.rcParams.update({'font.size': 13})315 # plt.title("Number of Clock Entries")316 me = 0.1317 msz = 12318 plt.plot(basic_rf3[BS3:BE3,0]-5*BS3, basic_rf3[BS3:BE3,4], linewidth=2, label='MerkleDB, Replication=3', color='r', marker='^', markevery=me, ms=msz, alpha=0.6)319 plt.plot(basic_rf6[BS6:BE6,0]-5*BS6, basic_rf6[BS6:BE6,4], linewidth=2, label='MerkleDB, Replication=6', color='r', marker='o', markevery=me, ms=msz, alpha=0.6)320 plt.plot(dotted_rf3[DS3:DE3,0]-5*DS3, dotted_rf3[DS3:DE3,4], linewidth=2, label='DottedDB, Replication=3', color='g', marker='s', markevery=me, ms=msz, alpha=0.6)321 plt.plot(dotted_rf6[DS6:DE6,0]-5*DS6, dotted_rf6[DS6:DE6,4], linewidth=2, label='DottedDB, Replication=6', color='g', marker='D', markevery=me, ms=msz, alpha=0.6)322 plt.xlabel('Time (s)')323 plt.ylabel('# Entries per Object Clock')324 plt.ylim(ymin=-0.2)325 # plt.ylim((-0.2,5))326 plt.xlim(xmin=0)327 # plt.xlim(xmax=(BE-BS)*5)328 plt.xlim(xmax=1280)329 plt.legend(loc='upper left')330 # plt.xlim((0,700))331 # save in PDF332 pp = PdfPages(test_path + 'entries_per_clock_paper.pdf')333 pp.savefig()334 pp.close()335def clock_entries_plot(type, DS,DE,BS,BE):336 if type == 'cluster':337 basic2 = load_cluster_basic_csv('entries-per-clock_hist.csv', True)338 dotted2 = load_cluster_dotted_csv('entries-per-clock_hist.csv', True)339 elif type == 'local':340 basic2 = load_local_basic_csv('entries-per-clock_hist.csv', True)341 dotted2 = load_local_dotted_csv('entries-per-clock_hist.csv', True)342 basic = mean_matrix(basic2)343 dotted = mean_matrix(dotted2)344 # print dotted345 # print mean_matrix(dotted)346 print "\n before: " + str(dotted2.shape) + "\n after: " + str(dotted.shape)347 plt.style.use('fivethirtyeight')348 plt.figure()349 plt.title("Number of Clock Entries")350 # plt.plot(f[:,0], f[:,c[0]], label=c[1], linewidth=3)351 # plt.scatter(basic[:,0], basic[:,4], s=(basic[:,2]/20), label='Basic', c='r', marker='^')352 # plt.scatter(basic[:,0], basic[:,4], s=15, label='Basic', c='r', marker='^')353 # plt.scatter(dotted[:,0], dotted[:,4], s=15, label='Dotted', c='g', marker='o')354 plt.plot(basic[BS:BE,0]-5*BS, basic[BS:BE,4], linewidth=2, label='Basic', c='r', marker='^')355 plt.plot(dotted[DS:DE,0]-5*DS, dotted[DS:DE,4], linewidth=2, label='Dotted', c='g', marker='o')356 plt.xlabel('Time')357 plt.ylabel('Entries per Clock')358 plt.legend()359 plt.ylim(ymin=-0.2)360 # plt.ylim((-0.2,5))361 plt.xlim(xmin=0)362 plt.xlim(xmax=(BE-BS)*5)363 plt.legend(loc='center right')364 # plt.xlim((0,700))365 # save in PDF366 pp = PdfPages(current_dotted_dir + '/entries_per_clock.pdf')367 pp.savefig()368 pp.close()369## Sync Size Transferred370def sync_paper4(371 types = ['hhh','lhh','hlh','llh','hhl','lhl','hll','lll'],372 # types = ['hhh','hlh','lhh','llh','hhl','hll','lhl','lll'],373 # ['hhh','hhl','hlh','hll','lhh','lhl','llh','lll'],374 types2 = ['hh','lh','hl','ll'],375 # ['hh','hl','lh','ll'],376 filename_ext=''):377 # Basic378 basic_mt = OrderedDict()379 basic = OrderedDict()380 BS = OrderedDict()381 BE = OrderedDict()382 DS = OrderedDict()383 DE = OrderedDict()384 num_keys = OrderedDict()385 vnodes = OrderedDict()386 RF = OrderedDict()387 hash_size = OrderedDict()388 key_size = OrderedDict()389 mt = OrderedDict()390 basic_ctx = OrderedDict()391 basic_md = OrderedDict()392 basic_pl = OrderedDict()393 basic_total = OrderedDict()394 basic_ctx_mean = OrderedDict()395 basic_ctx_std = OrderedDict()396 basic_md_mean = OrderedDict()397 basic_md_std = OrderedDict()398 basic_pl_mean = OrderedDict()399 basic_pl_std = OrderedDict()400 dotted_ctx = OrderedDict()401 dotted_md = OrderedDict()402 dotted_pl = OrderedDict()403 dotted_total = OrderedDict()404 dotted_ctx_mean = OrderedDict()405 dotted_ctx_std = OrderedDict()406 dotted_md_mean = OrderedDict()407 dotted_md_std = OrderedDict()408 dotted_pl_mean = OrderedDict()409 dotted_pl_std = OrderedDict()410 factor = 1/(5*1024.0)411 initial_offset= -1412 final_offset= 0413 # types = ['hhh','hlh','lhh','llh','hhl','hll','lhl','lll']414 for t in types:415 change_current_basic(cluster_path + 'cluster_basic/sync_'+t+'/')416 basic_bench = np.loadtxt((current_basic_dir +'/node1/bench_file.csv'), delimiter=':', usecols=[1])417 BS[t] = int(basic_bench[0]/5)-initial_offset418 BE[t] = int(basic_bench[1]/5)+final_offset419 num_keys[t] = int(basic_bench[2])420 vnodes[t] = int(basic_bench[6])421 RF[t] = int(basic_bench[7])422 hash_size[t] = int(basic_bench[8])423 key_size[t] = int(basic_bench[9])424 mt[t] = int(basic_bench[10])425 basic_ctx2 = load_cluster_basic_csv('sync-context-size_hist.csv', False)426 basic_md2 = load_cluster_basic_csv('sync-metadata-size_hist.csv', False)427 basic_pl2 = load_cluster_basic_csv('sync-payload-size_hist.csv', False)428 basic_ctx[t] = mean_matrix(basic_ctx2)429 basic_md[t] = mean_matrix(basic_md2)430 basic_pl[t] = mean_matrix(basic_pl2)431 basic_ctx_mean[t] = np.mean(basic_ctx[t][BS[t]:BE[t],10] * factor)432 basic_ctx_std[t] = np.std(basic_ctx[t][BS[t]:BE[t],10]* factor)433 basic_md_mean[t] = np.mean(basic_md[t][BS[t]:BE[t],10] * factor)434 basic_md_std[t] = np.std(basic_md[t][BS[t]:BE[t],10]* factor)435 basic_pl_mean[t] = np.mean(basic_pl[t][BS[t]:BE[t],10] * factor)436 basic_pl_std[t] = np.std(basic_pl[t][BS[t]:BE[t],10]* factor)437 basic_total[t] = (basic_ctx[t][BS[t]:BE[t],10] + basic_md[t][BS[t]:BE[t],10] + basic_pl[t][BS[t]:BE[t],10])438 # types2 = ['hh','lh','hl','ll']439 for t in types2:440 change_current_dotted(cluster_path + 'cluster_dotted/sync_'+t+'/')441 dotted_bench = np.loadtxt((current_dotted_dir +'/node1/bench_file.csv'), delimiter=':', usecols=[1])442 DS[t] = int(dotted_bench[0]/5)-initial_offset443 DE[t] = int(dotted_bench[1]/5)+final_offset444 dotted_ctx2 = load_cluster_dotted_csv('sync-context-size_hist.csv', False)445 dotted_md2 = load_cluster_dotted_csv('sync-metadata-size_hist.csv', False)446 dotted_pl2 = load_cluster_dotted_csv('sync-payload-size_hist.csv', False)447 dotted_ctx[t] = mean_matrix(dotted_ctx2)448 dotted_md[t] = mean_matrix(dotted_md2)449 dotted_pl[t] = mean_matrix(dotted_pl2)450 print "\n ctx: " + str(dotted_ctx[t].shape) + "\n md: " + str(dotted_md[t].shape)+ "\n pl: " + str(dotted_pl[t].shape)451 dotted_ctx_mean[t] = np.mean(dotted_ctx[t][DS[t]:DE[t],10] * factor)452 dotted_ctx_std[t] = np.std(dotted_ctx[t][DS[t]:DE[t],10]* factor)453 dotted_md_mean[t] = np.mean(dotted_md[t][DS[t]:DE[t],10] * factor)454 dotted_md_std[t] = np.std(dotted_md[t][DS[t]:DE[t],10]* factor)455 dotted_pl_mean[t] = np.mean(dotted_pl[t][DS[t]:DE[t],10] * factor)456 dotted_pl_std[t] = np.std(dotted_pl[t][DS[t]:DE[t],10]* factor)457 dotted_total[t] = (dotted_ctx[t][DS[t]:DE[t],10] + dotted_md[t][DS[t]:DE[t],10] + dotted_pl[t][DS[t]:DE[t],10])458 labels = ('DottedDB HH', 'DottedDB LH', 'DottedDB HL', 'DottedDB LL',' ', 'MerkleDB HHH','MerkleDB LHH','MerkleDB HLH','MerkleDB LLH','MerkleDB HHL','MerkleDB LHL','MerkleDB HLL','MerkleDB LLL')459## Absolute460 plt.rcParams.update({'font.size': 13})461 plt.style.use('fivethirtyeight')462 my_dpi = 96463 fig = plt.figure(figsize=(800/my_dpi, 600/my_dpi), dpi=my_dpi)464 # plt.title("Traffic Sent for Sync")465 fig.add_axes([0.10, 0.19, 0.96, 0.72])466 N = 13467 ind = np.arange(N) # the x locations for the groups468 width = 0.85 # the width of the bars: can also be len(x) sequence469 pl_mean = tuple(dotted_pl_mean.values() + [0] + basic_pl_mean.values())470 pl_std = tuple(dotted_pl_std.values() + [0] + basic_pl_std.values())471 md_mean = tuple(dotted_md_mean.values() + [0] + basic_md_mean.values())472 md_std = tuple(dotted_md_std.values() + [0] + basic_md_std.values())473 ctx_mean = tuple(dotted_ctx_mean.values() + [0] + basic_ctx_mean.values())474 ctx_std = tuple(dotted_ctx_std.values() + [0] + basic_ctx_std.values())475 # dotted does 2-way sync, while basic does 1-way476 dotted_pl_mean2 = map(lambda x: x/2.0, dotted_pl_mean.values())477 dotted_pl_std2 = map(lambda x: x/2.0, dotted_pl_std.values())478 dotted_md_mean2 = map(lambda x: x/2.0, dotted_md_mean.values())479 dotted_md_std2 = map(lambda x: x/2.0, dotted_md_std.values())480 dotted_ctx_mean2 = map(lambda x: x/2.0, dotted_ctx_mean.values())481 dotted_ctx_std2 = map(lambda x: x/2.0, dotted_ctx_std.values())482 pl_mean2 = tuple(dotted_pl_mean2 + [0] + basic_pl_mean.values())483 pl_std2 = tuple(dotted_pl_std2 + [0] + basic_pl_std.values())484 md_mean2 = tuple(dotted_md_mean2 + [0] + basic_md_mean.values())485 md_std2 = tuple(dotted_md_std2 + [0] + basic_md_std.values())486 ctx_mean2 = tuple(dotted_ctx_mean2 + [0] + basic_ctx_mean.values())487 ctx_std2 = tuple(dotted_ctx_std2 + [0] + basic_ctx_std.values())488 # plt.bar(ind, pl_mean, width, color='g', yerr=pl_std, label='Object Payload')489 # plt.bar(ind, md_mean, width, color='r', yerr=md_std, bottom=pl_mean, label='Sync Metadata')490 # plt.bar(ind, ctx_mean, width, color='b', yerr=ctx_std, bottom=map(operator.add, pl_mean, md_mean), label='Object Metadata')491 AL = 0.6492 ec = 'white'493 plt.bar(ind, pl_mean2, width, color='g', yerr=pl_std2, label='Object Data', alpha=AL, fill=True, edgecolor=ec, hatch='*')494 plt.bar(ind, ctx_mean2, width, color='b', yerr=ctx_std2, bottom=pl_mean2, label='Object Metadata', alpha=AL, fill=True, edgecolor=ec, hatch='x')495 plt.bar(ind, md_mean2, width, color='r', yerr=md_std2, bottom=map(operator.add, pl_mean2, ctx_mean2), label='Sync Metadata', alpha=AL, fill=True, edgecolor=ec, hatch='o')496 # plt.bar(ind, pl_mean2, width, color='g', yerr=pl_std2, label='Object Payload', alpha=AL, fill=True, edgecolor=ec, hatch='*')497 # plt.bar(ind, md_mean2, width, color='r', yerr=md_std2, bottom=pl_mean2, label='Sync Metadata', alpha=AL, fill=True, edgecolor=ec, hatch='o')498 # plt.bar(ind, ctx_mean2, width, color='b', yerr=ctx_std2, bottom=map(operator.add, pl_mean2, md_mean2), label='Object Metadata', alpha=AL, fill=True, edgecolor=ec, hatch='x')499 plt.ylabel('Network usage (KB/s)')500 plt.legend(bbox_to_anchor=(-0.06, 1.02, 1.0, .102), loc=3, ncol=3, mode="expand", borderaxespad=0.)501 plt.ylim((-1,752))502 plt.yticks(np.arange(0, 751, 50))503 # plt.xticks(ind + width/2., tuple(types2 + types))504 plt.xticks(ind + width/1.25, labels)505 # rotate axis labels506 plt.setp(plt.gca().get_xticklabels(), rotation=45, horizontalalignment='right')507 # plt.xlim(xmin=-5)508 # plt.xlim(xmax=(BE['hhh']-BS['hhh'])*5 - 40)509 # plt.ylim(ymin=-0.5)510 plt.savefig(test_path + 'sync_size_bar_abs_paper' + filename_ext + '.png')511 pp = PdfPages(test_path + 'sync_size_bar_abs_paper' + filename_ext + '.pdf')512 pp.savefig()513 pp.close()514## Absolute LOG515 plt.rcParams.update({'font.size': 10})516 fig = plt.figure()517 # plt.title("Traffic Sent for Sync")518 fig.add_axes([0.08, 0.14, 0.90, 0.8])519 N = 13520 ind = np.arange(N) # the x locations for the groups521 width = 0.85 # the width of the bars: can also be len(x) sequence522 pl_mean = tuple(dotted_pl_mean.values() + [0] + basic_pl_mean.values())523 pl_std = tuple(dotted_pl_std.values() + [0] + basic_pl_std.values())524 md_mean = tuple(dotted_md_mean.values() + [0] + basic_md_mean.values())525 md_std = tuple(dotted_md_std.values() + [0] + basic_md_std.values())526 ctx_mean = tuple(dotted_ctx_mean.values() + [0] + basic_ctx_mean.values())527 ctx_std = tuple(dotted_ctx_std.values() + [0] + basic_ctx_std.values())528 # dotted does 2-way sync, while basic does 1-way529 dotted_pl_mean2 = map(lambda x: x/2.0, dotted_pl_mean.values())530 dotted_pl_std2 = map(lambda x: x/2.0, dotted_pl_std.values())531 dotted_md_mean2 = map(lambda x: x/2.0, dotted_md_mean.values())532 dotted_md_std2 = map(lambda x: x/2.0, dotted_md_std.values())533 dotted_ctx_mean2 = map(lambda x: x/2.0, dotted_ctx_mean.values())534 dotted_ctx_std2 = map(lambda x: x/2.0, dotted_ctx_std.values())535 pl_mean2 = tuple(dotted_pl_mean2 + [0] + basic_pl_mean.values())536 pl_std2 = tuple(dotted_pl_std2 + [0] + basic_pl_std.values())537 md_mean2 = tuple(dotted_md_mean2 + [0] + basic_md_mean.values())538 md_std2 = tuple(dotted_md_std2 + [0] + basic_md_std.values())539 ctx_mean2 = tuple(dotted_ctx_mean2 + [0] + basic_ctx_mean.values())540 ctx_std2 = tuple(dotted_ctx_std2 + [0] + basic_ctx_std.values())541 # plt.bar(ind, pl_mean, width, color='g', yerr=pl_std, label='Object Payload')542 # plt.bar(ind, md_mean, width, color='r', yerr=md_std, bottom=pl_mean, label='Sync Metadata')543 # plt.bar(ind, ctx_mean, width, color='b', yerr=ctx_std, bottom=map(operator.add, pl_mean, md_mean), label='Object Metadata')544 AL = 0.6545 plt.bar(ind, pl_mean2, width, color='g', yerr=pl_std2, label='Object Payload', alpha=AL, hatch='xx')546 plt.bar(ind, md_mean2, width, color='r', yerr=md_std2, bottom=pl_mean2, label='Sync Metadata', alpha=AL, hatch='//')547 plt.bar(ind, ctx_mean2, width, color='b', yerr=ctx_std2, bottom=map(operator.add, pl_mean2, md_mean2), label='Object Metadata', alpha=AL, hatch='\\\\')548 plt.ylabel('Traffic Size (KB/s)')549 plt.legend(bbox_to_anchor=(0., 1.02, 0.9, .102), loc=3, ncol=3, mode="expand", borderaxespad=0.)550 # plt.yticks(np.arange(0, 751, 50))551 # plt.xticks(ind + width/2., tuple(types2 + types))552 plt.xticks(ind + width/1.25, labels)553 # rotate axis labels554 plt.setp(plt.gca().get_xticklabels(), rotation=45, horizontalalignment='right')555 # plt.xlim(xmin=-5)556 # plt.xlim(xmax=(BE['hhh']-BS['hhh'])*5 - 40)557 # plt.ylim(ymin=-0.5)558 # plt.ylim(ymax=1000)559 # plt.ylim((-1,752))560 plt.yscale('log')561 pp = PdfPages(test_path + 'sync_size_bar_abs_paper_LOG' + filename_ext + '.pdf')562 pp.savefig()563 pp.close()564 plt.rcParams.update({'font.size': 13})565 fig = plt.figure()566 # plt.title("Traffic Sent for Sync")567 # fig.add_axes([0.08, 0.12, 0.90, 0.8])568 my_dpi = 96569 fig = plt.figure(figsize=(800/my_dpi, 600/my_dpi), dpi=my_dpi)570 fig.add_axes([0.10, 0.19, 0.96, 0.72])571 ind = np.arange(N) # the x locations for the groups572 width = 0.85 # the width of the bars: can also be len(x) sequence573 # pl_mean = tuple(dotted_pl_mean.values() + basic_pl_mean.values())574 # md_mean = tuple(dotted_md_mean.values() + basic_md_mean.values())575 # ctx_mean = tuple(dotted_ctx_mean.values() + basic_ctx_mean.values())576 totals = map(operator.add, map(operator.add, pl_mean, md_mean), ctx_mean)577 pl_mean2 = [i / (max(0.000001,j)) * 100 for i,j in zip(pl_mean, totals)]578 md_mean2 = [i / (max(0.000001,j)) * 100 for i,j in zip(md_mean, totals)]579 ctx_mean2 = [i / (max(0.000001,j)) * 100 for i,j in zip(ctx_mean, totals)]580 plt.bar(ind, pl_mean2, width, color='g', label='Object Data', alpha=AL, hatch='*')581 plt.bar(ind, ctx_mean2, width, color='b', bottom=pl_mean2, label='Object Metadata', alpha=AL, hatch='x')582 plt.bar(ind, md_mean2, width, color='r', bottom=map(operator.add, pl_mean2, ctx_mean2), label='Sync Metadata', alpha=AL, hatch='o')583 # plt.bar(ind, pl_mean2, width, color='g', label='Object Payload')584 # plt.bar(ind, md_mean2, width, color='r', bottom=pl_mean2, label='Sync Metadata')585 # plt.bar(ind, ctx_mean2, width, color='b', bottom=map(operator.add, pl_mean2, md_mean2), label='Object Metadata')586 plt.ylabel('Percentage')587 # plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3, ncol=3, mode="expand", borderaxespad=0.)588 plt.legend(bbox_to_anchor=(-0.06, 1.02, 1.0, .102), loc=3, ncol=3, mode="expand", borderaxespad=0.)589 plt.ylim((-1,101))590 plt.yticks(np.arange(0, 101, 10))591 plt.xticks(ind + width/1.25, labels)592 # rotate axis labels593 plt.setp(plt.gca().get_xticklabels(), rotation=45, horizontalalignment='right')594 # plt.xlim(xmin=-5)595 # plt.xlim(xmax=(BE['hhh']-BS['hhh'])*5 - 40)596 # plt.ylim(ymin=-0.5)597 plt.savefig(test_path + 'sync_size_pct_bar_paper' + filename_ext + '.png')598 pp = PdfPages(test_path + 'sync_size_pct_bar_paper' + filename_ext + '.pdf')599 pp.savefig()600 pp.close()601######### Metadata vs Payload BAR602 plt.rcParams.update({'font.size': 13})603 # fig = plt.figure()604 # fig.add_axes([0.09, 0.18, 0.97, 0.74])605 my_dpi = 96606 fig = plt.figure(figsize=(800/my_dpi, 600/my_dpi), dpi=my_dpi)607 fig.add_axes([0.10, 0.19, 0.96, 0.72])608 ind = np.arange(N) # the x locations for the groups609 width = 0.85 # the width of the bars: can also be len(x) sequence610 # pl_mean = tuple(dotted_pl_mean.values() + basic_pl_mean.values())611 # md_mean = tuple(dotted_md_mean.values() + basic_md_mean.values())612 # ctx_mean = tuple(dotted_ctx_mean.values() + basic_ctx_mean.values())613 totals = map(operator.add, pl_mean, ctx_mean)614 pl_mean2 = [i / (max(0.000001,j)) * 100 for i,j in zip(pl_mean, totals)]615 ctx_mean2 = [i / (max(0.000001,j)) * 100 for i,j in zip(ctx_mean, totals)]616 plt.bar(ind, pl_mean2, width, color='g', label='Object Payload', alpha=AL, hatch='*')617 plt.bar(ind, ctx_mean2, width, color='b', bottom=pl_mean2, label='Object Metadata', alpha=AL, hatch='x')618 plt.ylabel('Percentage')619 plt.legend(bbox_to_anchor=(0., 1.02, 0.9, .102), loc=3, ncol=2, mode="expand", borderaxespad=0.)620 plt.ylim((-1,101))621 plt.yticks(np.arange(0, 101, 10))622 plt.xticks(ind + width/1.25, labels)623 # rotate axis labels624 plt.setp(plt.gca().get_xticklabels(), rotation=45, horizontalalignment='right')625 # plt.xlim(xmin=-5)626 # plt.xlim(xmax=(BE['hhh']-BS['hhh'])*5 - 40)627 # plt.ylim(ymin=-0.5)628 plt.savefig(test_path + 'sync_size_pct_bar_ctx_paper' + filename_ext + '.png')629 pp = PdfPages(test_path + 'sync_size_pct_bar_ctx_paper' + filename_ext + '.pdf')630 pp.savefig()631 pp.close()632######### Metadata vs Payload BAR633 # plt.rcParams.update({'font.size': 13})634 # fig = plt.figure()635 # fig.add_axes([0.09, 0.18, 0.97, 0.74])636 my_dpi = 96637 fig = plt.figure(figsize=(800/my_dpi, 600/my_dpi), dpi=my_dpi)638 fig.add_axes([0.10, 0.19, 0.96, 0.72])639 ind = np.arange(N) # the x locations for the groups640 width = 0.85 # the width of the bars: can also be len(x) sequence641 # pl_mean = tuple(dotted_pl_mean.values() + basic_pl_mean.values())642 # md_mean = tuple(dotted_md_mean.values() + basic_md_mean.values())643 # ctx_mean = tuple(dotted_ctx_mean.values() + basic_ctx_mean.values())644 totals = map(operator.add, pl_mean, md_mean)645 pl_mean2 = [i / (max(0.000001,j)) * 100 for i,j in zip(pl_mean, totals)]646 md_mean2 = [i / (max(0.000001,j)) * 100 for i,j in zip(md_mean, totals)]647 plt.bar(ind, pl_mean2, width, color='g', label='Object Payload', alpha=AL, hatch='*')648 plt.bar(ind, md_mean2, width, color='r', bottom=pl_mean2, label='Sync Metadata', alpha=AL, hatch='o')649 plt.ylabel('Percentage')650 plt.legend(bbox_to_anchor=(0., 1.02, 0.9, .102), loc=3, ncol=2, mode="expand", borderaxespad=0.)651 plt.ylim((-1,101))652 plt.yticks(np.arange(0, 101, 10))653 plt.xticks(ind + width/1.25, labels)654 # rotate axis labels655 plt.setp(plt.gca().get_xticklabels(), rotation=45, horizontalalignment='right')656 # plt.annotate(0.4, -0.12, 'MerkleDB')657 # plt.figtext(0.23, 0.05, 'DottedDB', horizontalalignment='right')658 # plt.figtext(0.66, 0.05, 'MerkleDB', horizontalalignment='right')659 # plt.xlim(xmin=-5)660 # plt.xlim(xmax=(BE['hhh']-BS['hhh'])*5 - 40)661 # plt.ylim(ymin=-0.5)662 plt.savefig(test_path + 'sync_size_pct_bar_md_paper' + filename_ext + '.png')663 pp = PdfPages(test_path + 'sync_size_pct_bar_md_paper' + filename_ext + '.pdf')664 pp.savefig()665 pp.close()666 lw = 2667 msz = 6668 ms = ['s','o','<','>','^','v','*','D','p']669 plt.style.use('fivethirtyeight')670 plt.rcParams.update({'font.size': 8})671 fig = plt.figure()672 plt.title("Sync Traffic for MerkleDB")673 # n_lines, _ = dotted1['hh'].shape674 # basic_total = np.empty(n_lines)675 # basic_total.fill(basic_size['hhh'])676 # print str(basic_size['hhh']/1024.0) + " KB\n"677 # plt.plot(dotted1['hh'][DS['hh']:DE['hh'],0]-DS['hh']*5, basic_total[DS['hh']:DE['hh']]/1024.0, linewidth=2, label='MT Theoretical Size', c='r', linestyle='--')678 i = 0679 for t in types:680 plt.plot(basic_pl[t][BS[t]:BE[t],0]-5*BS[t], basic_total[t]*factor, linewidth=lw, label='MerkleDB, '+t.upper(), c='r', marker=ms[i],markersize=msz, markevery=7)681 i = i + 1682 plt.xlabel('Time (Seconds)')683 plt.ylabel('Network Traffic (KB/s)')684 plt.legend(loc='upper right')685 # plt.ylim((-1,62))686 plt.xlim(xmin=-5)687 plt.xlim(xmax=(BE['hhh']-BS['hhh'])*5)688 plt.ylim(ymin=-5)689 pp = PdfPages(test_path + 'sync_size_basic_paper' + filename_ext + '.pdf')690 pp.savefig()691 pp.close()692 plt.style.use('fivethirtyeight')693 plt.rcParams.update({'font.size': 8})694 fig = plt.figure()695 plt.title("Sync Traffic for DottedDB")696 i = 0697 for t in types2:698 plt.plot(dotted_pl[t][DS[t]:DE[t],0]-5*DS[t], dotted_total[t]*factor, linewidth=lw, label='DottedDB, '+t.upper(), c='g', marker=ms[i], markersize=msz,markevery=7)699 i = i + 1700 plt.xlabel('Time (Seconds)')701 plt.ylabel('Network Traffic (KB/s)')702 plt.legend(loc='upper right')703 # plt.ylim((-1,62))704 plt.xlim(xmin=-5)705 plt.xlim(xmax=(DE[types2[0]]-DS[types2[0]])*5)706 plt.ylim(ymin=-5)707 pp = PdfPages(test_path + 'sync_size_dotted_paper' + filename_ext + '.pdf')708 pp.savefig()709 pp.close()710def sync_size_plot(type, DS,DE,BS,BE):711 if type == 'cluster':712 basic_ctx2 = load_cluster_basic_csv('sync-context-size_hist.csv', False)713 basic_md2 = load_cluster_basic_csv('sync-metadata-size_hist.csv', False)714 basic_pl2 = load_cluster_basic_csv('sync-payload-size_hist.csv', False)715 dotted_ctx2 = load_cluster_dotted_csv('sync-context-size_hist.csv', False)716 dotted_md2 = load_cluster_dotted_csv('sync-metadata-size_hist.csv', False)717 dotted_pl2 = load_cluster_dotted_csv('sync-payload-size_hist.csv', False)718 elif type == 'local':719 basic_ctx2 = load_local_basic_csv('sync-context-size_hist.csv', False)720 basic_md2 = load_local_basic_csv('sync-metadata-size_hist.csv', False)721 basic_pl2 = load_local_basic_csv('sync-payload-size_hist.csv', False)722 dotted_ctx2 = load_local_dotted_csv('sync-context-size_hist.csv', False)723 dotted_md2 = load_local_dotted_csv('sync-metadata-size_hist.csv', False)724 dotted_pl2 = load_local_dotted_csv('sync-payload-size_hist.csv', False)725 basic_ctx = mean_matrix(basic_ctx2)726 basic_md = mean_matrix(basic_md2)727 basic_pl = mean_matrix(basic_pl2)728 dotted_ctx = mean_matrix(dotted_ctx2)729 dotted_md = mean_matrix(dotted_md2)730 dotted_pl = mean_matrix(dotted_pl2)731 plt.style.use('fivethirtyeight')732 # plt.style.use('ggplot')733 # plt.style.use('dark_background')734 plt.figure()735 plt.title("Traffic in Node Synchronization")736 ms = 7737 lw = 3738 factor = (4/1024.0)739 # plt.plot(basic_ctx[:,0], basic_ctx[:,10]*factor, linewidth=2, label='Basic Context', c='g', marker='^', markersize=ms)740 # plt.plot(basic_md[:,0], basic_md[:,10]*factor, linewidth=2, label='Basic Metadata', c='r', marker='^', markersize=ms)741 # plt.plot(basic_pl[:,0], basic_pl[:,10]*factor, linewidth=2, label='Basic Payload', c='b', marker='^', markersize=ms)742 # plt.plot(dotted_ctx[:,0], dotted_ctx[:,10]*factor, linewidth=2, label='Dotted Context', c='g', marker='o', markersize=ms)743 # plt.plot(dotted_md[:,0], dotted_md[:,10]*factor, linewidth=2, label='Dotted Metadata', c='r', marker='o', markersize=ms)744 # plt.plot(dotted_pl[:,0], dotted_pl[:,10]*factor, linewidth=2, label='Dotted Payload', c='b', marker='o', markersize=ms)745 basic_total = (basic_ctx[BS:BE,10] + basic_md[BS:BE,10] + basic_pl[BS:BE,10])746 dotted_total = (dotted_ctx[DS:DE,10] + dotted_md[DS:DE,10] + dotted_pl[DS:DE,10])747 plt.plot(basic_pl[BS:BE,0]-5*BS, basic_total*factor, linewidth=lw, label='MerkleDB', color='r', marker='^', markersize=ms, markevery=5)748 plt.plot(dotted_pl[DS:DE,0]-5*DS, dotted_total*factor, linewidth=lw, label='DottedDB', color='g', marker='o', markersize=ms, markevery=5)749 # plt.plot(basic_pl[:,0]-1*interval, basic_total*factor, linewidth=lw, label='Basic Total', color='r')750 # plt.plot(dotted_pl[:,0]-6*interval, dotted_total*factor, linewidth=lw, label='Dotted Total', color='g')751 plt.xlabel('Time')752 plt.ylabel('Size (KBytes)')753 plt.legend()754 plt.ylim(ymin=-0.5)755 # plt.ylim(ymax=1000)756 # plt.ylim((-0.2,5))757 plt.xlim(xmin=0)758 plt.xlim(xmax=(DE-DS)*5)759 # plt.show()760 # plt.xlim((0,700))761 # save in PDF762 pp = PdfPages(current_dotted_dir + '/sync_size.pdf')763 pp.savefig()764 pp.close()765## Repair Latency766def sync_paper3(767 types = ['hhh','hhl','hlh','hll','lhh','lhl','llh','lll'],768 types2 = ['hh','hl','lh','ll'],769 filename_ext=''):770 basic = {}771 basicX = {}772 basicY = {}773 basicY2 = {}774 basic_ecdf = {}775 dotted = {}776 dottedX = {}777 dottedY = {}778 dottedY2 = {}779 dotted_ecdf = {}780 # types = ['hhh','hhl','hlh','hll','lhh','lhl','llh','lll']781 for t in types:782 change_current_basic(cluster_path + 'cluster_basic/sync_'+t+'/')783 basic[t] = load_cluster_basic_csv('write-latency_gauge.csv', False)784 basicY[t] = basic[t][:,1]785 basic_ecdf[t] = sm.distributions.ECDF(basicY[t])786 basicX[t] = np.linspace(min(basicY[t]), max(basicY[t]))787 basicY2[t] = basic_ecdf[t](basicX[t])788 # types2 = ['hh','hl','lh','ll']789 for t in types2:790 change_current_dotted(cluster_path + 'cluster_dotted/sync_'+t+'/')791 dotted[t] = load_cluster_dotted_csv('write-latency_gauge.csv', False)792 dottedY[t] = dotted[t][:,1]793 dotted_ecdf[t] = sm.distributions.ECDF(dottedY[t])794 dottedX[t] = np.linspace(min(dottedY[t]), max(dottedY[t]))795 print "min ", min(dottedY[t])796 dottedY2[t] = dotted_ecdf[t](dottedX[t])797 ms = ['s','o','<','>','^','v','*','D','p']798 l = 4799 axes = [0.09, 0.20, 0.85, 0.75]800 msize = 17801 font_size = 27802 plt.style.use('fivethirtyeight')803 plt.rcParams.update({'font.size': font_size})804 fig = plt.figure()805 fig.add_axes(axes)806 # plt.title("10000 ms Sync Interval, 100% Replication Loss")807 t1 = 'hhh'808 t2 = 'lhh'809 # t3 = 'hh'810 t3 = types2[0]811 plt.step(basicX[t1]/1000.0, basicY2[t1], label="MerkleDB, HHH", lw=l, ms=msize, marker=ms[0], alpha=0.6, color='b', markevery=0.08)812 plt.step(basicX[t2]/1000.0, basicY2[t2], label="MerkleDB, LHH", lw=l, ms=msize, marker=ms[1], alpha=0.6, color='r', markevery=0.10)813 plt.step(dottedX[t3]/1000.0, dottedY2[t3], label="DottedDB HH", lw=l, ms=msize, marker=ms[3], alpha=0.6, color='g', markevery=0.12)814 plt.xlabel('Time from request to storage (s)')815 # plt.ylabel('Size (KB)')816 plt.legend(loc='lower right')817 plt.xlim(xmin=-0.5)818 plt.xlim(xmax=990)819 # plt.ylim(ymax=1.05)820 plt.ylim((-0.02,1.02))821 pp = PdfPages(test_path + 'repair_latency_hh_paper' + filename_ext + '.pdf')822 pp.savefig()823 pp.close()824 plt.style.use('fivethirtyeight')825 plt.rcParams.update({'font.size': font_size})826 fig = plt.figure()827 fig.add_axes(axes)828 # plt.title("10000 ms Sync Interval, 20% Replication Loss")829 t1 = 'hhl'830 t2 = 'lhl'831 # t3 = 'hl'832 t3 = types2[1]833 plt.step(basicX[t1]/1000.0, basicY2[t1], label="MerkleDB, HHL", lw=l, ms=msize, marker=ms[0], alpha=0.6, color='b', markevery=0.08)834 plt.step(basicX[t2]/1000.0, basicY2[t2], label="MerkleDB, LHL", lw=l, ms=msize, marker=ms[1], alpha=0.6, color='r', markevery=0.10)835 plt.step(dottedX[t3]/1000.0, dottedY2[t3], label="DottedDB, HL", lw=l, ms=msize, marker=ms[3], alpha=0.6, color='g', markevery=0.12)836 plt.xlabel('Time from request to storage (s)')837 # plt.ylabel('Size (KB)')838 plt.legend(loc='lower right')839 plt.xlim(xmin=-0.5)840 plt.xlim(xmax=195)841 # plt.ylim(ymax=1.05)842 plt.ylim((-0.02,1.02))843 pp = PdfPages(test_path + 'repair_latency_hl_paper' + filename_ext + '.pdf')844 pp.savefig()845 pp.close()846 plt.style.use('fivethirtyeight')847 plt.rcParams.update({'font.size': font_size})848 fig = plt.figure()849 fig.add_axes(axes)850 # plt.title("100 ms Sync Interval, 100% Replication Loss")851 t1 = 'hlh'852 t2 = 'llh'853 # t3 = 'lh'854 t3 = types2[2]855 plt.step(basicX[t1]/1000.0, basicY2[t1], label="MerkleDB, HLH", lw=l, ms=msize, marker=ms[0], alpha=0.6, color='b', markevery=0.08)856 plt.step(basicX[t2]/1000.0, basicY2[t2], label="MerkleDB, LLH", lw=l, ms=msize, marker=ms[1], alpha=0.6, color='r', markevery=0.08)857 plt.step(dottedX[t3]/1000.0, dottedY2[t3], label="DottedDB, LH", lw=l, ms=msize, marker=ms[3], alpha=0.6, color='g', markevery=0.08)858 plt.xlabel('Time from request to storage (s)')859 # plt.ylabel('Size (KB)')860 plt.legend(loc='lower right')861 plt.xlim(xmin=-0.5)862 plt.xlim(xmax=790)863 plt.ylim((-0.02,1.02))864 # plt.ylim(ymax=1.05)865 pp = PdfPages(test_path + 'repair_latency_lh_paper' + filename_ext + '.pdf')866 pp.savefig()867 pp.close()868 plt.style.use('fivethirtyeight')869 plt.rcParams.update({'font.size': font_size})870 fig = plt.figure()871 fig.add_axes(axes)872 # plt.title("100 ms Sync Interval, 20% Replication Loss")873 t1 = 'hll'874 t2 = 'lll'875 # t3 = 'll'876 t3 = types2[3]877 plt.step(basicX[t1]/1000.0, basicY2[t1], label="MerkleDB, HLL", lw=l, ms=msize, marker=ms[0], alpha=0.6, color='b', markevery=0.08)878 plt.step(basicX[t2]/1000.0, basicY2[t2], label="MerkleDB, LLL", lw=l, ms=msize, marker=ms[1], alpha=0.6, color='r', markevery=0.10)879 plt.step(dottedX[t3]/1000.0, dottedY2[t3], label="DottedDB, LL", lw=l, ms=msize, marker=ms[3], alpha=0.6, color='g', markevery=0.12)880 plt.xlabel('Time from request to storage (s)')881 # plt.ylabel('Size (KB)')882 plt.legend(loc='lower right')883 # plt.yscale('log')884 # plt.xscale('log')885 plt.xlim(xmin=-0.5)886 plt.xlim(xmax=68)887 plt.ylim((-0.02,1.02))888 # plt.ylim(ymax=1.05)889 pp = PdfPages(test_path + 'repair_latency_ll_paper' + filename_ext + '.pdf')890 pp.savefig()891 pp.close()892def repair_latency_plot(type, DS,DE,BS,BE):893 if type == 'cluster':894 basic = load_cluster_basic_csv('write-latency_gauge.csv', False)895 dotted = load_cluster_dotted_csv('write-latency_gauge.csv', False)896 elif type == 'local':897 basic = load_local_basic_csv('write-latency_gauge.csv', False)898 dotted = load_local_dotted_csv('write-latency_gauge.csv', False)899 print "\n dotted: " + str(dotted.shape) + "\n basic: " + str(basic.shape)900 plt.style.use('fivethirtyeight')901 plt.figure()902 # plt.title("CDF of Replication Latency")903 # # Estimate the 2D histogram904 # nbins = 100905 # # H, xedges, yedges = np.histogram2d(dotted[:,0], dotted[:,1], bins=nbins)906 # H, xedges, yedges = np.histogram2d(basic[:,0], basic[:,1], bins=nbins)907 # # H needs to be rotated and flipped908 # H = np.rot90(H)909 # H = np.flipud(H)910 # # Mask zeros911 # Hmasked = np.ma.masked_where(H==0,H) # Mask pixels with a value of zero912 # # Plot 2D histogram using pcolor913 # plt.pcolormesh(xedges,yedges,Hmasked)914 basicY = basic[:,1]915 basic_ecdf = sm.distributions.ECDF(basicY)916 basicX = np.linspace(min(basicY), max(basicY))917 basicY2 = basic_ecdf(basicX)918 plt.step(basicX/1000.0, basicY2, label="MerkleDB")919 dottedY = dotted[:,1]920 dotted_ecdf = sm.distributions.ECDF(dottedY)921 dottedX = np.linspace(min(dottedY), max(dottedY))922 dottedY2 = dotted_ecdf(dottedX)923 plt.step(dottedX/1000.0, dottedY2, label="DottedDB")924 plt.xlabel('Time (s)')925 plt.xlim(xmin=-1.0)926 # plt.xlim(xmax=13)927 plt.legend()928 pp = PdfPages(current_dotted_dir + '/repair_latency_CDF.pdf')929 pp.savefig()930 pp.close()931def forceAspect(ax,aspect=1):932 im = ax.get_images()933 extent = im[0].get_extent()934 ax.set_aspect(abs((extent[1]-extent[0])/(extent[3]-extent[2]))/aspect)935def strip_paper():936 change_current_dotted(cluster_path + 'cluster_dotted/strip_hh/')937 delete_hh = load_cluster_dotted_csv('strip-delete-latency_gauge.csv', False)938 write_hh = load_cluster_dotted_csv('strip-write-latency_gauge.csv', False)939 change_current_dotted(cluster_path + 'cluster_dotted/strip_hl/')940 delete_hl = load_cluster_dotted_csv('strip-delete-latency_gauge.csv', False)941 write_hl = load_cluster_dotted_csv('strip-write-latency_gauge.csv', False)942 change_current_dotted(cluster_path + 'cluster_dotted/strip_mh/')943 delete_mh = load_cluster_dotted_csv('strip-delete-latency_gauge.csv', False)944 write_mh = load_cluster_dotted_csv('strip-write-latency_gauge.csv', False)945 change_current_dotted(cluster_path + 'cluster_dotted/strip_ml/')946 delete_ml = load_cluster_dotted_csv('strip-delete-latency_gauge.csv', False)947 write_ml = load_cluster_dotted_csv('strip-write-latency_gauge.csv', False)948 change_current_dotted(cluster_path + 'cluster_dotted/strip_lh/')949 delete_lh = load_cluster_dotted_csv('strip-delete-latency_gauge.csv', False)950 write_lh = load_cluster_dotted_csv('strip-write-latency_gauge.csv', False)951 change_current_dotted(cluster_path + 'cluster_dotted/strip_ll/')952 delete_ll = load_cluster_dotted_csv('strip-delete-latency_gauge.csv', False)953 write_ll = load_cluster_dotted_csv('strip-write-latency_gauge.csv', False)954 msz = 12955 mev = 0.1956 plt.style.use('fivethirtyeight')957 fig = plt.figure()958 fig.add_axes([0.05, 0.1, 0.90, 0.85])959 plt.rcParams.update({'font.size': 13})960 # plt.subplot(111)961 # plt.title("CDF of Delete Latency")962 # if delete_lh.shape != (0,):963 deleteY = delete_hh[:,1]964 delete_ecdf = sm.distributions.ECDF(deleteY)965 deleteX = np.linspace(min(deleteY), max(deleteY))966 deleteY2 = delete_ecdf(deleteX)967 plt.step(deleteX/1000.0, deleteY2, label="10s Strip Interval, 100% Replication Loss", ms=msz, markevery=mev, lw=2, marker='o', alpha=0.6, color='b')968 deleteY = delete_hl[:,1]969 delete_ecdf = sm.distributions.ECDF(deleteY)970 deleteX = np.linspace(min(deleteY), max(deleteY))971 deleteY2 = delete_ecdf(deleteX)972 plt.step(deleteX/1000.0, deleteY2, label="10s Strip Interval, 10% Replication Loss", ms=msz, markevery=mev, lw=2, marker='*', alpha=0.6, color='b')973 deleteY = delete_mh[:,1]974 delete_ecdf = sm.distributions.ECDF(deleteY)975 deleteX = np.linspace(min(deleteY), max(deleteY))976 deleteY2 = delete_ecdf(deleteX)977 plt.step(deleteX/1000.0, deleteY2, label="1s Strip Interval, 100% Replication Loss", ms=msz, markevery=mev, lw=2, marker='s', alpha=0.6, color='g')978 deleteY = delete_ml[:,1]979 delete_ecdf = sm.distributions.ECDF(deleteY)980 deleteX = np.linspace(min(deleteY), max(deleteY))981 deleteY2 = delete_ecdf(deleteX)982 plt.step(deleteX/1000.0, deleteY2, label="1s Strip Interval, 10% Replication Loss", ms=msz, markevery=mev, lw=2, marker='^', alpha=0.6, color='g')983 deleteY = delete_lh[:,1]984 delete_ecdf = sm.distributions.ECDF(deleteY)985 deleteX = np.linspace(min(deleteY), max(deleteY))986 deleteY2 = delete_ecdf(deleteX)987 plt.step(deleteX/1000.0, deleteY2, label="0.1s Strip Interval, 100% Replication Loss", ms=msz, markevery=mev, lw=2, marker='v', alpha=0.6, color='r')988 deleteY = delete_ll[:,1]989 delete_ecdf = sm.distributions.ECDF(deleteY)990 deleteX = np.linspace(min(deleteY), max(deleteY))991 deleteY2 = delete_ecdf(deleteX)992 plt.step(deleteX/1000.0, deleteY2, label="0.1s Strip Interval, 10% Replication Loss", ms=msz, markevery=mev, lw=2, marker='D', alpha=0.6, color='r')993 # writeY = write_hh[:,1]994 # write_ecdf = sm.distributions.ECDF(writeY)995 # writeX = np.linspace(min(writeY), max(writeY))996 # writeY2 = write_ecdf(writeX)997 # plt.step(writeX/1000.0, writeY2, label="Writes")998 plt.xlabel('Time from update to delete (s)')999 # plt.legend(loc='lower right')1000 plt.legend(bbox_to_anchor=(0.30, 0, 0.35, 0.4), mode="expand", borderaxespad=0.)1001 plt.xlim(xmin=-0.5)1002 plt.xlim(xmax=20)1003 pp = PdfPages(test_path + '/delete_latency_paper.pdf')1004 pp.savefig()1005 pp.close()1006 plt.style.use('fivethirtyeight')1007 fig = plt.figure()1008 fig.add_axes([0.05, 0.1, 0.90, 0.85])1009 plt.rcParams.update({'font.size': 13})1010 # plt.title("CDF of Write Latency")1011 # if write.shape != (0,):1012 writeY = write_hh[:,1]1013 write_ecdf = sm.distributions.ECDF(writeY)1014 writeX = np.linspace(min(writeY), max(writeY))1015 writeY2 = write_ecdf(writeX)1016 plt.step(writeX/1000.0, writeY2, label="10s Strip Interval, 100% Replication Loss", ms=msz, markevery=mev, lw=2, marker='o', alpha=0.6, color='b')1017 writeY = write_hl[:,1]1018 write_ecdf = sm.distributions.ECDF(writeY)1019 writeX = np.linspace(min(writeY), max(writeY))1020 writeY2 = write_ecdf(writeX)1021 plt.step(writeX/1000.0, writeY2, label="10s Strip Interval, 10% Replication Loss", ms=msz, markevery=mev, lw=2, marker='*', alpha=0.6, color='b')1022 writeY = write_mh[:,1]1023 write_ecdf = sm.distributions.ECDF(writeY)1024 writeX = np.linspace(min(writeY), max(writeY))1025 writeY2 = write_ecdf(writeX)1026 plt.step(writeX/1000.0, writeY2, label="1s Strip Interval, 100% Replication Loss", ms=msz, markevery=mev, lw=2, marker='s', alpha=0.6, color='g')1027 writeY = write_ml[:,1]1028 write_ecdf = sm.distributions.ECDF(writeY)1029 writeX = np.linspace(min(writeY), max(writeY))1030 writeY2 = write_ecdf(writeX)1031 plt.step(writeX/1000.0, writeY2, label="1s Strip Interval, 10% Replication Loss", ms=msz, markevery=mev, lw=2, marker='^', alpha=0.6, color='g')1032 writeY = write_lh[:,1]1033 write_ecdf = sm.distributions.ECDF(writeY)1034 writeX = np.linspace(min(writeY), max(writeY))1035 writeY2 = write_ecdf(writeX)1036 plt.step(writeX/1000.0, writeY2, label="0.1s Strip Interval, 100% Replication Loss", ms=msz, markevery=mev, lw=2, marker='v', alpha=0.6, color='r')1037 writeY = write_ll[:,1]1038 write_ecdf = sm.distributions.ECDF(writeY)1039 writeX = np.linspace(min(writeY), max(writeY))1040 writeY2 = write_ecdf(writeX)1041 plt.step(writeX/1000.0, writeY2, label="0.1s Strip Interval, 10% Replication Loss", ms=msz, markevery=mev, lw=2, marker='D', alpha=0.6, color='r')1042 plt.xlabel('Time from update to strip (s)')1043 # plt.legend(loc='lower right')1044 plt.legend(bbox_to_anchor=(0.30, 0, 0.35, 0.4), mode="expand", borderaxespad=0.)1045 plt.xlim(xmin=-0.5)1046 plt.xlim(xmax=20)1047 pp = PdfPages(test_path + '/strip_paper.pdf')1048 pp.savefig()1049 pp.close()1050def strip_latency_plot(type, DS,DE,BS,BE):1051 if type == 'cluster':1052 delete = load_cluster_dotted_csv('strip-delete-latency_gauge.csv', False)1053 write = load_cluster_dotted_csv('strip-write-latency_gauge.csv', False)1054 elif type == 'local':1055 delete = load_local_dotted_csv('strip-delete-latency_gauge.csv', False)1056 write = load_local_dotted_csv('strip-write-latency_gauge.csv', False)1057 print "\n deleted: " + str(delete.shape) + "\n writes: " + str(write.shape)1058 plt.style.use('fivethirtyeight')1059 plt.figure()1060 plt.title("CDF of Strip Latency")1061 if delete.shape != (0,):1062 deleteY = delete[:,1]1063 delete_ecdf = sm.distributions.ECDF(deleteY)1064 deleteX = np.linspace(min(deleteY), max(deleteY))1065 deleteY2 = delete_ecdf(deleteX)1066 plt.step(deleteX/1000.0, deleteY2, label="Deletes")1067 writeY = write[:,1]1068 write_ecdf = sm.distributions.ECDF(writeY)1069 writeX = np.linspace(min(writeY), max(writeY))1070 writeY2 = write_ecdf(writeX)1071 plt.step(writeX/1000.0, writeY2, label="Writes")1072 plt.xlabel('Time (Seconds)')1073 plt.legend(loc='center right')1074 pp = PdfPages(current_dotted_dir + '/strip_latency.pdf')1075 pp.savefig()1076 pp.close()1077def deletes_paper():1078 change_current_basic(cluster_path + 'cluster_basic/deletes_50k/')1079 change_current_dotted(cluster_path + 'cluster_dotted/deletes_50k/')1080 basic_w = load_cluster_basic_csv('written-keys_hist.csv', True)1081 basic_d = load_cluster_basic_csv('deleted-keys_hist.csv', True)1082 dotted_wc = load_cluster_dotted_csv('write-completed_hist.csv', True)1083 dotted_wi = load_cluster_dotted_csv('write-incomplete_hist.csv', True)1084 dotted_d = load_cluster_dotted_csv('deletes-incomplete_hist.csv', True)1085 basic2 = np.concatenate([basic_d, basic_w], axis=0)1086 basic = mean_matrix(basic2)1087 dotted3 = np.concatenate([dotted_wc, dotted_wi], axis=0)1088 dotted1 = mean_matrix(dotted3)1089 dotted2 = np.concatenate([dotted_d, dotted_wc, dotted_wi], axis=0)1090 dotted = mean_matrix(dotted2)1091 initial_offset= 121092 final_offset= 351093 dotted_bench = np.loadtxt((current_dotted_dir +'/node1/bench_file.csv'), delimiter=':', usecols=[1])1094 DS = int(dotted_bench[0]/5)-initial_offset1095 DE = int(dotted_bench[1]/5)+final_offset1096 basic_bench = np.loadtxt((current_basic_dir +'/node1/bench_file.csv'), delimiter=':', usecols=[1])1097 BS = int(basic_bench[0]/5)-initial_offset1098 BE = int(basic_bench[1]/5)+final_offset1099 NVnodes = int(basic_bench[6])1100 RF = int(basic_bench[7])1101 plt.style.use('fivethirtyeight')1102 fig = plt.figure()1103 fig.add_axes([0.15, 0.12, 0.85, 0.93])1104 # plt.title("Keys over Time with Deletes")1105 plt.rcParams.update({'font.size': 15})1106 me = 0.11107 msz = 121108 lw = 21109 plt.plot(basic[BS:BE,0]-BS*5, basic[BS:BE,10]/basic[BS:BE,2]*2*NVnodes/RF, linewidth=lw, label='MerkleDB', marker='o', color='r', markevery=me, markersize=msz, alpha=0.6)1110 # plt.plot(basic[:,0]-3*interval, basic[:,10]*2*(16/32.0), linewidth=2, label='Basic', c='r', marker='^')1111 # plt.plot(dotted[DS:DE,0]-DS*5, dotted[DS:DE,10]*3*(16/32.0), linewidth=2, label='Dotted', c='g', marker='o')1112 plt.plot(dotted[DS:DE,0]-DS*5, dotted[DS:DE,10]/dotted[DS:DE,2]*3*NVnodes/RF, linewidth=lw, label='DottedDB', marker='^', color='g', markevery=me-0.03, markersize=msz, alpha=0.6)1113 plt.plot(dotted1[DS:DE,0]-DS*5, dotted1[DS:DE,10]/dotted1[DS:DE,2]*2*NVnodes/RF, linewidth=lw, label='Ideal', marker='s', color='b', markevery=me+0.03, markersize=msz, alpha=0.6)1114 plt.xlabel('Time (s)')1115 plt.ylabel('# Objects in Storage')1116 plt.legend(loc='lower right')1117 plt.ylim(ymin=-0.2)1118 # plt.ylim((-0.2,5))1119 plt.xlim(xmin=0)1120 # plt.xlim(xmax=(BE-BS)*5)1121 plt.xlim(xmax=1375)1122 # plt.xlim((0,700))1123 # save in PDF1124 pp = PdfPages(test_path + 'total_number_keys_paper.pdf')1125 pp.savefig()1126 pp.close()1127def number_keys_plot(type, DS,DE,BS,BE,NVnodes,RF):1128 if type == 'cluster':1129 basic_w = load_cluster_basic_csv('written-keys_hist.csv', True)1130 basic_d = load_cluster_basic_csv('deleted-keys_hist.csv', True)1131 dotted_wc = load_cluster_dotted_csv('write-completed_hist.csv', True)1132 dotted_wi = load_cluster_dotted_csv('write-incomplete_hist.csv', True)1133 dotted_d = load_cluster_dotted_csv('deletes-incomplete_hist.csv', True)1134 elif type == 'local':1135 basic_w = load_local_basic_csv('written-keys_hist.csv', True)1136 basic_d = load_local_basic_csv('deleted-keys_hist.csv', True)1137 dotted_wc = load_local_dotted_csv('write-completed_hist.csv', True)1138 dotted_wi = load_local_dotted_csv('write-incomplete_hist.csv', True)1139 dotted_d = load_local_dotted_csv('deletes-incomplete_hist.csv', True)1140 basic2 = np.concatenate([basic_d, basic_w], axis=0)1141 basic = mean_matrix(basic2)1142 dotted3 = np.concatenate([dotted_wc, dotted_wi], axis=0)1143 dotted1 = mean_matrix(dotted3)1144 dotted2 = np.concatenate([dotted_d, dotted_wc, dotted_wi], axis=0)1145 dotted = mean_matrix(dotted2)1146 # print "\n basic before: " + str(basic2.shape) + "\n after: " + str(basic.shape)1147 # print "\n dotted before: " + str(dotted2.shape) + "\n after: " + str(dotted.shape)1148 plt.style.use('fivethirtyeight')1149 fig = plt.figure()1150 fig.add_axes([0.15, 0.10, 0.8, 0.8])1151 plt.title("Total Number of Keys in the Cluster")1152 # plt.plot(basic[:,0]-4*interval, basic[:,9]*2*4, linewidth=2, label='Basic', c='r', marker='^')1153 # plt.plot(dotted[:,0]-2*interval, dotted[:,9]*3*4, linewidth=2, label='Dotted', c='g', marker='o')1154 lw = 41155 plt.plot(basic[BS:BE,0]-BS*5, basic[BS:BE,10]/basic[BS:BE,2]*2*NVnodes/RF, ls='-.', linewidth=lw, label='MerkleDB', c='r')1156 # plt.plot(basic[:,0]-3*interval, basic[:,10]*2*(16/32.0), linewidth=2, label='Basic', c='r', marker='^')1157 # plt.plot(dotted[DS:DE,0]-DS*5, dotted[DS:DE,10]*3*(16/32.0), linewidth=2, label='Dotted', c='g', marker='o')1158 plt.plot(dotted[DS:DE,0]-DS*5, dotted[DS:DE,10]/dotted[DS:DE,2]*3*NVnodes/RF, ls='--',linewidth=lw, label='DottedDB', c='g')1159 plt.plot(dotted1[DS:DE,0]-DS*5, dotted1[DS:DE,10]/dotted1[DS:DE,2]*2*NVnodes/RF, ls='-', linewidth=2, label='Ideal', c='b')1160 plt.xlabel('Time (s)')1161 plt.ylabel('# Keys')1162 plt.legend(loc='lower right')1163 plt.ylim(ymin=-150.0)1164 # plt.ylim((-0.2,5))1165 plt.xlim(xmin=60)1166 plt.xlim(xmax=(DE-DS)*5+50)1167 # plt.xlim(xmax=400)1168 # plt.xlim((0,700))1169 # save in PDF1170 pp = PdfPages(current_dotted_dir + '/total_number_keys.pdf')1171 pp.savefig()1172 pp.close()1173def correct_hit_ratio(m):1174 for row in m:1175 if row[2] == 0:1176 row[4] = 1001177def perf_paper(1178 types1 = ['hhh','hhl','hlh','hll','lhh','lhl','llh','lll'],1179 types2 = ['hh','hl','lh','ll'],1180 filename_ext=''):1181 basic_run = {}1182 dotted_run = {}1183 # types1 = ['hhh','hhl','hlh','hll','lhh','lhl','llh','lll']1184 # types1 = ['hlh','hll','llh','lll']1185 for t in types1:1186 change_current_basic(cluster_path + 'cluster_basic/sync_'+t+'/')1187 # basic_run[t] = np.loadtxt((current_basic_dir +'/ycsb/basic_run2.csv'), delimiter=',', skiprows=11, usecols=range(1,2))1188 basic_run[t] = np.genfromtxt((current_basic_dir +'/ycsb/basic_run2.csv'), delimiter=',', skip_header=11, skip_footer=1)1189 # types2 = ['lh','ll']1190 # types2 = ['hh','hl','lh','ll']1191 for t in types2:1192 change_current_dotted(cluster_path + 'cluster_dotted/sync_'+t+'/')1193 dotted_run[t] = np.genfromtxt((current_dotted_dir +'/ycsb/dotted_run2.csv'), delimiter=',', skip_header=11, skip_footer=1)1194 plt.style.use('fivethirtyeight')1195 plt.rcParams.update({'font.size': 10})1196 plt.rcParams.update({'text.usetex': True})1197 # plt.rc('font',**{'family':'sans-serif','sans-serif':['Helvetica']})1198 # plt.rc('text', usetex=True)1199 msz = 61200 fig = plt.figure()1201 plt.title("Operation Latencies in MerkleDB")1202 ms = ['s','o','<','>','^','v','*','D','p']1203 cl = ['r','b','y','b','c']1204 i = 01205 for t in types1:1206 plt.scatter(basic_run[t][:,1], basic_run[t][:,2], label=labels[t], c='r', marker=ms[i], alpha=.4, s=28)1207 # plt.plot(basic_run[t][:,1], basic_run[t][:,2], linewidth=1, label='MerkleDB, '+t.upper(), c='r', markersize=msz, marker=ms[i], markevery=7)1208 i = i + 11209 i=01210 # for t in ['lh','ll']:1211 for t in [types2[2],types2[3]]:1212 plt.scatter(dotted_run[t][:,1], dotted_run[t][:,2], label=labels[t], c='b', marker=ms[i], alpha=.4, s=28)1213 # plt.plot(dotted_run[t][:,1], dotted_run[t][:,2], linewidth=1, label='DottedDB, '+t.upper(), c='r', markersize=msz, marker=ms[i], markevery=7)1214 plt.plot(1,1,marker='o',markersize=0)1215 i = i + 11216 # for t in ['hh','hl']:1217 for t in [types2[0],types2[1]]:1218 plt.scatter(dotted_run[t][:,1], dotted_run[t][:,2], label=labels[t], c='g', marker=ms[i], alpha=.4, s=28)1219 # plt.plot(dotted_run[t][:,1], dotted_run[t][:,2], linewidth=1, label='DottedDB, '+t.upper(), c='r', markersize=msz, marker=ms[i], markevery=7)1220 plt.plot(1,1,marker='x',markersize=0)1221 i = i + 11222 plt.plot()1223 ####### make the y axis logarithmic1224 plt.yscale('log')1225 plt.xlabel('Latency (ms)')1226 plt.ylabel('Number of Operations')1227 plt.legend(loc='upper right')1228 # plt.ylim((-100,1000000))1229 plt.ylim(ymin=-100)1230 plt.xlim(xmin=-15)1231 plt.xlim(xmax=800)1232 pp = PdfPages(test_path + 'perf_paper'+filename_ext+'.pdf')1233 pp.savefig()1234 pp.close()1235 plt.style.use('fivethirtyeight')1236 plt.rcParams.update({'font.size': 10})1237 plt.rcParams.update({'text.usetex': True})1238 # plt.rc('font',**{'family':'sans-serif','sans-serif':['Helvetica']})1239 # plt.rc('text', usetex=True)1240 msz = 61241 fig = plt.figure()1242 plt.title("Operation Latencies in MerkleDB")1243 ms = ['s','o','<','>','^','v','*','D','p']1244 cl = ['r','b','y','b','c']1245 i = 01246 for t in types1:1247 # plt.scatter(basic_run[t][:,1], basic_run[t][:,2], label=labels[t], c='r', marker=ms[i], alpha=.4, s=28)1248 Y = basic_run[t][:,2] / (basic_run[t][:,2]).sum()1249 plt.plot(basic_run[t][:,1], np.cumsum(Y), linewidth=2, label=labels[t], color='r', marker=ms[i], markevery=7, alpha=0.6)1250 # plt.plot(basic_run[t][:,1], basic_run[t][:,2], linewidth=1, label='MerkleDB, '+t.upper(), c='r', markersize=msz, marker=ms[i], markevery=7)1251 i = i + 11252 i=01253 # for t in ['lh','ll']:1254 for t in [types2[2],types2[3]]:1255 # plt.scatter(dotted_run[t][:,1], dotted_run[t][:,2], label=labels[t], c='b', marker=ms[i], alpha=.4, s=28)1256 Y = dotted_run[t][:,2] / (dotted_run[t][:,2]).sum()1257 plt.plot(dotted_run[t][:,1], np.cumsum(Y), linewidth=2, label=labels[t], color='b', marker=ms[i], markevery=7, alpha=0.6)1258 # plt.plot(1,1,marker='o',markersize=0)1259 i = i + 11260 # for t in ['hh','hl']:1261 for t in [types2[0],types2[1]]:1262 # plt.scatter(dotted_run[t][:,1], dotted_run[t][:,2], label=labels[t], c='g', marker=ms[i], alpha=.4, s=28)1263 Y = dotted_run[t][:,2] / (dotted_run[t][:,2]).sum()1264 plt.plot(dotted_run[t][:,1], np.cumsum(Y), linewidth=2, label=labels[t], color='g', marker=ms[i], markevery=7, alpha=0.6)1265 # plt.plot(1,1,marker='x',markersize=0)1266 i = i + 11267 plt.plot()1268 ####### make the y axis logarithmic1269 plt.xscale('log')1270 plt.xlabel('Latency (ms)')1271 # plt.ylabel('Number of Operations')1272 plt.legend(loc='lower right')1273 plt.ylim((0,1.02))1274 # plt.ylim(ymin=-200)1275 # plt.xlim(xmin=-2000)1276 # plt.xlim(xmax=800)1277 pp = PdfPages(test_path + 'perf_paper_CDF'+filename_ext+'.pdf')1278 pp.savefig()1279 pp.close()1280 ## CDFs of client request latencies1281 ms = ['s','o','<','>','^','v','*','D','p']1282 lw = 61283 axes = [0.09, 0.20, 0.85, 0.75]1284 msize = 171285 font_size = 271286 plt.tight_layout()1287 plt.style.use('fivethirtyeight')1288 plt.rcParams.update({'figure.autolayout': True})1289 plt.rcParams.update({'font.size': font_size})1290 plt.rcParams.update({'text.usetex': False})1291 fig = plt.figure()1292 fig.add_axes(axes)1293 t = 'hhh'1294 Y = basic_run[t][:,2] / (basic_run[t][:,2]).sum()1295 plt.plot(basic_run[t][:,1], np.cumsum(Y), linewidth=lw, label="MerkleDB HHH", color='b', marker=ms[0], ms=msize, markevery=0.07, alpha=0.6)1296 t = 'lhh'1297 Y = basic_run[t][:,2] / (basic_run[t][:,2]).sum()1298 plt.plot(basic_run[t][:,1], np.cumsum(Y), linewidth=lw, label="MerkleDB LHH", color='r', marker=ms[1], ms=msize, markevery=0.1, alpha=0.6)1299 t = types2[0]1300 print "HH == ", t1301 Y = dotted_run[t][:,2] / (dotted_run[t][:,2]).sum()1302 plt.plot(dotted_run[t][:,1], np.cumsum(Y), linewidth=lw, label="DottedDB HH", color='g', marker=ms[2], ms=msize, markevery=0.13, alpha=0.6)1303 # plt.plot()1304 plt.xscale('log')1305 plt.xlabel('Update Latency (ms)')1306 plt.legend(loc='lower right')1307 plt.ylim((-0.02,1.02))1308 pp = PdfPages(test_path + 'perf_latency_cdf_hh_paper'+filename_ext+'.pdf')1309 pp.savefig()1310 pp.close()1311 fig = plt.figure()1312 fig.add_axes(axes)1313 t = 'hhl'1314 Y = basic_run[t][:,2] / (basic_run[t][:,2]).sum()1315 plt.plot(basic_run[t][:,1], np.cumsum(Y), linewidth=lw, label="MerkleDB HHL", color='b', marker=ms[0], ms=msize, markevery=0.07, alpha=0.6)1316 t = 'lhl'1317 Y = basic_run[t][:,2] / (basic_run[t][:,2]).sum()1318 plt.plot(basic_run[t][:,1], np.cumsum(Y), linewidth=lw, label="MerkleDB LHL", color='r', marker=ms[1], ms=msize, markevery=0.1, alpha=0.6)1319 t = types2[1]1320 print "HL == ", t1321 Y = dotted_run[t][:,2] / (dotted_run[t][:,2]).sum()1322 plt.plot(dotted_run[t][:,1], np.cumsum(Y), linewidth=lw, label="DottedDB HL", color='g', marker=ms[2], ms=msize, markevery=0.13, alpha=0.6)1323 # plt.plot()1324 plt.xscale('log')1325 plt.xlabel('Update Latency (ms)')1326 plt.legend(loc='lower right')1327 plt.ylim((-0.02,1.02))1328 pp = PdfPages(test_path + 'perf_latency_cdf_hl_paper'+filename_ext+'.pdf')1329 pp.savefig()1330 pp.close()1331 fig = plt.figure()1332 fig.add_axes(axes)1333 t = 'hlh'1334 Y = basic_run[t][:,2] / (basic_run[t][:,2]).sum()1335 plt.plot(basic_run[t][:,1], np.cumsum(Y), linewidth=lw, label="MerkleDB HLH", color='b', marker=ms[0], ms=msize, markevery=0.07, alpha=0.6)1336 t = 'llh'1337 Y = basic_run[t][:,2] / (basic_run[t][:,2]).sum()1338 plt.plot(basic_run[t][:,1], np.cumsum(Y), linewidth=lw, label="MerkleDB LLH", color='r', marker=ms[1], ms=msize, markevery=0.1, alpha=0.6)1339 t = types2[2]1340 print "LH == ", t1341 Y = dotted_run[t][:,2] / (dotted_run[t][:,2]).sum()1342 plt.plot(dotted_run[t][:,1], np.cumsum(Y), linewidth=lw, label="DottedDB LH", color='g', marker=ms[2], ms=msize, markevery=0.13, alpha=0.6)1343 # plt.plot()1344 plt.xscale('log')1345 plt.xlabel('Update Latency (ms)')1346 plt.legend(loc='lower right')1347 plt.ylim((-0.02,1.02))1348 pp = PdfPages(test_path + 'perf_latency_cdf_lh_paper'+filename_ext+'.pdf')1349 pp.savefig()1350 pp.close()1351 fig = plt.figure()1352 fig.add_axes(axes)1353 t = 'hll'1354 Y = basic_run[t][:,2] / (basic_run[t][:,2]).sum()1355 plt.plot(basic_run[t][:,1], np.cumsum(Y), linewidth=lw, label="MerkleDB HLL", color='b', marker=ms[0], ms=msize, markevery=0.07, alpha=0.6)1356 t = 'lll'1357 Y = basic_run[t][:,2] / (basic_run[t][:,2]).sum()1358 plt.plot(basic_run[t][:,1], np.cumsum(Y), linewidth=lw, label="MerkleDB LLL", color='r', marker=ms[1], ms=msize, markevery=0.1, alpha=0.6)1359 t = types2[3]1360 print "LL == ", t1361 Y = dotted_run[t][:,2] / (dotted_run[t][:,2]).sum()1362 plt.plot(dotted_run[t][:,1], np.cumsum(Y), linewidth=lw, label="DottedDB LL", color='g', marker=ms[2], ms=msize, markevery=0.13, alpha=0.6)1363 # plt.plot()1364 plt.xscale('log')1365 plt.xlabel('Update Latency (ms)')1366 plt.legend(loc='lower right')1367 plt.ylim((-0.02,1.02))1368 pp = PdfPages(test_path + 'perf_latency_cdf_ll_paper'+filename_ext+'.pdf')1369 pp.savefig()1370 pp.close()1371## Hit Ratio1372def sync_paper1(1373 types = ['hhh','hhl','hlh','hll','lhh','lhl','llh','lll'],1374 types2 = ['hh','hl','lh','ll'],1375 filename_ext=''):1376 initial_offset= -11377 final_offset= 01378 # Basic1379 basic_m2 = {}1380 basic_tm2 = {}1381 basic_m = {}1382 basic_tm = {}1383 basic = {}1384 basic_pct0 = {}1385 basic_pct = {}1386 BS = {}1387 BE = {}1388 DS = {}1389 DE = {}1390 NVnodes = {}1391 RF = {}1392 dotted = {}1393 dotted2 = {}1394 dotted_m2 = {}1395 dotted_tm2 = {}1396 dotted_m = {}1397 dotted_tm = {}1398 dotted3 = {}1399 # types = ['hhh','hhl','hlh','hll','lhh','lhl','llh','lll']1400 for t in types:1401 change_current_basic(cluster_path + 'cluster_basic/sync_'+t+'/')1402 basic_bench = np.loadtxt((current_basic_dir +'/node1/bench_file.csv'), delimiter=':', usecols=[1])1403 BS[t] = int(basic_bench[0]/5)-initial_offset1404 BE[t] = int(basic_bench[1]/5)+final_offset1405 NVnodes[t] = int(basic_bench[6])1406 RF[t] = int(basic_bench[7])1407 basic_m2[t] = load_cluster_basic_csv('sync-segment-keys-missing_hist.csv', False)1408 basic_tm2[t] = load_cluster_basic_csv('sync-segment-keys-truly-missing_hist.csv', False)1409 basic_m[t] = mean_matrix(basic_m2[t])1410 basic_tm[t] = mean_matrix(basic_tm2[t])1411 basic[t] = join_matrix(basic_m[t], basic_tm[t])1412 basic_pct0[t] = basic[t][BS[t]:BE[t],22]*100/(basic[t][BS[t]:BE[t],10]*1.0)1413 basic_pct[t] = np.array(map(lambda x: min(x,100), basic_pct0[t]))1414 # types2 = ['hh','hl','lh','ll']1415 for t in types2:1416 change_current_dotted(cluster_path + 'cluster_dotted/sync_'+t+'/')1417 dotted_bench = np.loadtxt((current_dotted_dir +'/node1/bench_file.csv'), delimiter=':', usecols=[1])1418 DS[t] = int(dotted_bench[0]/5)-initial_offset1419 DE[t] = int(dotted_bench[1]/5)+final_offset1420 dotted2[t] = load_cluster_dotted_csv('sync-hit-ratio_hist.csv', False)1421 correct_hit_ratio(dotted2[t])1422 dotted[t] = mean_matrix(dotted2[t])1423 # dotted_m2[t] = load_cluster_dotted_csv('sync-sent-missing_hist.csv', False)1424 # dotted_tm2[t] = load_cluster_dotted_csv('sync-sent-truly-missing_hist.csv', False)1425 # dotted_m[t] = mean_matrix(dotted_m2[t])1426 # dotted_tm[t] = mean_matrix(dotted_tm2[t])1427 # dotted3[t] = join_matrix(dotted_m[t], dotted_tm[t])1428 plt.style.use('fivethirtyeight')1429 plt.rcParams.update({'font.size': 13})1430 me = 0.11431 msz = 121432 fig = plt.figure()1433 # fig.add_axes([0.13, 0.10, 0.85, 0.8])1434 plt.title("Hit Ratio for MerkleDB")1435 ms = ['s','o','<','>','^','v','*','D','p']1436 i = 01437 for t in types:1438 plt.plot(basic[t][BS[t]:BE[t],0]-5*BS[t], basic_pct[t], linewidth=1, label='MerkleDB, '+t.upper(), c='r', markersize=msz, marker=ms[i], markevery=me)1439 i = i + 11440 plt.xlabel('Time (Seconds)')1441 plt.ylabel('Percentage (%)')1442 plt.legend(loc='center right')1443 plt.ylim((-1,62))1444 plt.xlim(xmin=-15)1445 plt.xlim(xmax=(BE['hhh']-BS['hhh'])*5 + 30)1446 pp = PdfPages(test_path + 'basic_hit_ratio_paper' + filename_ext + '.pdf')1447 pp.savefig()1448 pp.close()1449 fig = plt.figure()1450 # fig.add_axes([0.13, 0.10, 0.85, 0.8])1451 plt.title("Hit Ratio for DottedDB")1452 plt.rcParams.update({'font.size': 13})1453 ms = ['s','o','<','>','^','v','*','D','p']1454 i = 01455 for t in types2:1456 # plt.plot(dotted3[t][DS[t]:DE[t],0]-5*DS[t], dotted3[t][DS[t]:DE[t],22]*100/(dotted3[t][DS[t]:DE[t],10]*1.0), linewidth=1, label='Dotted3, '+t, c='b', markersize=4, marker=ms[i], markevery=7)1457 i = i + 11458 plt.plot(dotted[t][DS[t]:DE[t],0]-5*DS[t], dotted[t][DS[t]:DE[t],4], linewidth=1, label='DottedDB, '+t.upper(), c='g', markersize=msz, marker=ms[i], markevery=me)1459 i = i + 11460 plt.xlabel('Time (Seconds)')1461 plt.ylabel('Percentage (%)')1462 plt.legend(loc='lower right')1463 # plt.ylim(ymin=-150.0)1464 plt.ylim((75,100.3))1465 plt.xlim(xmin=-15)1466 plt.xlim(xmax=(DE[types2[0]]-DS[types2[0]])*5 + 30)1467 # plt.xlim(xmax=1375)1468 # save in PDF1469 pp = PdfPages(test_path + 'dotted_hit_ratio_paper' + filename_ext + '.pdf')1470 pp.savefig()1471 pp.close()1472 fig = plt.figure()1473 fig.add_axes([0.08, 0.1, 0.9, 0.76])1474 plt.rcParams.update({'font.size': 13})1475 # plt.title("Hit Ratio")1476 ms = ['s','o','<','>','^','v','*','D','p','+','8','x']1477 i = 01478 for t in types:1479 plt.plot(basic[t][BS[t]:BE[t],0]-5*BS[t], basic_pct[t], linewidth=1, label='MerkleDB, '+t.upper(), color='r', markersize=msz, marker=ms[i], markevery=me, alpha=0.6)1480 i = i + 11481 # i = 01482 for t in types2:1483 # plt.plot(dotted3[t][DS[t]:DE[t],0]-5*DS[t], dotted3[t][DS[t]:DE[t],22]*100/(dotted3[t][DS[t]:DE[t],10]*1.0), linewidth=1, label='Dotted3, '+t, c='b', markersize=4, marker=ms[i], markevery=7)1484 plt.plot(dotted[t][DS[t]:DE[t],0]-5*DS[t], dotted[t][DS[t]:DE[t],4], linewidth=1, label='DottedDB, '+t[:2].upper(), color='g', markersize=msz, marker=ms[i], markevery=me, alpha=0.6)1485 i = i + 11486 plt.xlabel('Time (s)')1487 plt.ylabel('Hit Ratio (%)')1488 plt.legend(bbox_to_anchor=(0., 1.02, 1., .16), loc='upper center', ncol=3, mode="expand", borderaxespad=0.)1489 # plt.legend(loc='center right')1490 plt.ylim((-1,101))1491 plt.xlim(xmin=-15)1492 plt.xlim(xmax=(BE['hhh']-BS['hhh'])*5 + 30)1493 pp = PdfPages(test_path + 'hit_ratio_paper' + filename_ext + '.pdf')1494 pp.savefig()1495 pp.close()1496 ## Hit Ratio CDF1497 fig = plt.figure()1498 fig.add_axes([0.05, 0.1, 0.92, 0.68])1499 plt.rcParams.update({'font.size': 13})1500 ms = ['s','o','<','>','^','v','*','D',r'$\lambda$',r'$\clubsuit$',r'$\circlearrowleft$',r'$\bowtie$',r'$\checkmark$']1501 i = 01502 for t in types:1503 writeY = basic_pct[t][BS[t]+10:BE[t]]1504 write_ecdf = sm.distributions.ECDF(writeY)1505 writeX = np.linspace(min(writeY), max(writeY))1506 writeY2 = write_ecdf(writeX)1507 special_me = me if (t == 'hll' or t == 'lll') else 11508 color_br = 'r' if (t[0] == 'h') else 'b'1509 plt.step(writeX, writeY2, label="MerkleDB, "+t.upper(), lw=2, marker=ms[i], ms=msz, markevery=special_me, alpha=0.6, color=color_br)1510 i = i + 11511 # i = 01512 for t in types2:1513 writeY = dotted[t][DS[t]:DE[t],4]1514 write_ecdf = sm.distributions.ECDF(writeY)1515 writeX = np.linspace(min(writeY), max(writeY))1516 writeY2 = write_ecdf(writeX)1517 plt.step(writeX, writeY2, label="DottedDB, "+t[:2].upper(), lw=2, marker=ms[i], ms=msz, markevery=me, alpha=0.6, color='g')1518 i = i + 11519 plt.xlabel('Hit Ratio (%)')1520 # plt.legend(bbox_to_anchor=[0.40, 0.8])1521 plt.legend(bbox_to_anchor=(0., 1.02, 1., .30), loc='upper center', ncol=3, mode="expand", borderaxespad=0.)1522 plt.ylim((-0.05,1.05))1523 plt.xlim(xmin=-2)1524 plt.xlim(xmax=101)1525 pp = PdfPages(test_path + '/hit_ratio_cdf' + filename_ext + '.pdf')1526 pp.savefig()1527 pp.close()1528def sync_hit_ratio_plot(type, DS,DE,BS,BE):1529 DS = DS-11530 BS = max(BS-1,0)1531 if type == 'cluster':1532 basic_m2 = load_cluster_basic_csv('sync-segment-keys-missing_hist.csv', False)1533 basic_tm2 = load_cluster_basic_csv('sync-segment-keys-truly-missing_hist.csv', False)1534 dotted2 = load_cluster_dotted_csv('sync-hit-ratio_hist.csv', False)1535 dotted_m2 = load_cluster_dotted_csv('sync-sent-missing_hist.csv', False)1536 dotted_tm2 = load_cluster_dotted_csv('sync-sent-truly-missing_hist.csv', False)1537 elif type == 'local':1538 basic_m2 = load_local_basic_csv('sync-segment-keys-missing_hist.csv', False)1539 basic_tm2 = load_local_basic_csv('sync-segment-keys-truly-missing_hist.csv', False)1540 dotted2 = load_local_dotted_csv('sync-hit-ratio_hist.csv', False)1541 dotted_m2 = load_local_dotted_csv('sync-sent-missing_hist.csv', False)1542 dotted_tm2 = load_local_dotted_csv('sync-sent-truly-missing_hist.csv', False)1543 basic_m = mean_matrix(basic_m2)1544 basic_tm = mean_matrix(basic_tm2)1545 basic = join_matrix(basic_m, basic_tm)1546 # print "\n basic before: " + str(basic.shape) + "\n after: " + str(basic_tm.shape) + "\n\n"1547 dotted = mean_matrix(dotted2)1548 # print "\n dotted before: " + str(dotted2.shape) + "\n after: " + str(dotted.shape)1549 dotted_m = mean_matrix(dotted_m2)1550 dotted_tm = mean_matrix(dotted_tm2)1551 dotted3 = join_matrix(dotted_m, dotted_tm)1552 # print "\n dotted3 before: " + str(dotted3.shape) + "\n after: " + str(dotted_tm.shape) + "\n\n"1553 plt.style.use('fivethirtyeight')1554 plt.figure()1555 plt.title("Sync Hit Ratio")1556 basic_pct0 = basic[BS:BE,22]*100/(basic[BS:BE,10]*1.0)1557 basic_pct = np.array(map(lambda x: min(x,100), basic_pct0))1558 plt.plot(basic[BS:BE,0]-5*BS, basic_pct, linewidth=2, label='MerkleDB', c='r', marker='^', markevery=5)1559 # plt.plot(dotted3[DS:DE,0]-5*DS, dotted3[DS:DE,22]*100/(dotted3[DS:DE,10]*1.0), linewidth=2, label='dotted3', c='b', marker='x', markevery=5)1560 plt.plot(dotted[DS:DE,0]-5*DS, dotted[DS:DE,4], linewidth=2, label='DottedDB', c='g', marker='o', markevery=5)1561 plt.xlabel('Time')1562 plt.ylabel('Percentage (%)')1563 plt.legend(loc='lower left')1564 # plt.ylim(ymin=-150.0)1565 plt.ylim((-1,102))1566 plt.xlim(xmin=-5.0)1567 plt.xlim(xmax=(DE-DS)*5)1568 # plt.xlim(xmax=400)1569 # plt.xlim((0,700))1570 # save in PDF1571 pp = PdfPages(current_dotted_dir + '/sync_hit_ratio.pdf')1572 pp.savefig()1573 pp.close()1574## Sync Metadata1575def sync_paper2(1576 types = ['hhh','hhl','hlh','hll','lhh','lhl','llh','lll'],1577 types2 = ['hh','hl','lh','ll'],1578 filename_ext=''):1579 initial_offset= -11580 final_offset= 01581 # Basic1582 basic_total = {}1583 basic_mt = {}1584 basic = {}1585 BS = {}1586 BE = {}1587 DS = {}1588 DE = {}1589 num_keys = {}1590 vnodes = {}1591 RF = {}1592 hash_size = {}1593 key_size = {}1594 mt = {}1595 block_size = {}1596 basic_size = {}1597 dotted_bvv = {}1598 dotted_kl = {}1599 dotted_nsk = {}1600 dotted1 = {}1601 dotted2 = {}1602 dotted3 = {}1603 mt_metadata = 111604 # types = ['hhh','hhl','hlh','hll','lhh','lhl','llh','lll']1605 for t in types:1606 change_current_basic(cluster_path + 'cluster_basic/sync_'+t+'/')1607 basic_bench = np.loadtxt((current_basic_dir +'/node1/bench_file.csv'), delimiter=':', usecols=[1])1608 BS[t] = int(basic_bench[0]/5)-initial_offset1609 BE[t] = int(basic_bench[1]/5)+final_offset1610 num_keys[t] = int(basic_bench[2])1611 vnodes[t] = int(basic_bench[6])1612 RF[t] = int(basic_bench[7])1613 hash_size[t] = int(basic_bench[8])1614 key_size[t] = int(basic_bench[9])1615 mt[t] = int(basic_bench[10])1616 block_size[t] = mt_metadata + hash_size[t] + key_size[t]1617 basic_size[t] = (block_size[t] + mt[t]*block_size[t] + (mt[t]**2)*block_size[t] + (RF[t]*num_keys[t]/(vnodes[t]*1.0))*block_size[t]) * RF[t]1618 basic_mt[t] = load_cluster_basic_csv('mt-size_hist.csv', True)1619 basic[t] = mean_matrix(basic_mt[t])1620 n_lines, _ = basic_mt[t].shape1621 basic_total[t] = np.empty(n_lines)1622 basic_total[t].fill(basic_size[t])1623 print str(basic_size[t]/1024.0) + " KB\n"1624 # types2 = ['hh','hl','lh','ll']1625 for t in types2:1626 change_current_dotted(cluster_path + 'cluster_dotted/sync_'+t+'/')1627 dotted_bench = np.loadtxt((current_dotted_dir +'/node1/bench_file.csv'), delimiter=':', usecols=[1])1628 DS[t] = int(dotted_bench[0]/5)-initial_offset1629 DE[t] = int(dotted_bench[1]/5)+final_offset1630 dotted_bvv[t] = load_cluster_dotted_csv('bvv-size_hist.csv', False)1631 dotted_kl[t] = load_cluster_dotted_csv('kl-size_hist.csv', False)1632 dotted_nsk[t] = load_cluster_dotted_csv('nsk-size_hist.csv', False)1633 dotted1[t] = mean_matrix(dotted_bvv[t])1634 dotted2[t] = mean_matrix(dotted_kl[t])1635 dotted3[t] = mean_matrix(dotted_nsk[t])1636 plt.style.use('fivethirtyeight')1637 plt.rcParams.update({'font.size': 9})1638 fig = plt.figure()1639 # fig.add_axes([0.09, 0.09, 0.9, 0.76])1640 fig.add_axes([0.10, 0.09, 0.9, 0.68])1641 # fig.add_axes([0.05, 0.1, 0.92, 0.68])1642 plt.rcParams.update({'font.size': 13})1643 # plt.title("Node Metadata Size")1644 # n_lines, _ = dotted1['hh'].shape1645 # basic_total = np.empty(n_lines)1646 # basic_total.fill(basic_size['hhh'])1647 # print str(basic_size['hhh']/1024.0) + " KB\n"1648 # plt.plot(dotted1['hh'][DS['hh']:DE['hh'],0]-DS['hh']*5, basic_total[DS['hh']:DE['hh']]/1024.0, linewidth=2, label='MT Theoretical Size', c='r', linestyle='--')1649 ms = ['s','o','<','>','^','v','*','D',r'$\lambda$',r'$\clubsuit$',r'$\circlearrowleft$',r'$\bowtie$',r'$\checkmark$']1650 i = 01651 lw = 11652 msz = 121653 me = 0.11654 # for t in types:1655 # # plt.plot(basic[t][BS[t]:BE[t],0]-BS[t]*5, basic_size[t][BS[t]:BE[t]]/1024.0, linewidth=lw, label='MerkleDB, '+t.upper(), c='r', marker=ms[i+1],markersize=msz+4, markevery=3)1656 for t in ['hhh','hhl','hlh','hll']:1657 plt.plot(basic[t][BS[t]:BE[t],0]-BS[t]*5, basic[t][BS[t]:BE[t],7]/1024.0, linewidth=lw, label='MerkleDB, '+t.upper(), color='r', marker=ms[i],markersize=msz, markevery=me+0.02*i, alpha=0.6)1658 i = i + 11659 for t in ['lhh','lhl','llh','lll']:1660 plt.plot(basic[t][BS[t]:BE[t],0]-BS[t]*5, basic[t][BS[t]:BE[t],7]/1024.0, linewidth=lw, label='MerkleDB, '+t.upper(), color='b', marker=ms[i],markersize=msz, markevery=me+0.02*(i-4), alpha=0.6)1661 i = i + 11662 # i = 01663 for t in types2:1664 dotted_total = (dotted1[t][:,4] + dotted2[t][:,4] + dotted3[t][:,4])1665 plt.plot(dotted1[t][DS[t]:DE[t],0]-DS[t]*5, dotted_total[DS[t]:DE[t]]/1024.0, linewidth=lw, label='DottedDB, '+t[:2].upper(), color='g', marker=ms[i], markersize=msz, markevery=me+0.02*(i-8), alpha=0.6)1666 i = i + 11667 plt.xlabel('Time (s)')1668 plt.ylabel('Size (KB)')1669 # plt.legend(loc='upper right')1670 plt.legend(bbox_to_anchor=(0., 1.02, 1., .30), loc='upper center', ncol=3, mode="expand", borderaxespad=0.)1671 # plt.ylim((-1,62))1672 plt.xlim(xmin=-5)1673 plt.xlim(xmax=(BE['hhh']-BS['hhh'])*5)1674 plt.ylim(ymin=-5)1675 pp = PdfPages(test_path + 'sync_metadata_paper' + filename_ext + '.pdf')1676 pp.savefig()1677 pp.close()1678def node_metadate_plot(type, DS, DE, BS, BE, bench):1679 if type == 'cluster':1680 dotted_bvv = load_cluster_dotted_csv('bvv-size_hist.csv', True)1681 dotted_kl = load_cluster_dotted_csv('kl-size_hist.csv', True)1682 dotted_nsk = load_cluster_dotted_csv('nsk-size_hist.csv', True)1683 basic_mt = load_cluster_basic_csv('mt-size_hist.csv', True)1684 elif type == 'local':1685 dotted_bvv = load_local_dotted_csv('bvv-size_hist.csv', True)1686 dotted_kl = load_local_dotted_csv('kl-size_hist.csv', True)1687 dotted_nsk = load_local_dotted_csv('nsk-size_hist.csv', True)1688 basic_mt = load_local_basic_csv('mt-size_hist.csv', True)1689 dotted1 = mean_matrix(dotted_bvv)1690 dotted2 = mean_matrix(dotted_kl)1691 dotted3 = mean_matrix(dotted_nsk)1692 basic = mean_matrix(basic_mt)1693 plt.style.use('fivethirtyeight')1694 fig = plt.figure()1695 fig.add_axes([0.15, 0.10, 0.8, 0.8])1696 plt.title("Node Metadata Size")1697 print "tamanhos " + str(DS) + " "+ str(DE) + " "+ str(BS) + " "+ str(BE) + " "1698 num_keys = bench[2]1699 vnodes = bench[6]1700 RF = bench[7]1701 hash_size = bench[8]1702 key_size = bench[9]1703 mt = bench[10]1704 mt_metadata = 111705 block_size = mt_metadata + hash_size + key_size1706 basic_size = (block_size + mt*block_size + (mt**2)*block_size + (RF*num_keys/(vnodes*1.0))*block_size) * RF1707 n_lines, _ = dotted1.shape1708 basic_total = np.empty(n_lines)1709 basic_total.fill(basic_size)1710 print str(basic_size/1024.0) + " KB\n"1711 dotted_total = (dotted1[:,4] + dotted2[:,4] + dotted3[:,4])1712 plt.plot(dotted1[DS:DE,0]-DS*5, basic_total[DS:DE]/1024.0, linewidth=2, label='MT Theoretical Size', c='r', linestyle='--')1713 # plt.plot(basic[BS:BE,0]-BS*5, basic_total[DS:DE]/1024.0, linewidth=2, label='MT Theoretical Size', c='r', linestyle='--')1714 plt.plot(basic[BS:BE,0]-BS*5, basic[BS:BE,4]/1024.0, linewidth=3, label='MerkleDB', c='r', marker='s', markevery=5)1715 plt.plot(dotted1[DS:DE,0]-DS*5, dotted_total[DS:DE]/1024.0, linewidth=3, label='DottedDB', c='g', marker='o', markevery=5)1716 plt.xlabel('Time')1717 plt.ylabel('Size (KB)')1718 plt.legend(loc='center right')1719 ####### make the y axis logarithmic1720 # plt.yscale('log')1721 # ax = plt.gca()1722 # ax.yaxis.set_major_formatter(ticker.FuncFormatter(lambda y,pos: ('{{:.{:1d}f}}'.format(int(np.maximum(-np.log10(y),0)))).format(y)))1723 # for axis in [ax.xaxis, ax.yaxis]:1724 # axis.set_major_formatter(ScalarFormatter())1725 # plt.ylim(ymin=-150.0)1726 # plt.ylim((-1,101))1727 plt.xlim(xmin=-5.0)1728 plt.xlim(xmax=(DE-DS)*5)1729 plt.ylim(ymin=-5.0)1730 # plt.xlim(xmax=400)1731 # plt.xlim((0,700))1732 # save in PDF1733 pp = PdfPages(current_dotted_dir + '/node_metadata.pdf')1734 pp.savefig()1735 pp.close()1736def ops_plot(type):1737 basic = np.loadtxt((current_basic_dir +'/basho_bench/summary.csv'), delimiter=',', skiprows=1)1738 dotted = np.loadtxt((current_dotted_dir +'/basho_bench/summary.csv'), delimiter=',', skiprows=1)1739 plt.style.use('fivethirtyeight')1740 fig = plt.figure()1741 fig.add_axes([0.15, 0.10, 0.8, 0.8])1742 plt.title("Throughput")1743 il = -1 #ignore last n lines1744 plt.plot(basic[:il,0], basic[:il,3]/basic[:il,1], linewidth=3, label='Basic', c='r', marker='^')1745 plt.plot(dotted[:il,0], dotted[:il,3]/dotted[:il,1], linewidth=3, label='Dotted', c='g', marker='o')1746 plt.xlabel('Time (s)')1747 plt.ylabel('Ops/Sec')1748 plt.legend(loc='center right')1749 plt.xlim(xmin=-2.0)1750 # plt.xlim(xmax=(DE-DS)*5)1751 plt.ylim(ymin=-10)1752 # save in PDF1753 pp = PdfPages(current_dotted_dir + '/ops_sec.pdf')1754 pp.savefig()1755 pp.close()1756def latencies_plot(name):1757 bname = current_basic_dir +'/basho_bench/'+name+'_latencies.csv'1758 if os.path.isfile(bname):1759 basic = np.loadtxt(bname, delimiter=',', skiprows=1)1760 else:1761 return -11762 dname = current_dotted_dir +'/basho_bench/'+name+'_latencies.csv'1763 if os.path.isfile(dname):1764 dotted = np.loadtxt(dname, delimiter=',', skiprows=1)1765 else:1766 return -11767 plt.style.use('fivethirtyeight')1768 fig = plt.figure()1769 fig.add_axes([0.15, 0.10, 0.8, 0.8])1770 plt.title(name+" Latencies")1771 ill = -1 #ignore last n lines1772 ifl = 1 #ignore first n lines1773 lw = 11774 ## elapsed, window, n, min, mean, median, 95th, 99th, 99_9th, max, errors1775 plt.plot(basic[ifl:ill,0], basic[ifl:ill,4]/1000.0, linewidth=lw, label='Basic Mean', color='r', marker='o',linestyle='-')1776 plt.plot(basic[ifl:ill,0], basic[ifl:ill,6]/1000.0, linewidth=lw, label='Basic 95p', color='r', marker='s',linestyle='-')1777 # plt.plot(basic[ifl:ill,0], basic[ifl:ill,9], linewidth=lw, label='Basic Max', color='b', marker='.',linestyle='-')1778 plt.plot(dotted[ifl:ill,0], dotted[ifl:ill,4]/1000.0, linewidth=lw, label='Dotted Mean', color='g', marker='o',linestyle=':')1779 plt.plot(dotted[ifl:ill,0], dotted[ifl:ill,6]/1000.0, linewidth=lw, label='Dotted 95p', color='g', marker='s',linestyle=':')1780 # plt.plot(dotted[ifl:ill,0], dotted[ifl:ill,9], linewidth=lw, label='Dotted Max', color='b', marker='.',linestyle=':')1781 plt.xlabel('Time (s)')1782 plt.ylabel('Latency (ms)')1783 plt.legend(loc='upper left')1784 plt.xlim(xmin=-2.0)1785 # plt.xlim(xmax=(DE-DS)*5)1786 # plt.ylim(ymin=-10)1787 # save in PDF1788 pp = PdfPages(current_dotted_dir + '/'+name+'_latencies.pdf')1789 pp.savefig()1790 pp.close()1791columns_names = [1792 (0,'elapsed'),1793 (1,'window'),1794 (2,'n'),1795 (3,'min'),1796 (4,'mean'),1797 (5,'median'),1798 (6,'95p'),1799 (7,'99p'),1800 (8,'99.9p'),1801 (9,'max'),1802 (10,'total'),1803 (11,'std_dev')]1804def mean_matrix(d1):1805 d = normalize_time_to_multiple_of_5(d1)1806 return np.array([ (([xVal]+(np.mean([row[1:] for row in d if xVal==row[0]],axis=0)).tolist() )) for xVal in np.unique(d[:,0])])1807def join_matrix(a,b):1808 d = np.concatenate([a,b],axis=0)1809 return np.array([ (([xVal]+(np.concatenate([row[1:] for row in d if xVal==row[0]],axis=1)).tolist() )) for xVal in np.unique(d[:,0])])1810def filter_zero_n(m):1811 return np.array(filter(lambda x:x[2] != 0, m))1812def normalize_time_to_multiple_of_5(m):1813 return np.array(map(aux_map_5, m))1814def aux_map_5(x):1815 d = x[0] % 51816 if d == 0:1817 return x1818 elif d >= 3:1819 x[0] = x[0] + (5-d)1820 return x1821 elif d < 3:1822 x[0] = x[0] - d1823 return x1824def read_csv(name, do_filter, skip=4):1825 if do_filter == True:1826 return filter_zero_n( np.loadtxt( name, delimiter=',', skiprows=skip))1827 else:1828 return np.loadtxt( name, delimiter=',', skiprows=skip)1829def load_local_basic_csv(name, do_filter=True):1830 csv1 = read_csv((current_basic_dir +'/dev1/'+ name), do_filter)1831 csv2 = read_csv((current_basic_dir +'/dev2/'+ name), do_filter)1832 csv3 = read_csv((current_basic_dir +'/dev3/'+ name), do_filter)1833 csv4 = read_csv((current_basic_dir +'/dev4/'+ name), do_filter)1834 return np.concatenate([csv1,csv2,csv3,csv4], axis=0)1835def load_local_dotted_csv(name, do_filter=True):1836 csv1 = read_csv((current_dotted_dir +'/dev1/'+ name), do_filter)1837 csv2 = read_csv((current_dotted_dir +'/dev2/'+ name), do_filter)1838 csv3 = read_csv((current_dotted_dir +'/dev3/'+ name), do_filter)1839 csv4 = read_csv((current_dotted_dir +'/dev4/'+ name), do_filter)1840 return np.concatenate([csv1,csv2,csv3,csv4], axis=0)1841def load_cluster_basic_csv(name, do_filter=True, skip=4):1842 csv1 = read_csv((current_basic_dir +'/node1/'+ name), do_filter, skip)1843 csv2 = read_csv((current_basic_dir +'/node2/'+ name), do_filter, skip)1844 csv3 = read_csv((current_basic_dir +'/node3/'+ name), do_filter, skip)1845 csv4 = read_csv((current_basic_dir +'/node4/'+ name), do_filter, skip)1846 csv5 = read_csv((current_basic_dir +'/node5/'+ name), do_filter, skip)1847 return np.concatenate([csv1,csv2,csv3,csv4,csv5], axis=0)1848def load_cluster_dotted_csv(name, do_filter=True, skip=4):1849 csv1 = read_csv((current_dotted_dir +'/node1/'+ name), do_filter, skip)1850 csv2 = read_csv((current_dotted_dir +'/node2/'+ name), do_filter, skip)1851 csv3 = read_csv((current_dotted_dir +'/node3/'+ name), do_filter, skip)1852 csv4 = read_csv((current_dotted_dir +'/node4/'+ name), do_filter, skip)1853 csv5 = read_csv((current_dotted_dir +'/node5/'+ name), do_filter, skip)1854 return np.concatenate([csv1,csv2,csv3,csv4,csv5], axis=0)1855def load_dstat_csv(path):1856 # csv1 = pd.read_csv((path +'/node1/dstat.csv'), parse_dates=[0], header=7)1857 csv1 = (pd.read_csv((path +'/node1/dstat.csv'), parse_dates=[0], header=7)).as_matrix()1858 csv2 = (pd.read_csv((path +'/node2/dstat.csv'), parse_dates=[0], header=7)).as_matrix()1859 csv3 = (pd.read_csv((path +'/node3/dstat.csv'), parse_dates=[0], header=7)).as_matrix()1860 csv4 = (pd.read_csv((path +'/node4/dstat.csv'), parse_dates=[0], header=7)).as_matrix()1861 csv5 = (pd.read_csv((path +'/node5/dstat.csv'), parse_dates=[0], header=7)).as_matrix()1862 return np.concatenate([csv1,csv2,csv3,csv4,csv5], axis=0)1863################################1864## MAIN1865################################1866labels = {}1867labels['hhh'] = r'MerkleDB, $\Phi$=1000 $\Delta$=10\%, $\Omega$=100\%'1868labels['hhl'] = r'MerkleDB, $\Phi$=1000 $\Delta$=10\%, $\Omega$=10\%'1869labels['hlh'] = r'MerkleDB, $\Phi$=1000 $\Delta$=1\%, $\Omega$=100\%'1870labels['hll'] = r'MerkleDB, $\Phi$=1000 $\Delta$=1\%, $\Omega$=10\%'1871labels['lhh'] = r'MerkleDB, $\Phi$=1 $\Delta$=10\%, $\Omega$=100\%'1872labels['lhl'] = r'MerkleDB, $\Phi$=1 $\Delta$=10\%, $\Omega$=10\%'1873labels['llh'] = r'MerkleDB, $\Phi$=1 $\Delta$=1\%, $\Omega$=100\%'1874labels['lll'] = r'MerkleDB, $\Phi$=1 $\Delta$=1\%, $\Omega$=10\%'1875labels['hh'] = r'DottedDB, $\Delta$=10\%, $\Omega$=100\%'1876labels['hl'] = r'DottedDB, $\Delta$=10\%, $\Omega$=10\%'1877labels['lh'] = r'DottedDB, $\Delta$=1\%, $\Omega$=100\%'1878labels['ll'] = r'DottedDB, $\Delta$=1\%, $\Omega$=10\%'1879labels['hh_mat'] = r'DottedDB, $\Delta$=10\%, $\Omega$=100\%'1880labels['hl_mat'] = r'DottedDB, $\Delta$=10\%, $\Omega$=10\%'1881labels['lh_mat'] = r'DottedDB, $\Delta$=1\%, $\Omega$=100\%'1882labels['ll_mat'] = r'DottedDB, $\Delta$=1\%, $\Omega$=10\%'1883def main(argv):1884 #print 'Number of arguments:', len(sys.argv), 'arguments.'1885 #print 'Argument List:', str(sys.argv)1886 arg1 = ""1887 if len(sys.argv)>1:1888 arg1 = sys.argv[1]1889 print "EXECUTE " + arg11890 if arg1 == 'cluster_dotted' or arg1 == 'cluster_basic':1891 create_folder(arg1)1892 get_cluster_files(arg1)1893 # get_cluster_bb()1894 # do_bashobench()1895 get_ycsb(arg1)1896 elif arg1 == 'local_dotted' or arg1 == 'local_basic':1897 create_folder(arg1)1898 get_local_files(arg1)1899 get_local_bb()1900 do_bashobench()1901 elif arg1 == 'cluster_plot':1902 do_plot('cluster')1903 elif arg1 == 'local_plot':1904 do_plot('local')1905 elif arg1 == 'bb':1906 plot_bb()1907 elif arg1 == 'ycsb':1908 plot_ycsb()1909 elif arg1 == 'current':1910 if len(sys.argv) == 3:1911 fol = sys.argv[2]1912 print "Changing \'current\' to " + fol1913 change_current(fol)1914 else:1915 print "Missing name of the new \'current\' folder."1916 elif arg1 == 'entries':1917 clock_entries_paper()1918 elif arg1 == 'deletes':1919 deletes_paper()1920 elif arg1 == 'strip':1921 strip_paper()1922 elif arg1 == 'perf':1923 perf_paper()1924 elif arg1 == 'perfm':1925 perf_paper(1926 ['hhh','hhl','hlh','hll','lhh','lhl','llh','lll'],1927 ['hh_mat','hl_mat','lh_mat','ll_mat'],1928 '_matrix')1929 # for normal KL and VV (1st version of SWC)1930 elif arg1 == 'sync1':1931 sync_paper1() # hit ratio1932 elif arg1 == 'sync2':1933 sync_paper2() # metadata size1934 elif arg1 == 'sync3':1935 sync_paper3() # repair latency1936 elif arg1 == 'sync4':1937 sync_paper4() # sync size1938 # for matrix watermark (2nd version of SWC)1939 elif arg1 == 'sync1m':1940 sync_paper1( # hit ratio1941 ['hhh','hhl','hlh','hll','lhh','lhl','llh','lll'],1942 ['hh_mat','hl_mat','lh_mat','ll_mat'],1943 '_matrix')1944 elif arg1 == 'sync2m':1945 sync_paper2( # metadata size1946 ['hhh','hhl','hlh','hll','lhh','lhl','llh','lll'],1947 ['hh_mat','hl_mat','lh_mat','ll_mat'],1948 '_matrix')1949 elif arg1 == 'sync3m':1950 sync_paper3( # repair latency1951 ['hhh','hhl','hlh','hll','lhh','lhl','llh','lll'],1952 ['hh_mat','hl_mat','lh_mat','ll_mat'],1953 '_matrix')1954 elif arg1 == 'sync4m':1955 sync_paper4( # sync size1956 ['hhh','lhh','hlh','llh','hhl','lhl','hll','lll'],1957 # ['hhh','hlh','lhh','llh','hhl','hll','lhl','lll'],1958 ['hh_mat','lh_mat','hl_mat','ll_mat'],1959 '_matrix')1960 else:1961 print "No args :("1962if __name__ == "__main__":...

Full Screen

Full Screen

test_oneD.py

Source:test_oneD.py Github

copy

Full Screen

...30 # q[:] = 1.031 # q[half] = .532 # V[0][quarter:half] = 1.0 * units.m / units.s33 return V, q34 def basic_run(self, func):35 V, q = self.get_initial_conditions()36 plt.ion()37 plt.figure()38 plt.plot(q)39 plt.title('Initial state')40 plt.show()41 q_prev = q42 initial_variation = get_total_variation(q)43 logger.info("Initial Variation: %s" % (initial_variation,))44 for i in range(num_steps):45 print("iteration %s" % i)46 plt.clf()47 plt.plot(q_prev)48 plt.title('current_state...')49 plt.show()50 plt.pause(0.001) # pause a bit so that plots are updated51 q_next = func(dt, spatial_change, V, q_prev)52 q_prev = q_next53 else:54 q_next = q55 final_variation = get_total_variation(q_next)56 logger.info("Initial Variation: %s Final Variation: %s" % (initial_variation, final_variation))57 plt.ioff()58 plt.show()59 def world_run(self, func, world):60 plt.ion()61 plt.figure()62 plt.plot(world.q)63 plt.title('Initial state')64 plt.show()65 initial_variation = get_total_variation(world.q)66 logger.info("Initial Variation: %s" % (initial_variation,))67 for i in range(num_steps):68 print("iteration %s" % i)69 plt.clf()70 plt.plot(world.q)71 plt.title('current_state...')72 plt.show()73 plt.pause(0.001) # pause a bit so that plots are updated74 world = func(dt, world)75 final_variation = get_total_variation(world.q)76 logger.info("Initial Variation: %s Final Variation: %s" % (initial_variation, final_variation))77 plt.ioff()78 plt.show()79 def test_forward(self):80 self.world_run(advect_1d_fd, get_initial_world(world_shape, spatial_change))81 def test_upwind(self):82 self.basic_run(advect_1d_upwind)83 def test_upwind_second(self):84 self.basic_run(advect_1d_upwind_second)85 def test_upwind_third(self):86 self.basic_run(advect_1d_upwind_third)87 def test_upwind_spatial(self):88 self.basic_run(upwind_with_spatial)89 def test_ftcs_central(self):90 self.basic_run(ftcs_with_central)91 def test_ftcs_upwind(self):92 self.basic_run(ft_with_upwind)93 def test_lax_friedrichs(self):94 self.basic_run(lax_friedrichs)95 def test_upwind_axis(self):96 self.basic_run(upwind_axis)97 def test_fv_upwind(self):98 self.basic_run(fv_advect_axis_upwind)99 def test_fv_plain(self):100 self.basic_run(fv_advect_axis_plain)101 def test_leapfrog(self):102 V, q = self.get_initial_conditions()103 plt.ion()104 plt.figure()105 q_prev = q106 q_next = advect_1d_fd(dt, spatial_change, V, q_prev)107 q_current = q_next108 for i in range(num_steps):109 print("iteration %s" % i)110 plt.clf()111 plt.plot(q_prev)112 plt.title('current_state...')113 plt.show()114 plt.pause(0.001) # pause a bit so that plots are updated...

Full Screen

Full Screen

test_module.py

Source:test_module.py Github

copy

Full Screen

...6from pypescript.utils import setup_logging, MemoryMonitor7from pypescript.libutils import generate_rst_doc_table8module_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))9def test_py(name='test'):10 def basic_run(name,options):11 module = BaseModule.from_filename(name=name,options=options)12 module.setup()13 module.execute()14 module.cleanup()15 options = {}16 options[syntax.module_file] = os.path.join(module_dir,'template_lib','module_py','module.py')17 options[syntax.module_class] = 'PyModule'18 basic_run(name,options)19 options = {}20 options[syntax.module_file] = os.path.join(module_dir,'template_lib','module_py','module.py')21 basic_run(name,options)22 sys.path.insert(0,os.path.dirname(module_dir))23 options = {}24 options[syntax.module_name] = 'template_lib.module_py.module'25 basic_run(name,options)26 options = {}27 options[syntax.module_name] = 'template_lib.module_py.module'28 options[syntax.module_class] = 'PyModule'29 basic_run(name,options)30 options = {}31 options[syntax.module_name] = 'template_lib.module_py.other_module'32 basic_run(name,options)33def test_extensions(name='test'):34 def basic_run(module):35 module.setup()36 for name in ['int','long','float','double']:37 assert (module.data_block['parameters',name] == 42)38 assert np.all(module.data_block['parameters','{}_array'.format(name)] == 42)39 assert (module.data_block['parameters','string'] == 'string')40 module.data_block['external','int_array'] = np.ones(200,dtype='i4')[:36]41 module.data_block['external','float_array'] = np.ones(200,dtype='f4')[:36]42 #module.data_block['internal','long_array'] = np.ones(200,dtype='i4')[:36]43 module.execute()44 for name in ['int','long','float','double']:45 assert np.all(module.data_block['parameters','{}_array'.format(name)] == 44)46 for name in ['int','float']:47 assert np.all(module.data_block['external','{}_array'.format(name)] == 2)48 module.cleanup()49 for lang in ['c','cpp','f90']:50 options = {}51 options[syntax.module_name] = 'template_lib.module_{}.module'.format(lang)52 module = BaseModule.from_filename(name=name,options=options)53 #basic_run_dynamic(library)54 with MemoryMonitor() as mem:55 for i in range(100):56 basic_run(module)57def test_doc():58 with open(os.path.join(module_dir,'template_lib','module_f90','module.yaml'),'r') as file:59 description = yaml.load(file,Loader=yaml.SafeLoader)60 doc = generate_rst_doc_table(description)61 print(doc)62if __name__ == '__main__':63 setup_logging()64 test_py()65 test_extensions()...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run SeleniumBase automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful