How to use on_aggregated_data method in yandex-tank

Best Python code snippet using yandex-tank

tank_aggregator.py

Source:tank_aggregator.py Github

copy

Full Screen

...9from netort.data_processing import Drain, Chopper, get_nowait_from_queue10logger = logging.getLogger(__name__)11class LoggingListener(AggregateResultListener):12 """ Log aggregated results """13 def on_aggregated_data(self, data, stats):14 logger.info("Got aggregated sample:\n%s", json.dumps(data, indent=2))15 logger.info("Stats:\n%s", json.dumps(stats, indent=2))16class TankAggregator(object):17 """18 Plugin that manages aggregation and stats collection19 """20 SECTION = 'aggregator'21 @staticmethod22 def get_key():23 return __file__24 def __init__(self, generator):25 # AbstractPlugin.__init__(self, core, cfg)26 """27 :type generator: GeneratorPlugin28 """29 self.generator = generator30 self.listeners = [] # [LoggingListener()]31 self.results = q.Queue()32 self.stats_results = q.Queue()33 self.data_cache = {}34 self.stat_cache = {}35 self.reader = None36 self.stats_reader = None37 self.drain = None38 self.stats_drain = None39 @staticmethod40 def load_config():41 return json.loads(resource_string(__name__, 'config/phout.json').decode('utf8'))42 def start_test(self, poll_period=1):43 self.reader = self.generator.get_reader()44 self.stats_reader = self.generator.get_stats_reader()45 aggregator_config = self.load_config()46 verbose_histogram = True47 if verbose_histogram:48 logger.info("using verbose histogram")49 if self.reader and self.stats_reader:50 pipeline = Aggregator(51 TimeChopper(52 DataPoller(source=self.reader, poll_period=poll_period),53 cache_size=3),54 aggregator_config,55 verbose_histogram)56 self.drain = Drain(pipeline, self.results)57 self.drain.start()58 self.stats_drain = Drain(59 Chopper(DataPoller(60 source=self.stats_reader, poll_period=poll_period)),61 self.stats_results)62 self.stats_drain.start()63 else:64 logger.warning("Generator not found. Generator must provide a reader and a stats_reader interface")65 def _collect_data(self, end=False):66 """67 Collect data, cache it and send to listeners68 """69 data = get_nowait_from_queue(self.results)70 stats = get_nowait_from_queue(self.stats_results)71 logger.debug("Data timestamps: %s" % [d.get('ts') for d in data])72 logger.debug("Stats timestamps: %s" % [d.get('ts') for d in stats])73 for item in data:74 ts = item['ts']75 if ts in self.stat_cache:76 # send items77 data_item = item78 stat_item = self.stat_cache.pop(ts)79 self.__notify_listeners(data_item, stat_item)80 else:81 self.data_cache[ts] = item82 for item in stats:83 ts = item['ts']84 if ts in self.data_cache:85 # send items86 data_item = self.data_cache.pop(ts)87 stat_item = item88 self.__notify_listeners(data_item, stat_item)89 else:90 self.stat_cache[ts] = item91 if end and len(self.data_cache) > 0:92 logger.info('Timestamps without stats:')93 for ts, data_item in sorted(self.data_cache.items(), key=lambda i: i[0]):94 logger.info(ts)95 self.__notify_listeners(data_item, StatsReader.stats_item(ts, 0, 0))96 def is_aggr_finished(self):97 return self.drain._finished.is_set() and self.stats_drain._finished.is_set()98 def is_test_finished(self):99 self._collect_data()100 return -1101 def end_test(self, retcode):102 retcode = self.generator.end_test(retcode)103 if self.stats_reader:104 logger.info('Closing stats reader')105 self.stats_reader.close()106 if self.drain:107 logger.info('Waiting for gun drain to finish')108 self.drain.join()109 logger.info('Waiting for stats drain to finish')110 self.stats_drain.join()111 logger.info('Collecting remaining data')112 self._collect_data(end=True)113 return retcode114 def add_result_listener(self, listener):115 self.listeners.append(listener)116 def __notify_listeners(self, data, stats):117 """ notify all listeners about aggregate data and stats """118 for listener in self.listeners:...

Full Screen

Full Screen

plugin.py

Source:plugin.py Github

copy

Full Screen

...11from ...common.util import Drain, Chopper12logger = logging.getLogger(__name__)13class LoggingListener(AggregateResultListener):14 """ Log aggregated results """15 def on_aggregated_data(self, data, stats):16 logger.info("Got aggregated sample:\n%s", json.dumps(data, indent=2))17 logger.info("Stats:\n%s", json.dumps(stats, indent=2))18def get_from_queue(queue):19 data = []20 for _ in range(queue.qsize()):21 try:22 data.append(queue.get_nowait())23 except q.Empty:24 break25 return data26class Plugin(AbstractPlugin):27 """28 Plugin that manages aggregation and stats collection29 """30 SECTION = 'aggregator'31 @staticmethod32 def get_key():33 return __file__34 def __init__(self, core, config_section):35 AbstractPlugin.__init__(self, core, config_section)36 self.listeners = [] # [LoggingListener()]37 self.reader = None38 self.stats_reader = None39 self.results = q.Queue()40 self.stats = q.Queue()41 self.verbose_histogram = False42 self.data_cache = {}43 self.stat_cache = {}44 def get_available_options(self):45 return ["verbose_histogram"]46 def configure(self):47 self.aggregator_config = json.loads(48 resource_string(__name__, 'config/phout.json').decode('utf8'))49 verbose_histogram_option = self.get_option("verbose_histogram", "0")50 self.verbose_histogram = (51 verbose_histogram_option.lower() == "true") or (52 verbose_histogram_option.lower() == "1")53 if self.verbose_histogram:54 logger.info("using verbose histogram")55 def start_test(self):56 if self.reader and self.stats_reader:57 pipeline = Aggregator(58 TimeChopper(59 DataPoller(60 source=self.reader, poll_period=1), cache_size=3),61 self.aggregator_config,62 self.verbose_histogram)63 self.drain = Drain(pipeline, self.results)64 self.drain.start()65 self.stats_drain = Drain(66 Chopper(DataPoller(67 source=self.stats_reader, poll_period=1)),68 self.stats)69 self.stats_drain.start()70 else:71 raise PluginImplementationError(72 "Generator must pass a Reader and a StatsReader"73 " to Aggregator before starting test")74 def _collect_data(self):75 """76 Collect data, cache it and send to listeners77 """78 data = get_from_queue(self.results)79 stats = get_from_queue(self.stats)80 logger.debug("Data timestamps:\n%s" % [d.get('ts') for d in data])81 logger.debug("Stats timestamps:\n%s" % [d.get('ts') for d in stats])82 for item in data:83 ts = item['ts']84 if ts in self.stat_cache:85 # send items86 data_item = item87 stat_item = self.stat_cache.pop(ts)88 self.__notify_listeners(data_item, stat_item)89 else:90 self.data_cache[ts] = item91 for item in stats:92 ts = item['ts']93 if ts in self.data_cache:94 # send items95 data_item = self.data_cache.pop(ts)96 stat_item = item97 self.__notify_listeners(data_item, stat_item)98 else:99 self.stat_cache[ts] = item100 def is_test_finished(self):101 self._collect_data()102 return -1103 def end_test(self, retcode):104 if self.reader:105 self.reader.close()106 if self.drain:107 self.drain.join()108 if self.stats_reader:109 self.stats_reader.close()110 if self.stats_drain:111 self.stats_drain.join()112 self._collect_data()113 return retcode114 def add_result_listener(self, listener):115 self.listeners.append(listener)116 def __notify_listeners(self, data, stats):117 """ notify all listeners about aggregate data and stats """118 for listener in self.listeners:...

Full Screen

Full Screen

scanner.py

Source:scanner.py Github

copy

Full Screen

...68 if self._inflight_data and self._inflight_data[-1].index[0] != data.index[0]:69 # a new row is for a new time period70 aggregated_data = pd.concat(self._inflight_data, axis=1)71 self._inflight_data = []72 self.events.on_aggregated_data(aggregated_data)73 self._inflight_data.append(data)74 def _analyze_data(self, data):75 """76 Compute rolling statistics and select frequencies with the strongest signal.77 """78 def merge(mask, row):79 to_merge = []80 idxs = []81 for i, x in enumerate(mask.iloc[1:], 1):82 if mask.iloc[i-1] and mask.iloc[i]:83 if idxs:84 idxs.append(row.index[i])85 else:86 idxs.extend([row.index[i-1], row.index[i]])...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run yandex-tank automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful