How to use manage_stderr method in autotest

Best Python code snippet using autotest_python

logging_manager.py

Source:logging_manager.py Github

copy

Full Screen

...30 else:31 manager = LoggingManager()32 if manage_stdout_and_stderr:33 manager.manage_stdout()34 manager.manage_stderr()35 return manager36# implementation follows37logger = logging.getLogger()38def _current_handlers():39 return set(logger.handlers)40#保存函数的code对象41_caller_code_to_skip_in_logging_stack = set()42def do_not_report_as_logging_caller(func):43 #装饰器,这个函数不进行log44 """Decorator to annotate functions we will tell logging not to log."""45 # These are not the droids you are looking for.46 # You may go about your business.47 _caller_code_to_skip_in_logging_stack.add(func.func_code)48 return func49# Copied from Python 2.4 logging/__init__.py Logger.findCaller and enhanced.50# The logging code remains the same and compatible with this monkey patching51# through at least Python version 2.6.2.52#貌似没什么意义,logging中的findcall,查找调用栈,获取当前的行号和函数名等53def _logging_manager_aware_logger__find_caller(unused):54 """55 Find the stack frame of the caller so that we can note the source56 file name, line number and function name.57 """58 f = sys._getframe(2).f_back59 rv = "(unknown file)", 0, "(unknown function)"60 while hasattr(f, "f_code"):61 co = f.f_code62 filename = os.path.normcase(co.co_filename)63 if filename == logging._srcfile:64 f = f.f_back65 continue66 # START additional code.67 #!!!如果我们找到的code对象保存在不log的stack里面,那么这个log就不report68 if co in _caller_code_to_skip_in_logging_stack:69 f = f.f_back70 continue71 # END additional code.72 rv = (filename, f.f_lineno, co.co_name)73 break74 return rv75if sys.version_info[:2] > (2, 7):76 warnings.warn('This module has not been reviewed for Python %s' %77 sys.version)78# Monkey patch our way around logging's design...79# Monkey patch代表着在runtime的时候动态的替换掉原来模块中的方法、类、80#原来的find_caller函数81_original_logger__find_caller = logging.Logger.findCaller82#替换成autotest自己的find_caller,增加了可以主动不log code的功能83logging.Logger.findCaller = _logging_manager_aware_logger__find_caller84class LoggingFile(object):85 """86 File-like object that will receive messages pass them to the logging87 infrastructure in an appropriate way.88 """89 #假装自己是一个文件,但实际是一个log,目的是将一个字符串,分行的输出到log中90 #如果是一个包含换行的string,会分成多个log下发出去91 def __init__(self, prefix='', level=logging.DEBUG,92 logger=logging.getLogger()):93 """94 :param prefix - The prefix for each line logged by this object.95 """96 self._prefix = prefix97 self._level = level98 self._buffer = []99 self._logger = logger100 @do_not_report_as_logging_caller101 def write(self, data):102 """"103 Writes data only if it constitutes a whole line. If it's not the case,104 store it in a buffer and wait until we have a complete line.105 :param data - Raw data (a string) that will be processed.106 """107 #传入data,一个字符串,data进行分行108 #第一行和前一个buffer结合109 #最后一行继续放在buffer里,等待后续的log110 # splitlines() discards a trailing blank line, so use split() instead111 data_lines = data.split('\n')112 if len(data_lines) > 1:113 self._buffer.append(data_lines[0])114 self._flush_buffer()115 for line in data_lines[1:-1]:116 self._log_line(line)117 if data_lines[-1]:118 self._buffer.append(data_lines[-1])119 @do_not_report_as_logging_caller120 def writelines(self, lines):121 """"122 Writes itertable of lines123 :param lines: An iterable of strings that will be processed.124 """125 for data in lines:126 self.write(data)127 @do_not_report_as_logging_caller128 def _log_line(self, line):129 """130 Passes lines of output to the logging module.131 """132 self._logger.log(self._level, self._prefix + line)133 @do_not_report_as_logging_caller134 def _flush_buffer(self):135 if self._buffer:136 self._log_line(''.join(self._buffer))137 self._buffer = []138 @do_not_report_as_logging_caller139 def flush(self):140 self._flush_buffer()141 def isatty(self):142 return False143class SortingLoggingFile(LoggingFile):144 """145 File-like object that will receive messages and pass them to the logging146 infrastructure. It decides where to pass each line by applying a regex147 to it and seeing which level it matched.148 """149 def __init__(self, prefix='', level_list=[('ERROR', logging.ERROR),150 ('WARN', logging.WARN), ('INFO', logging.INFO),151 ('DEBUG', logging.DEBUG)], logger=logging.getLogger()):152 super(SortingLoggingFile, self).__init__(prefix=prefix, logger=logger)153 #生成一个新list[(re对象,40),(re对象,30)]154 self._level_list = [(re.compile(x), y) for x, y in level_list]155 @do_not_report_as_logging_caller156 def _log_line(self, line):157 #如果line中存在以上4个级别的re字段,则logging158 for pattern, level in self._level_list:159 if pattern.search(line):160 self._logger.log(level, self._prefix + line)161 break162 #否则添加一个UNMATCHED_LOG_LEVEL163 else:164 self._logger.log(logging.ERROR, 'UNMATCHED_LOG_LEVEL: ' +165 self._prefix + line)166class _StreamManager(object):167 """168 Redirects all output for some output stream (normally stdout or stderr) to169 the logging module by replacing the file objects with a new LoggingFile170 that calls logging.log().171 """172 #将stdout和stderr中的所有内容重定向到log中去173 def __init__(self, stream, level, stream_setter):174 """175 :param stream: stream object to manage176 :param level: level at which data written to the stream will be logged177 :param stream_setter: function accepting a stream object that will178 replace the given stream in its original location.179 """180 self._stream = stream #需要manage的流181 self._level = level182 self._stream_setter = stream_setter #方法,接受一个流对象183 self._logging_stream = None184 def _replace_with_logger(self):185 #将logging流替换成LoggingFile对象,调用_stream_setter函数186 self._logging_stream = LoggingFile(level=self._level)187 self._stream_setter(self._logging_stream)188 def _restore_stream(self):189 self._stream_setter(self._stream)190 def flush(self):191 self._logging_stream.flush()192 def start_logging(self):193 """Start directing the stream to the logging module."""194 self._replace_with_logger()195 def stop_logging(self):196 """Restore the stream to its original settings."""197 self._restore_stream()198 def on_push_context(self, context):199 """200 Called when the logging manager is about to push a new context onto the201 stack and has changed logging settings. The StreamHandler can modify202 the context to be saved before returning.203 """204 pass205 def on_restore_context(self, context):206 """207 Called when the logging manager is restoring a previous context.208 """209 pass210class LoggingManager(object):211 """212 Manages a stack of logging configurations, allowing clients to conveniently213 add and remove logging destinations. Also keeps a list of StreamManagers214 to easily direct streams into the logging module.215 """216 #管理一堆logging配置,允许clients方便的增删logging位置,另外保持一个StreamManger列表217 #用于streams redirect218 STREAM_MANAGER_CLASS = _StreamManager219 logging_config_object = None220 def __init__(self):221 """222 This class should not ordinarily be constructed directly (other than in223 tests). Use the module-global factory method get_logging_manager()224 instead.225 """226 #使用全局函数get_logging_manager,不要多次init这个类227 if self.logging_config_object is None:228 raise RuntimeError('You must call configure_logging() before this')229 # _context_stack holds a stack of context dicts. Each context dict230 # contains:231 # * old_handlers: list of registered logging Handlers232 # contexts may also be extended by _StreamHandlers233 self._context_stack = []234 self._streams = []235 self._started = False236 def manage_stream(self, stream, level, stream_setter):237 """238 Tells this manager to manage the given stream. All data written to the239 stream will be directed to the logging module instead. Must be called240 before start_logging().241 :param stream: stream to manage242 :param level: level to log data written to this stream243 :param stream_setter: function to set the stream to a new object244 """245 if self._started:246 raise RuntimeError('You must call this before start_logging()')247 self._streams.append(self.STREAM_MANAGER_CLASS(stream, level,248 stream_setter))249 def _sys_stream_setter(self, stream_name):250 #!重要,将sys.stdout和stderr设置为一个文件对象,251 assert stream_name in ('stdout', 'stderr'), stream_name252 def set_stream(file_object):253 setattr(sys, stream_name, file_object)254 return set_stream255 def manage_stdout(self):256 self.manage_stream(sys.stdout, logging.INFO,257 self._sys_stream_setter('stdout'))258 def manage_stderr(self):259 self.manage_stream(sys.stderr, self.logging_config_object.stderr_level,260 self._sys_stream_setter('stderr'))261 def start_logging(self):262 """263 Begin capturing output to the logging module.264 """265 for stream_manager in self._streams:266 stream_manager.start_logging()267 self._started = True268 def stop_logging(self):269 """270 Restore output to its original state.271 """272 while self._context_stack:...

Full Screen

Full Screen

ReaderManager.py

Source:ReaderManager.py Github

copy

Full Screen

1#!/usr/bin/env python2# -*- encoding: utf-8 -*-3import re4import sys5import time6import json7import socket8import datetime9import ProcReader.util as util_utils10from collections import OrderedDict11from DaemonClass import Daemon12from ProcReader.settings import (MANAGE_PIDFILE, MANAGE_STDIN,13 MANAGE_STDOUT, MANAGE_STDERR,14 MANAGE_WR_URL)15class ReaderManager(Daemon):16 _intvl = None17 _wr_url = None18 _readersters = OrderedDict()19 def __init__(self,20 pidfile=MANAGE_PIDFILE,21 stdin=MANAGE_STDIN,22 stdout=MANAGE_STDOUT,23 stderr=MANAGE_STDERR,24 intvl=10,25 wr_url=MANAGE_WR_URL):26 super(ReaderManager, self).__init__(pidfile=pidfile,27 stdin=stdin, stdout=stdout, stderr=stderr)28 self._wr_url = wr_url29 tmp_str = util_utils.rd_data('%s%s' % (self._wr_url, 'getintvl'))30 if tmp_str:31 self._intvl = int(tmp_str)32 else:33 self._intvl = intvl34 tmp_list = None35 tmp_str = util_utils.rd_data('%s%s' % (self._wr_url, 'getreadersters'))36 if tmp_str:37 tmp_list = eval(tmp_str)38 if type(tmp_list) == type(''):39 tmp_list = eval(tmp_list)40 for reader in tmp_list:41 p_name, cls = util_utils.load_class(reader)42 if p_name and cls:43 self._readersters[p_name] = cls()44 else:45 self._readersters = OrderedDict()46 def set_intvl(self, intvl):47 if intvl > 1:48 self._intvl = intvl49 util_utils.wr_data('%s%s' % (self._wr_url, 'setintvl'), intvl)50 self.restart()51 def set_readersters(self, readersters):52 reader_list = eval(readersters)53 self._readersters = OrderedDict()54 for reader in reader_list:55 p_name, cls = util_utils.load_class(reader)56 if p_name and cls:57 self._readersters[p_name] = cls()58 util_utils.wr_data('%s%s' %59 (self._wr_url, 'setreadersters'), readersters)60 self.restart()61 def _reader(self):62 reader_data = OrderedDict()63 if self._readersters:64 for readerster in self._readersters:65 reader_data[readerster] = {}66 reader_data[readerster]['timestamp'] = time.asctime(67 time.localtime())68 reader_data[readerster]['data'] = self._readersters[readerster].get_data()69 return reader_data70 def run(self):71 cnt = 072 while True:73 wr_obj = {}74 try:75 wr_obj['data'] = self._reader()76 wr_obj['timestamp'] = time.asctime(time.localtime())77 wr_obj['hostname'] = socket.gethostname()78 wr_obj['ip_address'] = socket.gethostbyname(wr_obj['hostname'])79 except socket.gaierror as e:80 wr_obj['ip_address'] = ''81 finally:82 util_utils.wr_data('%s%s' % (self._wr_url, 'setdata'), wr_obj)83 time.sleep(self._intvl)84 cnt += 185if __name__ == '__main__':86 daemon = ReaderManager()87 if len(sys.argv) == 2:88 if sys.argv[1] == 'start':89 daemon.start()90 elif sys.argv[1] == 'stop':91 daemon.stop()92 elif sys.argv[1] == 'restart':93 daemon.restart()94 else:95 print('Unknown command')96 sys.exit(2)97 elif len(sys.argv) == 3:98 if sys.argv[1] == 'setintvl':99 if re.match(r'^-?\d+$', sys.argv[2]) or re.match(r'^-?(\.\d+|\d+(\.\d+)?)', sys.argv[2]):100 daemon.set_intvl(int(sys.argv[2]))101 print('Set interval: %s' % sys.argv[2])102 elif sys.argv[1] == 'setreader':103 reader_list = None104 try:105 reader_list = eval(sys.argv[2])106 except:107 print('%s is not a list.' % sys.argv[2])108 if reader_list:109 daemon.set_readersters(sys.argv[2])110 else:111 print('USAGE: %s start/stop/restart' % sys.argv[0])...

Full Screen

Full Screen

settings.py

Source:settings.py Github

copy

Full Screen

1#!/usr/bin/env python2# -*- encoding: utf-8 -*-3BASE_RATE = 10244# cpu.py5PROC_CPU_INFO = '/proc/cpuinfo'6PROC_CPU_STAT = '/proc/stat'7# disk.py8PROC_DISK_STAT = '/proc/diskstats'9PROC_FILE_SYST = '/proc/filesystems'10DISK_UNIT_LIST = ('B', 'KB', 'MB', 'GB', 'TB', 'PB')11DISK_RATE_LIST = (12 pow(BASE_RATE, 0), pow(BASE_RATE, 1),13 pow(BASE_RATE, 2), pow(BASE_RATE, 3),14 pow(BASE_RATE, 4), pow(BASE_RATE, 5),15)16DISK_ETC_MTAB = '/etc/mtab'17# load.py18PROC_LOAD_STAT = '/proc/loadavg'19# mem.py20PROC_MEM_INFO = '/proc/meminfo'21MEM_UNIT_LIST = ('KB', 'MB', 'GB')22MEM_RATE_LIST = (23 pow(BASE_RATE, 0), pow(BASE_RATE, 1),24 pow(BASE_RATE, 2),25)26# net.py27PROC_NET_DEV = '/proc/net/dev'28# uptime.py29PROC_UPTIME_INFO = '/proc/uptime'30# daemon.py31DEV_STD_IN = '/dev/stdin'32DEV_STD_OUT = '/dev/stdout'33DEV_STD_ERR = '/dev/stderr'34# httpserver.py35READER_PATH = '/usr/local/procagent'36READERS_LIST = [37 'ProcReader.cpu.CPUUsageReader',38 'ProcReader.mem.MemInfoReader',39 'ProcReader.load.LoadStatReader',40 'ProcReader.disk.DiskUsageReader',41 'ProcReader.net.NetStatReader',42 'ProcReader.uptime.UptimeReader',43]44# readermanager.py45MANAGE_PIDFILE = '/tmp/readercls.pid'46MANAGE_STDIN = '/dev/null'47MANAGE_STDOUT = '/dev/null'48MANAGE_STDERR = '/dev/null'...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run autotest automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful