How to use stats_report method in locust

Best Python code snippet using locust

checkmysql.py

Source:checkmysql.py Github

copy

Full Screen

1# -*- coding: iso-8859-1 -*-2"""3 dbAlerter MySQL module4 @copyright: 2008-2011 Wave2 Limited. All rights reserved.5 @license: BSD License6"""7import math, MySQLdb, re, time, sys8import checkos, config, notify9from string import lower10__author__ = "Alan Snelson"11__copyright__ = "Copyright (c) 2008-2011 Wave2 Limited"12__revision__ = "$Id$"13__version__ = "0.2.0"14last_error = time.localtime()15last_slow_query = time.localtime()16db = None17auto_increment_state = {}18process_list_state = {}19security_state = {'ANONACCOUNT' : 0, 'EMPTYPASS' : 0}20warning_state = {'CONNECTIONS' : 0, 'BASEDIR_USAGE' : 0, 'BASEDIR_IUSAGE' : 0, \21'CPU_USAGE' : 0, 'DATADIR_USAGE' : 0, 'DATADIR_IUSAGE' : 0, 'OPEN_FILES' : 0, \22'PLUGIN_DIR_USAGE' : 0, 'PLUGIN_DIR_IUSAGE' : 0, 'SLAVEIO' : 0, 'SLAVESQL' : 0, \23'SLAVEPOS' : 0, 'TMPDIR_USAGE' : 0, 'TMPDIR_IUSAGE' : 0}24global_status = {'SLOW_QUERIES' : 0, 'MAX_USED_CONNECTIONS' : 0, 'UPTIME' : 0}25statistics = {'ERROR' : 0, 'WARNING' : 0, 'INFO' : 0, 'SLOWQ' : 0, 'MAXCONN' : 0, 'MAX_OPEN_FILES' : 0}26variables = {'BASEDIR' : '', 'DATADIR' : '', 'HOSTNAME' : '', 'LOG_ERROR' : '', 'PID' : 0, 'PID_FILE' : '', \27'PLUGIN_DIR' : '', 'OPEN_FILES_LIMIT' : 0, 'SERVERID' : 0, 'SLOW_QUERY_LOG' : '','SLOW_QUERY_LOG_FILE' : '', \28'TMPDIR' : '', 'VERSION' : '', 'LOG_OUTPUT' : '', 'VERSION_COMMENT' : '', 'MAX_CONNECTIONS' : 0}29def initialise():30 """Initialise MySQL module"""31 global db, global_status, statistics, variables32 db = connect()33 cursor=db.cursor(MySQLdb.cursors.DictCursor)34 #Obtain list of MySQL server variables35 cursor.execute("""SELECT * FROM INFORMATION_SCHEMA.GLOBAL_VARIABLES""")36 rows = cursor.fetchall ()37 for row in rows:38 #Store Basedir Path39 if (row['VARIABLE_NAME'] == 'BASEDIR'):40 variables['BASEDIR'] = row['VARIABLE_VALUE']41 #Store Datadir Path42 if (row['VARIABLE_NAME'] == 'DATADIR'):43 variables['DATADIR'] = row['VARIABLE_VALUE']44 #Store Hostname45 if (row['VARIABLE_NAME'] == 'HOSTNAME'):46 variables['HOSTNAME'] = row['VARIABLE_VALUE']47 #Store Error Log Path48 if (row['VARIABLE_NAME'] == 'LOG_ERROR'):49 variables['LOG_ERROR'] = row['VARIABLE_VALUE']50 #Store Open Files Limit51 if (row['VARIABLE_NAME'] == 'OPEN_FILES_LIMIT'):52 variables['OPEN_FILES_LIMIT'] = row['VARIABLE_VALUE']53 #Store Pid_file Path54 if (row['VARIABLE_NAME'] == 'PID_FILE'):55 variables['PID_FILE'] = row['VARIABLE_VALUE']56 #Store Plugin_dir Path57 if (row['VARIABLE_NAME'] == 'PLUGIN_DIR'):58 variables['PLUGIN_DIR'] = row['VARIABLE_VALUE']59 #Store Tmpdir Path60 if (row['VARIABLE_NAME'] == 'TMPDIR'):61 variables['TMPDIR'] = row['VARIABLE_VALUE']62 #Store MySQL version63 if (row['VARIABLE_NAME'] == 'VERSION'):64 variables['VERSION'] = row['VARIABLE_VALUE']65 #Store MySQL version comment66 if (row['VARIABLE_NAME'] == 'VERSION_COMMENT'):67 variables['VERSION_COMMENT'] = row['VARIABLE_VALUE']68 #Update MySQL pid69 variables['PID'] = int(file(variables['PID_FILE'],'r').read().strip())70 #initialise Statistics71 cursor.execute("""SHOW GLOBAL STATUS""")72 rows = cursor.fetchall ()73 for row in rows:74 #Current Connection Usage75 if (row['Variable_name'] == 'Threads_connected'):76 statistics['MAXCONN'] = int(row['Value'])77 #Current Open Files Usage78 if (row['Variable_name'] == 'Open_files'):79 statistics['MAX_OPEN_FILES'] = int(row['Value'])80 #Max Used Connections (Since server start)81 if (row['Variable_name'] == 'Max_used_connections'):82 global_status['MAX_USED_CONNECTIONS'] = int(row['Value'])83 #Slow Queries (Since server start)84 if (row['Variable_name'] == 'Slow_queries'):85 global_status['SLOW_QUERIES'] = int(row['Value'])86 cursor.close()87 return db88def cleanup():89 """Close connections and perform any cleanup"""90 global db91 db.close()92def check(stats=False):93 """Perform MySQL checks.94 Keyword arguments:95 stats -- produce statistics report post check96 """97 global db, statistics98 try:99 db.ping()100 #OS Checks101 if (checkos.supported):102 check_cpu_usage()103 check_disk_usage()104 #MySQL Checks105 update_variables()106 check_status()107 check_auto_increment()108 check_anonymous_accounts()109 check_empty_passwords()110 check_functionality()111 check_error_log()112 check_process_list()113 check_slow_query_log()114 check_slave_status()115 #Produce statistics report?116 if (stats):117 statistics_report()118 except MySQLdb.Error, (error,description):119 if error == 2006:120 notify.notify('Error','(' + str(error) + ') ' + description, 'Error (' + str(error) + ') - ' + description)121 retry_count=0122 while 1:123 try:124 #Create connection to database125 db=MySQLdb.connect(host=config.get('dbAlerter', 'mysql_hostname'), port=int(config.get('dbAlerter', 'mysql_port')), user=config.get('dbAlerter', 'mysql_username'), passwd=config.get('dbAlerter', 'mysql_password'))126 #Update MySQL pid127 variables['PID'] = int(file(variables['PID_FILE'],'r').read().strip())128 notify.notify('Info', 'MySQL Server Back Online', 'MySQL Server Back Online')129 statistics['INFO'] += 1130 break131 except MySQLdb.Error, (error,description):132 if (error == 2003 or error == 2002):133 if (retry_count == 5):134 notify.log_notify('Error', '(' + str(error) + ') ' + description)135 retry_count=0136 else:137 retry_count+=1138 time.sleep(int(config.get('dbAlerter','check_interval')))139 else:140 notify.notify('Error','(' + str(error) + ') ' + description, 'Error (' + str(error) + ') - ' + description + "\n\ndbAlerter Shutdown")141 notify.log_notify('Shutdown')142 time.sleep(5)143 sys.exit(1)144 except:145 notify.log_notify('Shutdown')146 raise147 time.sleep(5)148 sys.exit(1)149def connect():150 """Connect to MySQL server"""151 try:152 #Create connection to database153 db=MySQLdb.connect(host=config.get('dbAlerter', 'mysql_hostname'), port=int(config.get('dbAlerter', 'mysql_port')), user=config.get('dbAlerter', 'mysql_username'), passwd=config.get('dbAlerter', 'mysql_password'))154 except MySQLdb.Error, (error,description):155 #Access denied error156 if (error==1045):157 notify.notify('Error','(' + str(error) + ') - ' + description, 'Error (' + str(error) + ') - ' + description + "\n\n\nDid you remember to grant the correct privileges?\n\nGRANT PROCESS, SELECT, REPLICATION CLIENT, SHOW DATABASES, SUPER ON *.* TO 'mysqluser'@'localhost' IDENTIFIED BY 'mysqluser_password';\n\nGRANT CREATE, INSERT, DELETE, DROP ON dbAlerter.* TO 'mysqluser'@'localhost' IDENTIFIED BY 'mysqluser_password';")158 notify.log_notify('Shutdown')159 sys.exit(1)160 #No database selected error161 elif (error==1046):162 pass163 else:164 notify.notify('Error','(' + str(error) + ') - ' + description, 'Error (' + str(error) + ') - ' + description + "\n\ndbAlerter Shutdown")165 notify.log_notify('Shutdown')166 sys.exit(1)167 return db168def check_anonymous_accounts():169 """Check for anonymous accounts"""170 global db, security_state, statistics171 cursor = db.cursor(MySQLdb.cursors.DictCursor)172 cursor.execute("""SELECT User, Host FROM mysql.user WHERE User=''""")173 rows = cursor.fetchall ()174 if (cursor.rowcount > 0):175 anonymous_accounts = "The following anonymous accounts were detected:\n\n"176 for row in rows:177 anonymous_accounts += row['User'] + "'@'" + row['Host'] + "\n"178 notify.stateful_notify(True, security_state, 'ANONACCOUNT', 'Security', 'Anonymous accounts detected', anonymous_accounts + "\nPlease see: http://dev.mysql.com/doc/refman/5.1/en/default-privileges.html (Securing the initial MySQL Accounts) for details on how to secure these accounts.")179 else:180 notify.stateful_notify(False, security_state, 'ANONACCOUNT', 'Info', 'No anonymous accounts detected', "No anonymous accounts detected.")181 cursor.close()182def check_cpu_usage():183 """Check MySQL CPU usage"""184 global variables185 cpu_usage = float(checkos.get_cpu_usage(variables['PID']))186 threshold = float(config.get('dbAlerter','mysql_cpu_usage_threshold'))187 if (cpu_usage > threshold):188 notify.stateful_notify(True, warning_state, 'CPU_USAGE', 'Warning', 'CPU utilisation threshold crossed', 'CPU utilisation for MySQL process (' + str(variables['PID']) + ') is currently ' + str(cpu_usage) + '% (Threshold currently set to ' + str(threshold) + '%)')189 statistics['WARNING'] += 1190 else:191 notify.stateful_notify(False, warning_state, 'CPU_USAGE', 'Info', 'CPU utilisation returned below threshold', 'CPU utilisation for MySQL process (' + str(variables['PID']) + ') is currently ' + str(cpu_usage) + '% (Threshold currently set to ' + str(threshold) + '%)')192def check_disk_usage():193 """Check MySQL disk usage"""194 global variables195 mount_usage = checkos.get_mount_usage([variables['BASEDIR'], variables['DATADIR'], variables['PLUGIN_DIR'], variables['TMPDIR']])196 for mount in mount_usage.keys():197 mount_capacity = int(mount_usage[mount]['capacity'].replace('%',''))198 inode_capacity = int(mount_usage[mount]['icapacity'].replace('%',''))199 if (mount == variables['BASEDIR']):200 threshold = int(config.get('dbAlerter', 'mysql_basedir_threshold'))201 params = ['BASEDIR_', 'Installation directory']202 elif (mount == variables['DATADIR']):203 threshold = int(config.get('dbAlerter', 'mysql_datadir_threshold'))204 params = ['DATADIR_', 'Data directory']205 elif (mount == variables['PLUGIN_DIR']):206 threshold = int(config.get('dbAlerter', 'mysql_plugindir_threshold'))207 params = ['PLUGIN_DIR_', 'Plugin directory']208 elif (mount == variables['TMPDIR']):209 threshold = int(config.get('dbAlerter', 'mysql_tmpdir_threshold'))210 params = ['TMPDIR_', 'Temporary directory']211 #Check mount capacity212 if (mount_capacity > threshold):213 notify.stateful_notify(True, warning_state, params[0] + 'USAGE', 'Warning', params[1] + ' usage threshold crossed', params[1] + ' (' + mount + ') usage is currently ' + str(mount_capacity) + '% (Threshold currently set to ' + str(threshold) + '%)')214 statistics['WARNING'] += 1215 else:216 notify.stateful_notify(False, warning_state, params[0] + 'USAGE', 'Info', params[1] + ' usage returned below threshold', params[1] + ' (' + mount + ') usage is currently ' + str(mount_capacity) +'% (Threshold currently set to ' + str(threshold) + '%)')217 #Check inode capacity218 if (inode_capacity > threshold):219 notify.stateful_notify(True, warning_state, params[0] + 'IUSAGE', 'Warning', params[1] + ' inode usage threshold crossed', params[1] + ' (' + mount + ') inode usage is currently ' + str(mount_capacity) + '% (Threshold currently set to ' + str(threshold) + '%)')220 statistics['WARNING'] += 1221 else:222 notify.stateful_notify(False, warning_state, params[0] + 'IUSAGE', 'Info', params[1] + ' inode usage returned below threshold', params[1] + ' (' + mount + ') inode usage is currently ' + str(mount_capacity) +'% (Threshold currently set to ' + str(threshold) + '%)')223def check_empty_passwords():224 """Check for empty passwords"""225 global db, security_state, statistics226 cursor = db.cursor(MySQLdb.cursors.DictCursor)227 cursor.execute("""SELECT User, Host FROM mysql.user WHERE Password=''""")228 rows = cursor.fetchall ()229 if (cursor.rowcount > 0):230 emptyPasswords = "The following accounts do not have passwords configured:\n\n"231 for row in rows:232 emptyPasswords += row['User'] + "'@'" + row['Host'] + "\n"233 notify.stateful_notify(True, security_state, 'EMPTYPASS', 'Security', 'Empty passwords detected', emptyPasswords + "\nPlease see: http://dev.mysql.com/doc/refman/5.1/en/default-privileges.html (Securing the initial MySQL Accounts) for details on how to secure these accounts.")234 else:235 notify.stateful_notify(False, security_state, 'EMPTYPASS', 'Info', 'No Empty Passwords Detected', "No more empty passwords detected.")236 cursor.close()237def check_functionality():238 """Check basic functionality"""239 global db, statistics240 cursor = db.cursor(MySQLdb.cursors.DictCursor)241 cursor.execute("""CREATE DATABASE IF NOT EXISTS dbAlerter""")242 cursor.execute("""CREATE TABLE IF NOT EXISTS dbAlerter.insertcheck (ID int)""")243 cursor.execute("""INSERT INTO dbAlerter.insertcheck VALUES (112233)""")244 cursor.execute("""SELECT * FROM dbAlerter.insertcheck""")245 cursor.execute("""DROP TABLE dbAlerter.insertcheck""")246 cursor.execute("""DROP DATABASE dbAlerter""")247 cursor.execute("""SHOW ERRORS""")248 row = cursor.fetchone ()249 if row:250 notify.notify(row["Level"], ' (' + str(row["Code"]) + ') - ' + row["Message"], 'Error (' + str(row["Code"]) + ') - ' + row["Message"])251 statistics['ERROR'] += 1252 cursor.close()253def check_error_log():254 """Check MySQL Error Log"""255 re_error = '^(\d\d\d\d\d\d \d\d:\d\d:\d\d) (\[ERROR\]) (.*)$'256 global last_error, statistics, variables257 try:258 log = open(variables['LOG_ERROR'], 'r')259 for line in log:260 error_pattern = re.compile(re_error).match(line)261 if error_pattern:262 if (time.strptime(error_pattern.group(1),"%y%m%d %H:%M:%S") > last_error):263 last_error=time.strptime(error_pattern.group(1), "%y%m%d %H:%M:%S")264 notify.notify('Error', error_pattern.group(3), line)265 statistics['ERROR'] += 1266 log.close()267 except IOError, ioe:268 notify.notify("Error", ioe.str + ' : ' + ioe.filename + "\n")269 statistics['ERROR'] += 1270def check_slow_query_log():271 """Check slow query log - if enabled"""272 re_slow = '^(# Time:) (\d\d\d\d\d\d \d\d:\d\d:\d\d)$'273 274 global db, last_slow_query, statistics, variables275 if (variables['SLOW_QUERY_LOG'] == "ON"):276 slowquery = ''277 if (variables['LOG_OUTPUT'] == "FILE"):278 #File based logging279 try:280 slowquerylog=open(variables['SLOW_QUERY_LOG_FILE'], 'r')281 for line in slowquerylog:282 slowpattern = re.compile(re_slow).match(line)283 if slowpattern:284 if (time.strptime(slowpattern.group(2),"%y%m%d %H:%M:%S") > last_slow_query):285 last_slow_query=time.strptime(slowpattern.group(2), "%y%m%d %H:%M:%S")286 slowquery += line287 continue288 if (slowquery != ''):289 slowquery += line290 if (slowquery != ''):291 notify.notify('Info','Slow Query Encountered',slowquery)292 statistics['INFO'] += 1293 slowquerylog.close()294 except IOError, ioe:295 notify.notify("Error", ioe.str + ' : ' + ioe.filename, ioe.str + ' : ' + ioe.filename)296 statistics['ERROR'] += 1297 #Table based logging298 if (variables['LOG_OUTPUT'] == "TABLE"):299 cursor=db.cursor(MySQLdb.cursors.DictCursor)300 cursor.execute("""SELECT * FROM mysql.slow_log WHERE start_time > '""" + time.strftime("%Y-%m-%d %H:%M:%S", last_slow_query) + "' ORDER BY start_time ASC LIMIT 100;")301 rows = cursor.fetchall ()302 for row in rows:303 slowquery += '# Time: ' + str(row['start_time']) + "\n# User@Host: " + row['user_host'] + "\n# Query_time: " + str(row['query_time']) + "\n" + row['sql_text'] + "\n\n"304 if (time.strptime(str(row['start_time']),"%Y-%m-%d %H:%M:%S") > last_slow_query):305 last_slow_query=time.strptime(str(row['start_time']), "%Y-%m-%d %H:%M:%S")306 if (slowquery != ''):307 notify.notify('Info','Slow Query Encountered',slowquery)308 statistics['INFO'] += 1309 cursor.close()310def check_process_list():311 """Check process list for long running commands"""312 global db, process_list_state, statistics313 processes = []314 cursor = db.cursor(MySQLdb.cursors.DictCursor)315 cursor.execute("""SHOW FULL PROCESSLIST""")316 rows = cursor.fetchall()317 for row in rows:318 #Notify on commands taking longer than 2 minutes319 if (row['Command'] != 'Sleep' and row['User'] != 'system user' and row['User'] != 'event_scheduler' and row['Time'] > 120):320 processes.append(row['Id'])321 notify.stateful_notify(True, process_list_state, row['Id'], 'Warning', 'Long running process with ID (' + str(row['Id']) + ') detected ', "The following command has been running for over 2 minutes:\n\nId: " + str(row['Id']) + "\nUser: " + row['User'] + "\nHost: " + row['Host'] + "\nSchema: " + (row['db'] or 'NULL') + "\nCommand: " + row['Command'] + "\nTime: " + str(row['Time']) + "\nState: " + row['State'] + "\nInfo: " + row['Info'])322 statistics['WARNING'] += 1323 cursor.close()324 #Cleanup state variable325 for key in process_list_state.keys():326 if not key in processes:327 notify.stateful_notify(False, process_list_state, key, 'Info', 'Long running process with ID (' + str(key) + ') has completed', 'Long running process with ID (' + str(key) + ') has completed.')328 del process_list_state[key]329 330def check_slave_status():331 """Check replication slave status"""332 333 global db, statistics, warning_state334 cursor = db.cursor(MySQLdb.cursors.DictCursor)335 cursor.execute("""SHOW SLAVE STATUS""")336 row = cursor.fetchone()337 if row:338 #Check Slave IO339 if row["Slave_IO_Running"]=="No":340 notify.stateful_notify(True, warning_state, 'SLAVEIO', "Warning", "Slave IO has stopped", "Warning - Slave IO has stopped")341 statistics['WARNING'] += 1342 elif row["Slave_IO_Running"]=="Yes":343 notify.stateful_notify(False, warning_state, 'SLAVEIO', "Info", "Slave IO has started", "Info - Slave IO has started")344 #Check Slave SQL345 if row["Slave_SQL_Running"]=="No":346 notify.stateful_notify(True, warning_state, 'SLAVESQL', "Warning", "Slave SQL has stopped", "Warning - Slave SQL has stopped")347 statistics['WARNING'] += 1348 elif row["Slave_SQL_Running"]=="Yes":349 notify.stateful_notify(False, warning_state, 'SLAVESQL', "Info", "Slave SQL has started", "Info - Slave SQL has started")350 #Check Slave Position351 if row["Seconds_Behind_Master"] > 60:352 notify.stateful_notify(True, warning_state, 'SLAVEPOS', "Warning", "Slave is currently " + str(row["Seconds_Behind_Master"]) + " seconds behind the Master", "Warning - Slave is currently " + str(row["Seconds_Behind_Master"]) + " seconds behind the Master")353 statistics['WARNING'] += 1354 elif row["Seconds_Behind_Master"] == 0:355 notify.stateful_notify(False, warning_state, 'SLAVEPOS', "Info", "Slave has caught up with Master", "Info - Slave has caught up with Master")356 cursor.close()357def check_auto_increment ():358 '''Check all auto_increment counters'''359 global auto_increment_state, db, statistics360 threshold = 50361 aitables = []362 cursor1 = db.cursor(MySQLdb.cursors.DictCursor)363 cursor2 = db.cursor(MySQLdb.cursors.DictCursor)364 cursor1.execute("""SELECT TABLE_SCHEMA, TABLE_NAME, AUTO_INCREMENT FROM INFORMATION_SCHEMA.TABLES WHERE AUTO_INCREMENT > 0""")365 tables = cursor1.fetchall ()366 for table in tables:367 cursor2.execute("""SELECT COLUMN_NAME, DATA_TYPE, COLUMN_TYPE FROM information_schema.COLUMNS WHERE TABLE_SCHEMA = '""" + table['TABLE_SCHEMA'] + """' AND TABLE_NAME = '""" + table['TABLE_NAME'] + """' AND EXTRA = 'auto_increment'""")368 columns = cursor2.fetchall ()369 for column in columns:370 above_threshold = False371 if (lower(column['COLUMN_TYPE']).find('unsigned') == -1):372 #Process signed data types373 if (lower(column['DATA_TYPE']) == 'tinyint'):374 currentValue = int((float(100) / float(127))*float(table['AUTO_INCREMENT']))375 if (currentValue > threshold):376 above_threshold = True377 elif (lower(column['DATA_TYPE']) == 'smallint'):378 currentValue = int((float(100) / float(32767))*float(table['AUTO_INCREMENT']))379 if (currentValue > threshold):380 above_threshold = True381 elif (lower(column['DATA_TYPE']) == 'mediumint'):382 currentValue = int((float(100) / float(8388607))*float(table['AUTO_INCREMENT']))383 if (currentValue > threshold):384 above_threshold = True385 elif (lower(column['DATA_TYPE']) == 'int'):386 currentValue = int((float(100) / float(2147483647))*float(table['AUTO_INCREMENT']))387 if (currentValue > threshold):388 above_threshold = True389 elif (lower(column['DATA_TYPE']) == 'bigint'):390 currentValue = int((float(100) / float(9223372036854775807))*float(table['AUTO_INCREMENT']))391 if (currentValue > threshold):392 above_threshold = True393 else:394 #process unsigned data types395 if (lower(column['DATA_TYPE']) == 'tinyint'):396 currentValue = int((float(100) / float(255))*float(table['AUTO_INCREMENT']))397 if (currentValue > threshold):398 above_threshold = True399 elif (lower(column['DATA_TYPE']) == 'smallint'):400 currentValue = int((float(100) / float(65535))*float(table['AUTO_INCREMENT']))401 if (currentValue > threshold):402 above_threshold = True403 elif (lower(column['DATA_TYPE']) == 'mediumint'):404 currentValue = int((float(100) / float(16777215))*float(table['AUTO_INCREMENT']))405 if (currentValue > threshold):406 above_threshold = True407 elif (lower(column['DATA_TYPE']) == 'int'):408 currentValue = int((float(100) / float(4294967295))*float(table['AUTO_INCREMENT']))409 if (currentValue > threshold):410 above_threshold = True411 elif (lower(column['DATA_TYPE']) == 'bigint'):412 currentValue = int((float(100) / float(18446744073709551615))*float(table['AUTO_INCREMENT']))413 if (currentValue > threshold):414 above_threshold = True415 if (above_threshold):416 aitables.append(table['TABLE_SCHEMA'] + '.' + table['TABLE_NAME'] + '.' + column['COLUMN_NAME'])417 notify.stateful_notify(True, auto_increment_state, table['TABLE_SCHEMA'] + '.' + table['TABLE_NAME'] + '.' + column['COLUMN_NAME'], 'Warning', 'Auto increment threshold crossed on column [' + table['TABLE_SCHEMA'] + '.' + table['TABLE_NAME'] + '.' + column['COLUMN_NAME'] + ']', 'The column [' + column['COLUMN_NAME'] + '] within the table [' + table['TABLE_SCHEMA'] + '.' + table['TABLE_NAME'] + '] crossed the ' + str(threshold) +'% threshold.')418 statistics['WARNING'] += 1419 cursor1.close()420 cursor2.close()421 #Cleanup state variable422 for key in auto_increment_state.keys():423 if not key in aitables:424 notify.stateful_notify(False, auto_increment_state, key, 'Info', 'Auto increment returned below threshold for column [' + key + ']', 'The column [' + key.split('.')[2] + '] within the table [' + key.split('.')[0] + '.' + key.split('.')[1] + '] returned below the ' + str(threshold) + '% threshold.')425 del auto_increment_state[key]426def get_fragmented_tables():427 """Return fragmented tables"""428 global db429 fragmented_tables = {}430 cursor = db.cursor(MySQLdb.cursors.DictCursor)431 cursor.execute("""SELECT TABLE_SCHEMA, TABLE_NAME, DATA_FREE, DATA_LENGTH FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA NOT IN ('information_schema','mysql') AND DATA_LENGTH > 0 AND DATA_FREE > 0;""")432 rows = cursor.fetchall ()433 for row in rows:434 fragmented_tables[row['TABLE_SCHEMA'] + '.' + row['TABLE_NAME']] = int(math.ceil(row['DATA_FREE'] * (100 / float(row['DATA_LENGTH']))))435 return fragmented_tables436def get_unused_engines():437 """Return unused storage engines"""438 global db439 plugins = {}440 unused_engines = []441 cursor = db.cursor(MySQLdb.cursors.DictCursor)442 cursor.execute("""SELECT PLUGIN_NAME, PLUGIN_STATUS FROM INFORMATION_SCHEMA.PLUGINS WHERE PLUGIN_TYPE='STORAGE ENGINE';""")443 rows = cursor.fetchall ()444 for row in rows:445 plugins[row['PLUGIN_NAME']]=row['PLUGIN_STATUS']446 cursor.execute("""SELECT DISTINCT(ENGINE) FROM INFORMATION_SCHEMA.TABLES WHERE ENGINE IS NOT NULL;""")447 rows = cursor.fetchall ()448 for row in rows:449 del plugins[row['ENGINE']]450 for plugin, status in plugins.iteritems():451 if (status=='ACTIVE'):452 unused_engines.append(lower(plugin))453 return unused_engines454def format_uptime(uptime):455 """Convert uptime into Years / Months / Days / Minutes / Seconds"""456 days = int(math.floor(uptime/86400))457 hours = int(math.floor(uptime/3600)%24)458 minutes = int(math.floor(uptime/60)%60)459 return str(days) + ' Days, ' + str(hours) + ' Hours, ' + str(minutes) + ' Minutes'460def reset_statistics():461 """Reset MySQL statistics"""462 global statistics463 statistics['MAXCONN'] = 0464 statistics['MAX_OPEN_FILES'] = 0465 statistics['ERROR'] = 0466 statistics['WARNING'] = 0467 statistics['INFO'] = 0468 statistics['SLOWQ'] = 0469def statistics_report():470 """MySQL server statistics"""471 global auto_increment_state, db, global_status, security_state, statistics, variables472 stats_report = ''473 cursor = db.cursor(MySQLdb.cursors.DictCursor)474 #Auto increment threshold475 if (len(auto_increment_state) > 0):476 stats_report += "=== Auto Incrememnt Threshold Crossed ===\n"477 for key in auto_increment_state.keys():478 stats_report += ' ' + key + "\n"479 #Anonymous Accounts480 if (security_state['ANONACCOUNT'] == 1):481 stats_report += "=== Anonymous Accounts Detected ===\n"482 cursor.execute("""SELECT User, Host FROM mysql.user WHERE User=''""")483 rows = cursor.fetchall ()484 if (cursor.rowcount > 0):485 for row in rows:486 stats_report += ' ' + row['User'] + "'@'" + row['Host'] + "\n"487 #Empty passwords488 if (security_state['EMPTYPASS'] == 1):489 stats_report += "=== Accounts Without Passwords Detected ===\n"490 cursor.execute("""SELECT User, Host FROM mysql.user WHERE Password=''""")491 rows = cursor.fetchall ()492 if (cursor.rowcount > 0):493 for row in rows:494 stats_report += ' ' + row['User'] + "'@'" + row['Host'] + "\n"495 if (stats_report != ''):496 stats_report = "== Outstanding Issues ==\n" + stats_report + "\n"497 #Server Information498 stats_report += "== Server Information ==\n"499 #Uptime500 stats_report += 'Uptime: ' + format_uptime(global_status['UPTIME']) + "\n"501 #MySQL Version502 stats_report += 'MySQL Version: ' + variables['VERSION'] + ' (' + variables['VERSION_COMMENT'] + ")\n"503 #MySQL Server ID504 stats_report += 'Server ID: ' + str(variables['SERVERID']) + "\n"505 #Base Directory506 stats_report += 'Basedir: ' + variables['BASEDIR'] + "\n"507 #Data Directory508 stats_report += 'Datadir: ' + variables['DATADIR'] + "\n"509 #Plugin Directory510 stats_report += 'Plugindir: ' + variables['PLUGIN_DIR'] + "\n"511 #Tmp Directory512 stats_report += 'Tmpdir: ' + variables['TMPDIR'] + "\n"513 #Error Log514 stats_report += 'Error Log: ' + variables['LOG_ERROR'] + "\n"515 #Slow Query Log516 stats_report += 'Slow Query Log: ' + variables['SLOW_QUERY_LOG_FILE']517 if (variables['SLOW_QUERY_LOG'] == "OFF"):518 stats_report += " (Slow Query Logging Disabled)\n"519 else:520 stats_report += "\n"521 stats_report += "\n"522 #Server Statistics523 stats_report += "== Server Statistics ==\n"524 #Max Connections525 stats_report += 'Max Connections Encountered: ' + str(statistics['MAXCONN']) + "\n"526 #Max Connections (Since server start)527 stats_report += 'Max Connections (Since server start): ' + str(global_status['MAX_USED_CONNECTIONS']) + "\n"528 #Max Open Files529 stats_report += 'Max Open Files Encountered: ' + str(statistics['MAX_OPEN_FILES']) + "\n"530 #Total Info531 stats_report += 'Info Encountered: ' + str(statistics['INFO']) + "\n"532 #Total Warnings533 stats_report += 'Warnings Encountered: ' + str(statistics['WARNING']) + "\n"534 #Total Errors535 stats_report += 'Errors Encountered: ' + str(statistics['ERROR']) + "\n"536 #Slow Queries537 stats_report += 'Slow Queries Encountered: ' + str(statistics['SLOWQ'])538 if (variables['SLOW_QUERY_LOG'] == "OFF"):539 stats_report += " (Slow Query Logging Disabled)\n"540 else:541 stats_report += "\n"542 #Slow Queries (Since server start)543 stats_report += 'Slow Queries (Since server start): ' + str(global_status['SLOW_QUERIES'])544 if (variables['SLOW_QUERY_LOG'] == "OFF"):545 stats_report += " (Slow Query Logging Disabled)\n"546 else:547 stats_report += "\n"548 stats_report += "\n"549 stats_report += "== Metadata Information ==\n"550 #Schemata Count551 cursor.execute("""SELECT count(*) AS SCOUNT FROM information_schema.SCHEMATA""")552 schemas = cursor.fetchall ()553 for schema in schemas:554 stats_report += 'Schemata: ' + str(schema['SCOUNT']) + "\n"555 #Table Count556 cursor.execute("""SELECT count(*) AS TCOUNT FROM information_schema.TABLES""")557 tables = cursor.fetchall ()558 for table in tables:559 stats_report += 'Tables: ' + str(table['TCOUNT']) + "\n"560 #Column Count561 cursor.execute("""SELECT COUNT(*) AS CCOUNT FROM information_schema.COLUMNS""")562 columns = cursor.fetchall ()563 for column in columns:564 stats_report += 'Columns: ' + str(column['CCOUNT']) + "\n"565 #Event Count566 cursor.execute("""SELECT COUNT(*) AS ECOUNT FROM information_schema.EVENTS""")567 events = cursor.fetchall ()568 for event in events:569 stats_report += 'Events: ' + str(event['ECOUNT']) + "\n"570 #Routine Count571 cursor.execute("""SELECT COUNT(*) AS RCOUNT FROM information_schema.ROUTINES""")572 routines = cursor.fetchall ()573 for routine in routines:574 stats_report += 'Routines: ' + str(routine['RCOUNT']) + "\n"575 #Trigger Count576 cursor.execute("""SELECT COUNT(*) AS TCOUNT FROM information_schema.TRIGGERS""")577 triggers = cursor.fetchall ()578 for trigger in triggers:579 stats_report += 'Triggers: ' + str(trigger['TCOUNT']) + "\n"580 #View Count581 cursor.execute("""SELECT COUNT(*) AS VCOUNT FROM information_schema.VIEWS""")582 views = cursor.fetchall ()583 for view in views:584 stats_report += 'Views: ' + str(view['VCOUNT']) + "\n"585 #View Count586 cursor.execute("""SELECT COUNT(*) AS UCOUNT FROM mysql.user""")587 views = cursor.fetchall ()588 for view in views:589 stats_report += 'Users: ' + str(view['UCOUNT']) + "\n"590 #Recommendations591 recommendations=''592 #Fragmented Tables593 fragmented_tables=get_fragmented_tables()594 if (fragmented_tables):595 recommendations += "=== Fragmented Tables ===\nThe following tables have been identified as being fragmented:\n"596 for table, percent in sorted(fragmented_tables.iteritems()):597 recommendations += ' ' + table + ' (' + str(percent) + "%)\n"598 recommendations += "Consider running OTIMIZE TABLE to reclaim unused space. See http://dev.mysql.com/doc/refman/5.1/en/optimize-table.html for details.\n"599 #Unused Engines600 unused_engines=get_unused_engines()601 if (unused_engines):602 recommendations += "=== Unused Storage Engines ===\nThe following storage engines are not in use and could be disabled:\n"603 for engine in unused_engines:604 recommendations += ' ' + engine + "\n"605 recommendations += "See http://dev.mysql.com/doc/refman/5.1/en/server-plugin-options.html for details on how to disable plugins.\n"606 if (recommendations != ''):607 stats_report += "\n== Recommendations ==\n" + recommendations + "\n"608 cursor.close()609 notify.notify('Stats', 'dbAlerter Statistics for ' + variables['HOSTNAME'], stats_report)610def update_variables():611 """Update server variables"""612 global db, variables613 cursor = db.cursor(MySQLdb.cursors.DictCursor)614 #Obtain list of dynamic MySQL server variables615 cursor.execute("""SELECT * FROM INFORMATION_SCHEMA.GLOBAL_VARIABLES""")616 rows = cursor.fetchall ()617 for row in rows:618 #Store Log Output619 if (row['VARIABLE_NAME'] == 'LOG_OUTPUT'):620 variables['LOG_OUTPUT'] = row['VARIABLE_VALUE']621 #Store Max Connections622 if (row['VARIABLE_NAME'] == 'MAX_CONNECTIONS'):623 variables['MAX_CONNECTIONS'] = row['VARIABLE_VALUE']624 #Store Slow Query Log Status625 if (row['VARIABLE_NAME'] == 'SLOW_QUERY_LOG'):626 variables['SLOW_QUERY_LOG'] = row['VARIABLE_VALUE']627 #Store Slow Query Log Path628 if (row['VARIABLE_NAME'] == 'SLOW_QUERY_LOG_FILE'):629 variables['SLOW_QUERY_LOG_FILE'] = row['VARIABLE_VALUE']630 cursor.close()631def check_status():632 """Check server statistics"""633 global db, global_status, statistics, warning_state634 cursor = db.cursor(MySQLdb.cursors.DictCursor)635 cursor.execute("""SHOW GLOBAL STATUS""")636 rows = cursor.fetchall ()637 for row in rows:638 #Check Open File Usage639 if (row['Variable_name'] == 'Open_files'):640 if (int(row['Value']) > statistics['MAX_OPEN_FILES']):641 statistics['MAX_OPEN_FILES'] = int(row['Value'])642 connpct = int(((100 / float(variables['OPEN_FILES_LIMIT'])) * float(row['Value'])))643 if (connpct > int(config.get('dbAlerter','mysql_open_files_threshold'))):644 notify.stateful_notify(True, warning_state, 'OPEN_FILES', 'Warning', 'Open file usage crossed ' + config.get('dbAlerter','mysql_open_files_threshold') + '% threshold', 'Open file crossed ' + config.get('dbAlerter','mysql_open_files_threshold') + '% threshold and is currently ' + str(connpct) + '%')645 statistics['WARNING'] += 1646 else:647 notify.stateful_notify(False, warning_state, 'OPEN_FILES', 'Info', 'Open file usage fell below ' + config.get('dbAlerter','mysql_open_files_threshold') + '% threshold', 'Open file usage fell below ' + config.get('dbAlerter','mysql_open_files_threshold') + '% threshold and is currently ' + str(connpct) + '%')648 #Check Current Connection Usage649 if (row['Variable_name'] == 'Threads_connected'):650 if (int(row['Value']) > statistics['MAXCONN']):651 statistics['MAXCONN'] = int(row['Value'])652 connpct = int(((100 / float(variables['MAX_CONNECTIONS'])) * float(row['Value'])))653 if (connpct > int(config.get('dbAlerter','mysql_connection_usage_threshold'))):654 notify.stateful_notify(True, warning_state, 'CONNECTIONS', 'Warning', 'Connection usage crossed ' + config.get('dbAlerter','mysql_connection_usage_threshold') + '% threshold', 'Connection usage crossed ' + config.get('dbAlerter','mysql_connection_usage_threshold') + '% threshold and is currently ' + str(connpct) + "%")655 statistics['WARNING'] += 1656 else:657 notify.stateful_notify(False, warning_state, 'CONNECTIONS', 'Info', 'Connection usage fell below ' + config.get('dbAlerter','mysql_connection_usage_threshold') + '% threshold', 'Connection usage fell below ' + config.get('dbAlerter','mysql_connection_usage_threshold') + '% threshold and is currently ' + str(connpct) + '%')658 #Check Slow Queries659 if (row['Variable_name'] == 'Slow_queries'):660 slowqs = (int(row['Value']) - global_status['SLOW_QUERIES'])661 if (slowqs > 5):662 notify.notify('Warning', str(slowqs) + " Slow Queries during last " + config.get('dbAlerter','check_interval') + " seconds.", str(slowqs) + " Slow Queries during last " + config.get('dbAlerter','check_interval') + " seconds.")663 statistics['WARNING'] += 1664 global_status['SLOW_QUERIES'] = int(row['Value'])665 statistics['SLOWQ'] += slowqs666 #Server uptime667 if (row['Variable_name'] == 'Uptime'):668 global_status['UPTIME'] = int(row['Value'])...

Full Screen

Full Screen

stats_report.py

Source:stats_report.py Github

copy

Full Screen

1#!/usr/bin/python2import os3import sys4import simplejson as json5from datetime import datetime as dt6import zlib7import base648import argparse9from read_titles import readFilesTitles10#import backports.lzma as lzma11#backports.lzma12# 20K test07_21.json13#244K test07_21_kmer_stats_report.html14# 20K test07_31.json15#244K test07_31_kmer_stats_report.html16# 19M test10_21.json17# 11M test10_21_kmer_stats_report.html18# 18M test10_31.json19#9.6M test10_31_kmer_stats_report.html20# 20K test07_21.json21#236K test07_21_kmer_stats_report.html22# 20K test07_31.json23#236K test07_31_kmer_stats_report.html24# 19M test10_21.json25#8.4M test10_21_kmer_stats_report.html26# 18M test10_31.json27#7.4M test10_31_kmer_stats_report.html28REPORT_NAME = "kmer_stats_report"29template_dir = os.path.dirname( os.path.abspath( sys.argv[0] ) )30templates = {31 'css' : 'stats_report.template.css',32 'js' : 'stats_report.template.js',33 'html' : 'stats_report.template.html',34 #'b64' : 'stats_report.template.b64.js',35 'b64' : 'stats_report.template.b64.min.js',36 'inflate' : 'stats_report.template.inflate.js',37 'deflate' : 'stats_report.template.deflate.js',38 #'jsphylo' : 'stats_report.template.jsphylosvg.js',39 'jsphylo' : 'stats_report.template.jsphylosvg.min.js',40 'raphael' : 'stats_report.template.raphael.js',41 'math' : 'stats_report.template.math.js',42 'd3' : 'stats_report.template.d3.js',43 'd3layout' : 'stats_report.template.d3.layout.js',44 #'d3phylogram' : 'stats_report.template.d3.phylogram.js',45 'd3phylogram' : 'stats_report.template.d3.phylogram.min.js',46 #'d3phylonator': 'stats_report.template.d3.phylonator.js',47 'd3phylonator': 'stats_report.template.d3.phylonator.min.js',48 'newick' : 'stats_report.template.newick.js',49 #'lzma' : 'stats_report.template.lzma-d-min.js'50}51def templater( tpl ):52 print "reading template ::", tpl53 return open(os.path.join( template_dir, templates[tpl]), 'r').read()54def getjson(data):55 print "getting json"56 #return base64.b64encode( lzma.compress( json.dumps(data, sort_keys=True, indent=''), preset=lzma.PRESET_EXTREME, format=lzma.FORMAT_ALONE ) )57 #return base64.b64encode( zlib.compress( json.dumps(data, sort_keys=True, indent=''), zlib.Z_BEST_COMPRESSION ) )58 data = json.dumps(data, sort_keys=True, indent='')59 dl = len(data)60 cobj = zlib.compressobj( zlib.Z_BEST_COMPRESSION, zlib.DEFLATED, 15, 9 )61 cout = cobj.compress( data ) + cobj.flush()62 cl = len(cout)63 b64 = base64.b64encode( cout )64 bl = len(b64)65 print "size %d compressed %d ( %6.2f %%) b64 %d ( %6.2f %% / %6.2f %% )" % (dl, cl, (cl*1.0/dl*100), bl, (bl*1.0/dl*100), (bl*1.0/cl*100))66 return b6467 #return json.dumps(data, sort_keys=True, indent='')68def fixTitles( titles, data ):69 print "fixing titles"70 #print titles.keys()71 for p in xrange(len(data['in_filenames'])):72 name = data['in_filenames'][p]73 print " name",name74 if name in titles:75 data['in_filenames'][p] = titles[name]76 print " renaming",name,"to",titles[name]77def addtrees( data, trees ):78 print "adding trees"79 data["trees"] = {}80 81 bn = os.path.commonprefix( trees )82 for treefile in trees:83 if len(trees) > 1:84 treename = treefile.replace( bn, '' )85 else:86 treename = treefile87 88 fn, ext = os.path.splitext(treename)89 #ext = ext[1:]90 grps = fn.split('.')91 scale = grps[-2]92 distance = grps[-1]93 algorithm = ext[1:]94 print "adding trees", treefile, "to", treename, 'scale', scale, 'distance', distance, 'algorithm', algorithm95 #data["trees"][treename] = trees[treefile]96 if algorithm not in data["trees"]:97 data["trees"][algorithm] = {}98 if distance not in data["trees"][algorithm]:99 data["trees"][algorithm][distance] = {}100 data["trees"][algorithm][distance][scale] = trees[treefile]101 102def makehtml(nname, gdata):103 #https://google-developers.appspot.com/chart/interactive/docs/gallery/candlestickchart104 html = templater('html' )105 tpl = {}106 tpl["data"] = getjson( gdata );107 tpl["now" ] = str(dt.now().isoformat());108 for k in templates:109 tpl[k] = templater(k)110 html = html % tpl111 #html = html % {112 #"css" : templater('css' ),113 #"js" : templater('js' ),114 #"b64" : templater('b64m' ),115 #"inflate" : templater('inflate'),116 #"raphael" : templater('raphael'),117 #"jsphylo" : templater('jsphylo'),118 #"math" : templater('math' ),119 #"data" : getjson( gdata ),120 #"now" : str(dt.now().isoformat())121 #css=open( sys.argv[0] + '.css', 'r' ).read()122 #<!-- <link rel="stylesheet" type="text/css" href="%(stylefile)s"> -->123 #"stylefile": sys.argv[0] + '.css'124 #}125 #print html126 print "saving html"127 open(nname + '_' + REPORT_NAME + '.html', 'w').write( html )128def main(in_json , in_csv, in_trees):129 titles = None130 trees = None131 132 if in_csv is not None:133 print "loading titles", in_csv134 if not os.path.exists(in_csv):135 print "title CSV %s does not exists" % in_csv136 sys.exit(1)137 titles = readFilesTitles(in_csv)138 if in_trees is not None:139 trees = {}140 print "loading trees", in_trees141 for tree in in_trees:142 if not os.path.exists(tree):143 print "tree %s does not exists" % tree144 sys.exit(1)145 trees[ tree ] = open(tree, 'r').read()146 gdata = {}147 print "reading", in_json148 if not in_json.endswith('.json'):149 print "not a json file"150 sys.exit(0)151 nname = os.path.basename( os.path.abspath( in_json ) )152 nname = nname.replace( '.json', '')153 gdata[ nname ] = json.load( open(in_json, 'r') )154 if titles is not None:155 fixTitles( titles, gdata[ nname ] )156 if trees is not None:157 addtrees( gdata[ nname ], trees )158 print "generating html"159 makehtml(nname, gdata)160 print "finished"161if __name__ == '__main__':162 parser = argparse.ArgumentParser(description='Create HTML report')163 164 parser.add_argument('infile', type=str, 165 help='input json file')166 parser.add_argument('--title', dest='title',167 default=None,168 help='CSV containing row titles')169 parser.add_argument('--trees', dest='trees',170 action='append',171 default=None,172 help='Tree files')173 174 args = parser.parse_args()175 ...

Full Screen

Full Screen

urls.py

Source:urls.py Github

copy

Full Screen

1from django.urls import re_path2from . import views3group_re = r'(?P<group>' + '|'.join(views.SERIES_GROUPS) + ')'4group_date_re = r'(?P<group>' + '|'.join(views.SERIES_GROUPS_DATE) + ')'5range_re = r'(?P<start>\d{8})-(?P<end>\d{8})'6format_re = r'(?P<format>' + '|'.join(views.SERIES_FORMATS) + ')'7series_re = r'%s-%s\.%s$' % (group_re, range_re, format_re)8series = dict((type, r'^%s-%s' % (type, series_re)) for type in views.SERIES)9# Addon specific stats.10stats_patterns = [11 # page URLs12 re_path(13 r'^$', views.stats_report, name='stats.overview', kwargs={'report': 'overview'}14 ),15 re_path(16 r'^downloads/$',17 views.stats_report,18 name='stats.downloads',19 kwargs={'report': 'downloads'},20 ),21 re_path(22 r'^downloads/sources/$',23 views.stats_report,24 name='stats.sources',25 kwargs={'report': 'sources'},26 ),27 re_path(28 r'^downloads/mediums/$',29 views.stats_report,30 name='stats.mediums',31 kwargs={'report': 'mediums'},32 ),33 re_path(34 r'^downloads/contents/$',35 views.stats_report,36 name='stats.contents',37 kwargs={'report': 'contents'},38 ),39 re_path(40 r'^downloads/campaigns/$',41 views.stats_report,42 name='stats.campaigns',43 kwargs={'report': 'campaigns'},44 ),45 re_path(46 r'^usage/$', views.stats_report, name='stats.usage', kwargs={'report': 'usage'}47 ),48 re_path(49 r'^usage/languages/$',50 views.stats_report,51 name='stats.locales',52 kwargs={'report': 'locales'},53 ),54 re_path(55 r'^usage/versions/$',56 views.stats_report,57 name='stats.versions',58 kwargs={'report': 'versions'},59 ),60 re_path(61 r'^usage/applications/$',62 views.stats_report,63 name='stats.apps',64 kwargs={'report': 'apps'},65 ),66 re_path(67 r'^usage/os/$', views.stats_report, name='stats.os', kwargs={'report': 'os'}68 ),69 re_path(70 r'^usage/countries/$',71 views.stats_report,72 name='stats.countries',73 kwargs={'report': 'countries'},74 ),75 # time series URLs following this pattern:76 # /addon/{addon_id}/statistics/{series}-{group}-{start}-{end}.{format}77 re_path(series['overview'], views.overview_series, name='stats.overview_series'),78 re_path(series['downloads'], views.downloads_series, name='stats.downloads_series'),79 re_path(series['usage'], views.usage_series, name='stats.usage_series'),80 re_path(81 series['sources'],82 views.download_breakdown_series,83 name='stats.sources_series',84 kwargs={'source': 'sources'},85 ),86 re_path(87 series['mediums'],88 views.download_breakdown_series,89 name='stats.mediums_series',90 kwargs={'source': 'mediums'},91 ),92 re_path(93 series['contents'],94 views.download_breakdown_series,95 name='stats.contents_series',96 kwargs={'source': 'contents'},97 ),98 re_path(99 series['campaigns'],100 views.download_breakdown_series,101 name='stats.campaigns_series',102 kwargs={'source': 'campaigns'},103 ),104 re_path(105 series['os'],106 views.usage_breakdown_series,107 name='stats.os_series',108 kwargs={'field': 'oses'},109 ),110 re_path(111 series['locales'],112 views.usage_breakdown_series,113 name='stats.locales_series',114 kwargs={'field': 'locales'},115 ),116 re_path(117 series['versions'],118 views.usage_breakdown_series,119 name='stats.versions_series',120 kwargs={'field': 'versions'},121 ),122 re_path(123 series['apps'],124 views.usage_breakdown_series,125 name='stats.apps_series',126 kwargs={'field': 'applications'},127 ),128 re_path(129 series['countries'],130 views.usage_breakdown_series,131 name='stats.countries_series',132 kwargs={'field': 'countries'},133 ),...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run locust automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful