How to use logHeader method in fMBT

Best Python code snippet using fMBT_python

VolatilityProcessor.py

Source:VolatilityProcessor.py Github

copy

Full Screen

1import inspect2import os3import hashlib4import json5from shutil import copyfile6from VolatilityService import VolatilityServiceClass7from java.awt import GridBagLayout8from java.awt import GridBagConstraints9from javax.swing import JPanel10from javax.swing import JLabel11from javax.swing import JTextField12from javax.swing import JButton13from javax.swing import JFileChooser14from javax.swing import JComboBox15from javax.swing.filechooser import FileNameExtensionFilter16from java.util.logging import Level17from java.sql import DriverManager, SQLException18from java.lang import Class19from org.sleuthkit.autopsy.ingest import IngestModuleFactoryAdapter20from org.sleuthkit.autopsy.ingest import IngestModuleIngestJobSettingsPanel21from org.sleuthkit.autopsy.ingest import IngestModuleIngestJobSettings22from org.sleuthkit.autopsy.ingest import DataSourceIngestModule23from org.sleuthkit.autopsy.coreutils import Logger24from org.sleuthkit.autopsy.ingest import IngestMessage25from org.sleuthkit.autopsy.ingest import IngestServices26from org.sleuthkit.autopsy.ingest import IngestModule27from org.sleuthkit.autopsy.ingest.IngestModule import IngestModuleException28from org.sleuthkit.autopsy.casemodule import Case29from org.sleuthkit.datamodel import BlackboardArtifact30from org.sleuthkit.datamodel import BlackboardAttribute31from org.sleuthkit.autopsy.ingest import ModuleDataEvent32class VolatilityIngestModuleFactory(IngestModuleFactoryAdapter):33 def __init__(self):34 self.settings = None35 moduleName = "Volatility Processor"36 def getModuleDisplayName(self):37 return self.moduleName38 def getModuleDescription(self):39 return "Run Volatility against a Memory Image"40 def getModuleVersionNumber(self):41 return "1.0"42 def getDefaultIngestJobSettings(self):43 return VolatilityIngestModuleSettings()44 def hasIngestJobSettingsPanel(self):45 return True46 def getIngestJobSettingsPanel(self, settings):47 if not isinstance(settings, VolatilityIngestModuleSettings):48 raise IllegalArgumentException("Settings expected to be instnce of SampleIngestModuleSettings")49 self.settings = settings50 return VolatilityIngestModuleUISettingsPanel(self.settings)51 def isDataSourceIngestModuleFactory(self):52 return True53 def createDataSourceIngestModule(self, ingestOptions):54 return VolatilityIngestModule(self.settings)55class VolatilityIngestModuleUISettingsPanel(IngestModuleIngestJobSettingsPanel):56 def __init__(self, settings):57 head, tail = os.path.split(os.path.abspath(__file__))58 self.absolutePath = head59 self.database = head + "\\VolatilitySettings.db"60 self.localSettings = settings61 self.initLayout()62 def checkDatabase(self):63 runInsertStatements = False64 if not os.path.isfile(self.database):65 runInsertStatements = True66 connection = None67 statement= None68 try:69 Class.forName("org.sqlite.JDBC").newInstance()70 connection = DriverManager.getConnection("jdbc:sqlite:" + self.database)71 if runInsertStatements:72 with open(self.absolutePath + "\\InsertStatements.sql", "r") as file:73 count = 074 for query in file:75 # Exclude any lines that are empty or contain comment76 if query != "" and "--" not in query:77 count += 178 try:79 preparedStatement = connection.prepareStatement(query)80 preparedStatement.executeUpdate()81 except SQLException as ex:82 self.messageLabel.setText("Error at: " + query + "<br />" + ex.message)83 self.messageLabel.setText("Database created successfully")84 try:85 statement = connection.createStatement()86 query = 'SELECT name, value FROM settings'87 results = statement.executeQuery(query)88 while results.next():89 if results.getString("name") == "VolatilityExecutableDirectory":90 self.volatilityDirTextField.setText(results.getString("value"))91 self.localSettings.setVolatilityDir(results.getString("value"))92 if results.getString("name") == "VolatilityVersion":93 self.versionComboBox.setSelectedItem(results.getString("value"))94 if results.getString("name") == "VolatilityProfile":95 self.profileComboBox.setSelectedItem(results.getString("value"))96 self.messageLabel.setText("Saved settings loaded successfully")97 except SQLException as ex:98 self.messageLabel.setText("Error reading settings database: " + ex.message)99 finally:100 if statement:101 statement.close()102 except SQLException as ex:103 self.messageLabel.setText("Error opening settings DB: " + ex.message)104 finally:105 if connection:106 connection.close()107 def findDir(self, event):108 fileChooser = JFileChooser()109 fileExtentionFilter = FileNameExtensionFilter("Executable Files (*.exe)", ["exe"])110 fileChooser.addChoosableFileFilter(fileExtentionFilter)111 result = fileChooser.showDialog(self.mainPanel, "Select File")112 if result == JFileChooser.APPROVE_OPTION:113 file = fileChooser.getSelectedFile()114 canonicalPath = file.getCanonicalPath()115 self.localSettings.setVolatilityDir(canonicalPath)116 self.volatilityDirTextField.setText(canonicalPath)117 def saveSettings(self, event):118 connection = None119 statement = None120 try:121 Class.forName("org.sqlite.JDBC").newInstance()122 connection = DriverManager.getConnection("jdbc:sqlite:" + self.database)123 try:124 statement = connection.createStatement()125 query = 'SELECT count(*) as RowCount FROM settings'126 results = statement.executeQuery(query)127 settingsCount = int(results.getString("RowCount"))128 if settingsCount > 3:129 directoryStatement = connection.prepareStatement(130 "UPDATE settings SET value = ? WHERE name = 'VolatilityExecutableDirectory';"131 )132 versionStatement = connection.prepareStatement(133 "UPDATE settings SET value = ? WHERE name = 'VolatilityVersion';"134 )135 profileStatement = connection.prepareStatement(136 "UPDATE settings SET value = ? WHERE name = 'VolatilityProfile';"137 )138 else:139 directoryStatement = connection.prepareStatement(140 "INSERT INTO settings (name, value) VALUES ('VolatilityExecutableDirectory', ?);")141 versionStatement = connection.prepareStatement(142 "INSERT INTO settings (name, value) VALUES ('VolatilityVersion', ?);")143 profileStatement = connection.prepareStatement(144 "INSERT INTO settings (name, value) VALUES ('VolatilityProfile', ?);"145 )146 directoryStatement.setString(1, self.volatilityDirTextField.getText())147 versionStatement.setString(1, self.versionComboBox.getSelectedItem())148 profileStatement.setString(1, self.profileComboBox.getSelectedItem())149 directoryStatement.executeUpdate()150 versionStatement.executeUpdate()151 profileStatement.executeUpdate()152 self.messageLabel.setText("Settings saved successfully")153 self.localSettings.setVolatilityDir(self.volatilityDirTextField.getText())154 except SQLException as ex:155 self.messageLabel.setText("Error reading settings database: " + ex.message)156 except SQLException as ex:157 self.messageLabel.setText("Error opening settings DB: " + ex.message)158 finally:159 if statement:160 statement.close()161 if connection:162 connection.close()163 def getProfiles(self):164 connection = None165 statement = None166 try:167 Class.forName("org.sqlite.JDBC").newInstance()168 connection = DriverManager.getConnection("jdbc:sqlite:" + self.database)169 version = self.versionComboBox.getSelectedItem()170 statement = connection.createStatement()171 query = "SELECT name FROM profiles WHERE version = '" + version + "';"172 results = statement.executeQuery(query)173 profiles = []174 while results.next():175 profiles.append(results.getString("name"))176 # statement.close()177 # connection.close()178 return profiles179 except SQLException as ex:180 self.messageLabel.setText("Error opening settings DB:\n" + ex.message)181 finally:182 if statement:183 statement.close()184 if connection:185 connection.close()186 def changeVersion(self, event):187 self.localSettings.setVersion(event.item)188 profileList = self.getProfiles()189 self.profileComboBox.removeAllItems()190 for profile in profileList:191 self.profileComboBox.addItem(profile)192 def changeProfile(self, event):193 self.localSettings.setProfile(event.item)194 def getSettings(self):195 return self.localSettings196 def initLayout(self):197 self.mainPanel = JPanel()198 self.gridBagPanel = GridBagLayout()199 self.gridBagConstraints = GridBagConstraints()200 self.mainPanel.setLayout(self.gridBagPanel)201 # Volatility Executable Path202 self.dirLabel = JLabel("Volatility Executable Directory")203 self.dirLabel.setEnabled(True)204 self.gridBagConstraints.gridx = 2205 self.gridBagConstraints.gridy = 1206 self.gridBagConstraints.gridwidth = 1207 self.gridBagConstraints.gridheight = 1208 self.gridBagConstraints.fill = GridBagConstraints.BOTH209 self.gridBagConstraints.weightx = 1210 self.gridBagConstraints.weighty = 1211 self.gridBagConstraints.anchor = GridBagConstraints.NORTH212 self.gridBagPanel.setConstraints(self.dirLabel, self.gridBagConstraints)213 self.mainPanel.add(self.dirLabel)214 self.volatilityDirTextField = JTextField(10)215 self.volatilityDirTextField.setEnabled(True)216 self.gridBagConstraints.gridx = 2217 self.gridBagConstraints.gridy = 3218 self.gridBagConstraints.gridwidth = 1219 self.gridBagConstraints.gridheight = 1220 self.gridBagConstraints.fill = GridBagConstraints.BOTH221 self.gridBagConstraints.weightx = 1222 self.gridBagConstraints.weighty = 0223 self.gridBagConstraints.anchor = GridBagConstraints.NORTH224 self.gridBagPanel.setConstraints(self.volatilityDirTextField, self.gridBagConstraints)225 self.mainPanel.add(self.volatilityDirTextField)226 self.findVolatilityPathButton = JButton("Find Dir", actionPerformed=self.findDir)227 self.findVolatilityPathButton.setEnabled(True)228 self.gridBagConstraints.gridx = 7229 self.gridBagConstraints.gridy = 3230 self.gridBagConstraints.gridwidth = 1231 self.gridBagConstraints.gridheight = 1232 self.gridBagConstraints.fill = GridBagConstraints.BOTH233 self.gridBagConstraints.weightx = 1234 self.gridBagConstraints.weighty = 0235 self.gridBagConstraints.anchor = GridBagConstraints.NORTH236 self.gridBagPanel.setConstraints(self.findVolatilityPathButton, self.gridBagConstraints)237 self.mainPanel.add(self.findVolatilityPathButton)238 self.Blank3 = JLabel(" ")239 self.Blank3.setEnabled(True)240 self.gridBagConstraints.gridx = 2241 self.gridBagConstraints.gridy = 9242 self.gridBagConstraints.gridwidth = 1243 self.gridBagConstraints.gridheight = 1244 self.gridBagConstraints.fill = GridBagConstraints.BOTH245 self.gridBagConstraints.weightx = 1246 self.gridBagConstraints.weighty = 0247 self.gridBagConstraints.anchor = GridBagConstraints.NORTH248 self.gridBagPanel.setConstraints(self.Blank3, self.gridBagConstraints)249 self.mainPanel.add(self.Blank3)250 # Version selector251 self.versionLabel = JLabel("Version:")252 self.gridBagConstraints.gridx = 2253 self.gridBagConstraints.gridy = 11254 self.gridBagConstraints.gridwidth = 1255 self.gridBagConstraints.gridheight = 1256 self.gridBagConstraints.fill = GridBagConstraints.BOTH257 self.gridBagConstraints.weightx = 1258 self.gridBagConstraints.weighty = 0259 self.gridBagConstraints.anchor = GridBagConstraints.NORTH260 self.gridBagPanel.setConstraints(self.versionLabel, self.gridBagConstraints)261 self.mainPanel.add(self.versionLabel)262 self.versionList = ("2.5", "2.6")263 self.versionComboBox = JComboBox(self.versionList)264 self.versionComboBox.itemStateChanged = self.changeVersion265 self.gridBagConstraints.gridx = 7266 self.gridBagConstraints.gridy = 11267 self.gridBagConstraints.gridwidth = 1268 self.gridBagConstraints.gridheight = 1269 self.gridBagConstraints.fill = GridBagConstraints.BOTH270 self.gridBagConstraints.weightx = 1271 self.gridBagConstraints.weighty = 0272 self.gridBagConstraints.anchor = GridBagConstraints.NORTH273 self.gridBagPanel.setConstraints(self.versionComboBox, self.gridBagConstraints)274 self.mainPanel.add(self.versionComboBox)275 self.Blank4 = JLabel(" ")276 self.Blank4.setEnabled(True)277 self.gridBagConstraints.gridx = 2278 self.gridBagConstraints.gridy = 13279 self.gridBagConstraints.gridwidth = 1280 self.gridBagConstraints.gridheight = 1281 self.gridBagConstraints.fill = GridBagConstraints.BOTH282 self.gridBagConstraints.weightx = 1283 self.gridBagConstraints.weighty = 0284 self.gridBagConstraints.anchor = GridBagConstraints.NORTH285 self.gridBagPanel.setConstraints(self.Blank4, self.gridBagConstraints)286 self.mainPanel.add(self.Blank4)287 # Profile selector288 self.profileLabel = JLabel("Profile:")289 self.gridBagConstraints.gridx = 2290 self.gridBagConstraints.gridy = 19291 self.gridBagConstraints.gridwidth = 1292 self.gridBagConstraints.gridheight = 1293 self.gridBagConstraints.fill = GridBagConstraints.BOTH294 self.gridBagConstraints.weightx = 1295 self.gridBagConstraints.weighty = 0296 self.gridBagConstraints.anchor = GridBagConstraints.NORTH297 self.gridBagPanel.setConstraints(self.profileLabel, self.gridBagConstraints)298 self.mainPanel.add(self.profileLabel)299 self.profileList = self.getProfiles()300 self.profileComboBox = JComboBox(self.profileList)301 self.profileComboBox.itemStateChanged = self.changeProfile302 self.gridBagConstraints.gridx = 7303 self.gridBagConstraints.gridy = 19304 self.gridBagConstraints.gridwidth = 1305 self.gridBagConstraints.gridheight = 1306 self.gridBagConstraints.fill = GridBagConstraints.BOTH307 self.gridBagConstraints.weightx = 1308 self.gridBagConstraints.weighty = 1309 self.gridBagConstraints.anchor = GridBagConstraints.NORTH310 self.gridBagPanel.setConstraints(self.profileComboBox, self.gridBagConstraints)311 self.mainPanel.add(self.profileComboBox)312 self.Blank5 = JLabel(" ")313 self.Blank5.setEnabled(True)314 self.gridBagConstraints.gridx = 2315 self.gridBagConstraints.gridy = 13316 self.gridBagConstraints.gridwidth = 1317 self.gridBagConstraints.gridheight = 1318 self.gridBagConstraints.fill = GridBagConstraints.BOTH319 self.gridBagConstraints.weightx = 1320 self.gridBagConstraints.weighty = 0321 self.gridBagConstraints.anchor = GridBagConstraints.NORTH322 self.gridBagPanel.setConstraints(self.Blank5, self.gridBagConstraints)323 self.mainPanel.add(self.Blank5)324 self.Blank2 = JLabel(" ")325 self.Blank2.setEnabled(True)326 self.gridBagConstraints.gridx = 2327 self.gridBagConstraints.gridy = 22328 self.gridBagConstraints.gridwidth = 1329 self.gridBagConstraints.gridheight = 1330 self.gridBagConstraints.fill = GridBagConstraints.BOTH331 self.gridBagConstraints.weightx = 1332 self.gridBagConstraints.weighty = 0333 self.gridBagConstraints.anchor = GridBagConstraints.NORTH334 self.gridBagPanel.setConstraints(self.Blank2, self.gridBagConstraints)335 self.mainPanel.add(self.Blank2)336 # Save button337 self.saveButton = JButton("Save Settings", actionPerformed=self.saveSettings)338 self.saveButton.setEnabled(True)339 self.gridBagConstraints.gridx = 2340 self.gridBagConstraints.gridy = 24341 self.gridBagConstraints.gridwidth = 1342 self.gridBagConstraints.gridheight = 1343 self.gridBagConstraints.fill = GridBagConstraints.BOTH344 self.gridBagConstraints.weightx = 1345 self.gridBagConstraints.weighty = 0346 self.gridBagConstraints.anchor = GridBagConstraints.NORTH347 self.gridBagPanel.setConstraints(self.saveButton, self.gridBagConstraints)348 self.mainPanel.add(self.saveButton)349 self.Blank6 = JLabel(" ")350 self.Blank6.setEnabled(True)351 self.gridBagConstraints.gridx = 2352 self.gridBagConstraints.gridy = 26353 self.gridBagConstraints.gridwidth = 1354 self.gridBagConstraints.gridheight = 1355 self.gridBagConstraints.fill = GridBagConstraints.BOTH356 self.gridBagConstraints.weightx = 1357 self.gridBagConstraints.weighty = 0358 self.gridBagConstraints.anchor = GridBagConstraints.NORTH359 self.gridBagPanel.setConstraints(self.Blank6, self.gridBagConstraints)360 self.mainPanel.add(self.Blank6)361 # Message362 self.Label3 = JLabel("Message:")363 self.Label3.setEnabled(True)364 self.gridBagConstraints.gridx = 2365 self.gridBagConstraints.gridy = 27366 self.gridBagConstraints.gridwidth = 1367 self.gridBagConstraints.gridheight = 1368 self.gridBagConstraints.fill = GridBagConstraints.BOTH369 self.gridBagConstraints.weightx = 1370 self.gridBagConstraints.weighty = 0371 self.gridBagConstraints.anchor = GridBagConstraints.NORTH372 self.gridBagPanel.setConstraints(self.Label3, self.gridBagConstraints)373 self.mainPanel.add(self.Label3)374 self.messageLabel = JLabel("")375 self.messageLabel.setEnabled(True)376 self.gridBagConstraints.gridx = 2377 self.gridBagConstraints.gridy = 31378 self.gridBagConstraints.gridwidth = 1379 self.gridBagConstraints.gridheight = 1380 self.gridBagConstraints.fill = GridBagConstraints.BOTH381 self.gridBagConstraints.weightx = 2382 self.gridBagConstraints.weighty = 0383 self.gridBagConstraints.anchor = GridBagConstraints.NORTH384 self.gridBagPanel.setConstraints(self.messageLabel, self.gridBagConstraints)385 self.mainPanel.add(self.messageLabel)386 self.checkDatabase()387 self.add(self.mainPanel)388class VolatilityIngestModule(DataSourceIngestModule):389 def __init__(self, settings):390 self.context = None391 self.localSettings = settings392 self.databaseFile = ""393 self.isAutodetect = False394 self.logger = Logger.getLogger(VolatilityIngestModuleFactory.moduleName)395 def log(self, level, message):396 self.logger.logp(level, self.__class__.__name__, inspect.stack()[1][3], message)397 def startUp(self, context):398 self.context = context399 self.VolatilityDir = self.localSettings.getVolatilityDir()400 self.Profile = self.localSettings.getProfile()401 if self.Profile == 'Autodetect':402 self.isAutodetect = True403 else:404 self.isAutodetect = False405 message = "<ul>" + \406 "<li>Volatility executable at: " + self.VolatilityDir + "</li>" + \407 "<li>Selected profile: " + self.Profile + "</li>" + "</ul>"408 inbox = IngestMessage.createMessage(IngestMessage.MessageType.INFO, "Volatility Processor",409 "Volatiity Settings Loaded", message)410 IngestServices.getInstance().postMessage(inbox)411 if not os.path.exists(self.VolatilityDir):412 raise IngestModuleException("Volatility executable does not exist")413 self.log(Level.INFO, "Volatility Processor Loaded")414 def progressCount(self, processNum, fileNum):415 return processNum + (self.processCount * (fileNum - 1))416 def process(self, dataSource, progressBar):417 logHeader = "Volatility Processor -- "418 progressBar.switchToIndeterminate()419 BLOCKSIZE = 200 * 1024 * 1024 # about 200 megabyte420 inbox = IngestMessage.createMessage(IngestMessage.MessageType.INFO, "Volatility Processor",421 "Volatility Process Started")422 IngestServices.getInstance().postMessage(inbox)423 case = Case.getCurrentCase().getSleuthkitCase()424 fileManager = Case.getCurrentCase().getServices().getFileManager()425 files = fileManager.findFiles(dataSource, "%", "/")426 caseDir = Case.getCurrentCase().getModulesOutputDirAbsPath()427 tempDir = Case.getCurrentCase().getTempDirectory()428 connection = None429 numFiles = 0430 dllDumpDir = caseDir + "\VolatilityProcessor\\Dump_Files\\DLLDump"431 dumpRegistryDir = caseDir + "\VolatilityProcessor\\Dump_Files\\RegistryDump"432 procDumpDir = caseDir + "\VolatilityProcessor\\Dump_Files\\ProcDump"433 hiveDumpDir = caseDir + "\VolatilityProcessor\\HiveDump"434 try:435 os.mkdir(caseDir + "\VolatilityProcessor")436 os.mkdir(caseDir + "\VolatilityProcessor\\Dump_Files")437 os.mkdir(dllDumpDir)438 os.mkdir(dumpRegistryDir)439 os.mkdir(procDumpDir)440 os.mkdir(hiveDumpDir)441 except OSError as e:442 self.log(Level.WARNING, logHeader + "Trying to create directory that already exists: " + e.message)443 self.log(Level.INFO, logHeader + "Case directory: " + caseDir)444 # Verifying445 inbox = IngestMessage.createMessage(IngestMessage.MessageType.INFO, "Volatility Processor",446 "Verifying files")447 IngestServices.getInstance().postMessage(inbox)448 progressBar.progress("Verifying files")449 validMessage = "<p>Valid Files</p><ul>"450 validFiles = ""451 invalidFiles = ""452 invalidList = []453 cannotValidateMessage = ""454 for file in files:455 imageFilePath = file.getLocalAbsPath()456 if imageFilePath is not None:457 fileName = os.path.basename(imageFilePath)458 containingFolder = os.path.dirname(imageFilePath)459 self.log(Level.INFO, logHeader + "Containing directory of file: " + containingFolder)460 self.log(Level.INFO, logHeader + "Copying file to temp dir")461 inbox = IngestMessage.createMessage(IngestMessage.MessageType.INFO, "Volatility Processor",462 "Making copy of " + fileName)463 IngestServices.getInstance().postMessage(inbox)464 copyfile(imageFilePath, tempDir + "\\" + fileName)465 copiedFilePath = tempDir + "\\" + fileName466 self.log(Level.INFO, logHeader + "Verifying " + fileName)467 hashFile = fileName[:-4] + ".Hash.txt"468 hashFilePath = containingFolder + "\\" + hashFile469 self.log(Level.INFO, logHeader + "Filename containing verification hash: " + hashFile)470 if os.path.exists(copiedFilePath):471 fileHash = ""472 md5 = hashlib.md5()473 with open(copiedFilePath, "rb") as fileToValidate:474 fileChunk = fileToValidate.read(BLOCKSIZE)475 while len(fileChunk) > 0:476 md5.update(fileChunk)477 fileChunk = fileToValidate.read(BLOCKSIZE)478 fileHash = md5.hexdigest()479 self.log(Level.INFO, logHeader + "File hash for " + fileName + ": " + fileHash)480 with open(hashFilePath, "r") as verificationFile:481 verificationHash = verificationFile.readline().decode("ascii", "ignore")482 if verificationHash == fileHash:483 self.log(Level.INFO, logHeader + fileName + " has been verified")484 validFiles += "<li>" + fileName + "</li>"485 numFiles += 1486 else:487 self.log(Level.WARNING, logHeader + fileName + " is invalid")488 self.log(Level.INFO, logHeader + "verification file hash: " + verificationHash)489 invalidFiles += "<li>" + fileName + "</li>"490 invalidFiles += "<ul><li>Computed hash: " + fileHash + "</li><li>Hash in verification file: " + \491 verificationHash + "</li></ul>"492 invalidList.append(fileName)493 else:494 self.log(Level.WARNING, logHeader + "Verification file does not exist")495 cannotValidateMessage += "<li>" + fileName + "</li>"496 invalidList.append(fileName)497 validMessage += validFiles + "</ul><p>Invalid files</p><ul>" + invalidFiles + \498 "</ul><p>Cannot validate due to missing validation file</p><ul>" + cannotValidateMessage + "</ul>"499 inbox = IngestMessage.createMessage(IngestMessage.MessageType.INFO, "Volatility Processor",500 "Verifying files result", validMessage)501 IngestServices.getInstance().postMessage(inbox)502 # Processing503 self.processCount = 11504 progressBar.switchToDeterminate(self.progressCount(self.processCount, numFiles))505 VolatilityService = VolatilityServiceClass(self.VolatilityDir, self.Profile)506 currentFile = 1507 for file in files:508 currentProcess = 1509 imageFilePath = file.getLocalAbsPath()510 if imageFilePath is not None:511 containingFolder = os.path.dirname(imageFilePath)512 fileName = os.path.basename(imageFilePath)513 if fileName not in invalidFiles:514 dbName = caseDir + "\\VolatilityProcessor\\" + fileName[:-4] + ".db3"515 passwordFile = caseDir + "\\VolatilityProcessor\\" + fileName[:-4] + "-PASSWORD.txt"516 if not os.path.isfile(dbName):517 self.log(Level.WARNING, logHeader + "Database file " + dbName + " does not exist")518 VolatilityService.setDbName(dbName)519 inbox = IngestMessage.createMessage(IngestMessage.MessageType.INFO, "Volatility Processor",520 "Analysing memory for " + fileName)521 IngestServices.getInstance().postMessage(inbox)522 self.log(Level.INFO, logHeader + "Database: " + dbName)523 filePathToProcess = tempDir + "\\" + fileName524 # Hivelist525 progressBar.progress("Running hivelist", self.progressCount(currentProcess, currentFile))526 currentProcess += 1527 self.log(Level.INFO, logHeader + "File to process: " + filePathToProcess)528 self.log(Level.INFO, logHeader + "Running hivelist...")529 pipe = VolatilityService.hivelist(filePathToProcess)530 result = pipe.communicate()531 self.log(Level.INFO, logHeader + "Hivelist result: " + str(result))532 # Psscan533 progressBar.progress("Running psscan", self.progressCount(currentProcess, currentFile))534 currentProcess += 1535 self.log(Level.INFO, logHeader + "Running psscan...")536 pipe = VolatilityService.psscan(filePathToProcess)537 self.log(Level.INFO, logHeader + "Psscan result: " + str(pipe.communicate()))538 # Pslist539 progressBar.progress("Running pslist", self.progressCount(currentProcess, currentFile))540 currentProcess += 1541 self.log(Level.INFO, logHeader + "Running pslist...")542 pipe = VolatilityService.pslist(filePathToProcess)543 self.log(Level.INFO, logHeader + "Pslist result: " + str(pipe.communicate()))544 # Filescan545 progressBar.progress("Running filescan", self.progressCount(currentProcess, currentFile))546 currentProcess += 1547 self.log(Level.INFO, logHeader + "Running filescan...")548 pipe = VolatilityService.filescan(filePathToProcess)549 self.log(Level.INFO, logHeader + "Filescan results: " + str(pipe.communicate()))550 # Netscan551 progressBar.progress("Running netscan", self.progressCount(currentProcess, currentFile))552 currentProcess += 1553 self.log(Level.INFO, logHeader + "Running netscan...")554 pipe = VolatilityService.netscan(filePathToProcess)555 self.log(Level.INFO, logHeader + "Netscan results: " + str(pipe.communicate()))556 # Hashdump557 try:558 Class.forName("org.sqlite.JDBC").newInstance()559 connection = DriverManager.getConnection("jdbc:sqlite:/%s" % dbName)560 except SQLException as e:561 self.log(Level.INFO, "Could not open database file (not SQLite) " + dbName + " (" + e.getMessage() + ")")562 return IngestModule.ProcessResult.ERROR563 systemVirtualAddress = None564 samVirtualAddress = None565 try:566 statement1 = connection.createStatement()567 statement2 = connection.createStatement()568 resultSet1 = statement1.executeQuery("SELECT Virtual FROM HiveList WHERE Name LIKE '%SYSTEM'")569 resultSet2 = statement2.executeQuery("SELECT Virtual FROM HiveList WHERE Name LIKE '%SAM'")570 if resultSet1.next():571 systemVirtualAddress = resultSet1.getString("Virtual")572 if resultSet2.next():573 samVirtualAddress = resultSet2.getString("Virtual")574 resultSet1.close()575 resultSet2.close()576 statement1.close()577 statement2.close()578 except SQLException as ex:579 self.log(Level.SEVERE, logHeader + "Cannot continue scan due to database errors: " + ex.getMessage())580 # return IngestModule.ProcessResult.ERROR581 progressBar.progress("Running hashdump", self.progressCount(currentProcess, currentFile))582 currentProcess += 1583 self.log(Level.INFO, logHeader + "Running hashdump...")584 pipe = VolatilityService.getPasswords(filePathToProcess, systemVirtualAddress, samVirtualAddress, passwordFile)585 result = pipe.communicate()586 self.log(Level.INFO, logHeader + "Hashdump result: " + str(result))587 # Hivedump588 try:589 statement = connection.createStatement()590 resultset = statement.executeQuery("SELECT Virtual FROM HiveList")591 virtualAddresses = []592 while resultset.next():593 virtualAddresses.append(resultset.getString("Virtual"))594 resultset.close()595 statement.close()596 connection.close()597 progressBar.progress("Running hivedump", self.progressCount(currentProcess, currentFile))598 currentProcess += 1599 self.log(Level.INFO, logHeader + "Running hivedump for registries")600 self.log(Level.INFO, logHeader + "Number of addresses to dump: " + str(len(virtualAddresses)))601 addressNum = 1602 for address in virtualAddresses:603 self.log(Level.INFO, logHeader + "Running address number: " + str(addressNum))604 pipe = VolatilityService.hivedump(filePathToProcess, address, hiveDumpDir + "\\" + str(address) + ".json")605 self.log(Level.INFO, logHeader + "Hivedump result: " + str(pipe.communicate()))606 addressNum += 1607 except SQLException as ex:608 self.log(Level.SEVERE, logHeader + "Cannot continue scan due to database errors: " + ex.getMessage())609 # return IngestModule.ProcessResult.ERROR610 try:611 Class.forName("org.sqlite.JDBC").newInstance()612 connection = DriverManager.getConnection("jdbc:sqlite:/%s" % dbName)613 statement = connection.createStatement()614 result = statement.executeQuery("SELECT COUNT(name) AS NumTables FROM sqlite_master WHERE name LIKE 'HiveDump'")615 numTables = result.getInt("NumTables")616 if numTables == 0:617 try:618 preparedStatement = connection.prepareStatement("CREATE TABLE HiveDump ([Offset(V)] TEXT, LastWritten TEXT, Key TEXT)")619 preparedStatement.executeUpdate()620 except SQLException as ex:621 self.log(Level.WARNING, logHeader + "Error creating HiveDump: " + ex.getMessage())622 for hiveDumpFile in os.listdir(hiveDumpDir):623 if hiveDumpFile.endswith(".json"):624 hiveDumpFileName = os.path.basename(hiveDumpFile)625 offset = os.path.splitext(hiveDumpFileName)[0]626 with open(hiveDumpDir + "\\" + hiveDumpFileName, "r") as hiveDump:627 for line in hiveDump:628 result = json.loads(line)629 for item in result["rows"]:630 lastWritten = item[0]631 key = item[1]632 preparedStatement = connection.prepareStatement("INSERT INTO HiveDump ([Offset(V)], LastWritten, Key) "633 "VALUES (?, ?, ?)")634 preparedStatement.setString(1, offset)635 preparedStatement.setString(2, lastWritten)636 preparedStatement.setString(3, key)637 preparedStatement.executeUpdate()638 except SQLException as ex:639 self.log(Level.SEVERE, logHeader + "Cannot insert into HiveDump table: " + ex.getMessage())640 except SQLException as ex:641 self.log(Level.SEVERE, logHeader + "Cannot continue scan due to database errors: " + ex.getMessage())642 # return IngestModule.ProcessResult.ERROR643 # Dlldump644 progressBar.progress("Running dlldump", self.progressCount(currentProcess, currentFile))645 currentProcess += 1646 self.log(Level.INFO, logHeader + "Running dlldump...")647 pipe = VolatilityService.dlldump(filePathToProcess, dllDumpDir)648 self.log(Level.INFO, logHeader + "Dlldump results: " + str(pipe.communicate()))649 # Dumpregistry650 progressBar.progress("Running dumpregistry", self.progressCount(currentProcess, currentFile))651 currentProcess += 1652 self.log(Level.INFO, logHeader + "Running dumpregistry...")653 pipe = VolatilityService.dumpregistry(filePathToProcess, dumpRegistryDir)654 self.log(Level.INFO, logHeader + "Dumpregistry results: " + str(pipe.communicate()))655 # Procdump656 try:657 progressBar.progress("Running procdump", self.progressCount(currentProcess, currentFile))658 currentProcess += 1659 Class.forName("org.sqlite.JDBC").newInstance()660 connection = DriverManager.getConnection("jdbc:sqlite:/%s" % dbName)661 statement = connection.createStatement()662 resultset1 = statement.executeQuery("SELECT DISTINCT PID FROM PSList")663 pids = ""664 while resultset1.next():665 pids += resultset1.getString("PID") + ", "666 resultset1.close()667 statement.close()668 connection.close()669 self.log(Level.INFO, logHeader + "Number of unique processes to dump: " + str(len(pids)))670 pipe = VolatilityService.procdump(filePathToProcess, pids[:-2], procDumpDir)671 self.log(Level.INFO, logHeader + "Procdump result: " + str(pipe.communicate()))672 except SQLException as ex:673 self.log(Level.SEVERE, logHeader + "Cannot continue scan due to database errors: " + ex.getMessage())674 # return IngestModule.ProcessResult.ERROR675 # Analyse676 inbox = IngestMessage.createMessage(IngestMessage.MessageType.INFO, "Volatility Processor",677 "Analysing results for " + fileName)678 IngestServices.getInstance().postMessage(inbox)679 progressBar.progress("Analysing results", self.progressCount(currentProcess, currentFile))680 currentProcess += 1681 try:682 Class.forName("org.sqlite.JDBC").newInstance()683 connection = DriverManager.getConnection("jdbc:sqlite:/%s" % dbName)684 processArtifactName = ""685 registryArtifactName = ""686 accountArtifactName = ""687 fileArtifactName = ""688 try:689 processArtifactName = "VolatilityProcessor_Processes_" + fileName690 registryArtifactName = "VolatilityProcessor_Registries_" + fileName691 accountArtifactName = "VolatilityProcessor_Accounts_" + fileName692 fileArtifactName = "VolatilityProcessor_Files_" + fileName693 case.addArtifactType(processArtifactName, processArtifactName)694 case.addArtifactType(registryArtifactName, registryArtifactName)695 case.addArtifactType(accountArtifactName, accountArtifactName)696 case.addArtifactType(fileArtifactName, fileArtifactName)697 except:698 self.log(Level.WARNING, logHeader + "Error creating artifacts, some artifacts might not exist")699 processArtifact = case.getArtifactTypeID(processArtifactName)700 processArtifactType = case.getArtifactType(processArtifactName)701 registryArtifact = case.getArtifactTypeID(registryArtifactName)702 registryArtifactType = case.getArtifactType(registryArtifactName)703 accountArtifact = case.getArtifactTypeID(accountArtifactName)704 fileArtifact = case.getArtifactTypeID(fileArtifactName)705 fileArtifactType = case.getArtifactType(fileArtifactName)706 # Account707 try:708 case.addArtifactAttributeType(accountArtifactName,709 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,710 "Account Hash")711 except:712 self.log(Level.WARNING, logHeader + "Attribute already added: " + accountArtifactName)713 art = file.newArtifact(accountArtifact)714 accountAttribute = case.getAttributeType(accountArtifactName)715 with open(passwordFile, "r") as accountFile:716 for line in accountFile:717 art.addAttribute(BlackboardAttribute(accountAttribute, VolatilityIngestModuleFactory.moduleName, line))718 # Process719 try:720 statement = connection.createStatement()721 resultSet = statement.executeQuery("SELECT DISTINCT "722 "p.PID, "723 "p.Name, "724 "p.PPID, "725 "p.[Offset(V)], "726 "n.LocalAddr, "727 "n.ForeignAddr, "728 "n.State, "729 "n.Created, "730 "p.[Time Created] AS [Process Time Created], "731 "p.[Time Exited] AS [Process Time Exited] "732 "FROM PSScan p "733 "LEFT JOIN Netscan n ON n.[PID] = p.[PID]")734 try:735 case.addArtifactAttributeType(processArtifactName + "_PID",736 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,737 "PID")738 except:739 self.log(Level.WARNING, logHeader + "Attribute already added: " + processArtifactName + "_PID")740 try:741 case.addArtifactAttributeType(processArtifactName + "_Name",742 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,743 "Name")744 except:745 self.log(Level.WARNING, logHeader + "Attribute already added: " + processArtifactName + "_Name")746 try:747 case.addArtifactAttributeType(processArtifactName + "_PPID",748 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,749 "PPID")750 except:751 self.log(Level.WARNING, logHeader + "Attribute already added: " + processArtifactName + "_PPID")752 try:753 case.addArtifactAttributeType(processArtifactName + "_Offset",754 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,755 "Offset Virtual")756 except:757 self.log(Level.WARNING, logHeader + "Attribute already added: " + processArtifactName + "_Offset")758 try:759 case.addArtifactAttributeType(processArtifactName + "_LocalAddr",760 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,761 "Local Address")762 except:763 self.log(Level.WARNING, logHeader + "Attribute already added: " + processArtifactName + "_LocalAddr")764 try:765 case.addArtifactAttributeType(processArtifactName + "_ForeignAddr",766 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,767 "Foreign Address")768 except:769 self.log(Level.WARNING, logHeader + "Attribute already added: " + processArtifactName + "_ForeignAddr")770 try:771 case.addArtifactAttributeType(processArtifactName + "_State",772 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,773 "State")774 except:775 self.log(Level.WARNING, logHeader + "Attribute already added: " + processArtifactName + "_State")776 try:777 case.addArtifactAttributeType(processArtifactName + "_Created",778 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,779 "Created")780 except:781 self.log(Level.WARNING, logHeader + "Attribute already added: " + processArtifactName + "_Created")782 try:783 case.addArtifactAttributeType(processArtifactName + "_ProcessTimeCreated",784 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,785 "Process Time Created")786 except:787 self.log(Level.WARNING, logHeader + "Attribute already added: " + processArtifactName + "_ProcessTimeCreated")788 try:789 case.addArtifactAttributeType(processArtifactName + "_ProcessTimeExited",790 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,791 "Process Time Exited")792 except:793 self.log(Level.WARNING, logHeader + "Attribute already added: " + processArtifactName + "_ProcessTimeExited")794 pid = case.getAttributeType(processArtifactName + "_PID")795 name = case.getAttributeType(processArtifactName + "_Name")796 ppid = case.getAttributeType(processArtifactName + "_PPID")797 offset = case.getAttributeType(processArtifactName + "_Offset")798 local = case.getAttributeType(processArtifactName + "_LocalAddr")799 foreign = case.getAttributeType(processArtifactName + "_ForeignAddr")800 state = case.getAttributeType(processArtifactName + "_State")801 created = case.getAttributeType(processArtifactName + "_Created")802 pcreated = case.getAttributeType(processArtifactName + "_ProcessTimeCreated")803 pexited = case.getAttributeType(processArtifactName + "_ProcessTimeExited")804 while resultSet.next():805 proc = file.newArtifact(processArtifact)806 proc.addAttribute(BlackboardAttribute(pid,807 VolatilityIngestModuleFactory.moduleName,808 resultSet.getString("PID")))809 proc.addAttribute(BlackboardAttribute(name,810 VolatilityIngestModuleFactory.moduleName,811 resultSet.getString("Name")))812 proc.addAttribute(BlackboardAttribute(ppid,813 VolatilityIngestModuleFactory.moduleName,814 resultSet.getString("PPID")))815 proc.addAttribute(BlackboardAttribute(offset,816 VolatilityIngestModuleFactory.moduleName,817 resultSet.getString("Offset(V)")))818 proc.addAttribute(BlackboardAttribute(local,819 VolatilityIngestModuleFactory.moduleName,820 resultSet.getString("LocalAddr")))821 proc.addAttribute(BlackboardAttribute(foreign,822 VolatilityIngestModuleFactory.moduleName,823 resultSet.getString("ForeignAddr")))824 proc.addAttribute(BlackboardAttribute(state,825 VolatilityIngestModuleFactory.moduleName,826 resultSet.getString("State")))827 proc.addAttribute(BlackboardAttribute(created,828 VolatilityIngestModuleFactory.moduleName,829 resultSet.getString("Created")))830 proc.addAttribute(BlackboardAttribute(pcreated,831 VolatilityIngestModuleFactory.moduleName,832 resultSet.getString("Process Time Created")))833 proc.addAttribute(BlackboardAttribute(pexited,834 VolatilityIngestModuleFactory.moduleName,835 resultSet.getString("Process Time Exited")))836 IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(VolatilityIngestModuleFactory.moduleName,837 processArtifactType, None))838 resultSet.close()839 statement.close()840 except SQLException as ex:841 self.log(Level.SEVERE, logHeader + "Cannot continue analysis due to database errors: " + ex.getMessage())842 # Registry843 try:844 statement = connection.createStatement()845 resultSet = statement.executeQuery("SELECT "846 "l.Virtual,"847 "l.Name AS RegistryName, "848 "l.Physical, "849 "k.Key, "850 "k.LastWritten "851 "FROM HiveList l "852 "INNER JOIN HiveDump k ON k.[Offset(V)] = l.Virtual")853 try:854 case.addArtifactAttributeType(registryArtifactName + "_Virtual",855 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,856 "Virtual Address")857 except:858 self.log(Level.WARNING, logHeader + "Attribute already added: " + registryArtifactName + "_Virtual")859 try:860 case.addArtifactAttributeType(registryArtifactName + "_Physical",861 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,862 "Physical Address")863 except:864 self.log(Level.WARNING, logHeader + "Attribute already added: " + registryArtifactName + "_Physical")865 try:866 case.addArtifactAttributeType(registryArtifactName + "_Name",867 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,868 "Registry Name")869 except:870 self.log(Level.WARNING, logHeader + "Attribute already added: " + registryArtifactName + "_Name")871 try:872 case.addArtifactAttributeType(registryArtifactName + "_Key",873 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,874 "Key")875 except:876 self.log(Level.WARNING, logHeader + "Attribute already added: " + registryArtifactName + "_Key")877 try:878 case.addArtifactAttributeType(registryArtifactName + "_LastWritten",879 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,880 "Last Written")881 except:882 self.log(Level.WARNING, logHeader + "Attribute already added: " + registryArtifactName + "_LastWritten")883 virtual = case.getAttributeType(registryArtifactName + "_Virtual")884 physical = case.getAttributeType(registryArtifactName + "_Physical")885 name = case.getAttributeType(registryArtifactName + "_Name")886 key = case.getAttributeType(registryArtifactName + "_Key")887 last = case.getAttributeType(registryArtifactName + "_LastWritten")888 while resultSet.next():889 reg = file.newArtifact(registryArtifact)890 reg.addAttribute(BlackboardAttribute(virtual,891 VolatilityIngestModuleFactory.moduleName,892 resultSet.getString("Virtual")))893 reg.addAttribute(BlackboardAttribute(physical,894 VolatilityIngestModuleFactory.moduleName,895 resultSet.getString("Physical")))896 reg.addAttribute(BlackboardAttribute(name,897 VolatilityIngestModuleFactory.moduleName,898 resultSet.getString("RegistryName")))899 reg.addAttribute(BlackboardAttribute(key,900 VolatilityIngestModuleFactory.moduleName,901 resultSet.getString("Key")))902 reg.addAttribute(BlackboardAttribute(last,903 VolatilityIngestModuleFactory.moduleName,904 resultSet.getString("LastWritten")))905 IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(VolatilityIngestModuleFactory.moduleName,906 registryArtifactType, None))907 except SQLException as ex:908 self.log(Level.SEVERE, logHeader + "Cannot continue analysis due to database errors: " + ex.getMessage())909 # File910 try:911 statement1 = connection.createStatement()912 resultSet1 = statement1.executeQuery("SELECT COUNT(Name) AS RowCount FROM FileScan")913 rowCount = resultSet1.getInt("RowCount")914 self.log(Level.INFO, logHeader + "Filecan row count: " + str(rowCount))915 if rowCount > 0:916 statement2 = connection.createStatement()917 resultSet2 = statement2.executeQuery("SELECT "918 "Name, "919 "Access, "920 "[Offset(V)], "921 "Pointers,"922 "Handles "923 "FROM FileScan")924 try:925 case.addArtifactAttributeType(fileArtifactName + "_Name",926 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,927 "File Name")928 except:929 self.log(Level.WARNING, logHeader + "Attribute already added: " + fileArtifactName + "_Name")930 try:931 case.addArtifactAttributeType(fileArtifactName + "_Access",932 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,933 "File Access Rights")934 except:935 self.log(Level.WARNING, logHeader + "Attribute already added: " + fileArtifactName + "_Access")936 try:937 case.addArtifactAttributeType(fileArtifactName + "_Virtual",938 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,939 "Virtual Address")940 except:941 self.log(Level.WARNING, logHeader + "Attribute already added: " + fileArtifactName + "_Virtual")942 try:943 case.addArtifactAttributeType(fileArtifactName + "_Pointers",944 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,945 "Pointers")946 except:947 self.log(Level.WARNING, logHeader + "Attribute already added: " + fileArtifactName + "_Pointers")948 try:949 case.addArtifactAttributeType(fileArtifactName + "_Handles",950 BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,951 "Handles")952 except:953 self.log(Level.WARNING, logHeader + "Attribute already added: " + fileArtifactName + "_Handles")954 name = case.getAttributeType(fileArtifactName + "_Name")955 access = case.getAttributeType(fileArtifactName + "_Access")956 virtual = case.getAttributeType(fileArtifactName + "_Virtual")957 pointers = case.getAttributeType(fileArtifactName + "_Pointers")958 handles = case.getAttributeType(fileArtifactName + "_Handles")959 while resultSet2.next():960 f = file.newArtifact(fileArtifact)961 f.addAttribute(BlackboardAttribute(virtual,962 VolatilityIngestModuleFactory.moduleName,963 resultSet2.getString("Offset(V)")))964 f.addAttribute(BlackboardAttribute(access,965 VolatilityIngestModuleFactory.moduleName,966 resultSet2.getString("Access")))967 f.addAttribute(BlackboardAttribute(name,968 VolatilityIngestModuleFactory.moduleName,969 resultSet2.getString("Name")))970 f.addAttribute(BlackboardAttribute(pointers,971 VolatilityIngestModuleFactory.moduleName,972 resultSet2.getString("Pointers")))973 f.addAttribute(BlackboardAttribute(handles,974 VolatilityIngestModuleFactory.moduleName,975 resultSet2.getString("Handles")))976 IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(VolatilityIngestModuleFactory.moduleName,977 fileArtifactType, None))978 else:979 self.log(Level.INFO, logHeader + "No results found for filescan, not posting results")980 inbox = IngestMessage.createMessage(IngestMessage.MessageType.INFO, "Volatility Processor",981 "No filecan results found for " + fileName)982 IngestServices.getInstance().postMessage(inbox)983 except SQLException as ex:984 self.log(Level.SEVERE, logHeader + "Cannot continue analysis due to database errors: " + ex.getMessage())985 except SQLException as ex:986 self.log(Level.SEVERE, logHeader + "Cannot open database due to database errors: " + ex.getMessage())987 if connection is not None:988 try:989 connection.close()990 except SQLException as e:991 self.log(Level.WARNING, logHeader + "Could not close database: " + e.getMessage())992 currentFile += 1993 message = "<p>The dumped files can be found at: " + caseDir + "\\Dump_Files</p>"994 inbox = IngestMessage.createMessage(IngestMessage.MessageType.INFO, "Volatility Processor",995 "Finished analysing the results", message)996 IngestServices.getInstance().postMessage(inbox)997 return IngestModule.ProcessResult.OK998 def shutDown(self):999 inbox = IngestMessage.createMessage(IngestMessage.MessageType.INFO, "Volatility Processor",1000 "Volatility Process Stopped")1001 IngestServices.getInstance().postMessage(inbox)1002 self.log(Level.INFO, "Volatility Processor Finished")1003class VolatilityIngestModuleSettings(IngestModuleIngestJobSettings):1004 def __init__(self):1005 self.versionUID = 1L1006 self.VolatilityDir = ""1007 self.Version = "2.5"1008 self.Profile = "Autodetect"1009 # Getters and setters1010 def getVersionUID(self):1011 return self.versionUID1012 def getVolatilityDir(self):1013 return self.VolatilityDir1014 def getVersion(self):1015 return self.Version1016 def getProfile(self):1017 return self.Profile1018 def setVolatilityDir(self, dir):1019 self.VolatilityDir = dir1020 def setVersion(self, version):1021 self.Version = version1022 def setProfile(self, profile):...

Full Screen

Full Screen

ddpg_rce.py

Source:ddpg_rce.py Github

copy

Full Screen

1import numpy as np2from .ddpg import DDPG3def _get_statistics(activation):4 if 'min' in activation:5 return np.min6 elif 'max' in activation:7 return np.max8 elif 'mean' in activation:9 return np.mean10 else:11 raise ValueError('Undefined activation parameter')12def np_c2p(x):13 '''14 f: C(s,a) -> p(e|s,a)15 '''16 clip_x = np.clip(x, 0, 0.5)17 return clip_x/(1 - clip_x)18def np_c2l(x, gamma=1.0):19 '''20 f: C(s,a) -> label (gamma*C/(1 + (gamma-1)*C) or gamma*omega/(1 + gamma*omega))21 '''22 return gamma*x/(1 + (gamma-1)*x)23def np_l2c(x, gamma=1.0):24 '''25 f: label -> C(s,a) 26 '''27 return x/(x + gamma*(1-x))28class DDPGRecursiveClassifier(DDPG):29 '''30 Deep Deterministic Policy Gradient agent with RCE31 '''32 def __init__(self, *args, goal_examples=None, goal_examples_validation=None, **kwargs):33 # initialize actor(s), critic(s), replay buffer, log header34 super(DDPGRecursiveClassifier, self).__init__(*args, **kwargs)35 if self.name == 'forward':36 # replace orginal env reward/done functions with learned counterparts37 self.env.override_r_for_step = self._reward_fn # RCE does not explicitly learn rewards, however, implicit reward function can be derived by inverting the Bellman equation38 elif self.name == 'reset':39 # env must maintain original (forward) reward/done functions and should NOT be replaced by the (learned) reset reward/done functions40 pass41 else:42 raise ValueError('Undefined name')43 # input_args44 self.classifier_params = self._params.classifier45 if goal_examples is None:46 self.goal_examples = []47 else:48 self.goal_examples = goal_examples49 if goal_examples_validation is None:50 self.goal_examples_validation = []51 else:52 self.goal_examples_validation = goal_examples_validation53 # additional log header54 self.logheader.insert(len(self.logheader) - 2, 'critic rce loss')55 self.logheader.insert(len(self.logheader) - 2, 'critic rce loss (goal)')56 self.logheader.insert(len(self.logheader) - 2, 'critic rce loss (TD)')57 self.logheader.insert(len(self.logheader) - 2, '# of goal examples')58 self.logheader.insert(len(self.logheader) - 2, 'mean: goal p(et+|..) ensemble')59 self.logheader.insert(len(self.logheader) - 2, 'std: goal p(et+|..) ensemble')60 self.logheader.insert(len(self.logheader) - 2, 'min: goal p(et+|..) ensemble')61 self.logheader.insert(len(self.logheader) - 2, 'max: goal p(et+|..) ensemble')62 self.logheader.insert(len(self.logheader) - 2, '# of goal validation examples')63 self.logheader.insert(len(self.logheader) - 2, 'mean: goal validation p(et+|..) ensemble')64 self.logheader.insert(len(self.logheader) - 2, 'std: goal validation p(et+|..) ensemble')65 self.logheader.insert(len(self.logheader) - 2, 'min: goal validation p(et+|..) ensemble')66 self.logheader.insert(len(self.logheader) - 2, 'max: goal validation p(et+|..) ensemble')67 self.logheader.insert(len(self.logheader) - 2, '# of samples')68 self.logheader.insert(len(self.logheader) - 2, 'mean: sample p(et+|..) ensemble')69 self.logheader.insert(len(self.logheader) - 2, 'std: sample p(et+|..) ensemble')70 self.logheader.insert(len(self.logheader) - 2, 'min: sample p(et+|..) ensemble')71 self.logheader.insert(len(self.logheader) - 2, 'max: sample p(et+|..) ensemble')72 self.logheader.insert(len(self.logheader) - 2, 'mean: p(et+|s0,a0) ensemble')73 self.logheader.insert(len(self.logheader) - 2, 'std: p(et+|s0,a0) ensemble')74 self.logheader.insert(len(self.logheader) - 2, 'mean: p(et+|sT,aT) ensemble')75 self.logheader.insert(len(self.logheader) - 2, 'std: p(et+|sT,aT) ensemble')76 # redefine critic? no, but do an assertion check77 assert self.params.critic.loss == 'rce', 'loss type must be rce for %s'%(type(self).__name__)78 79 def get_env_fns(self):80 '''81 Return implicit (reconstructed) reward function and done functions82 '''83 reward_fn = self._reward_fn84 if self.name == 'forward':85 done_env_fn = lambda s: (self.env._get_done(s), {}) # empty done_info86 elif self.name == 'reset':87 done_env_fn = lambda s: (self.env._get_reset_done(s), {}) # empty done_info88 else:89 raise ValueError('Undefined name')90 done_fn = done_env_fn91 return {'%s_reward_fn' % (self.name): reward_fn, '%s_done_fn' % (self.name): done_fn, '%s_done_env_fn' % (self.name): done_env_fn}92 93 def _reward_fn(self, s, a, reward_type=None, statistics=None):94 '''95 Reward value(s) for observation(s)/action(s)96 args:97 :arg s: observation(s)98 :type s: numpy array (obs dim,) or (N, obs dim)99 :arg a: action(s)100 :type a: numpy array (action dim,) or (N, action dim)101 :arg reward_type: reward type (default: setting from config file)102 :type reward_type: string103 :arg statistics: statistics for ensembles104 :type statistics: numpy function with 'axis' keyword105 returns:106 :return *: reward(s)107 :type *: numpy array () or (N,)108 '''109 observation_batch = [np.atleast_2d(s)]110 action_batch = [np.atleast_2d(a)]111 reward_batch = \112 self._evaluate_rewards_at_once(observation_batch, action_batch, reward_type=reward_type, statistics=statistics)113 return np.squeeze(reward_batch)114 115 def _evaluate_rewards_at_once(self, observation_batches, action_batches, reward_type=None, statistics=None):116 '''117 Corresponding reward values for observation/action batches118 args:119 :arg observation_batches: batch(es) of observation(s)120 :type observation_batches: list of numpy arrays [num batch * (batch size, obs dim)]121 :arg action_batches: batch(es) of actions(s)122 :type action_batches: list of numpy arrays [num batch * (batch size, action dim)]123 :arg reward_type: reward type (default: setting from config file)124 :type reward_type: string125 :arg statistics: statistics for ensembles126 :type statistics: numpy function with 'axis' keyword127 returns:128 :return *: batch(es) of implicit reward(s)129 :type *: list of numpy arrays [num batch * (batch size, 1)]130 '''131 assert reward_type is None, 'RCE does not explicitly learn rewards, however, implicit reward function can be derived by inverting the Bellman equation'132 if statistics is None: statistics = _get_statistics(getattr(self.params.critic, 'reward_statistics', 'mean'))133 recursive_classifier_ensemble_batches = self._evaluate_ensembles_at_once(observation_batches, action_batches) # [num classifier * (batch size, 1)]134 event_prob_batches = [np.expand_dims(statistics(np_c2p(batch), axis=0), axis=-1) \135 for batch in recursive_classifier_ensemble_batches] # [num classifier * (batch size, 1)]136 # should be r(st,at,stplus1) = Ct/(1-Ct) - gamma*Ctplus1/(1-Ctplus1) but using (1-gamma)*Ct/(1-Ct) instead...137 return [(1-self.params.discount)*prob for prob in event_prob_batches]138 # required for compatibility reasons139 def evaluate_target_probability_ensembles(self, obs, action):140 '''141 return probabilities142 '''143 return np_c2p(self.evaluate_target_ensembles(obs, action))144 145 # required for compatibility reasons146 def evaluate_probability_ensembles(self, obs, action):147 '''148 return probabilities149 '''150 return np_c2p(self.evaluate_ensembles(obs, action))151 def run_episodes(self, actor_idx=0, num_episodes=1, episode_stats=None, training_stats=None, eval_mode=False):152 if self.logging:153 super(DDPGRecursiveClassifier, self)._configure_tf_filewriter()154 if self.record: super(DDPGRecursiveClassifier, self)._configure_video_recorder(eval_mode=eval_mode)155 156 episodes_stats = []157 # run episode158 for n in range(num_episodes):159 episode_stats, training_stats = super(DDPGRecursiveClassifier, self)._run_episode(actor_idx=actor_idx, eval_mode=eval_mode)160 latest_transition = self.buffer[(self.buffer.index - 1)%self.buffer.capacity]161 if latest_transition['done'] and not eval_mode:162 self.goal_examples.append(latest_transition['next observation'])163 else:164 # classifier-based done (use the goal examples that has been provided in the beginning)165 pass166 # modify stats167 rewards = self._reward_fn(np.array(episode_stats['observations']), np.array(episode_stats['actions']))168 rewards = np.atleast_1d(rewards)169 episode_stats['returns'] = np.sum([self.params.discount**i*reward for i, reward in enumerate(rewards)])170 episode_stats['average step rewards'] = np.mean(rewards)171 episodes_stats.append(episode_stats)172 173 # additional stats174 additional_stats = {}175 # evaluate goal (positives), goal validation (positives_validation), sample (negatives), s0, sT176 sampled_positive_indices = [np.random.randint(len(self.goal_examples), size=self.params.batch_size) \177 for _ in range(len(self.critics))]178 positives_batches = [[self.goal_examples[ind] for ind in sampled_positive_indice] \179 for sampled_positive_indice in sampled_positive_indices]180 positives_validation_batches = [self.goal_examples_validation for _ in range(len(self.critics))]181 negatives_batches = self.buffer.sample(self.params.batch_size, num_batches=len(self.critics))182 s0 = np.expand_dims(episode_stats['observations'][0], axis=0)183 sT = np.expand_dims(episode_stats['observations'][-1], axis=0)184 # combine into single batch185 ind1 = self.params.batch_size186 ind2 = ind1 + len(self.goal_examples_validation)187 ind3 = ind2 + self.params.batch_size188 ind4 = ind3 + 1189 ind5 = ind4 + 1190 observation_batches = [np.concatenate([neg['observations'], pos_val + pos, s0, sT], axis=0) \191 for neg, pos_val, pos in zip(negatives_batches, positives_validation_batches, positives_batches)]192 action_batches = self._get_actions_at_once(observation_batches) # [num batch * (batch size, action dim)]193 recursive_classifier_ensemble_batches = self._evaluate_at_once(observation_batches, action_batches) # [num classifier * (batch size, 1)]194 event_prob_batches = [np_c2p(batch) for batch in recursive_classifier_ensemble_batches]195 # save stats196 additional_stats['p(et+|..) sample'] = [batch[:ind1] for batch in event_prob_batches]197 additional_stats['p(et+|..) goal validation'] = [batch[ind1:ind2] for batch in event_prob_batches]198 additional_stats['p(et+|..) goal'] = [batch[ind2:ind3] for batch in event_prob_batches]199 additional_stats['p(et+|s0,a0)'] = [batch[ind3:ind4] for batch in event_prob_batches]200 additional_stats['p(et+|sT,aT)'] = [batch[ind4:ind5] for batch in event_prob_batches]201 202 self.stats = {**episode_stats, **training_stats, **additional_stats}203 if self.logging:204 self._log_entry(eval_mode=eval_mode)205 super(DDPGRecursiveClassifier, self)._print_entry(eval_mode=eval_mode)206 return episodes_stats207 def _log_entry(self, eval_mode):208 super(DDPGRecursiveClassifier, self)._log_entry(eval_mode=eval_mode)209 if self.stats is not None:210 logentry = self.logentry_evaluation if eval_mode else self.logentry211 try: logentry['critic rce loss'] = np.mean(self.stats['critic losses'])212 except: pass213 try: logentry['critic rce loss (goal)'] = np.mean(self.stats['critic losses (goal)'])214 except: pass215 try: logentry['critic rce loss (TD)'] = np.mean(self.stats['critic losses (TD)'])216 except: pass217 logentry['# of goal examples'] = len(self.goal_examples)218 goal_ensemble = [np.mean(ens) for ens in self.stats['p(et+|..) goal']]219 logentry['mean: goal p(et+|..) ensemble'] = np.mean(goal_ensemble)220 logentry['min: goal p(et+|..) ensemble'] = np.min(goal_ensemble)221 logentry['max: goal p(et+|..) ensemble'] = np.max(goal_ensemble)222 logentry['std: goal p(et+|..) ensemble'] = np.mean([np.std(ens) for ens in self.stats['p(et+|..) goal']])223 logentry['# of goal validation examples'] = len(self.goal_examples_validation)224 goal_validation_ensemble = [np.mean(ens) for ens in self.stats['p(et+|..) goal validation']]225 logentry['mean: goal validation p(et+|..) ensemble'] = np.mean(goal_validation_ensemble)226 logentry['min: goal validation p(et+|..) ensemble'] = np.min(goal_validation_ensemble)227 logentry['max: goal validation p(et+|..) ensemble'] = np.max(goal_validation_ensemble)228 logentry['std: goal validation p(et+|..) ensemble'] = np.mean([np.std(ens) for ens in self.stats['p(et+|..) goal validation']])229 logentry['# of samples'] = self.buffer.num_transitions230 sample_ensemble = [np.mean(ens) for ens in self.stats['p(et+|..) sample']]231 logentry['mean: sample p(et+|..) ensemble'] = np.mean(sample_ensemble)232 logentry['min: sample p(et+|..) ensemble'] = np.min(sample_ensemble)233 logentry['max: sample p(et+|..) ensemble'] = np.max(sample_ensemble)234 logentry['std: sample p(et+|..) ensemble'] = np.mean([np.std(ens) for ens in self.stats['p(et+|..) sample']])235 logentry['mean: p(et+|s0,a0) ensemble'] = np.mean(self.stats['p(et+|s0,a0)'])236 logentry['std: p(et+|s0,a0) ensemble'] = np.std(self.stats['p(et+|s0,a0)'])237 logentry['mean: p(et+|sT,aT) ensemble'] = np.mean(self.stats['p(et+|sT,aT)'])238 logentry['std: p(et+|sT,aT) ensemble'] = np.std(self.stats['p(et+|sT,aT)'])239 240 assert len(self.logheader) == len(logentry) # no additional keys!241 for key in logentry.keys():242 try:243 if not np.isfinite(logentry[key]): logentry[key] = None244 except:245 logentry[key] = None246 def _improve(self):247 critic_losses, critic_losses_goal, critic_losses_TD = None, None, None248 if self.num_steps <= self.params.heatup_steps:249 try:250 print('[%s] HEATUP (%d/%d)'%(self.name, self.num_steps, self.params.heatup_steps))251 except:252 pass253 elif self.buffer.num_transitions == 0 or len(self.goal_examples) == 0:254 print('No transitions in buffer or no goal examples!')255 else:256 for _ in range(getattr(self.classifier_params, 'train_steps', 1)):257 # 1. s(1), a(1), s(2), a(2), s_(2), a_(2)258 n_step = self.classifier_params.n_step259 self.classifier_params.q_combinator = getattr(self.classifier_params, 'q_combinator', 'independent')260 self.classifier_params.action_grad_q_combinator = getattr(self.classifier_params, 'action_grad_q_combinator', 'independent')261 if self.classifier_params.q_combinator == 'independent':262 negatives_batches = self.buffer.sample(self.params.batch_size, num_batches=self.params.critic.number_of_critics, n_step=n_step)263 sampled_positive_indices = [np.random.randint(len(self.goal_examples), size=self.params.batch_size) \264 for _ in range(self.params.critic.number_of_critics)]265 else:266 negatives_batches = self.buffer.sample(self.params.batch_size, num_batches=self.params.critic.number_of_critics, n_step=n_step, identical_batches=True)267 sampled_positive_indices = [np.random.randint(len(self.goal_examples), size=self.params.batch_size)]*self.params.critic.number_of_critics268 # get s(1) from S* and a(1) <- pi(s(1))269 observation1_batches = [[self.goal_examples[ind] for ind in sampled_positive_indice] for sampled_positive_indice in sampled_positive_indices]270 action1_batches = self._get_actions_at_once(observation1_batches)271 # get s(2), a(2), s_(2) from buffer and a_(2) <- pi(s_(2))272 observation2_batches, action2_batches, next_observation2_batches, future_observation2_batches, n_steps = \273 self._transitions_to_batches(negatives_batches)274 n_step = n_steps[0] if len(set(n_steps)) == 1 else 1275 next_action2_batches = self._get_actions_at_once(next_observation2_batches)276 if n_step > 1:277 future_action2_batches = self._get_actions_at_once(future_observation2_batches)278 279 # 2. inputs and labels280 observation_batches = [np.concatenate([s1, s2]) for s1, s2 in zip(observation1_batches, observation2_batches)]281 action_batches = [np.concatenate([a1, a2]) for a1, a2 in zip(action1_batches, action2_batches)]282 label_ones_batches = [np.ones([self.params.batch_size,1]) for _ in range(self.params.critic.number_of_critics)] # list of (N, 1)283 if n_step > 1:284 next_future_observation2_batches = [np.concatenate([s1, sn]) for s1, sn in zip(next_observation2_batches, future_observation2_batches)]285 next_future_action2_batches = [np.concatenate([a0, an_1]) for a0, an_1 in zip(next_action2_batches, future_action2_batches)]286 next_future_target_c_batches = self._evaluate_targets_at_once(next_future_observation2_batches, next_future_action2_batches) # list of (N, 1)287 next_target_c_batches = [np.split(target_c, 2, axis=0)[0] for target_c in next_future_target_c_batches]288 future_target_c_batches = [np.split(target_c, 2, axis=0)[1] for target_c in next_future_target_c_batches]289 gamma, gamma_n = self.params.discount, self.params.discount**n_step290 label_cs_batches = [np_l2c((np_c2l(next_c, gamma) + np_c2l(future_c, gamma_n))/2.0, gamma)291 for next_c, future_c in zip(next_target_c_batches, future_target_c_batches)] # list of (N, 1)292 else:293 next_target_c_batches = self._evaluate_targets_at_once(next_observation2_batches, next_action2_batches) # list of (N, 1)294 label_cs_batches = next_target_c_batches295 if self.classifier_params.q_combinator == 'independent': # ensemble296 pass297 elif self.classifier_params.q_combinator == 'mean': # consensus298 label_cs_mean = np.mean(label_cs_batches, axis=0)299 label_cs_batches = np.array([label_cs_mean] * self.params.critic.number_of_critics)300 elif self.classifier_params.q_combinator == 'min': # pessimistic301 label_cs_min = np.min(label_cs_batches, axis=0)302 label_cs_batches = np.array([label_cs_min] * self.params.critic.number_of_critics)303 elif self.classifier_params.q_combinator == 'max': # optimistic304 label_cs_max = np.max(label_cs_batches, axis=0)305 label_cs_batches = np.array([label_cs_max] * self.params.critic.number_of_critics)306 else:307 raise ValueError('Invalid q_combinator')308 label_batches = [np.concatenate([ones, omegas]) for ones, omegas in zip(label_ones_batches, label_cs_batches)]309 # 3. train critic network(s)310 critic_losses, critic_losses_goal, critic_losses_TD = self._train_critics_at_once(observation_batches, action_batches, label_batches) # list of floats311 # 4. train actor network312 observation2_batch = observation2_batches[np.random.randint(len(observation2_batches))] # pick one batch313 action2_for_grad_batch = self._get_actions_at_once([observation2_batch])[0]314 observation2_batches_actor = [observation2_batch]*self.params.critic.number_of_critics315 action2_for_grad_batches_actor = [action2_for_grad_batch]*self.params.critic.number_of_critics316 action2_grad_batches = self._action_gradients_at_once(observation2_batches_actor, action2_for_grad_batches_actor) # list of (N, action_dim)317 self._train_actors_at_once(observation2_batches_actor, action2_grad_batches)318 319 # 5. update actor and critic target networks320 self._update_targets_at_once()321 return {'critic losses': critic_losses, 'critic losses (goal)': critic_losses_goal, 'critic losses (TD)': critic_losses_TD}322 def _action_gradients_at_once(self, observation_batches, action_for_grad_batches):323 '''324 (internal function for self._improve)325 args:326 :arg observation_batches:327 :type observation_batches: list of numpy arrays328 :arg action_for_grad_batches:329 :type action_for_grad_batches: list of numpy arrays330 returns:331 :return sess.run(...):332 :type sess.run(...): list of arrays333 334 '''335 fetches = []336 feed_dict = {}337 for i, critic in enumerate(self.critics):338 obs = observation_batches[i]339 action = action_for_grad_batches[i]340 fetches.append([critic.action_grad, critic.output])341 feed_dict[critic.obs] = obs342 feed_dict[critic.action] = action343 fetch_results = self.sess.run(fetches, feed_dict=feed_dict)344 q_batches = [result[1] for result in fetch_results]345 action_grad_batches = [result[0][0] for result in fetch_results]346 if self.classifier_params.action_grad_q_combinator == 'independent':347 return action_grad_batches348 elif self.classifier_params.action_grad_q_combinator == 'mean':349 action_grad_mean = np.mean(action_grad_batches, axis=0)350 return [action_grad_mean]*len(action_grad_batches)351 elif self.classifier_params.action_grad_q_combinator == 'min':352 argmin = np.argmin(q_batches, axis=0)353 action_grad_min = np.take_along_axis(np.array(action_grad_batches), np.expand_dims(argmin, axis=0), axis=0)354 action_grad_min = np.squeeze(action_grad_min)355 return [action_grad_min]*len(action_for_grad_batches)356 elif self.classifier_params.action_grad_q_combinator == 'max':357 argmax = np.argmax(q_batches, axis=0)358 action_grad_max = np.take_along_axis(np.array(action_grad_batches), np.expand_dims(argmax, axis=0), axis=0)359 action_grad_max = np.squeeze(action_grad_max)360 return [action_grad_max]*len(action_for_grad_batches)361 else:362 raise ValueError('Invalid action_grad_q_combinator')363 def _train_critics_at_once(self, observation_batches, action_batches, target_q_batches):364 '''365 (internal function for self._improve)366 args:367 :arg observation_batches:368 :type observation_batches: list of numpy arrays369 :arg action_batches:370 :type action_batches: list of numpy arrays371 :arg target_q_batches:372 :type target_q_batches: (critics, batches, 1) numpy array373 returns:374 :return losses:375 :type losses: list of floats376 '''377 fetches = []378 feed_dict = {}379 for i, critic in enumerate(self.critics):380 fetches.append([critic.optimize_ops, critic.weight_decay_ops, critic.mse_loss, critic.rce_loss_goal, critic.rce_loss_TD])381 feed_dict[critic.obs] = observation_batches[i]382 feed_dict[critic.action] = action_batches[i]383 feed_dict[critic.label] = target_q_batches[i]384 fetch_results = self.sess.run(fetches, feed_dict=feed_dict)385 losses = [result[2] for result in fetch_results]386 losses_goal = [result[3] for result in fetch_results]387 losses_TD = [result[4] for result in fetch_results]388 return losses, losses_goal, losses_TD389 def _transitions_to_batches(self, transitions):390 batches = ()391 batches = (*batches, [tr['observations'] for tr in transitions]) # list of (N, obs_dim)392 batches = (*batches, [tr['actions'] for tr in transitions]) # list of (N, action_dim)393 batches = (*batches, [tr['next observations'] for tr in transitions]) # list of (N, obs_dim)394 n_steps = [tr['n-step'] for tr in transitions]395 future_observations = [tr['next observations_%d'%(n_step-1)] if n_step > 1 else None for n_step, tr in zip(n_steps, transitions)]396 batches = (*batches, future_observations)397 batches = (*batches, n_steps)...

Full Screen

Full Screen

gamelogger.py

Source:gamelogger.py Github

copy

Full Screen

1import asyncio2import globvars3from loguru import logger as log4from modules.asyncdb import DB as db5from modules.players import asyncisplayeradmin6from modules.redis import redis7from modules.servertools import removerichtext8from modules.timehelper import Now9from modules.tribes import (10 asyncgettribeinfo,11 asyncputplayerintribe,12 asyncremoveplayerintribe,13)14def checkgamelog(record):15 if (16 record["level"] == "TRAP"17 or record["level"] == "ADMIN"18 or record["level"] == "DEATH"19 or record["level"] == "TAME"20 or record["level"] == "DECAY"21 or record["level"] == "DEMO"22 or record["level"] == "TRIBE"23 or record["level"] == "CLAIM"24 or record["level"] == "RELEASE"25 ):26 return True27 else:28 return False29@log.catch30async def asyncprocessgamelog():31 globvars.gamelogger = True32 count = await redis.zcard("gamelog")33 for each in range(count):34 sline = await redis.zpopmin("gamelog", 1)35 if sline:36 line = sline[0].decode().split("||")37 await _processgameline(line[0], line[1], line[2])38 await asyncio.sleep(0.1)39 globvars.gamelogger = False40@log.catch41async def _processgameline(inst, ptype, line):42 clog = log.patch(lambda record: record["extra"].update(instance=inst))43 logheader = (44 f'{Now(fmt="dt").strftime("%Y-%m-%d %H:%M:%S")}|{inst.upper():>8}|{ptype:<7}| '45 )46 linesplit = removerichtext(line[21:]).split(", ")47 if ptype == "TRAP":48 tribename, tribeid = await asyncgettribeinfo(linesplit, inst, ptype)49 msgsplit = linesplit[2][10:].split("trapped:")50 playername = msgsplit[0].strip()51 await asyncputplayerintribe(tribeid, playername)52 dino = msgsplit[1].strip().replace(")", "").replace("(", "")53 line = (54 f"{logheader}[{playername.title()}] of ({tribename}) has trapped [{dino}]"55 )56 clog.log(ptype, line)57 elif ptype == "RELEASE":58 tribename, tribeid = await asyncgettribeinfo(linesplit, inst, ptype)59 msgsplit = linesplit[2][10:].split("released:")60 playername = msgsplit[0].strip()61 await asyncputplayerintribe(tribeid, playername)62 dino = msgsplit[1].strip().replace(")", "").replace("(", "")63 line = (64 f"{logheader}[{playername.title()}] of ({tribename}) has released [{dino}]"65 )66 clog.log(ptype, line)67 elif ptype == "DEATH":68 # clog.debug(f'{ptype} - {linesplit}')69 tribename, tribeid = await asyncgettribeinfo(linesplit, inst, ptype)70 if tribename is None:71 deathsplit = removerichtext(line[21:]).split(" - ", 1)72 playername = deathsplit[0].strip()73 if deathsplit[1].find("was killed by") != -1:74 killedby = (75 deathsplit[1]76 .split("was killed by")[1]77 .strip()[:-1]78 .replace("()", "")79 .strip()80 )81 playerlevel = (82 deathsplit[1].split("was killed by")[0].strip().replace("()", "")83 )84 line = f"{logheader}[{playername.title()}] {playerlevel} was killed by [{killedby}]"85 clog.log(ptype, line)86 elif deathsplit[1].find("killed!") != -1:87 level = deathsplit[1].split(" was killed!")[0].strip("()")88 line = f"{logheader}[{playername.title()}] {level} has been killed"89 clog.log(ptype, line)90 else:91 log.warning(f"not found gameparse death: {deathsplit}")92 else:93 pass94 # log.debug(f'deathskip: {linesplit}')95 elif ptype == "TAME":96 # clog.debug(f'{ptype} - {linesplit}')97 tribename, tribeid = await asyncgettribeinfo(linesplit, inst, ptype)98 if tribename is None:99 tamed = linesplit[0].split(" Tamed ")[1].strip(")").strip("!")100 line = f"{logheader}A tribe has tamed [{tamed}]"101 clog.log(ptype, line)102 else:103 # log.debug(f'TRIBETAME: {inst}, {linesplit}')104 playername = linesplit[2][10:].split(" Tamed")[0].strip()105 await asyncputplayerintribe(tribeid, playername)106 tamed = linesplit[2].split(" Tamed")[1].strip(")").strip("!").strip()107 if playername.title() == "Your Tribe":108 line = f"{logheader}[{tribename}] tamed [{tamed}]"109 clog.log(ptype, line)110 else:111 line = f"{logheader}[{playername.title()}] of ({tribename}) tamed [{tamed}]"112 clog.log(ptype, line)113 elif ptype == "DEMO":114 # clog.debug(f'{ptype} - {linesplit}')115 tribename, tribeid = await asyncgettribeinfo(linesplit, inst, ptype)116 if tribename is None:117 pass118 # clog.log(ptype, f'{logheader}SINGLDEMO: [{linesplit}]')119 else:120 # log.debug(f'TRIBEDEMO: {inst}, {linesplit}')121 playername = linesplit[2][10:].split(" demolished a ")[0].strip()122 await asyncputplayerintribe(tribeid, playername)123 if (124 len(linesplit[2].split(" demolished a ")) > 0125 and linesplit[2].find(" demolished a ") != -1126 ):127 demoitem = (128 linesplit[2]129 .split(" demolished a ")[1]130 .replace("'", "")131 .strip(")")132 .strip("!")133 .strip()134 )135 line = f"{logheader}[{playername.title()}] of ({tribename}) demolished a [{demoitem}]"136 clog.log(ptype, line)137 elif ptype == "ADMIN":138 # clog.debug(f'{ptype} - {linesplit}')139 steamid = linesplit[2].strip()[9:].strip(")")140 pname = linesplit[0].split("PlayerName: ")[1]141 cmd = linesplit[0].split("AdminCmd: ")[1].split(" (PlayerName:")[0].upper()142 if not await asyncisplayeradmin(steamid):143 clog.warning(144 f"{logheader}Admin command [{cmd}] executed by NON-ADMIN [{pname.title()}] !"145 )146 await db.update(147 "INSERT INTO kicklist (instance,steamid) VALUES ('%s','%s')"148 % (inst, steamid)149 )150 await db.update(151 "UPDATE players SET banned = 'true' WHERE steamid = '%s')" % (steamid,)152 )153 else:154 line = f"{logheader}[{pname.title()}] executed admin command [{cmd}] "155 clog.log(ptype, line)156 elif ptype == "DECAY":157 # clog.debug(f'{ptype} - {linesplit}')158 tribename, tribeid = await asyncgettribeinfo(linesplit, inst, ptype)159 decayitem = linesplit[2].split("'", 1)[1].split("'")[0]160 # decayitem = re.search('\(([^)]+)', linesplit[2]).group(1)161 line = f"{logheader}Tribe ({tribename}) auto-decayed [{decayitem}]"162 clog.log(ptype, line)163 # wglog(inst, removerichtext(line[21:]))164 elif ptype == "CLAIM":165 # log.debug(f'{ptype} : {linesplit}')166 tribename, tribeid = await asyncgettribeinfo(linesplit, inst, ptype)167 if tribename:168 if linesplit[2].find(" claimed '") != -1:169 playername = linesplit[2][10:].split(" claimed ")[0].strip()170 await asyncputplayerintribe(tribeid, playername)171 claimitem = linesplit[2].split("'", 1)[1].split("'")[0]172 line = f"{logheader}[{playername}] of ({tribename}) has claimed [{claimitem}]"173 clog.log(ptype, line)174 elif linesplit[2].find(" unclaimed '") != -1:175 playername = linesplit[2][10:].split(" claimed ")[0].strip()176 await asyncputplayerintribe(tribeid, playername)177 claimitem = linesplit[2].split("'", 1)[1].split("'")[0]178 line = f"{logheader}[{playername}] of ({tribename}) has un-claimed [{claimitem}]"179 clog.log(ptype, line)180 else:181 pass182 # clog.log(ptype, f'{logheader} SINGLECLAIM: {linesplit}')183 elif ptype == "TRIBE":184 # clog.debug(f'{ptype} - {linesplit}')185 tribename, tribeid = await asyncgettribeinfo(linesplit, inst, ptype)186 if tribeid is not None:187 if linesplit[2].find(" was added to the Tribe by ") != -1:188 playername = (189 linesplit[2][10:].split(" was added to the Tribe by ")[0].strip()190 )191 playername2 = (192 linesplit[2][10:]193 .split(" was added to the Tribe by ")[1]194 .strip()195 .strip(")")196 .strip("!")197 )198 await asyncputplayerintribe(tribeid, playername)199 await asyncputplayerintribe(tribeid, playername2)200 line = f"[{playername.title()}] was added to Tribe ({tribename}) by [{playername2.title()}]"201 clog.log(ptype, line)202 elif linesplit[2].find(" was removed from the Tribe!") != -1:203 playername = (204 linesplit[2][10:].split(" was removed from the Tribe!")[0].strip()205 )206 await asyncremoveplayerintribe(tribeid, playername)207 line = f"[{playername.title()}] was removed from Tribe ({tribename})"208 clog.log(ptype, line)209 elif linesplit[2].find(" was added to the Tribe!") != -1:210 playername = (211 linesplit[2][10:].split(" was added to the Tribe!")[0].strip()212 )213 await asyncputplayerintribe(tribeid, playername)214 line = f"[{playername.title()}] was added to the Tribe ({tribename})"215 clog.log(ptype, line)216 elif linesplit[2].find(" set to Rank Group ") != -1:217 playername = linesplit[2][10:].split(" set to Rank Group ")[0].strip()218 await asyncputplayerintribe(tribeid, playername)219 rankgroup = (220 linesplit[2][10:].split(" set to Rank Group ")[1].strip().strip("!")221 )222 line = f"[{playername.title()}] set to rank group [{rankgroup}] in Tribe ({tribename})"223 clog.log(ptype, line)224 else:225 clog.log(ptype, f"{logheader}{linesplit}")226 else:227 log.debug(f"UNKNOWN {ptype} - {linesplit}")228 line = f"{linesplit}"...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run fMBT automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful