How to use verifyBitmap method in fMBT

Best Python code snippet using fMBT_python

fmbtgti.py

Source:fmbtgti.py Github

copy

Full Screen

...63After that, device B can be tested without taking new screenshot.64d = fmbtandroid.Device()65d.setBitmapPath("bitmaps/android-4.2/device-B")66All bitmap methods support alternative bitmaps. For example, if67 verifyBitmap("bitmap.png")68finds bitmap.png file in directory bitmapdir in the bitmap path, but69bitmap.png cannot be found in the screenshot with any given match70parameters, then it automatically tries the same for all71 bitmap.png.alt*.png72files in the same bitmapdir directory. If any of those match, then73verifyBitmap("bitmap.png") returns True. Visual log shows the74information about which of the alternatives actually matched.75"""76import cgi77import ctypes78import datetime79import distutils.sysconfig80import gc81import glob82import inspect83import math84import os85import re86import shlex87import shutil88import subprocess89import sys90import time91import traceback92import types93import fmbt94import fmbt_config95import eyenfinger96# See imagemagick convert parameters.97_OCRPREPROCESS = [98 ''99]100# See tesseract -pagesegmode.101_OCRPAGESEGMODES = [3]102_g_defaultOcrEngine = None # optical character recognition engine103_g_defaultOirEngine = None # optical image recognition engine104_g_ocrEngines = []105_g_oirEngines = []106_g_forcedLocExt = ".fmbtoir.loc"107class _USE_DEFAULTS:108 pass109def _fmbtLog(msg):110 fmbt.fmbtlog("fmbtgti: %s" % (msg,))111def _filenameTimestamp(t=None):112 return fmbt.formatTime("%Y%m%d-%H%M%S-%f", t)113def _takeDragArgs(d):114 return _takeArgs(("startOffset", "startPos",115 "delayBeforeMoves", "delayBetweenMoves",116 "delayAfterMoves", "movePoints"), d)117def _takeTapArgs(d):118 return _takeArgs(("tapOffset", "tapPos", "long", "hold", "count", "delayBetweenTaps", "button"), d)119def _takeWaitArgs(d):120 return _takeArgs(("waitTime", "pollDelay",121 "beforeRefresh", "afterRefresh"), d)122def _takeOirArgs(screenshotOrOirEngine, d, thatsAll=False):123 if isinstance(screenshotOrOirEngine, Screenshot):124 oirEngine = screenshotOrOirEngine.oirEngine()125 else:126 oirEngine = screenshotOrOirEngine127 return _takeArgs(oirEngine._findBitmapArgNames(), d, thatsAll)128def _takeOcrArgs(screenshotOrOcrEngine, d, thatsAll=False):129 if isinstance(screenshotOrOcrEngine, Screenshot):130 ocrEngine = screenshotOrOcrEngine.ocrEngine()131 else:132 ocrEngine = screenshotOrOcrEngine133 return _takeArgs(ocrEngine._findTextArgNames(), d, thatsAll)134def _takeArgs(argNames, d, thatsAll=False):135 """136 Returns pair:137 (dict of items where key in argNames,138 dict of items that were left in d)139 If thatsAll is True, require that all arguments have been140 consumed.141 """142 retval = {}143 for a in argNames:144 if a in d:145 retval[a] = d.pop(a, None)146 if thatsAll and len(d) > 0:147 raise TypeError('Unknown argument(s): "%s"' %148 ('", "'.join(sorted(d.keys()))))149 return retval, d150def _convert(srcFile, convertArgs, dstFile):151 if isinstance(convertArgs, basestring):152 convertArgs = shlex.split(convertArgs)153 if (os.access(dstFile, os.R_OK) and154 os.access(srcFile, os.R_OK) and155 os.stat(srcFile).st_mtime < os.stat(dstFile).st_mtime):156 return # cached file is up-to-date157 subprocess.call([fmbt_config.imagemagick_convert, srcFile] + convertArgs + [dstFile])158def _ppFilename(origFilename, preprocess):159 return origFilename + ".fmbtoir.cache." + re.sub("[^a-zA-Z0-9.]", "", preprocess) + ".png"160def _intCoords((x, y), (width, height)):161 if 0 <= x <= 1 and type(x) == float: x = x * width162 if 0 <= y <= 1 and type(y) == float: y = y * height163 return (int(round(x)), int(round(y)))164def _boxOnRegion((x1, y1, x2, y2), (minX, minY, maxX, maxY), partial=True):165 if partial:166 return (x1 < x2 and ((minX <= x1 <= maxX) or (minX <= x2 <= maxX)) and167 y1 < y2 and ((minY <= y1 <= maxY) or (minY <= y2 <= maxY)))168 else:169 return (x1 < x2 and ((minX <= x1 <= maxX) and (minX <= x2 <= maxX)) and170 y1 < y2 and ((minY <= y1 <= maxY) and (minY <= y2 <= maxY)))171def _edgeDistanceInDirection((x, y), (width, height), direction):172 x, y = _intCoords((x, y), (width, height))173 direction = direction % 360 # -90 => 270, 765 => 45174 dirRad = math.radians(direction)175 if 0 < direction < 180:176 distTopBottom = y / math.sin(dirRad)177 elif 180 < direction < 360:178 distTopBottom = -(height - y) / math.sin(dirRad)179 else:180 distTopBottom = float('inf')181 if 90 < direction < 270:182 distLeftRight = -x / math.cos(dirRad)183 elif 270 < direction <= 360 or 0 <= direction < 90:184 distLeftRight = (width - x) / math.cos(dirRad)185 else:186 distLeftRight = float('inf')187 return min(distTopBottom, distLeftRight)188### Binding to eye4graphics C-library189class _Bbox(ctypes.Structure):190 _fields_ = [("left", ctypes.c_int32),191 ("top", ctypes.c_int32),192 ("right", ctypes.c_int32),193 ("bottom", ctypes.c_int32),194 ("error", ctypes.c_int32)]195class _Rgb888(ctypes.Structure):196 _fields_ = [("red", ctypes.c_uint8),197 ("green", ctypes.c_uint8),198 ("blue", ctypes.c_uint8)]199_libpath = ["", ".",200 os.path.dirname(os.path.abspath(__file__)),201 distutils.sysconfig.get_python_lib(plat_specific=1)]202_suffix = ".so"203if os.name == "nt":204 _suffix = ".dll"205for _dirname in _libpath:206 try:207 eye4graphics = ctypes.CDLL(os.path.join(_dirname , "eye4graphics"+_suffix))208 struct_bbox = _Bbox(0, 0, 0, 0, 0)209 eye4graphics.findNextColor.restype = ctypes.c_int210 eye4graphics.findNextHighErrorBlock.argtypes = [211 ctypes.c_void_p,212 ctypes.c_void_p,213 ctypes.c_int,214 ctypes.c_int,215 ctypes.c_double,216 ctypes.c_void_p]217 eye4graphics.findNextDiff.restype = ctypes.c_int218 eye4graphics.findNextDiff.argtypes = [219 ctypes.c_void_p,220 ctypes.c_void_p,221 ctypes.c_void_p,222 ctypes.c_double,223 ctypes.c_double,224 ctypes.c_void_p,225 ctypes.c_void_p,226 ctypes.c_int]227 eye4graphics.openImage.argtypes = [ctypes.c_char_p]228 eye4graphics.openImage.restype = ctypes.c_void_p229 eye4graphics.openedImageDimensions.argtypes = [ctypes.c_void_p, ctypes.c_void_p]230 eye4graphics.closeImage.argtypes = [ctypes.c_void_p]231 eye4graphics.rgb5652rgb.restype = ctypes.c_int232 eye4graphics.rgb5652rgb.argtypes = [233 ctypes.c_void_p,234 ctypes.c_int,235 ctypes.c_int,236 ctypes.c_void_p]237 break238 except: pass239else:240 raise ImportError("%s cannot load eye4graphics%s" % (__file__, _suffix))241def _e4gOpenImage(filename):242 image = eye4graphics.openImage(filename)243 if not image:244 raise IOError('Cannot open image "%s"' % (filename,))245 else:246 return image247def _e4gImageDimensions(e4gImage):248 struct_bbox = _Bbox(0, 0, 0, 0, 0)249 eye4graphics.openedImageDimensions(ctypes.byref(struct_bbox), e4gImage)250 return (struct_bbox.right, struct_bbox.bottom)251def _e4gImageIsBlank(filename):252 e4gImage = _e4gOpenImage(filename)253 rv = (eye4graphics.openedImageIsBlank(e4gImage) == 1)254 eye4graphics.closeImage(e4gImage)255 return rv256### end of binding to eye4graphics.so257def sortItems(items, criteria):258 """259 Returns GUIItems sorted according to given criteria260 Parameters:261 items (list of GUIItems):262 items to be sorted263 criteria (string):264 Supported values:265 "topleft" - sort by top left coordinates of items266 "area" - sort by areas of items267 """268 if criteria == "topleft":269 top_left_items = [(i.bbox()[1], i.bbox()[0], i) for i in items]270 top_left_items.sort()271 return [tli[2] for tli in top_left_items]272 elif criteria == "area":273 area_items = [274 ((i.bbox()[2] - i.bbox()[0]) * (i.bbox()[3] - i.bbox()[1]), i)275 for i in items]276 area_items.sort()277 return [ai[1] for ai in area_items]278 else:279 raise ValueError('invalid sort criteria "%s"' % (criteria,))280class GUITestConnection(object):281 """282 Implements GUI testing primitives needed by GUITestInterface.283 All send* and recv* methods return284 - True on success285 - False on user error (unknown keyName, coordinates out of range)286 - raise Exception on framework error (connection lost, missing287 dependencies).288 """289 def sendPress(self, keyName):290 return self.sendKeyDown(keyName) and self.sendKeyUp(keyName)291 def sendKeyDown(self, keyName):292 raise NotImplementedError('sendKeyDown("%s") needed but not implemented.' % (keyName,))293 def sendKeyUp(self, keyName):294 raise NotImplementedError('sendKeyUp("%s") needed but not implemented.' % (keyName,))295 def sendTap(self, x, y):296 return self.sendTouchDown(x, y) and self.sendTouchUp(x, y)297 def sendTouchDown(self, x, y):298 raise NotImplementedError('sendTouchDown(%d, %d) needed but not implemented.' % (x, y))299 def sendTouchMove(self, x, y):300 raise NotImplementedError('sendTouchMove(%d, %d) needed but not implemented.' % (x, y))301 def sendTouchUp(self, x, y):302 raise NotImplementedError('sendTouchUp(%d, %d) needed but not implemented.' % (x, y))303 def sendType(self, text):304 raise NotImplementedError('sendType("%s") needed but not implemented.' % (text,))305 def recvScreenshot(self, filename):306 """307 Saves screenshot from the GUI under test to given filename.308 """309 raise NotImplementedError('recvScreenshot("%s") needed but not implemented.' % (filename,))310 def recvScreenUpdated(self, waitTime, pollDelay):311 """312 Wait until the screen has been updated, but no longer than the313 timeout (waitTime). Return True if the screen was updated314 before the timeout, otherwise False.315 Implementing this method is optional. If not implemented, the316 method returns None, and an inefficient recvScreenshot-based317 implementation is used instead from fmbtgti. pollDelay can be318 ignored if more efficient solutions are available319 (update-event based triggering, for instance).320 """321 return None322 def target(self):323 """324 Returns a string that is unique to each test target. For325 instance, Android device serial number.326 """327 return "GUITestConnectionTarget"328class SimulatedGUITestConnection(GUITestConnection):329 """330 Simulates GUITestConnection: records method calls and fakes screenshots331 All send* methods return True. recvScreenshot returns always True332 if non-empty list of fake screenshot filenames is given (see333 constructor and setScreenshotFilenames). Otherwise it returns334 False.335 """336 def __init__(self, screenshotFilenames=()):337 """338 Parameters:339 screenshotFilenames (tuple of filenames):340 calling recvScreenshot uses next item in this tuple as341 the observed screenshot.342 """343 GUITestConnection.__init__(self)344 self.setScreenshotFilenames(screenshotFilenames)345 self._calls = []346 for recordedMethod in ("sendPress", "sendKeyDown", "sendKeyUp",347 "sendTap", "sendTouchDown", "sendTouchMove",348 "sendTouchUp", "sendType"):349 self.__dict__[recordedMethod] = self._recorder(recordedMethod)350 def _recorder(self, method):351 return lambda *args, **kwargs: self._calls.append(352 (time.time(), method, args, kwargs)) or True353 def history(self):354 return self._calls355 def clearHistory(self):356 self._calls = []357 def setScreenshotFilenames(self, screenshotFilenames):358 self._screenshotFilenames = screenshotFilenames359 self._nextScreenshotFilename = 0360 def recvScreenshot(self, filename):361 self._calls.append((time.time(), "recvScreenshot", (filename,), {}))362 if self._screenshotFilenames:363 if self._nextScreenshotFilename >= len(self._screenshotFilenames):364 self._nextScreenshotFilename = 0365 fakeFilename = self._screenshotFilenames[self._nextScreenshotFilename]366 self._nextScreenshotFilename += 1367 if not os.access(fakeFilename, os.R_OK):368 raise IOError('screenshot file not found: "%s"' % (fakeFilename,))369 shutil.copy(fakeFilename, filename)370 return True371 else:372 return False373 def target(self):374 return "SimulatedGUITestConnection"375class OrEngine(object):376 """377 Optical recognition engine. Base class for OCR and OIR engines,378 enables registering engine instances.379 """380 def __init__(self, *args, **kwargs):381 pass382 def register(self, defaultOcr=False, defaultOir=False):383 """384 Register this engine instance to the list of OCR and/or OIR385 engines.386 Parameters:387 defaultOcr (optional, boolean):388 if True, use this OCR engine by default in all new389 GUITestInterface instances. The default is False.390 defaultOir (optional, boolean):391 if True, use this OIR engine by default in all new392 GUITestInterface instances. The default is False.393 Returns the index with which the engine was registered to the394 list of OCR or OIR engines. If this instance implements both395 OCR and OIR engines, returns pair (OCR index, OIR index).396 """397 # Allow a single engine implement both OCR and OIR engine398 # interfaces. Therefore, it must be possible to call399 # e.register(defaultOcr=True, defaultOir=True).400 #401 global _g_defaultOcrEngine, _g_defaultOirEngine402 global _g_ocrEngines, _g_oirEngines403 engineIndexes = []404 if isinstance(self, OcrEngine):405 if not self in _g_ocrEngines:406 _g_ocrEngines.append(self)407 engineIndexes.append(_g_ocrEngines.index(self))408 if defaultOcr:409 _g_defaultOcrEngine = self410 if isinstance(self, OirEngine):411 if not self in _g_oirEngines:412 _g_oirEngines.append(self)413 engineIndexes.append(_g_oirEngines.index(self))414 if defaultOir:415 _g_defaultOirEngine = self416 if len(engineIndexes) == 1:417 return engineIndexes[0]418 else:419 return engineIndexes420class OcrEngine(OrEngine):421 """422 This is an abstract interface for OCR engines that can be plugged423 into fmbtgti.GUITestInterface instances and Screenshots.424 To implement an OCR engine, you need to override _findText() at425 minimum. See _findText documentation in this class for426 requirements.427 If possible in your OCR engine, you can provide _dumpOcr() to428 reveal what is recognized in screenshots.429 For efficient caching of results and freeing cached results, you430 can override _addScreenshot() and _removeScreenshot(). Every431 screenshot is added before findText() or dumpOcr().432 A typical usage of OcrEngine instance:433 - oe.addScreenshot(ss)434 - oe.findText(ss, text1, <engine/screenshot/find-specific-args>)435 - oe.findText(ss, text2, <engine/screenshot/find-specific-args>)436 - oe.removeScreenshot(ss)437 Note that there may be several screenshots added before they are438 removed.439 """440 def __init__(self, *args, **kwargs):441 super(OcrEngine, self).__init__(*args, **kwargs)442 self._ssFindTextDefaults = {}443 self._findTextDefaults = {}444 ocrFindArgs, _ = _takeOcrArgs(self, kwargs)445 self._setFindTextDefaults(ocrFindArgs)446 def dumpOcr(self, screenshot, **kwargs):447 """448 Returns what is recognized in the screenshot. For debugging449 purposes.450 """451 ocrArgs = self.__ocrArgs(screenshot, **kwargs)452 return self._dumpOcr(screenshot, **ocrArgs)453 def _dumpOcr(self, screenshot, **kwargs):454 return None455 def addScreenshot(self, screenshot, **findTextDefaults):456 """457 Prepare for finding text from the screenshot.458 Parameters:459 screenshot (fmbtgti.Screenshot)460 screenshot object to be searched from.461 other parameters (optional)462 findText defaults for this screenshot.463 Notice that there may be many screenshots simultaneously.464 Do not keep reference to the screenshot object.465 """466 self.setScreenshotFindTextDefaults(screenshot, **findTextDefaults)467 return self._addScreenshot(screenshot, **findTextDefaults)468 def _addScreenshot(self, screenshot, **findTextDefaults):469 pass470 def removeScreenshot(self, screenshot):471 """472 OCR queries on the screenshot will not be made anymore.473 """474 self._removeScreenshot(screenshot)475 try:476 del self._ssFindTextDefaults[id(screenshot)]477 except KeyError:478 raise KeyError('screenshot "%s" does not have findTextDefaults. '479 'If OcrEngine.addScreenshot() is overridden, it '480 '*must* call parent\'s addScreenshot.' % (screenshot.filename(),))481 def _removeScreenshot(self, screenshot):482 pass483 def setFindTextDefaults(self, **findTextDefaults):484 return self._setFindTextDefaults(findTextDefaults, screenshot=None)485 def setScreenshotFindTextDefaults(self, screenshot, **findTextDefaults):486 return self._setFindTextDefaults(findTextDefaults, screenshot=screenshot)487 def _setFindTextDefaults(self, defaults, screenshot=None):488 """489 Set default values for optional arguments for findText().490 Parameters:491 defaults (dictionary)492 Default keyword arguments and their values.493 screenshot (optional, fmbtgti.Screenshot instance)494 Use the defaults for findText on this screenshot. If495 the defaults are None, make them default for all496 screenshots. Screenshot-specific defaults override497 engine default.498 """499 if screenshot == None:500 self._findTextDefaults.update(defaults)501 else:502 ssid = id(screenshot)503 if not ssid in self._ssFindTextDefaults:504 self._ssFindTextDefaults[ssid] = self._findTextDefaults.copy()505 self._ssFindTextDefaults[ssid].update(defaults)506 def findTextDefaults(self, screenshot=None):507 if screenshot == None:508 return self._findTextDefaults509 elif id(screenshot) in self._ssFindTextDefaults:510 return self._ssFindTextDefaults[id(screenshot)]511 else:512 return None513 def _findTextArgNames(self):514 """515 Returns names of optional findText arguments.516 """517 return inspect.getargspec(self._findText).args[3:]518 def __ocrArgs(self, screenshot, **priorityArgs):519 ocrArgs = {}520 ocrArgs.update(self._findTextDefaults)521 ssId = id(screenshot)522 if ssId in self._ssFindTextDefaults:523 ocrArgs.update(self._ssFindTextDefaults[ssId])524 ocrArgs.update(priorityArgs)525 return ocrArgs526 def findText(self, screenshot, text, **kwargs):527 """528 Return list of fmbtgti.GUIItems that match to text.529 """530 ocrArgs = self.__ocrArgs(screenshot, **kwargs)531 return self._findText(screenshot, text, **ocrArgs)532 def _findText(self, screenshot, text, **kwargs):533 """534 Find appearances of text from the screenshot.535 Parameters:536 screenshot (fmbtgti.Screenshot)537 Screenshot from which text is to be searched538 for. Use Screenshot.filename() to get the filename.539 text (string)540 text to be searched for.541 other arguments (engine specific)542 kwargs contain keyword arguments given to543 findText(screenshot, text, ...), already extended544 first with screenshot-specific findTextDefaults, then545 with engine-specific findTextDefaults.546 _findText *must* define all engine parameters as547 explicit keyword arguments:548 def _findText(self, screenshot, text, engArg1=42):549 ...550 Return list of fmbtgti.GUIItems.551 """552 raise NotImplementedError("_findText needed but not implemented.")553class _EyenfingerOcrEngine(OcrEngine):554 """555 OCR engine parameters that can be used in all556 ...OcrText() methods (swipeOcrText, tapOcrText, findItemsByOcrText, ...):557 match (float, optional):558 minimum match score in range [0.0, 1.0]. The default is559 1.0 (exact match).560 area ((left, top, right, bottom), optional):561 search from the given area only. Left, top, right and562 bottom are either absolute coordinates (integers) or563 floats in range [0.0, 1.0]. In the latter case they are564 scaled to screenshot dimensions. The default is (0.0,565 0.0, 1.0, 1.0), that is, search everywhere in the566 screenshot.567 lang (string, optional):568 pass given language option to Tesseract. See supported569 LANGUAGES (-l) in Tesseract documentation. The default570 is "eng" (English).571 pagesegmodes (list of integers, optional):572 try all integers as tesseract -pagesegmode573 arguments. The default is [3], another good option could574 be [3, 6].575 preprocess (string, optional):576 preprocess filter to be used in OCR for better577 result. Refer to eyenfinger.autoconfigure to search for578 a good one, or try with ImageMagick's convert:579 $ convert screenshot.png <preprocess> screenshot-pp.png580 $ tesseract screenshot-pp.png stdout581 configfile (string, optional):582 Tesseract configuration file.583 Example: limit recognized characters to hexadecimals by creating file584 "hexchars" with content585 tessedit_char_whitelist 0123456789abcdefABCDEF586 To use this file in a single run, pass it to any Ocr method:587 dut.verifyOcrText("DEADBEEF", configfile="hexchars")588 or to use it on every Ocr method, set it as a default:589 dut.ocrEngine().setFindTextDefaults(configfile="hexchars")590 """591 class _OcrResults(object):592 __slots__ = ("filename", "screenSize", "pagesegmodes", "preprocess", "area", "words", "lang", "configfile")593 def __init__(self, filename, screenSize):594 self.filename = filename595 self.screenSize = screenSize596 self.pagesegmodes = None597 self.preprocess = None598 self.area = None599 self.words = None600 self.lang = None601 self.configfile = None602 def __init__(self, *args, **engineDefaults):603 engineDefaults["area"] = engineDefaults.get("area", (0.0, 0.0, 1.0, 1.0))604 engineDefaults["lang"] = engineDefaults.get("lang", "eng")605 engineDefaults["match"] = engineDefaults.get("match", 1.0)606 engineDefaults["pagesegmodes"] = engineDefaults.get("pagesegmodes", _OCRPAGESEGMODES)607 engineDefaults["preprocess"] = engineDefaults.get("preprocess", _OCRPREPROCESS)608 engineDefaults["configfile"] = engineDefaults.get("configfile", None)609 super(_EyenfingerOcrEngine, self).__init__(*args, **engineDefaults)610 self._ss = {} # OCR results for screenshots611 def _addScreenshot(self, screenshot, **findTextDefaults):612 ssId = id(screenshot)613 self._ss[ssId] = _EyenfingerOcrEngine._OcrResults(screenshot.filename(), screenshot.size())614 def _removeScreenshot(self, screenshot):615 ssId = id(screenshot)616 if ssId in self._ss:617 del self._ss[ssId]618 def _findText(self, screenshot, text, match=None, preprocess=None, area=None, pagesegmodes=None, lang=None, configfile=None):619 ssId = id(screenshot)620 self._assumeOcrResults(screenshot, preprocess, area, pagesegmodes, lang, configfile)621 for ppfilter in self._ss[ssId].words.keys():622 try:623 score_text_bbox_list = eyenfinger.findText(624 text, self._ss[ssId].words[ppfilter], match=match)625 if not score_text_bbox_list:626 continue627 else:628 break629 except eyenfinger.BadMatch:630 continue631 else:632 return []633 retval = [GUIItem("OCR text (match %.2f)" % (score,),634 bbox, self._ss[ssId].filename,635 ocrFind=text, ocrFound=matching_text)636 for score, matching_text, bbox in score_text_bbox_list]637 return retval638 def _dumpOcr(self, screenshot, match=None, preprocess=None, area=None, pagesegmodes=None, lang=None, configfile=None):639 ssId = id(screenshot)640 self._assumeOcrResults(screenshot, preprocess, area, pagesegmodes, lang, configfile)641 w = []642 for ppfilter in self._ss[ssId].preprocess:643 for word in self._ss[ssId].words[ppfilter]:644 for appearance, (wid, middle, bbox) in enumerate(self._ss[ssId].words[ppfilter][word]):645 (x1, y1, x2, y2) = bbox646 w.append((word, (x1, y1, x2, y2)))647 return sorted(set(w), key=lambda i:(i[1][1]/8, i[1][0]))648 def _assumeOcrResults(self, screenshot, preprocess, area, pagesegmodes, lang, configfile):649 ssId = id(screenshot)650 if not type(preprocess) in (list, tuple):651 preprocess = [preprocess]652 if (self._ss[ssId].words == None653 or self._ss[ssId].preprocess != preprocess654 or self._ss[ssId].area != area655 or self._ss[ssId].lang != lang656 or self._ss[ssId].configfile != configfile):657 self._ss[ssId].words = {}658 self._ss[ssId].preprocess = preprocess659 self._ss[ssId].area = area660 self._ss[ssId].lang = lang661 self._ss[ssId].configfile = configfile662 for ppfilter in preprocess:663 pp = ppfilter % { "zoom": "-resize %sx" % (self._ss[ssId].screenSize[0] * 2) }664 try:665 eyenfinger.iRead(source=self._ss[ssId].filename, ocr=True, preprocess=pp, ocrArea=area, ocrPageSegModes=pagesegmodes, lang=lang, configfile=configfile)666 except Exception:667 self._ss[ssId].words = None668 raise669 self._ss[ssId].words[ppfilter] = eyenfinger._g_words670def _defaultOcrEngine():671 if _g_defaultOcrEngine:672 return _g_defaultOcrEngine673 else:674 _EyenfingerOcrEngine().register(defaultOcr=True)675 return _g_defaultOcrEngine676class OirEngine(OrEngine):677 """678 This is an abstract interface for OIR (optical image recognition)679 engines that can be plugged into fmbtgti.GUITestInterface680 instances and Screenshots.681 To implement an OIR engine, you need to override _findBitmap() at682 minimum. See _findBitmap documentation in this class for683 requirements.684 This base class provides full set of OIR parameters to685 _findBitmap. The parameters are combined from686 - OirEngine find defaults, specified when OirEngine is687 instantiated.688 - Screenshot instance find defaults.689 - bitmap / bitmap directory find defaults (read from the690 .fmbtoirrc that is in the same directory as the bitmap).691 - ...Bitmap() method parameters.692 The latter in the list override the former.693 For efficient caching of results and freeing cached results, you694 can override _addScreenshot() and _removeScreenshot(). Every695 screenshot is added before findBitmap().696 A typical usage of OirEngine instance:697 - oe.addScreenshot(ss)698 - oe.findBitmap(ss, bmpFilename1, <engine/screenshot/find-specific-args>)699 - oe.findBitmap(ss, bmpFilename2, <engine/screenshot/find-specific-args>)700 - oe.removeScreenshot(ss)701 Note that there may be several screenshots added before they are702 removed. ss is a Screenshot instance. Do not keep references to703 Screenshot intances, otherwise garbage collector will not remove704 them.705 """706 def __init__(self, *args, **kwargs):707 super(OirEngine, self).__init__(*args, **kwargs)708 self._ssFindBitmapDefaults = {}709 self._findBitmapDefaults = {}710 oirArgs, _ = _takeOirArgs(self, kwargs)711 self._setFindBitmapDefaults(oirArgs)712 def addScreenshot(self, screenshot, **findBitmapDefaults):713 """714 Prepare for finding bitmap from the screenshot.715 Parameters:716 screenshot (fmbtgti.Screenshot)717 screenshot object to be searched from.718 other parameters (optional)719 findBitmap defaults for this screenshot.720 Notice that there may be many screenshots simultaneously.721 Do not keep reference to the screenshot object.722 """723 self.setScreenshotFindBitmapDefaults(screenshot, **findBitmapDefaults)724 return self._addScreenshot(screenshot, **findBitmapDefaults)725 def _addScreenshot(self, screenshot, **findBitmapDefaults):726 pass727 def removeScreenshot(self, screenshot):728 """729 OIR queries on the screenshot will not be made anymore.730 """731 self._removeScreenshot(screenshot)732 try:733 del self._ssFindBitmapDefaults[id(screenshot)]734 except KeyError:735 raise KeyError('screenshot "%s" does not have findBitmapDefaults. '736 'If OirEngine.addScreenshot() is overridden, it '737 '*must* call parent\'s addScreenshot.' % (screenshot.filename(),))738 def _removeScreenshot(self, screenshot):739 pass740 def setFindBitmapDefaults(self, **findBitmapDefaults):741 return self._setFindBitmapDefaults(findBitmapDefaults, screenshot=None)742 def setScreenshotFindBitmapDefaults(self, screenshot, **findBitmapDefaults):743 return self._setFindBitmapDefaults(findBitmapDefaults, screenshot=screenshot)744 def _setFindBitmapDefaults(self, defaults, screenshot=None):745 """746 Set default values for optional arguments for findBitmap().747 Parameters:748 defaults (dictionary)749 Default keyword arguments and their values.750 screenshot (optional, fmbtgti.Screenshot instance)751 Use the defaults for findBitmap on this screenshot. If752 the defaults are None, make them default for all753 screenshots. Screenshot-specific defaults override754 engine default.755 """756 if screenshot == None:757 self._findBitmapDefaults.update(defaults)758 else:759 ssid = id(screenshot)760 if not ssid in self._ssFindBitmapDefaults:761 self._ssFindBitmapDefaults[ssid] = self._findBitmapDefaults.copy()762 self._ssFindBitmapDefaults[ssid].update(defaults)763 def findBitmapDefaults(self, screenshot=None):764 if screenshot == None:765 return self._findBitmapDefaults766 elif id(screenshot) in self._ssFindBitmapDefaults:767 return self._ssFindBitmapDefaults[id(screenshot)]768 else:769 return None770 def _findBitmapArgNames(self):771 """772 Returns names of optional findBitmap arguments.773 """774 return inspect.getargspec(self._findBitmap).args[3:]775 def __oirArgs(self, screenshot, bitmap, **priorityArgs):776 oirArgs = {}777 oirArgs.update(self._findBitmapDefaults)778 ssId = id(screenshot)779 if ssId in self._ssFindBitmapDefaults:780 oirArgs.update(self._ssFindBitmapDefaults[ssId])781 oirArgs.update(priorityArgs)782 return oirArgs783 def findBitmap(self, screenshot, bitmap, **kwargs):784 """785 Return list of fmbtgti.GUIItems that match to bitmap.786 """787 oirArgs = self.__oirArgs(screenshot, bitmap, **kwargs)788 bitmapLocsFilename = bitmap + _g_forcedLocExt789 if os.access(bitmapLocsFilename, os.R_OK):790 # Use hardcoded bitmap locations file instead of real OIR791 # bitmap.png.locs file format:792 # [(x11, y11, x12, y12), ..., (xn1, yn1, xn2, yn2)]793 try:794 bboxList = eval(file(bitmapLocsFilename).read().strip())795 foundItems = []796 for (left, top, right, bottom) in bboxList:797 x1, y1 = _intCoords((left, top), screenshot.size())798 x2, y2 = _intCoords((right, bottom), screenshot.size())799 foundItems.append(800 GUIItem("bitmap location", (x1, y1, x2, y2),801 screenshot.filename(), bitmap=bitmapLocsFilename))802 return foundItems803 except Exception, e:804 raise ValueError('Error reading bounding box list from %s: %s' %805 repr(bitmapLocsFilename), e)806 return self._findBitmap(screenshot, bitmap, **oirArgs)807 def _findBitmap(self, screenshot, bitmap, **kwargs):808 """809 Find appearances of bitmap from the screenshot.810 Parameters:811 screenshot (fmbtgti.Screenshot)812 Screenshot from which bitmap is to be searched813 for. Use Screenshot.filename() to get the filename.814 bitmap (string)815 bitmap to be searched for.816 other arguments (engine specific)817 kwargs contain keyword arguments given to818 findBitmap(screenshot, bitmap, ...), already extended819 first with screenshot-specific findBitmapDefaults, then820 with engine-specific findBitmapDefaults.821 _findBitmap *must* define all engine parameters as822 explicit keyword arguments:823 def _findBitmap(self, screenshot, bitmap, engArg1=42):824 ...825 Returns list of fmbtgti.GUIItems.826 """827 raise NotImplementedError("_findBitmap needed but not implemented.")828class _Eye4GraphicsOirEngine(OirEngine):829 """OIR engine parameters that can be used in all830 ...Bitmap() methods (swipeBitmap, tapBitmap, findItemsByBitmap, ...):831 colorMatch (float, optional):832 required color matching accuracy. The default is 1.0833 (exact match). For instance, 0.75 requires that every834 pixel's every RGB component value on the bitmap is at835 least 75 % match with the value of corresponding pixel's836 RGB component in the screenshot.837 opacityLimit (float, optional):838 threshold for comparing pixels with non-zero alpha839 channel. Pixels less opaque than the given threshold are840 skipped in match comparison. The default is 0, that is,841 alpha channel is ignored.842 area ((left, top, right, bottom), optional):843 search bitmap from the given area only. Left, top right844 and bottom are either absolute coordinates (integers) or845 floats in range [0.0, 1.0]. In the latter case they are846 scaled to screenshot dimensions. The default is (0.0,847 0.0, 1.0, 1.0), that is, search everywhere in the848 screenshot.849 limit (integer, optional):850 number of returned matches is limited to the limit. The851 default is -1: all matches are returned. Applicable in852 findItemsByBitmap.853 allowOverlap (boolean, optional):854 allow returned icons to overlap. If False, returned list855 contains only non-overlapping bounding boxes. The856 default is False.857 scale (float or pair of floats, optional):858 scale to be applied to the bitmap before859 matching. Single float is a factor for both X and Y860 axis, pair of floats is (xScale, yScale). The default is861 1.0.862 bitmapPixelSize (integer, optional):863 size of pixel rectangle on bitmap for which there must864 be same color on corresponding screenshot rectangle. If865 scale is 1.0, default is 1 (rectangle is 1x1). If scale866 != 1.0, the default is 2 (rectangle is 2x2).867 screenshotPixelSize (integer, optional):868 size of pixel rectangle on screenshot in which there869 must be a same color pixel as in the corresponding870 rectangle on bitmap. The default is scale *871 bitmapPixelSize.872 preprocess (string, optional):873 preprocess parameters that are executed to both screenshot874 and reference bitmap before running findBitmap. By default875 there is no preprocessing.876 Example: d.verifyBitmap("ref.png", preprocess="-threshold 60%")877 will execute two imagemagick commands:878 1. convert screenshot.png -threshold 60% screenshot-pp.png879 2. convert ref.png -threshold 60% ref-pp.png880 and then search for ref-pp.png in screenshot-pp.png. This results881 in black-and-white comparison (immune to slight color changes).882 If unsure about parameters, but you have a bitmap that should be883 detected in a screenshot, try obj.oirEngine().adjustParameters().884 Example:885 d.enableVisualLog("params.html")886 screenshot = d.refreshScreenshot()887 results = d.oirEngine().adjustParameters(screenshot, "mybitmap.png")888 if results:889 item, params = results[0]890 print "found %s with parameters:" % (item,)891 print "\n".join(sorted([" %s = %s" % (k, params[k]) for k in params]))892 print "verify:", d.verifyBitmap("mybitmap.png", **params)893 Notice, that you can force refreshScreenshot to load old screenshot:894 d.refreshScreenshot("old.png")895 """896 def __init__(self, *args, **engineDefaults):897 engineDefaults["colorMatch"] = engineDefaults.get("colorMatch", 1.0)898 engineDefaults["opacityLimit"] = engineDefaults.get("opacityLimit", 0.0)899 engineDefaults["area"] = engineDefaults.get("area", (0.0, 0.0, 1.0, 1.0))900 engineDefaults["limit"] = engineDefaults.get("limit", -1)901 engineDefaults["allowOverlap"] = engineDefaults.get("allowOverlap", False)902 engineDefaults["scale"] = engineDefaults.get("scale", 1.0)903 engineDefaults["bitmapPixelSize"] = engineDefaults.get("bitmapPixelSize", 0)904 engineDefaults["screenshotPixelSize"] = engineDefaults.get("screenshotPixelSize", 0)905 engineDefaults["preprocess"] = engineDefaults.get("preprocess", "")906 OirEngine.__init__(self, *args, **engineDefaults)907 self._openedImages = {}908 # openedRelatedScreenshots maps a screenshot filename to909 # a list of preprocessed screenshot objects. All those objects910 # must be closed when the screenshot is removed.911 self._openedRelatedScreenshots = {}912 self._findBitmapCache = {}913 def _addScreenshot(self, screenshot, **findBitmapDefaults):914 filename = screenshot.filename()915 self._openedImages[filename] = _e4gOpenImage(filename)916 # make sure size() is available, this can save an extra917 # opening of the screenshot file.918 if screenshot.size(allowReadingFile=False) == None:919 screenshot.setSize(_e4gImageDimensions(self._openedImages[filename]))920 self._findBitmapCache[filename] = {}921 def _removeScreenshot(self, screenshot):922 filename = screenshot.filename()923 if filename in self._openedRelatedScreenshots:924 for screenshotPP in self._openedRelatedScreenshots[filename]:925 self._removeScreenshot(screenshotPP)926 del self._openedRelatedScreenshots[filename]927 eye4graphics.closeImage(self._openedImages[filename])928 del self._openedImages[filename]929 del self._findBitmapCache[filename]930 def adjustParameters(self, screenshot, bitmap,931 scaleRange = [p/100.0 for p in range(110,210,10)],932 colorMatchRange = [p/100.0 for p in range(100,60,-10)],933 pixelSizeRange = range(2,5),934 resultCount = 1,935 **oirArgs):936 """937 Search for scale, colorMatch, bitmapPixelSize and938 screenshotPixelSize parameters that find the bitmap in the939 screenshot.940 Parameters:941 screenshot (Screenshot instance):942 screenshot that contains the bitmap.943 bitmap (string):944 bitmap filename.945 scaleRange (list of floats, optional):946 scales to go through.947 The default is: 1.1, 1.2, ... 2.0.948 colorMatchRange (list of floats, optional):949 colorMatches to try out.950 The default is: 1.0, 0.9, ... 0.7.951 pixelSizeRange (list of integers, optional):952 values for bitmapPixelSize and screenshotPixelSize.953 The default is: [2, 3]954 resultCount (integer, optional):955 number of parameter combinations to be found.956 The default is 1. 0 is unlimited.957 other OIR parameters: as usual, refer to engine documentation.958 Returns list of pairs: (GUIItem, findParams), where959 GUIItem is the detected item (GUIItem.bbox() is the box around it),960 and findParams is a dictionary containing the parameters.961 """962 if not screenshot.filename() in self._findBitmapCache:963 self.addScreenshot(screenshot)964 ssAdded = True965 else:966 ssAdded = False967 retval = []968 for colorMatch in colorMatchRange:969 for pixelSize in pixelSizeRange:970 for scale in scaleRange:971 findParams = oirArgs.copy()972 findParams.update({"colorMatch": colorMatch,973 "limit": 1,974 "scale": scale,975 "bitmapPixelSize": pixelSize,976 "screenshotPixelSize": pixelSize})977 results = self.findBitmap(screenshot, bitmap,978 **findParams)979 if results:980 retval.append((results[0], findParams))981 if len(retval) == resultCount:982 return retval983 if ssAdded:984 self.removeScreenshot(screenshot)985 return retval986 def _findBitmap(self, screenshot, bitmap, colorMatch=None,987 opacityLimit=None, area=None, limit=None,988 allowOverlap=None, scale=None,989 bitmapPixelSize=None, screenshotPixelSize=None,990 preprocess=None):991 """992 Find items on the screenshot that match to bitmap.993 """994 ssFilename = screenshot.filename()995 ssSize = screenshot.size()996 cacheKey = (bitmap, colorMatch, opacityLimit, area, limit,997 scale, bitmapPixelSize, screenshotPixelSize, preprocess)998 if cacheKey in self._findBitmapCache[ssFilename]:999 return self._findBitmapCache[ssFilename][cacheKey]1000 self._findBitmapCache[ssFilename][cacheKey] = []1001 if preprocess:1002 ssFilenamePP = _ppFilename(ssFilename, preprocess)1003 bitmapPP = _ppFilename(bitmap, preprocess)1004 if not ssFilenamePP in self._openedImages:1005 _convert(ssFilename, preprocess, ssFilenamePP)1006 screenshotPP = Screenshot(ssFilenamePP)1007 self.addScreenshot(screenshotPP)1008 if not ssFilename in self._openedRelatedScreenshots:1009 self._openedRelatedScreenshots[ssFilename] = []1010 self._openedRelatedScreenshots[ssFilename].append(screenshotPP)1011 _convert(bitmap, preprocess, bitmapPP)1012 ssFilename = ssFilenamePP1013 bitmap = bitmapPP1014 self._findBitmapCache[ssFilename][cacheKey] = []1015 e4gIcon = _e4gOpenImage(bitmap)1016 matchCount = 01017 leftTopRightBottomZero = (_intCoords((area[0], area[1]), ssSize) +1018 _intCoords((area[2], area[3]), ssSize) +1019 (0,))1020 struct_area_bbox = _Bbox(*leftTopRightBottomZero)1021 struct_bbox = _Bbox(0, 0, 0, 0, 0)1022 contOpts = 0 # search for the first hit1023 try:1024 xscale, yscale = scale1025 except TypeError:1026 xscale = yscale = float(scale)1027 while True:1028 if matchCount == limit: break1029 result = eye4graphics.findNextIcon(1030 ctypes.byref(struct_bbox),1031 ctypes.c_void_p(self._openedImages[ssFilename]),1032 ctypes.c_void_p(e4gIcon),1033 0, # no fuzzy matching1034 ctypes.c_double(colorMatch),1035 ctypes.c_double(opacityLimit),1036 ctypes.byref(struct_area_bbox),1037 ctypes.c_int(contOpts),1038 ctypes.c_float(xscale),1039 ctypes.c_float(yscale),1040 ctypes.c_int(bitmapPixelSize),1041 ctypes.c_int(screenshotPixelSize))1042 contOpts = 1 # search for the next hit1043 if result < 0: break1044 bbox = (int(struct_bbox.left), int(struct_bbox.top),1045 int(struct_bbox.right), int(struct_bbox.bottom))1046 addToFoundItems = True1047 if allowOverlap == False:1048 for guiItem in self._findBitmapCache[ssFilename][cacheKey]:1049 itemLeft, itemTop, itemRight, itemBottom = guiItem.bbox()1050 if ((itemLeft <= bbox[0] <= itemRight or itemLeft <= bbox[2] <= itemRight) and1051 (itemTop <= bbox[1] <= itemBottom or itemTop <= bbox[3] <= itemBottom)):1052 if ((itemLeft < bbox[0] < itemRight or itemLeft < bbox[2] < itemRight) or1053 (itemTop < bbox[1] < itemBottom or itemTop < bbox[3] < itemBottom)):1054 addToFoundItems = False1055 break1056 if addToFoundItems:1057 self._findBitmapCache[ssFilename][cacheKey].append(1058 GUIItem("bitmap", bbox, ssFilename, bitmap=bitmap))1059 matchCount += 11060 eye4graphics.closeImage(e4gIcon)1061 return self._findBitmapCache[ssFilename][cacheKey]1062def _defaultOirEngine():1063 if _g_defaultOirEngine:1064 return _g_defaultOirEngine1065 else:1066 _Eye4GraphicsOirEngine().register(defaultOir=True)1067 return _g_defaultOirEngine1068class _OirRc(object):1069 """Optical image recognition settings for a directory.1070 Currently loaded from file .fmbtoirc in the directory.1071 There is once _OirRc instance per directory.1072 """1073 _filename = ".fmbtoirrc"1074 _cache = {}1075 @classmethod1076 def load(cls, directory):1077 if directory in cls._cache:1078 pass1079 elif os.access(os.path.join(directory, cls._filename), os.R_OK):1080 cls._cache[directory] = cls(directory)1081 else:1082 cls._cache[directory] = None1083 return cls._cache[directory]1084 def __init__(self, directory):1085 self._key2value = {}1086 curdir = "."1087 self._dir2oirArgsList = {curdir: [{}]}1088 oirRcFilepath = os.path.join(directory, _OirRc._filename)1089 for line in file(oirRcFilepath):1090 line = line.strip()1091 if line == "" or line[0] in "#;":1092 continue1093 elif line == "alternative":1094 self._dir2oirArgsList[curdir].append({}) # new set of args1095 self._key2value = {}1096 elif "=" in line:1097 key, value_str = line.split("=", 1)1098 value_str = value_str.strip()1099 if key.strip().lower() == "includedir":1100 curdir = value_str1101 self._dir2oirArgsList[curdir] = [{}]1102 if not os.access(curdir, os.X_OK):1103 _fmbtLog("warning: %s: inaccessible includedir %s" %1104 (repr(oirRcFilepath), curdir))1105 else:1106 try: value = int(value_str)1107 except ValueError:1108 try: value = float(value_str)1109 except ValueError:1110 if value_str[0] in "([\"'": # tuple, list, string1111 value = eval(value_str)1112 else:1113 value = value_str1114 self._dir2oirArgsList[curdir][-1][key.strip()] = value1115 def searchDirs(self):1116 return self._dir2oirArgsList.keys()1117 def oirArgsList(self, searchDir):1118 return self._dir2oirArgsList[searchDir]1119class _Paths(object):1120 def __init__(self, bitmapPath, relativeRoot):1121 self.bitmapPath = bitmapPath1122 self.relativeRoot = relativeRoot1123 self._oirAL = {} # OIR parameters for bitmaps1124 self._abspaths = {} # bitmap to abspaths1125 def abspaths(self, bitmap, checkReadable=True):1126 if bitmap in self._abspaths:1127 return self._abspaths[bitmap]1128 if bitmap.startswith("/"):1129 path = [os.path.dirname(bitmap)]1130 bitmap = os.path.basename(bitmap)1131 else:1132 path = []1133 for singleDir in self.bitmapPath.split(":"):1134 if singleDir and not singleDir.startswith("/"):1135 path.append(os.path.join(self.relativeRoot, singleDir))1136 else:1137 path.append(singleDir)1138 for singleDir in path:1139 candidate = os.path.join(singleDir, bitmap)1140 if not checkReadable or os.access(candidate, os.R_OK):1141 oirRc = _OirRc.load(os.path.dirname(candidate))1142 if oirRc:1143 self._oirAL[candidate] = oirRc.oirArgsList(".")1144 else:1145 self._oirAL[candidate] = [{}]1146 self._oirAL[bitmap] = self._oirAL[candidate]1147 break1148 else:1149 # bitmap is not in singleDir, but there might be .fmbtoirrc1150 oirRc = _OirRc.load(os.path.dirname(candidate))1151 if oirRc:1152 for d in oirRc.searchDirs():1153 if d.startswith("/"):1154 candidate = os.path.join(d, os.path.basename(bitmap))1155 else:1156 candidate = os.path.join(os.path.dirname(candidate), d, os.path.basename(bitmap))1157 if os.access(candidate, os.R_OK):1158 self._oirAL[candidate] = oirRc.oirArgsList(d)1159 self._oirAL[bitmap] = self._oirAL[candidate]1160 break1161 if checkReadable and not os.access(candidate, os.R_OK):1162 raise ValueError('Bitmap "%s" not readable in bitmapPath %s' % (bitmap, ':'.join(path)))1163 self._abspaths[bitmap] = [candidate]1164 # check for alternative bitmaps1165 try:1166 candidate_ext = "." + candidate.rsplit(".", 1)[1]1167 except IndexError:1168 candidate_ext = ""1169 alt_candidates = glob.glob(candidate + ".alt*" + candidate_ext)1170 self._abspaths[bitmap].extend(alt_candidates)1171 return self._abspaths[bitmap]1172 def oirArgsList(self, bitmap):1173 """Returns list of alternative OIR parameters associated to the bitmap1174 by appropriate .fmbtoirrc file1175 """1176 if bitmap in self._oirAL:1177 return self._oirAL[bitmap]1178 else:1179 absBitmap = self.abspaths(bitmap)[0]1180 if absBitmap in self._oirAL:1181 return self._oirAL[absBitmap]1182 else:1183 return None1184class GUITestInterface(object):1185 def __init__(self, ocrEngine=None, oirEngine=None, rotateScreenshot=None):1186 self._paths = _Paths("", "")1187 self._conn = None1188 self._lastScreenshot = None1189 self._longPressHoldTime = 2.01190 self._longTapHoldTime = 2.01191 self._ocrEngine = None1192 self._oirEngine = None1193 self._rotateScreenshot = rotateScreenshot1194 self._screenshotLimit = None1195 self._screenshotRefCount = {} # filename -> Screenshot object ref count1196 self._screenshotArchiveMethod = "resize"1197 if ocrEngine == None:1198 self.setOcrEngine(_defaultOcrEngine())1199 else:1200 if type(ocrEngine) == int:1201 self.setOcrEngine(_g_ocrEngines[ocrEngine])1202 else:1203 self.setOcrEngine(ocrEngine)1204 if oirEngine == None:1205 self.setOirEngine(_defaultOirEngine())1206 else:1207 if type(oirEngine) == int:1208 self.setOirEngine(_g_oirEngines[oirEngine])1209 else:1210 self.setOirEngine(oirEngine)1211 self._screenshotDir = None1212 self._screenshotDirDefault = "screenshots"1213 self._screenshotSubdir = None1214 self._screenshotSubdirDefault = ""1215 self._screenSize = None1216 self._tapDefaults = {}1217 self._visualLog = None1218 self._visualLogFileObj = None1219 self._visualLogFilenames = set()1220 def bitmapPath(self):1221 """1222 Returns bitmapPath from which bitmaps are searched for.1223 """1224 return self._paths.bitmapPath1225 def bitmapPathRoot(self):1226 """1227 Returns the path that prefixes all relative directories in1228 bitmapPath.1229 """1230 return self._paths.relativeRoot1231 def close(self):1232 self._lastScreenshot = None1233 if self._visualLog:1234 if (hasattr(self._visualLog._outFileObj, "name") and1235 self._visualLog._outFileObj.name in self._visualLogFilenames):1236 self._visualLogFilenames.remove(self._visualLog._outFileObj.name)1237 self._visualLog.close()1238 if self._visualLogFileObj:1239 self._visualLogFileObj.close()1240 self._visualLog = None1241 def connection(self):1242 """1243 Returns GUITestConnection instance or None if not available.1244 See also existingConnection().1245 """1246 return self._conn1247 def drag(self, (x1, y1), (x2, y2), delayBetweenMoves=0.01,1248 delayBeforeMoves=0, delayAfterMoves=0, movePoints=20,1249 button=_USE_DEFAULTS):1250 """1251 Touch the screen on coordinates (x1, y1), drag along straight1252 line to coordinates (x2, y2), and raise fingertip.1253 Parameters:1254 coordinates (floats in range [0.0, 1.0] or integers):1255 floating point coordinates in range [0.0, 1.0] are1256 scaled to full screen width and height, others are1257 handled as absolute coordinate values.1258 delayBeforeMoves (float, optional):1259 seconds to wait after touching and before dragging.1260 If negative, starting touch event is not sent.1261 delayBetweenMoves (float, optional):1262 seconds to wait when moving between points when1263 dragging.1264 delayAfterMoves (float, optional):1265 seconds to wait after dragging, before raising1266 fingertip.1267 If negative, fingertip is not raised.1268 movePoints (integer, optional):1269 the number of intermediate move points between end1270 points of the line.1271 button (integer, optional):1272 send drag using given mouse button. The default is None.1273 Returns True on success, False if sending input failed.1274 """1275 x1, y1 = self.intCoords((x1, y1))1276 x2, y2 = self.intCoords((x2, y2))1277 extraArgs = {}1278 if button != _USE_DEFAULTS:1279 extraArgs["button"] = button1280 if delayBeforeMoves >= 0:1281 if not self.existingConnection().sendTouchDown(x1, y1, **extraArgs):1282 return False1283 if delayBeforeMoves > 0:1284 time.sleep(delayBeforeMoves)1285 else:1286 time.sleep(delayBetweenMoves)1287 for i in xrange(0, movePoints):1288 nx = x1 + int(round(((x2 - x1) / float(movePoints+1)) * (i+1)))1289 ny = y1 + int(round(((y2 - y1) / float(movePoints+1)) * (i+1)))1290 if not self.existingConnection().sendTouchMove(nx, ny, **extraArgs):1291 return False1292 time.sleep(delayBetweenMoves)1293 if delayAfterMoves > 0:1294 self.existingConnection().sendTouchMove(x2, y2, **extraArgs)1295 time.sleep(delayAfterMoves)1296 if delayAfterMoves >= 0:1297 if self.existingConnection().sendTouchUp(x2, y2, **extraArgs):1298 return True1299 else:1300 return False1301 else:1302 return True1303 def enableVisualLog(self, filenameOrObj,1304 screenshotWidth="240", thumbnailWidth="",1305 timeFormat="%s.%f", delayedDrawing=False,1306 copyBitmapsToScreenshotDir=False):1307 """1308 Start writing visual HTML log on this device object.1309 Parameters:1310 filenameOrObj (string or a file object)1311 The file to which the log is written. Log can be1312 split into multiple html files by using strftime1313 conversion specifications in filenameOrObj. For1314 instance, "%a-%H.html" will log to "Thu-16.html" on1315 Thurday from 4 pm to 5 pm.1316 screenshotWidth (string, optional)1317 Width of screenshot images in HTML.1318 The default is "240".1319 thumbnailWidth (string, optional)1320 Width of thumbnail images in HTML.1321 The default is "", that is, original size.1322 timeFormat (string, optional)1323 Timestamp format. The default is "%s.%f".1324 Refer to strftime documentation.1325 delayedDrawing (boolean, optional)1326 If True, screenshots with highlighted icons, words1327 and gestures are not created during the1328 test. Instead, only shell commands are stored for1329 later execution. The value True can significantly1330 save test execution time and disk space. The default1331 is False.1332 copyBitmapsToScreenshotDir (boolean, optional)1333 If True, every logged bitmap file will be copied to1334 bitmaps directory in screenshotDir. The default is1335 False.1336 """1337 if type(filenameOrObj) == str:1338 try:1339 outFileObj = file(filenameOrObj, "w")1340 self._visualLogFileObj = outFileObj1341 except Exception, e:1342 _fmbtLog('Failed to open file "%s" for logging.' % (filenameOrObj,))1343 raise1344 else:1345 outFileObj = filenameOrObj1346 # someone else opened the file => someone else will close it1347 self._visualLogFileObj = None1348 if hasattr(outFileObj, "name"):1349 if outFileObj.name in self._visualLogFilenames:1350 raise ValueError('Visual logging on file "%s" is already enabled' % (outFileObj.name,))1351 else:1352 self._visualLogFilenames.add(outFileObj.name)1353 self._visualLog = _VisualLog(self, outFileObj, screenshotWidth,1354 thumbnailWidth, timeFormat, delayedDrawing,1355 copyBitmapsToScreenshotDir)1356 def existingConnection(self):1357 """1358 Returns GUITestConnection, raises ConnectionError if not available.1359 See also connection()1360 """1361 if self._conn:1362 return self._conn1363 else:1364 raise ConnectionError("not connected")1365 def visualLog(self, *args):1366 """1367 Writes parameters to the visual log, given that visual logging is1368 enabled.1369 """1370 pass1371 def intCoords(self, (x, y)):1372 """1373 Convert floating point coordinate values in range [0.0, 1.0] to1374 screen coordinates.1375 """1376 width, height = self.screenSize()1377 return _intCoords((x, y), (width, height))1378 def relCoords(self, (x, y)):1379 """1380 Convert coordinates (x, y) to relative coordinates in range [0.0, 1.0].1381 """1382 width, height = self.screenSize()1383 ix, iy = _intCoords((x, y), (width, height))1384 return (float(ix)/width, float(iy)/height)1385 def itemOnScreen(self, guiItem):1386 """1387 Returns True if bbox of guiItem is non-empty and at least1388 partially on screen, otherwise False.1389 """1390 maxX, maxY = self.screenSize()1391 return _boxOnRegion(guiItem.bbox(), (0, 0, maxX, maxY))1392 def ocrEngine(self):1393 """1394 Returns the OCR engine that is used by default for new1395 screenshots.1396 """1397 return self._ocrEngine1398 def oirEngine(self):1399 """1400 Returns the OIR engine that is used by default for new1401 screenshots.1402 """1403 return self._oirEngine1404 def pressKey(self, keyName, long=False, hold=0.0, modifiers=None):1405 """1406 Press a key.1407 Parameters:1408 keyName (string):1409 the name of the key, like KEYCODE_HOME.1410 long (boolean, optional):1411 if True, press the key for long time.1412 hold (float, optional):1413 time in seconds to hold the key down.1414 modifiers (list of strings, optional)1415 modifier key(s) to be pressed at the same time.1416 """1417 extraParams = {}1418 if modifiers != None:1419 extraParams['modifiers'] = modifiers1420 if long and hold == 0.0:1421 hold = self._longPressHoldTime1422 if hold > 0.0:1423 try:1424 assert self.existingConnection().sendKeyDown(keyName, **extraParams)1425 time.sleep(hold)1426 assert self.existingConnection().sendKeyUp(keyName, **extraParams)1427 except AssertionError:1428 return False1429 return True1430 return self.existingConnection().sendPress(keyName, **extraParams)1431 def _newScreenshotFilepath(self):1432 """1433 Returns path and filename for next screenshot file.1434 Makes sure the file can be written (necessary directory1435 structure exists).1436 """1437 t = datetime.datetime.now()1438 if not self._conn:1439 target = ""1440 else:1441 target = self._conn.target()1442 filename = _filenameTimestamp(t) + "-" + target + ".png"1443 screenshotPath = self.screenshotDir()1444 if self.screenshotSubdir():1445 screenshotPath = os.path.join(screenshotPath,1446 self.screenshotSubdir())1447 screenshotPath = fmbt.formatTime(screenshotPath, t)1448 filepath = os.path.join(screenshotPath, filename)1449 necessaryDirs = os.path.dirname(filepath)1450 if necessaryDirs and not os.path.isdir(necessaryDirs):1451 try:1452 os.makedirs(necessaryDirs)1453 except Exception, e:1454 _fmbtLog('creating directory "%s" for screenshots failed: %s' %1455 (necessaryDirs, e))1456 raise1457 return filepath1458 def _archiveScreenshot(self, filepath):1459 if self._screenshotArchiveMethod == "remove":1460 try:1461 os.remove(filepath)1462 except IOError:1463 pass1464 elif self._screenshotArchiveMethod.startswith("resize"):1465 if self._screenshotArchiveMethod == "resize":1466 convertArgs = ["-resize",1467 "%sx" % (int(self.screenSize()[0]) / 4,)]1468 else:1469 widthHeight = self._screenshotArchiveMethod.split()[1]1470 convertArgs = ["-resize", widthHeight]1471 subprocess.call([fmbt_config.imagemagick_convert, filepath] + convertArgs + [filepath])1472 def _archiveScreenshots(self):1473 """1474 Archive screenshot files if screenshotLimit has been exceeded.1475 """1476 freeScreenshots = [filename1477 for (filename, refCount) in self._screenshotRefCount.iteritems()1478 if refCount == 0]1479 archiveCount = len(freeScreenshots) - self._screenshotLimit1480 if archiveCount > 0:1481 freeScreenshots.sort(reverse=True) # archive oldest1482 while archiveCount > 0:1483 toBeArchived = freeScreenshots.pop()1484 try:1485 self._archiveScreenshot(toBeArchived)1486 except IOError:1487 pass1488 del self._screenshotRefCount[toBeArchived]1489 archiveCount -= 11490 def refreshScreenshot(self, forcedScreenshot=None, rotate=None):1491 """1492 Takes new screenshot and updates the latest screenshot object.1493 Parameters:1494 forcedScreenshot (Screenshot or string, optional):1495 use given screenshot object or image file, do not1496 take new screenshot.1497 rotate (integer, optional):1498 rotate screenshot by given number of degrees. This1499 overrides constructor rotateScreenshot parameter1500 value. The default is None (no override).1501 Returns Screenshot object, and makes the same object "the1502 latest screenshot" that is used by all *Bitmap and *OcrText1503 methods. Returns None if screenshot cannot be taken.1504 """1505 if forcedScreenshot != None:1506 if type(forcedScreenshot) == str:1507 self._lastScreenshot = Screenshot(1508 screenshotFile=forcedScreenshot,1509 paths = self._paths,1510 ocrEngine=self._ocrEngine,1511 oirEngine=self._oirEngine,1512 screenshotRefCount=self._screenshotRefCount)1513 else:1514 self._lastScreenshot = forcedScreenshot1515 elif self._conn: # There is a connection, get new screenshot1516 if self.screenshotDir() == None:1517 self.setScreenshotDir(self._screenshotDirDefault)1518 if self.screenshotSubdir() == None:1519 self.setScreenshotSubdir(self._screenshotSubdirDefault)1520 screenshotFile = self._newScreenshotFilepath()1521 if self.existingConnection().recvScreenshot(screenshotFile):1522 # New screenshot successfully received from device1523 if rotate == None:1524 rotate = self._rotateScreenshot1525 if rotate != None and rotate != 0:1526 subprocess.call([fmbt_config.imagemagick_convert, screenshotFile, "-rotate", str(rotate), screenshotFile])1527 self._lastScreenshot = Screenshot(1528 screenshotFile=screenshotFile,1529 paths = self._paths,1530 ocrEngine=self._ocrEngine,1531 oirEngine=self._oirEngine,1532 screenshotRefCount=self._screenshotRefCount)1533 else:1534 self._lastScreenshot = None1535 else: # No connection, cannot get a screenshot1536 self._lastScreenshot = None1537 # Make sure unreachable Screenshot instances are released from1538 # memory.1539 gc.collect()1540 for obj in gc.garbage:1541 if isinstance(obj, Screenshot):1542 if hasattr(obj, "_logCallReturnValue"):1543 # Some methods have been wrapped by visual1544 # log. Break reference cycles to let gc collect1545 # them.1546 try:1547 del obj.findItemsByBitmap1548 except:1549 pass1550 try:1551 del obj.findItemsByOcr1552 except:1553 pass1554 del gc.garbage[:]1555 gc.collect()1556 # If screenshotLimit has been set, archive old screenshot1557 # stored on the disk.1558 if self._screenshotLimit != None and self._screenshotLimit >= 0:1559 self._archiveScreenshots()1560 return self._lastScreenshot1561 def screenshot(self):1562 """1563 Returns the latest Screenshot object.1564 Use refreshScreenshot() to get a new screenshot.1565 """1566 return self._lastScreenshot1567 def screenshotArchiveMethod(self):1568 """1569 Returns how screenshots exceeding screenshotLimit are archived.1570 """1571 return self._screenshotArchiveMethod1572 def screenshotDir(self):1573 """1574 Returns the directory under which new screenshots are saved.1575 """1576 return self._screenshotDir1577 def screenshotLimit(self):1578 """1579 Returns the limit after which unused screenshots are archived.1580 """1581 return self._screenshotLimit1582 def screenshotSubdir(self):1583 """1584 Returns the subdirectory in screenshotDir under which new1585 screenshots are stored.1586 """1587 return self._screenshotSubdir1588 def screenSize(self):1589 """1590 Returns screen size in pixels in tuple (width, height).1591 """1592 if self._lastScreenshot != None:1593 self._screenSize = self._lastScreenshot.size()1594 if self._screenSize == None:1595 if self._lastScreenshot == None:1596 try:1597 if self.refreshScreenshot():1598 self._screenSize = self._lastScreenshot.size()1599 self._lastScreenshot = None1600 except Exception:1601 pass1602 if (self._screenSize == None and1603 hasattr(self.existingConnection(), "recvScreenSize")):1604 self._screenSize = self.existingConnection().recvScreenSize()1605 else:1606 self._screenSize = self._lastScreenshot.size()1607 return self._screenSize1608 def setBitmapPath(self, bitmapPath, rootForRelativePaths=None):1609 """1610 Set new path for finding bitmaps.1611 Parameters:1612 bitmapPath (string)1613 colon-separated list of directories from which1614 bitmap methods look for bitmap files.1615 rootForRelativePaths (string, optional)1616 path that will prefix all relative paths in1617 bitmapPath.1618 Example:1619 gui.setBitmapPath("bitmaps:icons:/tmp", "/home/X")1620 gui.tapBitmap("start.png")1621 will look for /home/X/bitmaps/start.png,1622 /home/X/icons/start.png and /tmp/start.png, in this order.1623 """1624 self._paths.bitmapPath = bitmapPath1625 if rootForRelativePaths != None:1626 self._paths.relativeRoot = rootForRelativePaths1627 def setConnection(self, conn):1628 """1629 Set the connection object that performs actions on real target.1630 Parameters:1631 conn (GUITestConnection instance):1632 The connection to be used.1633 """1634 self._conn = conn1635 def setOcrEngine(self, ocrEngine):1636 """1637 Set OCR (optical character recognition) engine that will be1638 used by default in new screenshots.1639 Returns previous default.1640 """1641 prevDefault = self._ocrEngine1642 self._ocrEngine = ocrEngine1643 return prevDefault1644 def setOirEngine(self, oirEngine):1645 """1646 Set OIR (optical image recognition) engine that will be used1647 by default in new screenshots.1648 Returns previous default.1649 """1650 prevDefault = self._oirEngine1651 self._oirEngine = oirEngine1652 return prevDefault1653 def setScreenshotArchiveMethod(self, screenshotArchiveMethod):1654 """1655 Set method for archiving screenshots when screenshotLimit is exceeded.1656 Parameters:1657 screenshotArchiveMethod (string)1658 Supported methods are "resize [WxH]" and "remove"1659 where W and H are integers that define maximum width and1660 height for an archived screenshot.1661 The default method is "resize".1662 """1663 if screenshotArchiveMethod == "remove":1664 pass1665 elif screenshotArchiveMethod == "resize":1666 pass1667 elif screenshotArchiveMethod.startswith("resize"):1668 try:1669 w, h = screenshotArchiveMethod.split(" ")[1].split("x")1670 except:1671 raise ValueError("Invalid resize syntax")1672 try:1673 w, h = int(w), int(h)1674 except:1675 raise ValueError(1676 "Invalid resize width or height, integer expected")1677 else:1678 raise ValueError('Unknown archive method "%s"' %1679 (screenshotArchiveMethod,))1680 self._screenshotArchiveMethod = screenshotArchiveMethod1681 def setScreenshotDir(self, screenshotDir):1682 self._screenshotDir = screenshotDir1683 self._newScreenshotFilepath() # make directories1684 def setScreenshotLimit(self, screenshotLimit):1685 """1686 Set maximum number for unarchived screenshots.1687 Parameters:1688 screenshotLimit (integer)1689 Maximum number of unarchived screenshots that are1690 free for archiving (that is, not referenced by test code).1691 The default is None, that is, there is no limit and1692 screenshots are never archived.1693 See also:1694 setScreenshotArchiveMethod()1695 """1696 self._screenshotLimit = screenshotLimit1697 def setScreenshotSubdir(self, screenshotSubdir):1698 """1699 Define a subdirectory under screenshotDir() for screenshot files.1700 Parameters:1701 screenshotSubdir (string)1702 Name of a subdirectory. The name should contain1703 conversion specifiers supported by strftime.1704 Example:1705 sut.setScreenshotSubdir("%m-%d-%H")1706 A screenshot taken on June 20th at 4.30pm will1707 be stored to screenshotDir/06-20-16. That is,1708 screenshots taken on different hours will be1709 stored to different subdirectories.1710 By default, all screenshots are stored directly to screenshotDir().1711 """1712 self._screenshotSubdir = screenshotSubdir1713 def setTapDefaults(self, **tapDefaults):1714 """1715 Define default parameters for tap methods.1716 Parameters:1717 **tapDefaults (keyword arguments):1718 default arguments to be used in sendTap call unless1719 explicitely overridden by user.1720 Example:1721 sut.setTapDefaults(button=1)1722 after this sut.tapBitmap("ref.png") does the same as1723 sut.tapBitmap("ref.png", button=1) did before.1724 """1725 self._tapDefaults.update(tapDefaults)1726 def swipe(self, (x, y), direction, distance=1.0, **dragArgs):1727 """1728 swipe starting from coordinates (x, y) to given direction.1729 Parameters:1730 coordinates (floats in range [0.0, 1.0] or integers):1731 floating point coordinates in range [0.0, 1.0] are1732 scaled to full screen width and height, others are1733 handled as absolute coordinate values.1734 direction (string or integer):1735 Angle (0..360 degrees), or "north", "south", "east"1736 and "west" (corresponding to angles 90, 270, 0 and1737 180).1738 distance (float, optional):1739 Swipe distance. Values in range [0.0, 1.0] are1740 scaled to the distance from the coordinates to the1741 edge of the screen. The default is 1.0: swipe to the1742 edge of the screen.1743 rest of the parameters: refer to drag documentation.1744 Returns True on success, False if sending input failed.1745 """1746 if type(direction) == str:1747 d = direction.lower()1748 if d in ["n", "north"]: direction = 901749 elif d in ["s", "south"]: direction = 2701750 elif d in ["e", "east"]: direction = 01751 elif d in ["w", "west"]: direction = 1801752 else: raise ValueError('Illegal direction "%s"' % (direction,))1753 direction = direction % 3601754 x, y = self.intCoords((x, y))1755 dirRad = math.radians(direction)1756 distToEdge = _edgeDistanceInDirection((x, y), self.screenSize(), direction)1757 if distance > 1.0: distance = float(distance) / distToEdge1758 x2 = int(x + math.cos(dirRad) * distToEdge * distance)1759 y2 = int(y - math.sin(dirRad) * distToEdge * distance)1760 return self.drag((x, y), (x2, y2), **dragArgs)1761 def swipeBitmap(self, bitmap, direction, distance=1.0, **dragAndOirArgs):1762 """1763 swipe starting from bitmap to given direction.1764 Parameters:1765 bitmap (string)1766 bitmap from which swipe starts1767 direction, distance1768 refer to swipe documentation.1769 startPos, startOffset1770 refer to swipeItem documentation.1771 optical image recognition arguments (optional)1772 refer to help(obj.oirEngine()).1773 delayBeforeMoves, delayBetweenMoves, delayAfterMoves,1774 movePoints1775 refer to drag documentation.1776 Returns True on success, False if sending input failed.1777 """1778 assert self._lastScreenshot != None, "Screenshot required."1779 dragArgs, rest = _takeDragArgs(dragAndOirArgs)1780 oirArgs, _ = _takeOirArgs(self._lastScreenshot, rest, thatsAll=True)1781 oirArgs["limit"] = 11782 items = self._lastScreenshot.findItemsByBitmap(bitmap, **oirArgs)1783 if len(items) == 0:1784 return False1785 return self.swipeItem(items[0], direction, distance, **dragArgs)1786 def swipeItem(self, viewItem, direction, distance=1.0, **dragArgs):1787 """1788 swipe starting from viewItem to given direction.1789 Parameters:1790 viewItem (ViewItem)1791 item from which swipe starts1792 direction, distance1793 refer to swipe documentation.1794 startPos (pair of floats (x, y)):1795 position of starting swipe, relative to the item.1796 (0.0, 0.0) is the top-left corner,1797 (1.0, 0.0) is the top-right corner,1798 (1.0, 1.0) is the lower-right corner.1799 Values < 0.0 and > 1.0 start swiping from coordinates1800 outside the item.1801 The default is (0.5, 0.5), in the middle of the item.1802 startOffset (pair of integers or floats (x, y)):1803 offset of swipe start coordinates. Integers are1804 pixels, floats are relative to screensize.1805 Example:1806 startOffset=(0, 0.1) will keep the X coordinate1807 unchagned and add 10 % of screensize to Y.1808 delayBeforeMoves, delayBetweenMoves, delayAfterMoves,1809 movePoints1810 refer to drag documentation.1811 Returns True on success, False if sending input failed.1812 """1813 if "startPos" in dragArgs:1814 posX, posY = dragArgs["startPos"]1815 del dragArgs["startPos"]1816 x1, y1, x2, y2 = viewItem.bbox()1817 swipeCoords = (x1 + (x2-x1) * posX,1818 y1 + (y2-y1) * posY)1819 else:1820 swipeCoords = viewItem.coords()1821 if "startOffset" in dragArgs:1822 offX, offY = dragArgs["startOffset"]1823 del dragArgs["startOffset"]1824 x, y = swipeCoords1825 if isinstance(offX, int):1826 x += offX1827 elif isinstance(offX, float):1828 width, _ = self.screenSize()1829 x += offX * width1830 else:1831 raise TypeError('invalid offset %s (int or float expected)' %1832 (repr(offX),))1833 if isinstance(offY, int):1834 y += offY1835 elif isinstance(offY, float):1836 _, height = self.screenSize()1837 y += offY * height1838 else:1839 raise TypeError('invalid offset %s (int or float expected)' %1840 (repr(offY),))1841 swipeCoords = (x, y)1842 return self.swipe(swipeCoords, direction, distance, **dragArgs)1843 def swipeOcrText(self, text, direction, distance=1.0, **dragAndOcrArgs):1844 """1845 Find text from the latest screenshot using OCR, and swipe it.1846 Parameters:1847 text (string):1848 the text to be swiped.1849 direction, distance1850 refer to swipe documentation.1851 startPos1852 refer to swipeItem documentation.1853 delayBeforeMoves, delayBetweenMoves, delayAfterMoves,1854 movePoints1855 refer to drag documentation.1856 OCR engine specific arguments1857 refer to help(obj.ocrEngine())1858 Returns True on success, False otherwise.1859 """1860 assert self._lastScreenshot != None, "Screenshot required."1861 dragArgs, rest = _takeDragArgs(dragAndOcrArgs)1862 ocrArgs, _ = _takeOcrArgs(self._lastScreenshot, rest, thatsAll=True)1863 items = self._lastScreenshot.findItemsByOcr(text, **ocrArgs)1864 if len(items) == 0:1865 return False1866 return self.swipeItem(items[0], direction, distance, **dragArgs)1867 def tap(self, (x, y), long=_USE_DEFAULTS, hold=_USE_DEFAULTS,1868 count=_USE_DEFAULTS, delayBetweenTaps=_USE_DEFAULTS,1869 button=_USE_DEFAULTS):1870 """1871 Tap screen on coordinates (x, y).1872 Parameters:1873 coordinates (floats in range [0.0, 1.0] or integers):1874 floating point coordinates in range [0.0, 1.0] are1875 scaled to full screen width and height, others are1876 handled as absolute coordinate values.1877 count (integer, optional):1878 number of taps to the coordinates. The default is 1.1879 delayBetweenTaps (float, optional):1880 time (seconds) between taps when count > 1.1881 The default is 0.175 (175 ms).1882 long (boolean, optional):1883 if True, touch the screen for a long time.1884 hold (float, optional):1885 time in seconds to touch the screen.1886 button (integer, optional):1887 send tap using given mouse button. The default is1888 None: button parameter is not passed to the1889 underlying connection layer (sendTouchDown etc.),1890 the default in the underlying layer will be used.1891 Note that all connection layers may not support1892 this parameter.1893 Returns True if successful, otherwise False.1894 """1895 if long == _USE_DEFAULTS:1896 long = self._tapDefaults.get("long", False)1897 if hold == _USE_DEFAULTS:1898 hold = self._tapDefaults.get("hold", 0.0)1899 if count == _USE_DEFAULTS:1900 count = self._tapDefaults.get("count", 1)1901 if delayBetweenTaps == _USE_DEFAULTS:1902 delayBetweenTaps = self._tapDefaults.get("delayBetweenTaps", 0.175)1903 if button == _USE_DEFAULTS:1904 button = self._tapDefaults.get("button", None)1905 x, y = self.intCoords((x, y))1906 count = int(count)1907 if long and hold == 0.0:1908 hold = self._longTapHoldTime1909 extraParams = {}1910 if button != None:1911 extraParams['button'] = button1912 if count == 0:1913 self.existingConnection().sendTouchMove(x, y)1914 while count > 0:1915 if hold > 0.0:1916 try:1917 assert self.existingConnection().sendTouchDown(x, y, **extraParams)1918 time.sleep(hold)1919 assert self.existingConnection().sendTouchUp(x, y, **extraParams)1920 except AssertionError:1921 return False1922 else:1923 if not self.existingConnection().sendTap(x, y, **extraParams):1924 return False1925 count = int(count) - 11926 return True1927 def tapBitmap(self, bitmap, **tapAndOirArgs):1928 """1929 Find a bitmap from the latest screenshot, and tap it.1930 Parameters:1931 bitmap (string):1932 filename of the bitmap to be tapped.1933 optical image recognition arguments (optional)1934 refer to help(obj.oirEngine()).1935 tapPos (pair of floats (x,y)):1936 refer to tapItem documentation.1937 tapOffset (pair of floats or integers (x, y)):1938 refer to tapItem documentation.1939 long, hold, count, delayBetweenTaps, button (optional):1940 refer to tap documentation.1941 Returns True if successful, otherwise False.1942 """1943 assert self._lastScreenshot != None, "Screenshot required."1944 tapArgs, rest = _takeTapArgs(tapAndOirArgs)1945 oirArgs, _ = _takeOirArgs(self._lastScreenshot, rest, thatsAll=True)1946 oirArgs["limit"] = 11947 items = self._lastScreenshot.findItemsByBitmap(bitmap, **oirArgs)1948 if len(items) == 0:1949 return False1950 return self.tapItem(items[0], **tapArgs)1951 def tapDefaults(self):1952 """1953 Returns default parameters for sendTap method.1954 See also setTapDefaults.1955 """1956 return self._tapDefaults1957 def tapItem(self, viewItem, **tapArgs):1958 """1959 Tap the center point of viewItem.1960 Parameters:1961 viewItem (GUIItem object):1962 item to be tapped, possibly returned by1963 findItemsBy... methods in Screenshot or View.1964 tapPos (pair of floats (x, y)):1965 position to tap, relative to the item.1966 (0.0, 0.0) is the top-left corner,1967 (1.0, 0.0) is the top-right corner,1968 (1.0, 1.0) is the lower-right corner.1969 Values < 0 and > 1 tap coordinates outside the item.1970 The default is (0.5, 0.5), in the middle of the item.1971 tapOffset (pair of floats or integers (x, y)):1972 offset of tap coordinates. Integers are1973 pixels, floats are relative to screensize.1974 Example:1975 tapOffset=(0, 0.1) will keep the X coordinate1976 unchagned and add 10 % of screensize to Y.1977 long, hold, count, delayBetweenTaps, button (optional):1978 refer to tap documentation.1979 """1980 if "tapPos" in tapArgs:1981 posX, posY = tapArgs["tapPos"]1982 del tapArgs["tapPos"]1983 x1, y1, x2, y2 = viewItem.bbox()1984 tapCoords = (x1 + (x2-x1) * posX,1985 y1 + (y2-y1) * posY)1986 else:1987 tapCoords = viewItem.coords()1988 if "tapOffset" in tapArgs:1989 offX, offY = tapArgs["tapOffset"]1990 del tapArgs["tapOffset"]1991 x, y = tapCoords1992 if isinstance(offX, int):1993 x += offX1994 elif isinstance(offX, float):1995 width, _ = self.screenSize()1996 x += offX * width1997 else:1998 raise TypeError('invalid offset %s (int or float expected)' %1999 (repr(offX),))2000 if isinstance(offY, int):2001 y += offY2002 elif isinstance(offY, float):2003 _, height = self.screenSize()2004 y += offY * height2005 else:2006 raise TypeError('invalid offset %s (int or float expected)' %2007 (repr(offY),))2008 tapCoords = (x, y)2009 return self.tap(tapCoords, **tapArgs)2010 def tapOcrText(self, text, appearance=0, **tapAndOcrArgs):2011 """2012 Find text from the latest screenshot using OCR, and tap it.2013 Parameters:2014 text (string):2015 the text to be tapped.2016 long, hold, count, delayBetweenTaps, button (optional):2017 refer to tap documentation.2018 OCR engine specific arguments2019 refer to help(obj.ocrEngine())2020 Returns True if successful, otherwise False.2021 """2022 assert self._lastScreenshot != None, "Screenshot required."2023 tapArgs, rest = _takeTapArgs(tapAndOcrArgs)2024 ocrArgs, _ = _takeOcrArgs(self._lastScreenshot, rest, thatsAll=True)2025 items = self._lastScreenshot.findItemsByOcr(text, **ocrArgs)2026 if len(items) <= appearance:2027 return False2028 return self.tapItem(items[appearance], **tapArgs)2029 def type(self, text):2030 """2031 Type text.2032 """2033 return self.existingConnection().sendType(text)2034 def verifyOcrText(self, text, **ocrArgs):2035 """2036 Verify using OCR that the last screenshot contains the text.2037 Parameters:2038 text (string):2039 text to be verified.2040 OCR engine specific arguments2041 refer to help(obj.ocrEngine())2042 Returns True if successful, otherwise False.2043 """2044 assert self._lastScreenshot != None, "Screenshot required."2045 ocrArgs, _ = _takeOcrArgs(self._lastScreenshot, ocrArgs, thatsAll=True)2046 return self._lastScreenshot.findItemsByOcr(text, **ocrArgs) != []2047 def verifyBitmap(self, bitmap, **oirArgs):2048 """2049 Verify that bitmap is present in the last screenshot.2050 Parameters:2051 bitmap (string):2052 filename of the bitmap file to be searched for.2053 optical image recognition arguments (optional)2054 refer to help(obj.oirEngine()).2055 """2056 assert self._lastScreenshot != None, "Screenshot required."2057 oirArgs, _ = _takeOirArgs(self._lastScreenshot, oirArgs, thatsAll=True)2058 oirArgs["limit"] = 12059 return self._lastScreenshot.findItemsByBitmap(bitmap, **oirArgs) != []2060 def wait(self, refreshFunc, waitFunc, waitFuncArgs=(), waitFuncKwargs={},2061 waitTime = 5.0, pollDelay = 1.0,2062 beforeRefresh = lambda: None, afterRefresh = lambda: None):2063 """2064 Wait until waitFunc returns True or waitTime has expired.2065 Parameters:2066 refreshFunc (function):2067 this function is called before re-evaluating2068 waitFunc. For instance, refreshScreenshot.2069 waitFunc, waitFuncArgs, waitFuncKwargs (function, tuple,2070 dictionary):2071 wait for waitFunc(waitFuncArgs, waitFuncKwargs) to2072 return True2073 waitTime (float, optional):2074 max. time in seconds to wait for. The default is2075 5.0.2076 pollDelay (float, optional):2077 time in seconds to sleep between refreshs. The2078 default is 1.0.2079 beforeRefresh (function, optional):2080 this function will be called before every refreshFunc call.2081 The default is no operation.2082 afterRefresh (function, optional):2083 this function will be called after every refreshFunc call.2084 The default is no operation.2085 Returns True if waitFunc returns True - either immediately or2086 before waitTime has expired - otherwise False.2087 refreshFunc will not be called if waitFunc returns immediately2088 True.2089 """2090 if waitFunc(*waitFuncArgs, **waitFuncKwargs):2091 return True2092 startTime = time.time()2093 endTime = startTime + waitTime2094 now = startTime2095 while now < endTime:2096 time.sleep(min(pollDelay, (endTime - now)))2097 now = time.time()2098 beforeRefresh()2099 refreshFunc()2100 afterRefresh()2101 if waitFunc(*waitFuncArgs, **waitFuncKwargs):2102 return True2103 return False2104 def waitAny(self, listOfFuncs, waitTime=5.0, pollDelay=1.0):2105 """2106 Wait until any function returns True (or equivalent)2107 Parameters:2108 listOfFuncs (enumerable set of functions):2109 functions that will be called without parameters.2110 waitTime (float, optional):2111 max. time to wait in seconds. If None, the call2112 is blocked until a function returns True or2113 equivalent. The default is 5.0.2114 pollDelay (float, optional):2115 time in seconds to sleep before calling2116 functions again, if no function returned True.2117 The default is 1.0.2118 Returns tuple [(function index, function, return value), ...]2119 of functions in the list that returned True or equivalent.2120 Returns empty list in case of timeout.2121 Exceptions raised by functions are not catched.2122 Example: run an async cmd on Windows, wait for it to finish2123 or dialog X or Y to appear:2124 sut.shellSOE(cmd, asyncStatus="c:/temp/cmd.async.status")2125 detected = sut.waitAny(2126 [lambda: sut.topWindowProperties()["title"] == "Dialog X",2127 lambda: sut.topWindowProperties()["title"] == "Dialog Y",2128 lambda: sut.pycosh("cat c:/temp/cmd.async.status")],2129 waitTime=60)2130 if not detected:2131 ...waiting was timed out...2132 else:2133 index, func, retval = detected[0]2134 if index == 2:2135 ...c:/temp/cmd.async.status contents are in retval...2136 """2137 startTime = time.time()2138 if waitTime is None:2139 endTime = float("inf")2140 else:2141 endTime = startTime + waitTime2142 now = startTime2143 rv = []2144 while now <= endTime and not rv:2145 for index, func in enumerate(listOfFuncs):2146 retval = func()2147 if retval:2148 rv.append((index, func, retval))2149 time.sleep(min(pollDelay, (endTime - now)))2150 now = time.time()2151 return rv2152 def waitAnyBitmap(self, listOfBitmaps, **waitAndOirArgs):2153 """2154 Wait until any of given bitmaps appears on screen.2155 Parameters:2156 listOfBitmaps (list of strings):2157 list of bitmaps (filenames) to be waited for.2158 optical image recognition arguments (optional)2159 refer to help(obj.oirEngine()).2160 waitTime, pollDelay, beforeRefresh, afterRefresh (optional):2161 refer to wait documentation.2162 Returns list of bitmaps appearing in the first screenshot that2163 contains at least one of the bitmaps. If none of the bitmaps2164 appear within the time limit, returns empty list.2165 If the bitmap is not found from most recently refreshed2166 screenshot, waitAnyBitmap updates the screenshot.2167 """2168 if listOfBitmaps == []: return []2169 if not self._lastScreenshot: self.refreshScreenshot()2170 waitArgs, rest = _takeWaitArgs(waitAndOirArgs)2171 oirArgs, _ = _takeOirArgs(self._lastScreenshot, rest, thatsAll=True)2172 foundBitmaps = []2173 def observe():2174 for bitmap in listOfBitmaps:2175 if self._lastScreenshot.findItemsByBitmap(bitmap, **oirArgs):2176 foundBitmaps.append(bitmap)2177 return foundBitmaps != []2178 self.wait(self.refreshScreenshot, observe, **waitArgs)2179 return foundBitmaps2180 def waitAnyOcrText(self, listOfTexts, **waitAndOcrArgs):2181 """2182 Wait until OCR recognizes any of texts on the screen.2183 Parameters:2184 listOfTexts (list of string):2185 texts to be waited for.2186 waitTime, pollDelay, beforeRefresh, afterRefresh (optional):2187 refer to wait documentation.2188 OCR engine specific arguments2189 refer to help(obj.ocrEngine())2190 Returns list of texts that appeared in the first screenshot2191 that contains at least one of the texts. If none of the texts2192 appear within the time limit, returns empty list.2193 If any of texts is not found from most recently refreshed2194 screenshot, waitAnyOcrText updates the screenshot.2195 """2196 if listOfTexts == []: return []2197 if not self._lastScreenshot: self.refreshScreenshot()2198 waitArgs, rest = _takeWaitArgs(waitAndOcrArgs)2199 ocrArgs, _ = _takeOcrArgs(self._lastScreenshot, rest, thatsAll=True)2200 foundTexts = []2201 def observe():2202 for text in listOfTexts:2203 if self.verifyOcrText(text, **ocrArgs):2204 foundTexts.append(text)2205 return foundTexts != []2206 self.wait(self.refreshScreenshot, observe, **waitArgs)2207 return foundTexts2208 def waitBitmap(self, bitmap, **waitAndOirArgs):2209 """2210 Wait until bitmap appears on screen.2211 Parameters:2212 bitmap (string):2213 filename of the bitmap to be waited for.2214 optical image recognition arguments (optional)2215 refer to help(obj.oirEngine()).2216 waitTime, pollDelay, beforeRefresh, afterRefresh (optional):2217 refer to wait documentation.2218 Returns True if bitmap appeared within given time limit,2219 otherwise False.2220 If the bitmap is not found from most recently refreshed2221 screenshot, waitBitmap updates the screenshot.2222 """2223 return self.waitAnyBitmap([bitmap], **waitAndOirArgs) != []2224 def waitOcrText(self, text, **waitAndOcrArgs):2225 """2226 Wait until OCR detects text on the screen.2227 Parameters:2228 text (string):2229 text to be waited for.2230 waitTime, pollDelay, beforeRefresh, afterRefresh (optional):2231 refer to wait documentation.2232 OCR engine specific arguments2233 refer to help(obj.ocrEngine())2234 Returns True if the text appeared within given time limit,2235 otherwise False.2236 If the text is not found from most recently refreshed2237 screenshot, waitOcrText updates the screenshot.2238 """2239 return self.waitAnyOcrText([text], **waitAndOcrArgs) != []2240 def waitScreenUpdated(self, **waitArgs):2241 """2242 Wait until screenshot has been updated or waitTime expired.2243 Parameters:2244 waitTime, pollDelay, beforeRefresh, afterRefresh (optional):2245 refer to wait documentation.2246 Returns True if screenshot was updated before waitTime expired,2247 otherwise False. If waitTime is 0, screenshot is refreshed once.2248 Returns True if the screenshot differs from the previous.2249 waitScreenUpdated refreshes the screenshot.2250 """2251 waitTime = waitArgs.get("waitTime", 5.0)2252 pollDelay = waitArgs.get("pollDelay", 1.0)2253 beforeRefresh = waitArgs.get("beforeRefresh", lambda: None)2254 afterRefresh = waitArgs.get("afterRefresh", lambda: None)2255 updated = self.existingConnection().recvScreenUpdated(waitTime, pollDelay)2256 if updated == None:2257 # optimised version is not available, this is a fallback2258 previousScreenshot = self.screenshot()2259 if previousScreenshot == None:2260 beforeRefresh()2261 self.refreshScreenshot()2262 afterRefresh()2263 return True2264 # return True if screen changed from previous even with2265 # waitTime == 0, therefore refresh before calling wait.2266 beforeRefresh()2267 self.refreshScreenshot()2268 afterRefresh()2269 return self.wait(2270 self.refreshScreenshot,2271 lambda: not self.verifyBitmap(previousScreenshot.filename()),2272 **waitArgs)2273 elif updated == True:2274 self.refreshScreenshot()2275 elif updated == False:2276 pass # no need to fetch the same screen2277 else:2278 raise ValueError("recvScreenUpdated returned illegal value: %s" % (repr(updated),))2279 return updated2280class Screenshot(object):2281 """2282 Screenshot class takes and holds a screenshot (bitmap) of device2283 display, or a forced bitmap file if device connection is not given.2284 """2285 def __init__(self, screenshotFile=None, paths=None,...

Full Screen

Full Screen

attrverify_cc.py

Source:attrverify_cc.py Github

copy

Full Screen

1from pydicom.datadict import(2 # FUNCTIONS3 dictionary_VM,4 dictionary_VR,)5from pydicom.dataelem import(6 # CLASSES7 DataElement,)8from pydicom.multival import(9 # CLASSES10 MultiValue,)11from pydicom.valuerep import(12 # CLASSES13 DSdecimal,14 DSfloat,15 IS,)16from rightdicom.dcmvfy.mesgtext_cc import(17 # FUNCTIONS18 EMsgDC,19 MMsgDC,20 WMsgDC,)21from numpy import uint16, uint32222324def isLongValueLengthInExplicitValueRepresentation(vr: str):25 # Check for known short form VRs, rather than known long form VRs,26 # since all new VRs will be long form, in case we encounter unrecognized27 # one but be sure and check that there really is a VR present28 return vr and vr[0] != 0 and vr[1] != 0 and(vr != "AE"29 and vr != "AS"30 and vr != "AT"31 and vr != "CS"32 and vr != "DA"33 and vr != "DS"34 and vr != "DT"35 and vr != "FD"36 and vr != "FL"37 and vr != "IS"38 and vr != "LO"39 and vr != "LT"40 and vr != "PN"41 and vr != "SH"42 and vr != "SL"43 and vr != "SS"44 and vr != "ST"45 and vr != "TM"46 and vr != "UI"47 and vr != "UL"48 and vr != "US"49 )505152def isKnownExplicitValueRepresentation(vr: str):53 return vr and(54 vr == "AE"55 or vr == "AS"56 or vr == "AT"57 or vr == "CS"58 or vr == "DA"59 or vr == "DS"60 or vr == "DT"61 or vr == "FL"62 or vr == "FD"63 or vr == "IS"64 or vr == "LO"65 or vr == "LT"66 or vr == "OB"67 or vr == "OD"68 or vr == "OF"69 or vr == "OL"70 or vr == "OW"71 or vr == "PN"72 or vr == "SH"73 or vr == "SL"74 or vr == "SQ"75 or vr == "SS"76 or vr == "ST"77 or vr == "TM"78 or vr == "UI"79 or vr == "UL"80 or vr == "UN"81 or vr == "UR"82 or vr == "US"83 or vr == "UT"84 )858687def isStringVR(vr: str) -> bool:88 return vr and(vr == "AE"89 or vr == "AS"90 or vr == "CS"91 or vr == "DA"92 or vr == "DT"93 or vr == "DS"94 or vr == "IS"95 or vr == "LO"96 or vr == "LT"97 or vr == "PN"98 or vr == "SH"99 or vr == "ST"100 or vr == "TM"101 or vr == "UC"102 or vr == "UI"103 or vr == "UR"104 or vr == "UT")105106107def isNonOtherNumericOrDateOrTimeOrUIStringVR(vr: str) -> bool:108 return vr and(vr == "DA"109 or vr == "DT"110 or vr == "DS"111 or vr == "IS"112 or vr == "TM"113 or vr == "UI")114115116def isNumericVR(vr: str) -> bool:117 return vr and(vr == "OB"118 or vr == "OL"119 or vr == "OW"120 or vr == "OX"121 or vr == "SL"122 or vr == "SS"123 or vr == "UL"124 or vr == "US"125 or vr == "XL"126 or vr == "XS")127128129def isFloatVR(vr: str) -> bool:130 return vr and(vr == "FL"131 or vr == "FD"132 or vr == "OD"133 or vr == "OF")134135136def isPydicomNumeric(value):137 return(type(value) == int or type(value) == DSfloat or138 type(value) == DSdecimal or type(value) == IS or139 type(value) == float)140141142def verifyDefinedTerms(elem: DataElement, str_val: dict, verbose: bool,143 log: list,144 which: int) -> bool:145 val = elem.value146 vm = elem.VM147 if type(val) == MultiValue:148 if which == -1:149 candidate = val150 else:151 if which >= vm:152 return False153 else:154 candidate = [val[which]]155 else:156 if which >= 1:157 return False158 else:159 candidate = [val]160 for i, count in zip(candidate, range(0, len(candidate))):161 if type(i) != str:162 log.append(163 EMsgDC("TriedToVerifyDefinedTermsForNonStringAttribute") + \164 MMsgDC("ForAttribute") + " <" + elem.description() + ">")165 return False166 if i in str_val:167 if verbose:168 log.append(169 MMsgDC("RecognizedDefinedTerms") \170 + " <" + i + "> " + MMsgDC("Is") + " <" + str_val[i] \171 + "> " + MMsgDC("ForValue") + " {}".format(count + 1) \172 + " " + MMsgDC("OfAttribute") + " <" \173 + elem.description() + ">")174 else:175 msg = "{} <{}> {} {} {} {}".format(176 WMsgDC("UnrecognizedDefinedTerm"),177 i, MMsgDC("ForValue"), count + 1, MMsgDC("OfAttribute"),178 elem.description())179 log.append(msg)180 return True181 return False182183184def verifyEnumValues(elem: DataElement, str_val: dict, verbose: bool, log: list,185 which: int) -> bool:186 success = True187 val = elem.value188 vm = elem.VM189 if type(val) == MultiValue:190 if which == -1:191 candidate = val192 else:193 if which >= vm:194 return False195 else:196 candidate = [val[which]]197 else:198 if which >= 1:199 return False200 else:201 candidate = [val]202 for i, count in zip(candidate, range(0, len(candidate))):203 converted_i = i204 if type(i) == DSfloat or \205 type(i) == DSdecimal or type(i) == IS:206 converted_i = str(i)207 elif type(i) == str:208 converted_i = i209 else:210 log.append(211 EMsgDC("TriedToVerifyEnumeratedValueForNonStringAttribute") + \212 MMsgDC("ForAttribute") + " <" + elem.description() + ">")213 return False214 if converted_i in str_val:215 if verbose:216 log.append(217 MMsgDC("RecognizedEnumeratedValue") \218 + " <" + i + "> " + MMsgDC("Is") + " <" + str_val[i] \219 + "> " + MMsgDC("ForValue") + " {}".format(count + 1) \220 + " " + MMsgDC("OfAttribute") + " <" \221 + elem.description() + ">")222 else:223 msg = "{} <{}> {} {} {} {}".format(224 EMsgDC("UnrecognizedEnumeratedValue"),225 i, MMsgDC("ForValue"), count + 1, MMsgDC("OfAttribute"),226 elem.description())227 log.append(msg)228 success = False229 return success230231232def verifyEnumValues_uint16(elem: DataElement, bin_method, verbose: bool,233 log: list,234 which: int) -> bool:235 success = True236 val = elem.value237 vm = elem.VM238 if type(val) == MultiValue:239 if which == -1:240 candidate = val241 else:242 if which >= vm:243 return False244 else:245 candidate = [val[which]]246 else:247 if which >= 1:248 return False249 else:250 candidate = [val]251 for i, count in zip(candidate, range(0, len(candidate))):252 if not isPydicomNumeric(i):253 log.append(254 EMsgDC("TriedToVerifyEnumeratedValueForNonNumericAttribute") + \255 MMsgDC("ForAttribute") + " <" + elem.description() + ">")256 try:257 int_input = uint16(i)258 output = bin_method(int_input)259 except:260 output = []261 if len(output) == 0:262 msg ="{} <{}> {} {} {} {} {}".format(263 EMsgDC("UnrecognizedEnumeratedValue"),264 i, MMsgDC("ForValue"), count + 1, MMsgDC("OfAttribute"),265 elem.description(), bin_method.__name__)266 log.append(msg)267 success = False268 else:269 if verbose:270 log.append(271 MMsgDC("RecognizedEnumeratedValue") \272 + " <{}> ".format(i) + MMsgDC("Is") + " <" + output \273 + "> " + MMsgDC("ForValue") + " {}".format(count + 1) \274 + " " + MMsgDC("OfAttribute") + " <" \275 + elem.description() + ">")276 return success277278279def verifyBitMap(elem: DataElement, bin_method, verbose: bool, log: list,280 which: int) -> bool:281 success = True282 val = elem.value283 vm = elem.VM284 if type(val) == MultiValue:285 if which == -1:286 candidate = val287 else:288 if which >= vm:289 return False290 else:291 candidate = [val[which]]292 else:293 if which >= 1:294 return False295 else:296 candidate = [val]297 for i, count in zip(candidate, range(0, len(candidate))):298 if not isPydicomNumeric(i):299 log.append(300 EMsgDC("TriedToVerifyBitMapForNonNumericAttribute") + \301 MMsgDC("ForAttribute") + " <" + elem.description() + ">")302 return False303 output = bin_method(uint16(i))304 if len(output) == 0:305 msg = "{} <{}> {} {} {} {}".format(306 EMsgDC("UnrecognizedBitMap"),307 i, MMsgDC("ForValue"), count + 1, MMsgDC("OfAttribute"),308 elem.description())309 log.append(msg)310 success = False311 else:312 if verbose:313 log.append(314 MMsgDC("RecognizedBitMap") \315 + " <" + i + "> " + MMsgDC("Is") + " <" + output \316 + "> " + MMsgDC("ForValue") + " {}".format(count + 1) \317 + " " + MMsgDC("OfAttribute") + " <" \318 + elem.description() + ">")319 return success320321322def verifyEnumValues_tag(elem: DataElement, tag_method, verbose: bool,323 log: list,324 which: int) -> bool:325 # I think group and element for all values of multivalue attribs is the same. I should ask this?326 success = True327 val = elem.value328 vm = elem.VM329 g = uint16(elem.tag.group)330 e = uint16(elem.tag.elemnt)331 output = tag_method(g, e)332 if len(output) == 0:333 msg = "{} <({},{})> {} {} {}".format(334 EMsgDC("UnrecognizedEnumeratedValue"),335 g, e, MMsgDC("ForValue"), MMsgDC("OfAttribute"),336 elem.description())337 log.append(msg)338 success = False339 else:340 if verbose:341 log.append(342 MMsgDC("RecognizedEnumeratedValue") \343 + " <" + "({},{})".format(g, e) + "> " + MMsgDC(344 "Is") + " <" + output \345 + "> " + MMsgDC("ForValue") + val \346 + " " + MMsgDC("OfAttribute") + " <" \347 + elem.description() + ">")348 # if which == -1:349 # candidate = val350 # else:351 # if which >= vm:352 # return False353 # else:354 # candidate = val[which]355 # for i in candidate:356 # if type(i) != int:357 # return False358 # output = tag_method(uint16(elem.tag.group), uint16(elem.tag.elemnt))359 # if len(output) == 0:360 # print("Unrecognized defined term for {}".format(elem.keyword))361 # success = False362 # else:363 # print("print sth if verbose")364 return success365366367def verifyNotZero(elem: DataElement, verbose: bool, log: list,368 which: int, warningNotError: bool) -> bool:369 success = True370 if elem.is_empty:371 return True372 val = elem.value373 vm = elem.VM374 if type(val) == MultiValue:375 if which == -1:376 candidate = val377 else:378 if which >= vm:379 return False380 else:381 candidate = [val[which]]382 else:383 if which >= 1:384 return False385 else:386 candidate = [val]387 for i, count in zip(candidate, range(0, len(candidate))):388 if not isPydicomNumeric(i):389 log.append("{} {} <{}>".format(390 EMsgDC("TriedToVerifyNotZeroForNonNumericAttribute"),391 MMsgDC("ForAttribute"), elem.description()))392 return False393 if i == 0:394 log.append("{} {} {} {} <{}>".format(395 (WMsgDC("ZeroValue") if warningNotError else EMsgDC(396 "ZeroValue")),397 MMsgDC("ForValue"),(count + 1),398 MMsgDC("OfAttribute"), elem.description()))399 success = False400 return success401402403def verifyVR(elem: DataElement, module: str, element: str, verbose: bool,404 log: list, fix_trivial=False):405 # tag = getTag();406407 # if (tag.isPrivateTag()) :408 # return True409 v = elem.value410 try:411 vrd = dictionary_VR(elem.tag)412 except BaseException as err:413 print(err)414 mssg = EMsgDC("NoSuchElementInDictionary") + " "415 if len(element) != 0:416 mssg += MMsgDC("Element") + "=<" + element + ">"417 if len(module) != 0:418 mssg += MMsgDC("Module") + "=<" + module + ">"419 log.append(mssg)420 return False421 vre = elem.VR422 vrds = []423 vre_equlas_vrd = False424 if len(vrd) > 2 and len(vre) == 2:425 vrds = vrd.split(' or ')426 for dic_vr in vrds:427 if dic_vr == vre:428 vre_equlas_vrd = True429 break430 else:431 vre_equlas_vrd = (vre == vrd)432 if not vre_equlas_vrd and not(vrd == "OX" and vre == "OB" or vre == "OW") \433 and not(vrd == "XS" and vre == "US" or vre == "SS") \434 and not(vrd == "XO" and vre == "US" or vre == "SS" or vre == "OW") \435 and not(vrd == "XL" and vre == "UL" or vre == "SL"):436 mssg = EMsgDC("BadValueRepresentation") \437 + " " + vre + " (" + vrd + " " + MMsgDC("Required") + ")"438 # print(vrds,'<-->' ,vrd,'<-->', vre, '<-->', elem)439 if len(element) != 0:440 mssg += MMsgDC("Element") + "=<" + element + ">"441 if len(module) != 0:442 mssg += MMsgDC("Module") + "=<" + module + ">"443 log.append(mssg)444 if fix_trivial:445 elem.VR = vrd446 mssg += " :fixed: by changing the vr"447 return False448 else:449 return True450451452def vmpart2num(vmpart: str):453 if vmpart.isnumeric():454 return [uint32(vmpart), False]455 elif vmpart == 'n':456 return [1, True]457 elif vmpart[-1] == 'n':458 return [vmpart[:len(vmpart) - 1], True]459 else:460 return [-1, False]461462463def getVM_min_max(vm: str):464 has_min_factor = False465 has_max_factor = False466 if vm.isnumeric():467 mmin = uint32(vm)468 mmax = uint32(vm)469 else:470 minmax = vm.split('-')471 [mmin, has_min_factor] = vmpart2num(minmax[0])472 [mmax, has_max_factor] = vmpart2num(minmax[1])473 return [mmin, has_min_factor, mmax, has_max_factor]474475476def verifyVM(elem: DataElement, module: str, element: str, verbose: bool,477 log: list,478 multiplicityMin: uint32, multiplicityMax: uint32,479 specifiedSource=""):480 ttag = elem.tag481 current_vm = elem.VM482 vm = dictionary_VM(ttag)483 if multiplicityMax == 0 and multiplicityMin == 0:484 [dictmin, has_min_factor, dictmax,485 has_max_factor] = getVM_min_max(vm)486 source = MMsgDC("Dictionary")487 else:488 dictmin = multiplicityMin489 dictmax = multiplicityMax490 has_min_factor = False491 has_max_factor = False492 source = specifiedSource if len(specifiedSource) > 0 else MMsgDC(493 "ModuleDefinition")494 min_err = False495 max_err = False496 if has_min_factor:497 min_err = (current_vm % dictmin == 0)498 else:499 min_err = current_vm < dictmin500 if has_max_factor:501 max_err = (current_vm % dictmax == 0)502 else:503 max_err = current_vm > dictmin504 message = ""505 err = min_err and max_err506 if err:507 mssg = "{} {} vm is {} ({}".format(508 EMsgDC("BadAttributeValueMultiplicity"), vm, current_vm,509 dictmin)510 if(dictmin != dictmax or not has_max_factor511 or not has_min_factor):512 if not has_max_factor and dictmax == 0xFFFFFFFF:513 mssg += "-n"514 elif has_max_factor and dictmax > 1:515 mssg += "-{}n".format(dictmax)516 elif has_max_factor and dictmax == 1:517 mssg += "-n".format(dictmax)518 else:519 mssg += "-{}".format(dictmax)520 mssg += " {} {})".format( MMsgDC("RequiredBy"), source)521 if len(element) != 0:522 mssg += MMsgDC("Element") + "=<" + element + ">"523 if len(module) != 0:524 mssg += MMsgDC("Module") + "=<" + module + ">"525 log.append(mssg)526 return not err527528529def isEmptyOrHasAnyEmptyValue(elem: DataElement) -> bool:530 if elem.is_empty:531 return True532 v = elem.value533 if type(v) != MultiValue:534 return v.is_empty535 else:536 for i in v:537 try:538 length = len(i)539 except:540 return False541 if length == 0:542 return True543 return False544545546def isEmptyOrHasAllEmptyValues(elem: DataElement) -> bool:547 if elem.is_empty:548 return True549 v = elem.value550 if type(v) != MultiValue:551 return elem.is_empty552 else:553 for i in v:554 try:555 length = len(i)556 except:557 return False558 if length > 0:559 return False ...

Full Screen

Full Screen

pages.py

Source:pages.py Github

copy

Full Screen

...18 def test_moveToPage2(self):19 s.tapBitmap("input-username.png", colorMatch=0.9)20 sleep(DELAY)21 s.refreshScreenshot()22 found = s.verifyBitmap("kuvas6.png", colorMatch=0.9)23 self.assertTrue(found, "Header2 not found")24if __name__ == '__main__':...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run fMBT automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful