How to use touch_position method in robotframework-ioslibrary

Best Python code snippet using robotframework-ioslibrary_python

main_engversion.py

Source:main_engversion.py Github

copy

Full Screen

1# -*- encoding: UTF-8 -*-2import qi3import argparse4import sys5import math6import subprocess7import time8import touch9from naoqi import ALProxy10from threading import Thread11sys.path.insert(0, './motion')12import entertain13# import saju14FRAME_WIDTH = 128015FRAME_HEIGHT = 80016DEFAULT_VOLUME = 6017# jesnk touch18TOUCH_LIST = {}19TOUCH_LIST['RIGHT_SIDE'] = {"x": [FRAME_WIDTH / 2, FRAME_WIDTH], "y": [0, FRAME_HEIGHT], 'name': "RIGHT_SIDE"}20TOUCH_LIST['LEFT_SIDE'] = {"x": [0, FRAME_WIDTH], "y": [0, FRAME_HEIGHT], 'name': "LEFT_SIDE"}21TOUCH_LIST['JESNK_SIDE'] = {"x": [0, 200], "y": [0, 200], 'name': "JESNK_SIDE"}22TOUCH_LIST['BUTTON_LEFT'] = {"x": [75, 600], "y": [233, 593], 'name': "BUTTON_LEFT"}23TOUCH_LIST['BUTTON_RIGHT'] = {"x": [669, 1192], "y": [227, 598], 'name': "BUTTON_RIGHT"}24TOUCH_LIST['BUTTON_MIDDLE_DOWN'] = {"x": [485, 800], "y": [632, 705], 'name': "BUTTON_MIDDLE_DOWN"}25TOUCH_LIST['BUTTON_RIGHT_DOWN'] = {"x": [930, 1156], "y": [641, 707], 'name': "BUTTON_RIGHT_DOWN"}26TOUCH_LIST['BUTTON_LEFT_DOWN'] = {"x": [150, 390], "y": [621, 707], 'name': "BUTTON_LEFT_DOWN"}27scene_data = {}28scene_data['init'] = ['init', ['RIGHT_SIDE', 'LEFT_SIDE'], ['bye', 'next', 'first']]29scene_data['1'] = ['1', ['RIGHT_SIDE', 'LEFT_SIDE'], ['bye', 'next', 'first']]30scene_data['exit'] = ['exit', [], []]31scene_data['home'] = ['home', ['BUTTON_MIDDLE_DOWN', 'JESNK_SIDE'], ['start', 'next', 'pepper']]32scene_data['first_menu'] = ['first_menu', \33 ['JESNK_SIDE', 'BUTTON_RIGHT', 'BUTTON_LEFT', \34 'BUTTON_MIDDLE_DOWN', 'BUTTON_RIGHT_DOWN'], ['bye', 'next', 'first']]35scene_data['tour'] = ['tour', \36 ['JESNK_SIDE', 'BUTTON_RIGHT', 'BUTTON_LEFT', \37 'BUTTON_LEFT_DOWN', 'BUTTON_MIDDLE_DOWN', 'BUTTON_RIGHT_DOWN'], \38 ['bye', 'next', 'first']]39scene_data['entertain'] = ['entertain', \40 ['JESNK_SIDE', 'BUTTON_RIGHT', 'BUTTON_LEFT', \41 'BUTTON_LEFT_DOWN', 'BUTTON_MIDDLE_DOWN', 'BUTTON_RIGHT_DOWN'], \42 ['bye', 'next', 'first']]43scene_data['entertain2'] = ['entertain2', \44 ['JESNK_SIDE', 'BUTTON_RIGHT', 'BUTTON_LEFT', \45 'BUTTON_LEFT_DOWN', 'BUTTON_MIDDLE_DOWN', 'BUTTON_RIGHT_DOWN'], \46 ['bye', 'next', 'first']]47scene_data['tour_hsr1'] = ['tour_hsr1', \48 ['JESNK_SIDE', 'BUTTON_RIGHT', 'BUTTON_LEFT', \49 'BUTTON_LEFT_DOWN', 'BUTTON_MIDDLE_DOWN', 'BUTTON_RIGHT_DOWN'], \50 ['bye', 'next', 'first']]51scene_data['tour_hsr2'] = ['tour_hsr2', \52 ['JESNK_SIDE', 'BUTTON_RIGHT', 'BUTTON_LEFT', \53 'BUTTON_LEFT_DOWN', 'BUTTON_MIDDLE_DOWN', 'BUTTON_RIGHT_DOWN'], \54 ['bye', 'next', 'first']]55signalID = 056def touch_callback(x, y):57 print(" coordinate x : ", x, " y : ", y)58 print(signalID)59class Monitor_input:60 def __init__(self, srv, touch_list=[], word_list=[]):61 self.target_touch_list = touch_list62 self.target_word_list = word_list63 self.srv = srv64 self.tabletService = srv['tablet']65 self.signalID = srv['tablet'].onTouchDown.connect(self.touch_callback)66 self.touched_position = None67 self.exit_flag = False68 self.ret = {}69 self.memory = srv['memory']70 self.asr = srv['asr']71 self.asr.pause(True)72 #self.asr.setLanguage("Korean")73 self.asr.setLanguage("English")74 self.debug_mode = False75 self.debug_touch_count = 076 self.debug_touch_coordinate = []77 try:78 self.asr.unsubscribe("asr")79 except:80 pass81 self.asr.pause(True)82 def check_valid_touch(self):83 for i in self.target_touch_list:84 if self.touch_x > TOUCH_LIST[i]['x'][0] and self.touch_x < TOUCH_LIST[i]['x'][1]:85 if self.touch_y > TOUCH_LIST[i]['y'][0] and self.touch_y < TOUCH_LIST[i]['y'][1]:86 self.ret['touch_position'] = i87 return True88 return False89 def touch_callback(self, x, y):90 print(self.debug_mode)91 if self.debug_mode:92 self.debug_touch_count += 193 self.debug_touch_coordinate.append([x, y])94 print("x : ", x, " y : ", y)95 if self.debug_touch_count == 4:96 self.debug_mode = False97 self.debug_touch_count = 098 print("test")99 xs = [x[0] for x in self.debug_touch_coordinate]100 xs.sort()101 ys = [x[1] for x in self.debug_touch_coordinate]102 ys.sort()103 print("X range : ", xs[0], "-", xs[-1])104 print("Y range : ", ys[0], "-", ys[-1])105 print("Touch_debug_mode Finished")106 self.debug_touch_coordinate = []107 return108 return109 self.touch_x = x110 self.touch_y = y111 if (self.check_valid_touch()):112 self.ret['type'] = 'touch'113 self.ret['x'] = x114 self.ret['y'] = y115 self.exit_flag = True116 print("class_ x ", x, " y ", y)117 def asr_callback(self, msg):118 # Threshold119 print(msg[0], ' is recognized. ', msg[1])120 if msg[1] > 0.5:121 print(msg[0], msg[1], " is returned")122 self.ret['type'] = 'speech'123 self.ret['word'] = msg[0]124 self.exit_flag = True125 def wait_for_get_input(self):126 self.asr.setVocabulary(self.target_word_list, False)127 print("Staring wait")128 self.srv['audio_device'].setOutputVolume(3)129 self.asr.subscribe('asr')130 asr_mem_sub = self.memory.subscriber("WordRecognized")131 asr_mem_sub.signal.connect(self.asr_callback)132 while not self.exit_flag:133 time.sleep(0.01)134 self.asr.unsubscribe('asr')135 self.srv['audio_device'].setOutputVolume(100)136 self.exit_flag = False137 return self.ret138 def set_target_touch_list(self, touch_list):139 self.target_touch_list = touch_list140 def set_target_word_list(self, word_list):141 self.target_word_list = word_list142 def __del__(self):143 self.tabletService.onTouchDown.disconnect(self.touch_callback)144 self.asr.unsubscribe("ASR")145def get_html_address(file_name):146 name = file_name147 if len(name) > 5 and name[-5:] == '.html':148 name = name[:-5]149 return "http://198.18.0.1/apps/bi-html/" + name + '.html'150def transition(srv, scene, input_ret):151 global monitor_input152 # return value : scene name, available touch, avail word153 print("Trainsition mode")154 print(scene, input_ret)155 if scene == 'home':156 if input_ret['type'] == 'touch':157 if input_ret['touch_position'] == 'BUTTON_MIDDLE_DOWN':158 next_scene = 'first_menu'159 srv['tablet'].showWebview(get_html_address(next_scene))160 srv['tts'].say("next")161 return scene_data[next_scene]162 # jesnk : test163 if input_ret['touch_position'] == 'JESNK_SIDE':164 file_path = "/opt/aldebaran/www/apps/bi-sound/background.mp3"165 # srv['tts'].post.say('yes')166 player = ALProxy("ALAudioPlayer")167 player.post.playFileFromPosition(file_path, 120)168 # file_id = srv['audio_player'].loadFile("/opt/aldebaran/www/apps/bi-sound/background.mp3")169 # srv['audio_player'].playFileFromPosition(file_path,120)170 # srv['audio_player'].setVolume(file_id,0.3)171 elif input_ret['type'] == 'speech':172 if input_ret['word'] == 'start':173 next_scene = 'first_menu'174 srv['aas'].say("Hello, Nice to meet you!", aas_configuration)175 srv['tablet'].showWebview(get_html_address(next_scene))176 return scene_data[next_scene]177 if input_ret['word'] == 'Hello':178 next_scene = 'home'179 srv['aas'].say("Hello Sir!", aas_configuration)180 return scene_data[next_scene]181 if input_ret['word'] == 'pepper':182 next_scene = 'home'183 srv['aas'].say("Yep! Hello?!", aas_configuration)184 return scene_data[next_scene]185 if scene == 'first_menu':186 if input_ret['type'] == 'touch':187 if input_ret['touch_position'] == 'JESNK_SIDE':188 next_scene = 'first_menu'189 srv['tablet'].showWebview(get_html_address(next_scene))190 srv['tts'].say("Debug")191 monitor_input.debug_mode = True192 while monitor_input.debug_mode:193 time.sleep(0.01)194 srv['tts'].say("Debug finished")195 return scene_data[next_scene]196 if input_ret['touch_position'] == 'BUTTON_LEFT':197 next_scene = 'tour'198 srv['tablet'].showWebview(get_html_address(next_scene))199 srv['tts'].say("next")200 return scene_data[next_scene]201 if input_ret['touch_position'] == 'BUTTON_RIGHT':202 next_scene = 'entertain'203 srv['tablet'].showWebview(get_html_address(next_scene))204 srv['tts'].say("next")205 return scene_data[next_scene]206 if input_ret['touch_position'] == 'BUTTON_MIDDLE_DOWN':207 next_scene = 'home'208 srv['tablet'].showWebview(get_html_address(next_scene))209 srv['aas'].say("next")210 return scene_data[next_scene]211 if input_ret['touch_position'] == 'BUTTON_RIGHT_DOWN':212 next_scene = scene213 srv['tts'].setParameter("defaultVoiceSpeed", 100)214 srv['aas'].say(215 "Are you curious about me? I am Pepper. It is a humanoid robot made by Softbank, and can use artificial intelligence. It is characterized by a cute appearance, and is introduced in various fields such as finance, bookstore, medical care, and distribution fields in Korea")216 srv['tts'].setParameter("defaultVoiceSpeed", 70)217 return scene_data[next_scene]218 elif input_ret['type'] == 'speech':219 if input_ret['word'] == 'bye':220 return scene_data['exit']221 if input_ret['word'] == 'first':222 next_scene = 'home'223 srv['tablet'].showWebview(get_html_address(next_scene))224 return scene_data[next_scene]225 if input_ret['word'] == 'who':226 next_scene = 'first_menu'227 srv['tablet'].showWebview(get_html_address(next_scene))228 return scene_data[next_scene]229 if scene == 'tour':230 if input_ret['type'] == 'touch':231 if input_ret['touch_position'] == 'BUTTON_RIGHT':232 next_scene = 'tour_hsr1'233 srv['tts'].setParameter("defaultVoiceSpeed", 100)234 srv['tablet'].showWebview(get_html_address(next_scene))235 srv['aas'].say("Let me explain the robots in our lab. First, HSR, a human helper robot, is a mobile operation robot.", aas_configuration)236 next_scene = 'tour_hsr2'237 srv['tablet'].showWebview(get_html_address(next_scene))238 srv['aas'].say("It is about 1 meter tall and is a versatile robot that can recognize objects through various cameras and pick them up with a gripper. But is it ugly than me?",239 aas_configuration)240 next_scene = 'tour_blitz'241 srv['tablet'].showWebview(get_html_address(next_scene))242 srv['aas'].say(243 "The next robot, Blitz. It is a robot made by combining a base robot, which is specialized in moving objects, and a UR5 robot that picks up objects. In addition, it is a mobile operation robot that is equipped with sound and camera sensors, capable of recognizing objects and gripping them with a gripper.",244 aas_configuration)245 next_scene = 'tour_pepper1'246 srv['tablet'].showWebview(get_html_address(next_scene))247 srv['aas'].say(248 "The last robot to be introduced is me, Pepper. I am a humanoid robot made by Softbank, and I can use artificial intelligence.",aas_configuration)249 next_scene = 'tour_pepper2'250 srv['tablet'].showWebview(get_html_address(next_scene))251 srv['aas'].say(252 "I have a cute appearance, and has been introduced in various fields such as finance, bookstores, medical care, and distribution fields in Korea. In addition, it is used as a standard robot in S, S, P, L, among the world robot competitions, Robo Cup League.",253 aas_configuration)254 srv['tts'].setParameter("defaultVoiceSpeed", 70)255 next_scene = 'tour'256 srv['tablet'].showWebview(get_html_address(next_scene))257 return scene_data[next_scene]258 if input_ret['touch_position'] == 'BUTTON_LEFT':259 next_scene = 'tour_ourlab1'260 srv['tts'].setParameter("defaultVoiceSpeed", 100)261 srv['tablet'].showWebview(get_html_address(next_scene))262 srv['aas'].say(263 "Let me introduce our lab. Our bio-intelligence lab is conducting the following studies. First, we are conducting interdisciplinary research in various fields such as artificial intelligence, psychology, and cognitive science to develop human-level artificial intelligence such as Baby Mind and VTT. We are also actively conducting research on robots on various platforms, such as home robots that work with humans and Robocup, a world robot competition.",264 aas_configuration)265 next_scene = 'tour_ourlab2'266 srv['tablet'].showWebview(get_html_address(next_scene))267 srv['aas'].say("If you have any other questions or inquiries, please refer to the following website or contact us.", aas_configuration)268 srv['tts'].setParameter("defaultVoiceSpeed", 70)269 next_scene = 'tour'270 srv['tablet'].showWebview(get_html_address(next_scene))271 return scene_data[next_scene]272 if input_ret['touch_position'] == 'BUTTON_MIDDLE_DOWN':273 next_scene = 'home'274 srv['tablet'].showWebview(get_html_address(next_scene))275 srv['aas'].say("To the inital screen", aas_configuration)276 return scene_data[next_scene]277 if input_ret['touch_position'] == 'BUTTON_LEFT_DOWN':278 next_scene = 'first_menu'279 srv['tablet'].showWebview(get_html_address(next_scene))280 srv['tts'].say("previous")281 return scene_data[next_scene]282 if input_ret['touch_position'] == 'BUTTON_RIGHT_DOWN':283 next_scene = scene284 srv['tts'].setParameter("defaultVoiceSpeed", 110)285 srv['aas'].say(286 "Are you curious about me? I am Pepper. It is a humanoid robot made by Softbank, and can use artificial intelligence. It is characterized by a cute appearance, and is introduced in various fields such as finance, bookstore, medical care, and distribution fields in Korea",287 aas_configuration)288 srv['tts'].setParameter("defaultVoiceSpeed", 70)289 return scene_data[next_scene]290 if scene == 'entertain':291 if input_ret['type'] == 'touch':292 if input_ret['touch_position'] == 'BUTTON_MIDDLE_DOWN':293 next_scene = 'home'294 srv['tablet'].showWebview(get_html_address(next_scene))295 srv['tts'].say("To the inital screen")296 return scene_data[next_scene]297 if input_ret['touch_position'] == 'BUTTON_LEFT_DOWN':298 next_scene = 'first_menu'299 srv['tablet'].showWebview(get_html_address(next_scene))300 srv['tts'].say("previous")301 return scene_data[next_scene]302 if input_ret['touch_position'] == 'BUTTON_LEFT':303 file_path = "/opt/aldebaran/www/apps/bi-sound/elephant.ogg"304 # srv['tts'].post.say('yes')305 player = ALProxy("ALAudioPlayer", PEPPER_IP, 9559)306 player.post.playFileFromPosition(file_path, 0)307 entertain.elephant(srv)308 player.post.stopAll()309 pass310 if input_ret['touch_position'] == 'BUTTON_RIGHT':311 file_path = "/opt/aldebaran/www/apps/bi-sound/UrbanStreet.mp3"312 player = ALProxy("ALAudioPlayer", PEPPER_IP, 9559)313 player.post.playFileFromPosition(file_path, 0)314 entertain.disco(srv)315 player.post.stopAll()316 pass317 if input_ret['touch_position'] == 'BUTTON_RIGHT_DOWN':318 next_scene = 'entertain2'319 srv['tablet'].showWebview(get_html_address(next_scene))320 srv['tts'].say("next")321 return scene_data[next_scene]322 if scene == 'entertain2':323 if input_ret['type'] == 'touch':324 if input_ret['touch_position'] == 'BUTTON_MIDDLE_DOWN':325 next_scene = 'home'326 srv['tablet'].showWebview(get_html_address(next_scene))327 srv['tts'].say("To the inital screen")328 return scene_data[next_scene]329 if input_ret['touch_position'] == 'BUTTON_LEFT_DOWN':330 next_scene = 'first_menu'331 srv['tablet'].showWebview(get_html_address(next_scene))332 srv['tts'].say("previous")333 return scene_data[next_scene]334 if input_ret['touch_position'] == 'BUTTON_LEFT':335 srv['aas'].say("Please tell me your date of birth in English.", aas_configuration)336 text = saju.main(srv)337 srv['ass'].say(text, aas_configuration)338 if input_ret['touch_position'] == 'BUTTON_RIGHT':339 pass340 if input_ret['touch_position'] == 'BUTTON_RIGHT_DOWN':341 next_scene = 'entertain'342 srv['tablet'].showWebview(get_html_address(next_scene))343 srv['tts'].say("next")344 return scene_data[next_scene]345# jesnk 1346monitor_input = None347aas_configuration = {"bodyLanguageMode": "contextual"}348def main(session):349 # jesnk main350 print("Hello")351 srv = {}352 srv['tablet'] = session.service("ALTabletService")353 srv['memory'] = session.service("ALMemory")354 srv['motion'] = session.service("ALMotion")355 srv['asr'] = session.service("ALSpeechRecognition")356 srv['tts'] = session.service("ALTextToSpeech")357 srv['aas'] = session.service("ALAnimatedSpeech")358 srv['audio_device'] = session.service("ALAudioDevice")359 srv['tts'].setVolume(0.1)360 srv['tts'].setParameter("defaultVoiceSpeed", 70)361 srv['audio_player'] = session.service("ALAudioPlayer")362 # Present Inital Page363 srv['tablet'].enableWifi()364 srv['tablet'].setOnTouchWebviewScaleFactor(1)365 srv['tablet'].showWebview('http://198.18.0.1/apps/bi-html/home.html')366 # Valid Input condition setting367 global monitor_input368 monitor_input = Monitor_input(srv)369 init_scene = 'home'370 scene_name, valid_touch_list, valid_word_list = \371 scene_data[init_scene][0], scene_data[init_scene][1], scene_data[init_scene][2]372 print(scene_name, valid_touch_list, valid_word_list)373 monitor_input.set_target_touch_list(valid_touch_list)374 monitor_input.set_target_word_list(valid_word_list)375 while (True):376 input_ret = monitor_input.wait_for_get_input()377 ret = transition(srv, scene_name, input_ret)378 if ret == None:379 continue380 print(ret)381 scene_name, valid_touch_list, valid_word_list = ret[0], ret[1], ret[2]382 monitor_input.set_target_touch_list(valid_touch_list)383 monitor_input.set_target_word_list(valid_word_list)384 if scene_name == 'exit':385 break386 print("passed 2")387 # global signalID388 # signalID = tabletService.onTouchDown.connect(touch_callback)389 srv['tablet'].hideWebview()390 print("Finished")391PEPPER_IP = '192.168.1.212'392if __name__ == "__main__":393 print("Hello")394 parser = argparse.ArgumentParser()395 parser.add_argument("--ip", type=str, default=PEPPER_IP,396 help="Robot IP address. On robot or Local Naoqi: use '192.168.1.188'.")397 parser.add_argument("--port", type=int, default=9559,398 help="Naoqi port number")399 print("Hello")400 args = parser.parse_args()401 session = qi.Session()402 print("Hello")403 try:404 session.connect("tcp://" + PEPPER_IP + ":" + str(args.port))405 print("connection complete")406 except RuntimeError:407 print ("Can't connect to Naoqi at ip \"" + args.ip + "\" on port " + str(args.port) + ".\n"408 "Please check your script arguments. Run with -h option for help.")409 sys.exit(1)...

Full Screen

Full Screen

main_frame.py

Source:main_frame.py Github

copy

Full Screen

1# This nice pygame menu is edited base on code by garthvh from 2# https://github.com/garthvh/pitftmenu/blob/master/menu_8button.py3# the original code was for screen of size 480*320 with 8 buttons4# this project is using piTFT of size 320*240 and 4 buttons5import cv26import sys, pygame7from pygame.locals import *8import time9import subprocess10import os11import argparse12import pickle13import numpy14from qr_package import *15from parcel_tools import * 16parser = argparse.ArgumentParser()17parser.add_argument("-t", "--threshold", default=90.0, help="Threshold of the recognizer, default 150")18parser.add_argument("-T", "--test_mode", action="store_true",help="Flag for testing mode")19parser.add_argument("-F", "--check_courier_face", action="store_true",help="Flag for checking the courier's face")20args = vars(parser.parse_args())21threshold = float(args['threshold'])22testing_mode = args['test_mode']23check_courier_face = args['check_courier_face']24os.putenv('SDL_FBDEV', '/dev/fb1')25os.putenv('SDL_MOUSEDEV', '/dev/input/touchscreen')26os.putenv('SDL_MOUSEDRV', 'TSLIB')27os.putenv('SDL_VIDEODRIVER','fbcon')28# Initialize pygame and hide mouse29pygame.init()30pygame.mouse.set_visible(0)31# define function for printing text in a specific place with a specific width and height with a specific colour and border32def make_button(text, xpo, ypo, height, width, colour):33 font=pygame.font.Font(None,30)34 label=font.render(str(text), 1, (colour))35 screen.blit(label,(xpo,ypo))36 pygame.draw.rect(screen, blue, (xpo-10,ypo-10,width,height),3)37# Define each button press action38def button(number):39 print "You pressed button ",number40 if number == 1:41 check = unlock_with_face()42 if not check:43 warning = 'Warning, an unauthorised person is trying to open your door with face recognization' 44 print "Send message: ", warning45 qr_tools.send_text_message(warning)46 else:47 print 'Door opened'48 if number == 2:49 if testing_mode:50 print 'Using testing mode of the parcel functions'51 res_data = qr_tools.camstream_QR_nested(pygame, screen)52 check_code = Parcel_tools.check_tracking_number(res_data, testing_mode)53 if check_code == False:54 warning = 'Warning, someone is trying to scan an invalid tracking number to open your door.'55 print "Send message: ", warning56 qr_tools.send_text_message(warning)57 else:58 number, courier = check_code59 if check_courier_face:60 check_face = check_deliver_face()61 if check_face == False:62 warning = 'Warning, an unauthorised courier is trying to deliver a package to your home.'63 print "Send message: ", warning64 qr_tools.send_text_message(warning)65 else:66 print 'Door open'67 message = 'Your package No. %s has been delivered by %s'%(number, courier)68 print "Send message: ", message69 qr_tools.send_text_message(message)70 else:71 print 'Door open'72 message = 'Your package No. %s has been delivered by %s'%(number, courier)73 print "Send message: ", message74 qr_tools.send_text_message(message)75 76 if number == 3:77 res_data = qr_tools.camstream_QR_nested(pygame, screen)78 check = qr_tools.check_qr_code(res_data)79 if not check:80 warning = 'Warning, someone is trying to scan an invalid QR pass code to open your door.'81 print "Send message: ", warning82 qr_tools.send_text_message(warning)83 else:84 message = 'Your guest with phone number %s has opened your door'%(check)85 print "Send message: ", message86 qr_tools.send_text_message(message)87 if number == 4:88 pass89#colors R G B90blue = ( 0, 0, 255)91black = ( 0, 0, 0)92cyan = ( 50, 255, 255)93# Set up the base menu you can customize your menu with the colors above94#set size of the screen95size = width, height = 320, 24096screen = pygame.display.set_mode(size)97def recognize_face(timeout = 30):98 camera = cv2.VideoCapture(0)99 camera.set(3,320)100 camera.set(4,240)101 cascade_path = 'cascades/haarcascade_frontalface_default.xml'102 facedetector = cv2.CascadeClassifier(cascade_path)103 face_reconizer=cv2.createLBPHFaceRecognizer(threshold=threshold)104 face_reconizer.load('recognizers/default_recognizer.model')105 with open('recognizers/default_recognizer.pickle', 'rb') as f:106 name_dict=pickle.load(f)107 f.close()108 result_count = 0109 name = None110 start_time = time.time()111 while time.time() - start_time <= timeout and name == None:112 (grabbed, frame) = camera.read()113 if not grabbed:114 break115 gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)116 faceRects = facedetector.detectMultiScale(gray)117 118 if len(faceRects):119 (x, y, w, h) = max(faceRects, key=lambda b:(b[2] * b[3]))120 face = gray[y:y + h, x:x + w]121 index, confidence = face_reconizer.predict(face)122 print index, name_dict[index], confidence123 if index != -1:124 name = name_dict[index]125 cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)126 result_count += 1127 if result_count >= 10:128 name = 'Unknown'129 pyimage=cv2.cvtColor(frame,cv2.COLOR_BGR2RGB)130 pyimage=numpy.rot90(pyimage)131 pyimage=pygame.surfarray.make_surface(pyimage)132 screen.blit(pyimage, (0,0))133 pygame.display.flip()134 key = cv2.waitKey(1)135 camera.release()136 cv2.destroyAllWindows()137 return name138def unlock_with_face():139 name = recognize_face()140 if not name: return False141 if 'master' in name:142 return True143 else:144 return False145def check_deliver_face():146 name = recognize_face()147 if not name: return False148 if 'courier' in name:149 return True150 else:151 return False152qr_tools = QR_tools()153go = True154while go:155 try:156 screen.fill(black)157 158 make_button("Reconize face", 10, 10, 110, 150, cyan)159 make_button("Parcel", 170, 10, 110, 150, cyan)160 make_button("QR code", 10, 130, 110, 150, cyan)161 make_button("Others", 170, 130, 110, 150, cyan)162 for event in pygame.event.get():163 if event.type == pygame.MOUSEBUTTONDOWN:164 165 touch_position = pygame.mouse.get_pos()166 167 if 10 <= touch_position[0] <= 160 and 10 <= touch_position[1] <= 120:168 button(1)169 if 170 <= touch_position[0] <= 310 and 10 <= touch_position[1] <= 120:170 button(2)171 if 10 <= touch_position[0] <= 160 and 130 <= touch_position[1] <=230:172 button(3)173 if 170 <= touch_position[0] <= 310 and 130 <= touch_position[1] <=230:174 button(4)175 pygame.display.update()176 except KeyboardInterrupt:177 go = False178pygame.quit()179sys.exit()...

Full Screen

Full Screen

core_input_gestures.py

Source:core_input_gestures.py Github

copy

Full Screen

...37while not pyray.window_should_close(): # Detect window close button or ESC key38 # Update39 last_gesture = current_gesture40 current_gesture = pyray.get_gesture_detected()41 touch_position = pyray.get_touch_position(0)42 if (43 pyray.check_collision_point_rec(touch_position, touch_area)44 and current_gesture != pyray.GESTURE_NONE45 ):46 if current_gesture != last_gesture:47 gesture_strings.append(GESTURE_LABELS[current_gesture])48 # Reset gestures strings49 if len(gesture_strings) >= MAX_GESTURE_STRINGS:50 gesture_strings = []51 # Draw52 pyray.begin_drawing()53 pyray.clear_background(RAYWHITE)54 pyray.draw_rectangle_rec(touch_area, GRAY)55 pyray.draw_rectangle(225, 15, SCREEN_WIDTH - 240, SCREEN_HEIGHT - 30,...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run robotframework-ioslibrary automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful