How to use running_pidfile_ids method in autotest

Best Python code snippet using autotest_python

monitor_db_functional_test.py

Source:monitor_db_functional_test.py Github

copy

Full Screen

...114 def nonfinished_pidfile_ids(self):115 return [pidfile_id for pidfile_id, pidfile_contents116 in self._pidfiles.iteritems()117 if pidfile_contents.exit_status is None]118 def running_pidfile_ids(self):119 return [pidfile_id for pidfile_id in self.nonfinished_pidfile_ids()120 if self._pidfiles[pidfile_id].process is not None]121 def pidfile_from_path(self, working_directory, pidfile_name):122 return self._pidfile_index[(working_directory, pidfile_name)]123 def attached_files(self, working_directory):124 """125 Return dict mapping path to contents for attached files with specified126 paths.127 """128 return dict((path, contents) for path, contents129 in self._attached_files.get(working_directory, [])130 if path is not None)131 # DroneManager emulation APIs for use by monitor_db132 def get_orphaned_autoserv_processes(self):133 return set()134 def total_running_processes(self):135 return sum(pidfile_id._num_processes136 for pidfile_id in self.nonfinished_pidfile_ids())137 def max_runnable_processes(self, username, drone_hostnames_allowed):138 return self.process_capacity - self.total_running_processes()139 def refresh(self):140 for pidfile_id in self._unregistered_pidfiles:141 # intentionally handle non-registered pidfiles silently142 self._pidfiles.pop(pidfile_id, None)143 self._unregistered_pidfiles = set()144 def execute_actions(self):145 # executing an "execute_command" causes a pidfile to be created146 for pidfile_id in self._future_pidfiles:147 # Process objects are opaque to monitor_db148 process = object()149 self._pidfiles[pidfile_id].process = process150 self._process_index[process] = pidfile_id151 self._future_pidfiles = []152 for pidfile_id in self._set_pidfile_exit_status_queue:153 self._set_pidfile_exit_status(pidfile_id, 271)154 self._set_pidfile_exit_status_queue = []155 def attach_file_to_execution(self, result_dir, file_contents,156 file_path=None):157 self._attached_files.setdefault(result_dir, set()).add((file_path,158 file_contents))159 return 'attach_path'160 def _initialize_pidfile(self, pidfile_id):161 if pidfile_id not in self._pidfiles:162 assert pidfile_id.key() not in self._pidfile_index163 self._pidfiles[pidfile_id] = drone_manager.PidfileContents()164 self._pidfile_index[pidfile_id.key()] = pidfile_id165 def _set_last_pidfile(self, pidfile_id, working_directory, pidfile_name):166 if working_directory.startswith('hosts/'):167 # such paths look like hosts/host1/1-verify, we'll grab the end168 type_string = working_directory.rsplit('-', 1)[1]169 pidfile_type = _PidfileType.get_value(type_string)170 else:171 pidfile_type = _PIDFILE_TO_PIDFILE_TYPE[pidfile_name]172 self._last_pidfile_id[pidfile_type] = pidfile_id173 def execute_command(self, command, working_directory, pidfile_name,174 num_processes, log_file=None, paired_with_pidfile=None,175 username=None, drone_hostnames_allowed=None):176 logging.debug('Executing %s in %s', command, working_directory)177 pidfile_id = self._DummyPidfileId(working_directory, pidfile_name)178 if pidfile_id.key() in self._pidfile_index:179 pidfile_id = self._pidfile_index[pidfile_id.key()]180 pidfile_id._num_processes = num_processes181 pidfile_id._paired_with_pidfile = paired_with_pidfile182 self._future_pidfiles.append(pidfile_id)183 self._initialize_pidfile(pidfile_id)184 self._pidfile_index[(working_directory, pidfile_name)] = pidfile_id185 self._set_last_pidfile(pidfile_id, working_directory, pidfile_name)186 return pidfile_id187 def get_pidfile_contents(self, pidfile_id, use_second_read=False):188 if pidfile_id not in self._pidfiles:189 logging.debug('Request for nonexistent pidfile %s', pidfile_id)190 return self._pidfiles.get(pidfile_id, drone_manager.PidfileContents())191 def is_process_running(self, process):192 return True193 def register_pidfile(self, pidfile_id):194 self._initialize_pidfile(pidfile_id)195 def unregister_pidfile(self, pidfile_id):196 self._unregistered_pidfiles.add(pidfile_id)197 def declare_process_count(self, pidfile_id, num_processes):198 pidfile_id.num_processes = num_processes199 def absolute_path(self, path):200 return 'absolute/' + path201 def write_lines_to_file(self, file_path, lines, paired_with_process=None):202 # TODO: record this203 pass204 def get_pidfile_id_from(self, execution_tag, pidfile_name):205 default_pidfile = self._DummyPidfileId(execution_tag, pidfile_name,206 num_processes=0)207 return self._pidfile_index.get((execution_tag, pidfile_name),208 default_pidfile)209 def kill_process(self, process, sig=signal.SIGKILL):210 pidfile_id = self._process_index[process]211 if pidfile_id not in self._pids_to_signals_received:212 self._pids_to_signals_received[pidfile_id] = set()213 self._pids_to_signals_received[pidfile_id].add(sig)214 if signal.SIGKILL == sig:215 self._set_pidfile_exit_status_queue.append(pidfile_id)216class MockEmailManager(NullMethodObject):217 _NULL_METHODS = ('send_queued_emails', 'send_email')218 def enqueue_notify_email(self, subject, message):219 logging.warning('enqueue_notify_email: %s', subject)220 logging.warning(message)221class SchedulerFunctionalTest(unittest.TestCase,222 frontend_test_utils.FrontendTestMixin):223 # some number of ticks after which the scheduler is presumed to have224 # stabilized, given no external changes225 _A_LOT_OF_TICKS = 10226 def setUp(self):227 self._frontend_common_setup()228 self._set_stubs()229 self._set_global_config_values()230 self._create_dispatcher()231 logging.basicConfig(level=logging.DEBUG)232 def _create_dispatcher(self):233 self.dispatcher = monitor_db.Dispatcher()234 def tearDown(self):235 self._database.disconnect()236 self._frontend_common_teardown()237 def _set_stubs(self):238 self.mock_config = global_config.FakeGlobalConfig()239 self.god.stub_with(global_config, 'global_config', self.mock_config)240 self.mock_drone_manager = MockDroneManager()241 drone_manager._set_instance(self.mock_drone_manager)242 self.mock_email_manager = MockEmailManager()243 self.god.stub_with(email_manager, 'manager', self.mock_email_manager)244 self._database = (245 database_connection.TranslatingDatabase.get_test_database(246 translators=scheduler_lib._DB_TRANSLATORS))247 self._database.connect(db_type='django')248 self.god.stub_with(monitor_db, '_db', self._database)249 self.god.stub_with(scheduler_models, '_db', self._database)250 MockConnectionManager.db = self._database251 scheduler_lib.ConnectionManager = MockConnectionManager252 monitor_db.initialize_globals()253 scheduler_models.initialize_globals()254 patcher = mock.patch(255 'autotest_lib.scheduler.luciferlib.is_lucifer_enabled',256 lambda: False)257 patcher.start()258 self.addCleanup(patcher.stop)259 def _set_global_config_values(self):260 self.mock_config.set_config_value('SCHEDULER', 'pidfile_timeout_mins',261 1)262 self.mock_config.set_config_value('SCHEDULER', 'gc_stats_interval_mins',263 999999)264 self.mock_config.set_config_value('SCHEDULER', 'enable_archiving', True)265 self.mock_config.set_config_value('SCHEDULER',266 'clean_interval_minutes', 60)267 self.mock_config.set_config_value('SCHEDULER',268 'max_parse_processes', 50)269 self.mock_config.set_config_value('SCHEDULER',270 'max_transfer_processes', 50)271 self.mock_config.set_config_value('SCHEDULER',272 'clean_interval_minutes', 50)273 self.mock_config.set_config_value('SCHEDULER',274 'max_provision_retries', 1)275 self.mock_config.set_config_value('SCHEDULER', 'max_repair_limit', 1)276 self.mock_config.set_config_value(277 'SCHEDULER', 'secs_to_wait_for_atomic_group_hosts', 600)278 self.mock_config.set_config_value(279 'SCHEDULER', 'inline_host_acquisition', True)280 scheduler_config.config.read_config()281 def _initialize_test(self):282 self.dispatcher.initialize()283 def _run_dispatcher(self):284 for _ in xrange(self._A_LOT_OF_TICKS):285 self.dispatcher.tick()286 def test_idle(self):287 self._initialize_test()288 self._run_dispatcher()289 def _assert_process_executed(self, working_directory, pidfile_name):290 process_was_executed = self.mock_drone_manager.was_process_executed(291 'hosts/host1/1-verify', drone_manager.AUTOSERV_PID_FILE)292 self.assert_(process_was_executed,293 '%s/%s not executed' % (working_directory, pidfile_name))294 def _update_instance(self, model_instance):295 return type(model_instance).objects.get(pk=model_instance.pk)296 def _check_statuses(self, queue_entry, queue_entry_status,297 host_status=None):298 self._check_entry_status(queue_entry, queue_entry_status)299 if host_status:300 self._check_host_status(queue_entry.host, host_status)301 def _check_entry_status(self, queue_entry, status):302 # update from DB303 queue_entry = self._update_instance(queue_entry)304 self.assertEquals(queue_entry.status, status)305 def _check_host_status(self, host, status):306 # update from DB307 host = self._update_instance(host)308 self.assertEquals(host.status, status)309 def _run_pre_job_verify(self, queue_entry):310 self._run_dispatcher() # launches verify311 self._check_statuses(queue_entry, HqeStatus.VERIFYING,312 HostStatus.VERIFYING)313 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)314 def test_simple_job(self):315 self._initialize_test()316 job, queue_entry = self._make_job_and_queue_entry()317 self._run_pre_job_verify(queue_entry)318 self._run_dispatcher() # launches job319 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)320 self._finish_job(queue_entry)321 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)322 self._assert_nothing_is_running()323 def _setup_for_pre_job_reset(self):324 self._initialize_test()325 job, queue_entry = self._make_job_and_queue_entry()326 job.reboot_before = model_attributes.RebootBefore.ALWAYS327 job.save()328 return queue_entry329 def _run_pre_job_reset_job(self, queue_entry):330 self._run_dispatcher() # reset331 self._check_statuses(queue_entry, HqeStatus.RESETTING,332 HostStatus.RESETTING)333 self.mock_drone_manager.finish_process(_PidfileType.RESET)334 self._run_dispatcher() # job335 self._finish_job(queue_entry)336 def test_pre_job_reset(self):337 queue_entry = self._setup_for_pre_job_reset()338 self._run_pre_job_reset_job(queue_entry)339 def _run_pre_job_reset_one_failure(self):340 queue_entry = self._setup_for_pre_job_reset()341 self._run_dispatcher() # reset342 self.mock_drone_manager.finish_process(_PidfileType.RESET,343 exit_status=256)344 self._run_dispatcher() # repair345 self._check_statuses(queue_entry, HqeStatus.QUEUED,346 HostStatus.REPAIRING)347 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)348 return queue_entry349 def test_pre_job_reset_failure(self):350 queue_entry = self._run_pre_job_reset_one_failure()351 # from here the job should run as normal352 self._run_pre_job_reset_job(queue_entry)353 def test_pre_job_reset_double_failure(self):354 # TODO (showard): this test isn't perfect. in reality, when the second355 # reset fails, it copies its results over to the job directory using356 # copy_results_on_drone() and then parses them. since we don't handle357 # that, there appear to be no results at the job directory. the358 # scheduler handles this gracefully, parsing gets effectively skipped,359 # and this test passes as is. but we ought to properly test that360 # behavior.361 queue_entry = self._run_pre_job_reset_one_failure()362 self._run_dispatcher() # second reset363 self.mock_drone_manager.finish_process(_PidfileType.RESET,364 exit_status=256)365 self._run_dispatcher()366 self._check_statuses(queue_entry, HqeStatus.FAILED,367 HostStatus.REPAIR_FAILED)368 # nothing else should run369 self._assert_nothing_is_running()370 def _assert_nothing_is_running(self):371 self.assertEquals(self.mock_drone_manager.running_pidfile_ids(), [])372 def _setup_for_post_job_cleanup(self):373 self._initialize_test()374 job, queue_entry = self._make_job_and_queue_entry()375 job.reboot_after = model_attributes.RebootAfter.ALWAYS376 job.save()377 return queue_entry378 def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry,379 include_verify=True):380 if include_verify:381 self._run_pre_job_verify(queue_entry)382 self._run_dispatcher() # job383 self.mock_drone_manager.finish_process(_PidfileType.JOB)384 self._run_dispatcher() # parsing + cleanup385 self.mock_drone_manager.finish_process(_PidfileType.PARSE)386 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,387 exit_status=256)388 self._run_dispatcher() # repair, HQE unaffected389 return queue_entry390 def test_post_job_cleanup_failure(self):391 queue_entry = self._setup_for_post_job_cleanup()392 self._run_post_job_cleanup_failure_up_to_repair(queue_entry)393 self._check_statuses(queue_entry, HqeStatus.COMPLETED,394 HostStatus.REPAIRING)395 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)396 self._run_dispatcher()397 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)398 def test_post_job_cleanup_failure_repair_failure(self):399 queue_entry = self._setup_for_post_job_cleanup()400 self._run_post_job_cleanup_failure_up_to_repair(queue_entry)401 self.mock_drone_manager.finish_process(_PidfileType.REPAIR,402 exit_status=256)403 self._run_dispatcher()404 self._check_statuses(queue_entry, HqeStatus.COMPLETED,405 HostStatus.REPAIR_FAILED)406 def _ensure_post_job_process_is_paired(self, queue_entry, pidfile_type):407 pidfile_name = _PIDFILE_TYPE_TO_PIDFILE[pidfile_type]408 queue_entry = self._update_instance(queue_entry)409 pidfile_id = self.mock_drone_manager.pidfile_from_path(410 queue_entry.execution_path(), pidfile_name)411 self.assert_(pidfile_id._paired_with_pidfile)412 def _finish_job(self, queue_entry):413 self._check_statuses(queue_entry, HqeStatus.RUNNING)414 self.mock_drone_manager.finish_process(_PidfileType.JOB)415 self._run_dispatcher() # launches parsing416 self._check_statuses(queue_entry, HqeStatus.PARSING)417 self._ensure_post_job_process_is_paired(queue_entry, _PidfileType.PARSE)418 self._finish_parsing()419 def _finish_parsing(self):420 self.mock_drone_manager.finish_process(_PidfileType.PARSE)421 self._run_dispatcher()422 def _create_reverify_request(self):423 host = self.hosts[0]424 models.SpecialTask.schedule_special_task(425 host=host, task=models.SpecialTask.Task.VERIFY)426 return host427 def test_requested_reverify(self):428 host = self._create_reverify_request()429 self._run_dispatcher()430 self._check_host_status(host, HostStatus.VERIFYING)431 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)432 self._run_dispatcher()433 self._check_host_status(host, HostStatus.READY)434 def test_requested_reverify_failure(self):435 host = self._create_reverify_request()436 self._run_dispatcher()437 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,438 exit_status=256)439 self._run_dispatcher() # repair440 self._check_host_status(host, HostStatus.REPAIRING)441 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)442 self._run_dispatcher()443 self._check_host_status(host, HostStatus.READY)444 def _setup_for_do_not_verify(self):445 self._initialize_test()446 job, queue_entry = self._make_job_and_queue_entry()447 queue_entry.host.protection = host_protections.Protection.DO_NOT_VERIFY448 queue_entry.host.save()449 return queue_entry450 def test_do_not_verify_job(self):451 queue_entry = self._setup_for_do_not_verify()452 self._run_dispatcher() # runs job directly453 self._finish_job(queue_entry)454 def test_do_not_verify_job_with_cleanup(self):455 queue_entry = self._setup_for_do_not_verify()456 queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS457 queue_entry.job.save()458 self._run_dispatcher() # cleanup459 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)460 self._run_dispatcher() # job461 self._finish_job(queue_entry)462 def test_do_not_verify_pre_job_cleanup_failure(self):463 queue_entry = self._setup_for_do_not_verify()464 queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS465 queue_entry.job.save()466 self._run_dispatcher() # cleanup467 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,468 exit_status=256)469 self._run_dispatcher() # failure ignored; job runs470 self._finish_job(queue_entry)471 def test_do_not_verify_post_job_cleanup_failure(self):472 queue_entry = self._setup_for_do_not_verify()473 queue_entry.job.reboot_after = model_attributes.RebootAfter.ALWAYS474 queue_entry.job.save()475 self._run_post_job_cleanup_failure_up_to_repair(queue_entry,476 include_verify=False)477 # failure ignored, host still set to Ready478 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)479 self._run_dispatcher() # nothing else runs480 self._assert_nothing_is_running()481 def test_do_not_verify_requested_reverify_failure(self):482 host = self._create_reverify_request()483 host.protection = host_protections.Protection.DO_NOT_VERIFY484 host.save()485 self._run_dispatcher()486 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,487 exit_status=256)488 self._run_dispatcher()489 self._check_host_status(host, HostStatus.READY) # ignore failure490 self._assert_nothing_is_running()491 def test_job_abort_in_verify(self):492 self._initialize_test()493 job = self._create_job(hosts=[1])494 queue_entries = list(job.hostqueueentry_set.all())495 self._run_dispatcher() # launches verify496 self._check_statuses(queue_entries[0], HqeStatus.VERIFYING)497 job.hostqueueentry_set.update(aborted=True)498 self._run_dispatcher() # kills verify, launches cleanup499 self.assert_(self.mock_drone_manager.was_last_process_killed(500 _PidfileType.VERIFY, set([signal.SIGKILL])))501 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)502 self._run_dispatcher()503 def test_job_abort(self):504 self._initialize_test()505 job = self._create_job(hosts=[1])506 job.run_reset = False507 job.save()508 queue_entries = list(job.hostqueueentry_set.all())509 self._run_dispatcher() # launches job510 self._check_statuses(queue_entries[0], HqeStatus.RUNNING)511 job.hostqueueentry_set.update(aborted=True)512 self._run_dispatcher() # kills job, launches gathering513 self._check_statuses(queue_entries[0], HqeStatus.GATHERING)514 self.mock_drone_manager.finish_process(_PidfileType.GATHER)515 self._run_dispatcher() # launches parsing + cleanup516 queue_entry = job.hostqueueentry_set.all()[0]517 self._finish_parsing()518 # The abort will cause gathering to launch a cleanup.519 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)520 self._run_dispatcher()521 self.mock_drone_manager.finish_process(_PidfileType.RESET)522 self._run_dispatcher()523 def test_job_abort_queued_synchronous(self):524 self._initialize_test()525 job = self._create_job(hosts=[1,2])526 job.synch_count = 2527 job.save()528 job.hostqueueentry_set.update(aborted=True)529 self._run_dispatcher()530 for host_queue_entry in job.hostqueueentry_set.all():531 self.assertEqual(host_queue_entry.status,532 HqeStatus.ABORTED)533 def test_no_pidfile_leaking(self):534 self._initialize_test()535 self.test_simple_job()536 self.mock_drone_manager.refresh()537 self.assertEquals(self.mock_drone_manager._pidfiles, {})538 self.test_job_abort_in_verify()539 self.mock_drone_manager.refresh()540 self.assertEquals(self.mock_drone_manager._pidfiles, {})541 self.test_job_abort()542 self.mock_drone_manager.refresh()543 self.assertEquals(self.mock_drone_manager._pidfiles, {})544 def _make_job_and_queue_entry(self):545 job = self._create_job(hosts=[1])546 queue_entry = job.hostqueueentry_set.all()[0]547 return job, queue_entry548 def test_recover_running_no_process(self):549 # recovery should re-execute a Running HQE if no process is found550 _, queue_entry = self._make_job_and_queue_entry()551 queue_entry.status = HqeStatus.RUNNING552 queue_entry.execution_subdir = '1-myuser/host1'553 queue_entry.save()554 queue_entry.host.status = HostStatus.RUNNING555 queue_entry.host.save()556 self._initialize_test()557 self._run_dispatcher()558 self._finish_job(queue_entry)559 def test_recover_verifying_hqe_no_special_task(self):560 # recovery should move a Resetting HQE with no corresponding561 # Verify or Reset SpecialTask back to Queued.562 _, queue_entry = self._make_job_and_queue_entry()563 queue_entry.status = HqeStatus.RESETTING564 queue_entry.save()565 # make some dummy SpecialTasks that shouldn't count566 models.SpecialTask.objects.create(567 host=queue_entry.host,568 task=models.SpecialTask.Task.RESET,569 requested_by=models.User.current_user())570 models.SpecialTask.objects.create(571 host=queue_entry.host,572 task=models.SpecialTask.Task.CLEANUP,573 queue_entry=queue_entry,574 is_complete=True,575 requested_by=models.User.current_user())576 self._initialize_test()577 self._check_statuses(queue_entry, HqeStatus.QUEUED)578 def _test_recover_verifying_hqe_helper(self, task, pidfile_type):579 _, queue_entry = self._make_job_and_queue_entry()580 queue_entry.status = HqeStatus.VERIFYING581 queue_entry.save()582 special_task = models.SpecialTask.objects.create(583 host=queue_entry.host, task=task, queue_entry=queue_entry)584 self._initialize_test()585 self._run_dispatcher()586 self.mock_drone_manager.finish_process(pidfile_type)587 self._run_dispatcher()588 # don't bother checking the rest of the job execution, as long as the589 # SpecialTask ran590 def test_recover_verifying_hqe_with_cleanup(self):591 # recover an HQE that was in pre-job cleanup592 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.CLEANUP,593 _PidfileType.CLEANUP)594 def test_recover_verifying_hqe_with_verify(self):595 # recover an HQE that was in pre-job verify596 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.VERIFY,597 _PidfileType.VERIFY)598 def test_recover_parsing(self):599 self._initialize_test()600 job, queue_entry = self._make_job_and_queue_entry()601 job.run_verify = False602 job.run_reset = False603 job.reboot_after = model_attributes.RebootAfter.NEVER604 job.save()605 self._run_dispatcher() # launches job606 self.mock_drone_manager.finish_process(_PidfileType.JOB)607 self._run_dispatcher() # launches parsing608 # now "restart" the scheduler609 self._create_dispatcher()610 self._initialize_test()611 self._run_dispatcher()612 self.mock_drone_manager.finish_process(_PidfileType.PARSE)613 self._run_dispatcher()614 def test_recover_parsing__no_process_already_aborted(self):615 _, queue_entry = self._make_job_and_queue_entry()616 queue_entry.execution_subdir = 'host1'617 queue_entry.status = HqeStatus.PARSING618 queue_entry.aborted = True619 queue_entry.save()620 self._initialize_test()621 self._run_dispatcher()622 def test_job_scheduled_just_after_abort(self):623 # test a pretty obscure corner case where a job is aborted while queued,624 # another job is ready to run, and throttling is active. the post-abort625 # cleanup must not be pre-empted by the second job.626 # This test kind of doesn't make sense anymore after verify+cleanup627 # were merged into reset. It should maybe just be removed.628 job1, queue_entry1 = self._make_job_and_queue_entry()629 queue_entry1.save()630 job2, queue_entry2 = self._make_job_and_queue_entry()631 job2.reboot_before = model_attributes.RebootBefore.IF_DIRTY632 job2.save()633 self.mock_drone_manager.process_capacity = 0634 self._run_dispatcher() # schedule job1, but won't start verify635 job1.hostqueueentry_set.update(aborted=True)636 self.mock_drone_manager.process_capacity = 100637 self._run_dispatcher() # reset must run here, not verify for job2638 self._check_statuses(queue_entry1, HqeStatus.ABORTED,639 HostStatus.RESETTING)640 self.mock_drone_manager.finish_process(_PidfileType.RESET)641 self._run_dispatcher() # now verify starts for job2642 self._check_statuses(queue_entry2, HqeStatus.RUNNING,643 HostStatus.RUNNING)644 def test_reverify_interrupting_pre_job(self):645 # ensure things behave sanely if a reverify is scheduled in the middle646 # of pre-job actions647 _, queue_entry = self._make_job_and_queue_entry()648 self._run_dispatcher() # pre-job verify649 self._create_reverify_request()650 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,651 exit_status=256)652 self._run_dispatcher() # repair653 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)654 self._run_dispatcher() # reverify runs now655 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)656 self._run_dispatcher() # pre-job verify657 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)658 self._run_dispatcher() # and job runs...659 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)660 self._finish_job(queue_entry) # reverify has been deleted661 self._check_statuses(queue_entry, HqeStatus.COMPLETED,662 HostStatus.READY)663 self._assert_nothing_is_running()664 def test_reverify_while_job_running(self):665 # once a job is running, a reverify must not be allowed to preempt666 # Gathering667 _, queue_entry = self._make_job_and_queue_entry()668 self._run_pre_job_verify(queue_entry)669 self._run_dispatcher() # job runs670 self._create_reverify_request()671 # make job end with a signal, so gathering will run672 self.mock_drone_manager.finish_process(_PidfileType.JOB,673 exit_status=271)674 self._run_dispatcher() # gathering must start675 self.mock_drone_manager.finish_process(_PidfileType.GATHER)676 self._run_dispatcher() # parsing and cleanup677 self._finish_parsing()678 self._run_dispatcher() # now reverify runs679 self._check_statuses(queue_entry, HqeStatus.FAILED,680 HostStatus.VERIFYING)681 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)682 self._run_dispatcher()683 self._check_host_status(queue_entry.host, HostStatus.READY)684 def test_reverify_while_host_pending(self):685 # ensure that if a reverify is scheduled while a host is in Pending, it686 # won't run until the host is actually free687 job = self._create_job(hosts=[1,2])688 queue_entry = job.hostqueueentry_set.get(host__hostname='host1')689 job.synch_count = 2690 job.save()691 host2 = self.hosts[1]692 host2.locked = True693 host2.save()694 self._run_dispatcher() # verify host1695 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)696 self._run_dispatcher() # host1 Pending697 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)698 self._create_reverify_request()699 self._run_dispatcher() # nothing should happen here700 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)701 # now let the job run702 host2.locked = False703 host2.save()704 self._run_dispatcher() # verify host2705 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)706 self._run_dispatcher() # run job707 self._finish_job(queue_entry)708 # the reverify should now be running709 self._check_statuses(queue_entry, HqeStatus.COMPLETED,710 HostStatus.VERIFYING)711 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)712 self._run_dispatcher()713 self._check_host_status(queue_entry.host, HostStatus.READY)714 def test_throttling(self):715 job = self._create_job(hosts=[1,2,3])716 job.synch_count = 3717 job.save()718 queue_entries = list(job.hostqueueentry_set.all())719 def _check_hqe_statuses(*statuses):720 for queue_entry, status in zip(queue_entries, statuses):721 self._check_statuses(queue_entry, status)722 self.mock_drone_manager.process_capacity = 2723 self._run_dispatcher() # verify runs on 1 and 2724 queue_entries = list(job.hostqueueentry_set.all())725 _check_hqe_statuses(HqeStatus.QUEUED,726 HqeStatus.VERIFYING, HqeStatus.VERIFYING)727 self.assertEquals(len(self.mock_drone_manager.running_pidfile_ids()), 2)728 self.mock_drone_manager.finish_specific_process(729 'hosts/host3/1-verify', drone_manager.AUTOSERV_PID_FILE)730 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)731 self._run_dispatcher() # verify runs on 3732 _check_hqe_statuses(HqeStatus.VERIFYING, HqeStatus.PENDING,733 HqeStatus.PENDING)734 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)735 self._run_dispatcher() # job won't run due to throttling736 _check_hqe_statuses(HqeStatus.STARTING, HqeStatus.STARTING,737 HqeStatus.STARTING)738 self._assert_nothing_is_running()739 self.mock_drone_manager.process_capacity = 3740 self._run_dispatcher() # now job runs741 _check_hqe_statuses(HqeStatus.RUNNING, HqeStatus.RUNNING,...

Full Screen

Full Screen

monitor_db_functional_unittest.py

Source:monitor_db_functional_unittest.py Github

copy

Full Screen

...118 def nonfinished_pidfile_ids(self):119 return [pidfile_id for pidfile_id, pidfile_contents120 in self._pidfiles.iteritems()121 if pidfile_contents.exit_status is None]122 def running_pidfile_ids(self):123 return [pidfile_id for pidfile_id in self.nonfinished_pidfile_ids()124 if self._pidfiles[pidfile_id].process is not None]125 def pidfile_from_path(self, working_directory, pidfile_name):126 return self._pidfile_index[(working_directory, pidfile_name)]127 def attached_files(self, working_directory):128 """129 Return dict mapping path to contents for attached files with specified130 paths.131 """132 return dict((path, contents) for path, contents133 in self._attached_files.get(working_directory, [])134 if path is not None)135 # DroneManager emulation APIs for use by monitor_db136 def get_orphaned_autoserv_processes(self):137 return set()138 def total_running_processes(self):139 return sum(pidfile_id._num_processes140 for pidfile_id in self.nonfinished_pidfile_ids())141 def max_runnable_processes(self, username, drone_hostnames_allowed):142 return self.process_capacity - self.total_running_processes()143 def refresh(self):144 for pidfile_id in self._unregistered_pidfiles:145 # intentionally handle non-registered pidfiles silently146 self._pidfiles.pop(pidfile_id, None)147 self._unregistered_pidfiles = set()148 def execute_actions(self):149 # executing an "execute_command" causes a pidfile to be created150 for pidfile_id in self._future_pidfiles:151 # Process objects are opaque to monitor_db152 process = object()153 self._pidfiles[pidfile_id].process = process154 self._process_index[process] = pidfile_id155 self._future_pidfiles = []156 def attach_file_to_execution(self, result_dir, file_contents,157 file_path=None):158 self._attached_files.setdefault(result_dir, set()).add((file_path,159 file_contents))160 return 'attach_path'161 def _initialize_pidfile(self, pidfile_id):162 if pidfile_id not in self._pidfiles:163 assert pidfile_id.key() not in self._pidfile_index164 self._pidfiles[pidfile_id] = drone_manager.PidfileContents()165 self._pidfile_index[pidfile_id.key()] = pidfile_id166 def _set_last_pidfile(self, pidfile_id, working_directory, pidfile_name):167 if working_directory.startswith('hosts/'):168 # such paths look like hosts/host1/1-verify, we'll grab the end169 type_string = working_directory.rsplit('-', 1)[1]170 pidfile_type = _PidfileType.get_value(type_string)171 else:172 pidfile_type = _PIDFILE_TO_PIDFILE_TYPE[pidfile_name]173 self._last_pidfile_id[pidfile_type] = pidfile_id174 def execute_command(self, command, working_directory, pidfile_name,175 num_processes, log_file=None, paired_with_pidfile=None,176 username=None, drone_hostnames_allowed=None):177 logging.debug('Executing %s in %s', command, working_directory)178 pidfile_id = self._DummyPidfileId(working_directory, pidfile_name)179 if pidfile_id.key() in self._pidfile_index:180 pidfile_id = self._pidfile_index[pidfile_id.key()]181 pidfile_id._num_processes = num_processes182 pidfile_id._paired_with_pidfile = paired_with_pidfile183 self._future_pidfiles.append(pidfile_id)184 self._initialize_pidfile(pidfile_id)185 self._pidfile_index[(working_directory, pidfile_name)] = pidfile_id186 self._set_last_pidfile(pidfile_id, working_directory, pidfile_name)187 return pidfile_id188 def get_pidfile_contents(self, pidfile_id, use_second_read=False):189 if pidfile_id not in self._pidfiles:190 logging.debug('Request for nonexistent pidfile %s' % pidfile_id)191 return self._pidfiles.get(pidfile_id, drone_manager.PidfileContents())192 def is_process_running(self, process):193 return True194 def register_pidfile(self, pidfile_id):195 self._initialize_pidfile(pidfile_id)196 def unregister_pidfile(self, pidfile_id):197 self._unregistered_pidfiles.add(pidfile_id)198 def declare_process_count(self, pidfile_id, num_processes):199 pidfile_id.num_processes = num_processes200 def absolute_path(self, path):201 return 'absolute/' + path202 def write_lines_to_file(self, file_path, lines, paired_with_process=None):203 # TODO: record this204 pass205 def get_pidfile_id_from(self, execution_tag, pidfile_name):206 default_pidfile = self._DummyPidfileId(execution_tag, pidfile_name,207 num_processes=0)208 return self._pidfile_index.get((execution_tag, pidfile_name),209 default_pidfile)210 def kill_process(self, process):211 pidfile_id = self._process_index[process]212 self._killed_pidfiles.add(pidfile_id)213 self._set_pidfile_exit_status(pidfile_id, 271)214class MockEmailManager(NullMethodObject):215 _NULL_METHODS = ('send_queued_admin', 'send')216 def enqueue_admin(self, subject, message):217 logging.warn('enqueue_notify_email: %s', subject)218 logging.warn(message)219class SchedulerFunctionalTest(unittest.TestCase,220 test_utils.FrontendTestMixin):221 # some number of ticks after which the scheduler is presumed to have222 # stabilized, given no external changes223 _A_LOT_OF_TICKS = 10224 def setUp(self):225 self._frontend_common_setup()226 self._set_stubs()227 self._set_settings_values()228 self._create_dispatcher()229 logging.basicConfig(level=logging.DEBUG)230 def _create_dispatcher(self):231 self.dispatcher = monitor_db.Dispatcher()232 def tearDown(self):233 self._database.disconnect()234 self._frontend_common_teardown()235 def _set_stubs(self):236 self.mock_config = MockGlobalConfig()237 self.god.stub_with(settings, 'settings', self.mock_config)238 self.mock_drone_manager = MockDroneManager()239 drone_manager._set_instance(self.mock_drone_manager)240 self.mock_email_manager = MockEmailManager()241 self.god.stub_with(mail, "manager", self.mock_email_manager)242 self._database = (243 database_connection.TranslatingDatabase.get_test_database(244 translators=_DB_TRANSLATORS))245 self._database.connect(db_type='django')246 self.god.stub_with(monitor_db, '_db', self._database)247 self.god.stub_with(scheduler_models, '_db', self._database)248 monitor_db.initialize_globals()249 scheduler_models.initialize_globals()250 def _set_settings_values(self):251 self.mock_config.set_config_value('SCHEDULER', 'pidfile_timeout_mins',252 1)253 self.mock_config.set_config_value('SCHEDULER', 'gc_stats_interval_mins',254 999999)255 def _initialize_test(self):256 self.dispatcher.initialize()257 def _run_dispatcher(self):258 for _ in xrange(self._A_LOT_OF_TICKS):259 self.dispatcher.tick()260 def test_idle(self):261 self._initialize_test()262 self._run_dispatcher()263 def _assert_process_executed(self, working_directory, pidfile_name):264 process_was_executed = self.mock_drone_manager.was_process_executed(265 'hosts/host1/1-verify', drone_manager.AUTOSERV_PID_FILE)266 self.assert_(process_was_executed,267 '%s/%s not executed' % (working_directory, pidfile_name))268 def _update_instance(self, model_instance):269 return type(model_instance).objects.get(pk=model_instance.pk)270 def _check_statuses(self, queue_entry, queue_entry_status,271 host_status=None):272 self._check_entry_status(queue_entry, queue_entry_status)273 if host_status:274 self._check_host_status(queue_entry.host, host_status)275 def _check_entry_status(self, queue_entry, status):276 # update from DB277 queue_entry = self._update_instance(queue_entry)278 self.assertEquals(queue_entry.status, status)279 def _check_host_status(self, host, status):280 # update from DB281 host = self._update_instance(host)282 self.assertEquals(host.status, status)283 def _run_pre_job_verify(self, queue_entry):284 self._run_dispatcher() # launches verify285 self._check_statuses(queue_entry, HqeStatus.VERIFYING,286 HostStatus.VERIFYING)287 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)288 def test_simple_job(self):289 self._initialize_test()290 job, queue_entry = self._make_job_and_queue_entry()291 self._run_pre_job_verify(queue_entry)292 self._run_dispatcher() # launches job293 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)294 self._finish_job(queue_entry)295 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)296 self._assert_nothing_is_running()297 def _setup_for_pre_job_cleanup(self):298 self._initialize_test()299 job, queue_entry = self._make_job_and_queue_entry()300 job.reboot_before = model_attributes.RebootBefore.ALWAYS301 job.save()302 return queue_entry303 def _run_pre_job_cleanup_job(self, queue_entry):304 self._run_dispatcher() # cleanup305 self._check_statuses(queue_entry, HqeStatus.VERIFYING,306 HostStatus.CLEANING)307 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)308 self._run_dispatcher() # verify309 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)310 self._run_dispatcher() # job311 self._finish_job(queue_entry)312 def test_pre_job_cleanup(self):313 queue_entry = self._setup_for_pre_job_cleanup()314 self._run_pre_job_cleanup_job(queue_entry)315 def _run_pre_job_cleanup_one_failure(self):316 queue_entry = self._setup_for_pre_job_cleanup()317 self._run_dispatcher() # cleanup318 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,319 exit_status=256)320 self._run_dispatcher() # repair321 self._check_statuses(queue_entry, HqeStatus.QUEUED,322 HostStatus.REPAIRING)323 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)324 return queue_entry325 def test_pre_job_cleanup_failure(self):326 queue_entry = self._run_pre_job_cleanup_one_failure()327 # from here the job should run as normal328 self._run_pre_job_cleanup_job(queue_entry)329 def test_pre_job_cleanup_double_failure(self):330 # TODO (showard): this test isn't perfect. in reality, when the second331 # cleanup fails, it copies its results over to the job directory using332 # copy_results_on_drone() and then parses them. since we don't handle333 # that, there appear to be no results at the job directory. the334 # scheduler handles this gracefully, parsing gets effectively skipped,335 # and this test passes as is. but we ought to properly test that336 # behavior.337 queue_entry = self._run_pre_job_cleanup_one_failure()338 self._run_dispatcher() # second cleanup339 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,340 exit_status=256)341 self._run_dispatcher()342 self._check_statuses(queue_entry, HqeStatus.FAILED,343 HostStatus.REPAIR_FAILED)344 # nothing else should run345 self._assert_nothing_is_running()346 def _assert_nothing_is_running(self):347 self.assertEquals(self.mock_drone_manager.running_pidfile_ids(), [])348 def _setup_for_post_job_cleanup(self):349 self._initialize_test()350 job, queue_entry = self._make_job_and_queue_entry()351 job.reboot_after = model_attributes.RebootAfter.ALWAYS352 job.save()353 return queue_entry354 def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry,355 include_verify=True):356 if include_verify:357 self._run_pre_job_verify(queue_entry)358 self._run_dispatcher() # job359 self.mock_drone_manager.finish_process(_PidfileType.JOB)360 self._run_dispatcher() # parsing + cleanup361 self.mock_drone_manager.finish_process(_PidfileType.PARSE)362 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,363 exit_status=256)364 self._run_dispatcher() # repair, HQE unaffected365 self.mock_drone_manager.finish_process(_PidfileType.ARCHIVE)366 self._run_dispatcher()367 return queue_entry368 def test_post_job_cleanup_failure(self):369 queue_entry = self._setup_for_post_job_cleanup()370 self._run_post_job_cleanup_failure_up_to_repair(queue_entry)371 self._check_statuses(queue_entry, HqeStatus.COMPLETED,372 HostStatus.REPAIRING)373 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)374 self._run_dispatcher()375 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)376 def test_post_job_cleanup_failure_repair_failure(self):377 queue_entry = self._setup_for_post_job_cleanup()378 self._run_post_job_cleanup_failure_up_to_repair(queue_entry)379 self.mock_drone_manager.finish_process(_PidfileType.REPAIR,380 exit_status=256)381 self._run_dispatcher()382 self._check_statuses(queue_entry, HqeStatus.COMPLETED,383 HostStatus.REPAIR_FAILED)384 def _ensure_post_job_process_is_paired(self, queue_entry, pidfile_type):385 pidfile_name = _PIDFILE_TYPE_TO_PIDFILE[pidfile_type]386 queue_entry = self._update_instance(queue_entry)387 pidfile_id = self.mock_drone_manager.pidfile_from_path(388 queue_entry.execution_path(), pidfile_name)389 self.assert_(pidfile_id._paired_with_pidfile)390 def _finish_job(self, queue_entry):391 self.mock_drone_manager.finish_process(_PidfileType.JOB)392 self._run_dispatcher() # launches parsing + cleanup393 self._check_statuses(queue_entry, HqeStatus.PARSING,394 HostStatus.CLEANING)395 self._ensure_post_job_process_is_paired(queue_entry, _PidfileType.PARSE)396 self._finish_parsing_and_cleanup(queue_entry)397 def _finish_parsing_and_cleanup(self, queue_entry):398 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)399 self.mock_drone_manager.finish_process(_PidfileType.PARSE)400 self._run_dispatcher()401 self._check_entry_status(queue_entry, HqeStatus.ARCHIVING)402 self.mock_drone_manager.finish_process(_PidfileType.ARCHIVE)403 self._run_dispatcher()404 def _create_reverify_request(self):405 host = self.hosts[0]406 models.SpecialTask.schedule_special_task(407 host=host, task=models.SpecialTask.Task.VERIFY)408 return host409 def test_requested_reverify(self):410 host = self._create_reverify_request()411 self._run_dispatcher()412 self._check_host_status(host, HostStatus.VERIFYING)413 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)414 self._run_dispatcher()415 self._check_host_status(host, HostStatus.READY)416 def test_requested_reverify_failure(self):417 host = self._create_reverify_request()418 self._run_dispatcher()419 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,420 exit_status=256)421 self._run_dispatcher() # repair422 self._check_host_status(host, HostStatus.REPAIRING)423 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)424 self._run_dispatcher()425 self._check_host_status(host, HostStatus.READY)426 def _setup_for_do_not_verify(self):427 self._initialize_test()428 job, queue_entry = self._make_job_and_queue_entry()429 queue_entry.host.protection = host_protections.Protection.DO_NOT_VERIFY430 queue_entry.host.save()431 return queue_entry432 def test_do_not_verify_job(self):433 queue_entry = self._setup_for_do_not_verify()434 self._run_dispatcher() # runs job directly435 self._finish_job(queue_entry)436 def test_do_not_verify_job_with_cleanup(self):437 queue_entry = self._setup_for_do_not_verify()438 queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS439 queue_entry.job.save()440 self._run_dispatcher() # cleanup441 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)442 self._run_dispatcher() # job443 self._finish_job(queue_entry)444 def test_do_not_verify_pre_job_cleanup_failure(self):445 queue_entry = self._setup_for_do_not_verify()446 queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS447 queue_entry.job.save()448 self._run_dispatcher() # cleanup449 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,450 exit_status=256)451 self._run_dispatcher() # failure ignored; job runs452 self._finish_job(queue_entry)453 def test_do_not_verify_post_job_cleanup_failure(self):454 queue_entry = self._setup_for_do_not_verify()455 self._run_post_job_cleanup_failure_up_to_repair(queue_entry,456 include_verify=False)457 # failure ignored, host still set to Ready458 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)459 self._run_dispatcher() # nothing else runs460 self._assert_nothing_is_running()461 def test_do_not_verify_requested_reverify_failure(self):462 host = self._create_reverify_request()463 host.protection = host_protections.Protection.DO_NOT_VERIFY464 host.save()465 self._run_dispatcher()466 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,467 exit_status=256)468 self._run_dispatcher()469 self._check_host_status(host, HostStatus.READY) # ignore failure470 self._assert_nothing_is_running()471 def test_job_abort_in_verify(self):472 self._initialize_test()473 job = self._create_job(hosts=[1])474 self._run_dispatcher() # launches verify475 job.hostqueueentry_set.update(aborted=True)476 self._run_dispatcher() # kills verify, launches cleanup477 self.assert_(self.mock_drone_manager.was_last_process_killed(478 _PidfileType.VERIFY))479 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)480 self._run_dispatcher()481 def test_job_abort(self):482 self._initialize_test()483 job = self._create_job(hosts=[1])484 job.run_verify = False485 job.save()486 self._run_dispatcher() # launches job487 job.hostqueueentry_set.update(aborted=True)488 self._run_dispatcher() # kills job, launches gathering489 self.assert_(self.mock_drone_manager.was_last_process_killed(490 _PidfileType.JOB))491 self.mock_drone_manager.finish_process(_PidfileType.GATHER)492 self._run_dispatcher() # launches parsing + cleanup493 queue_entry = job.hostqueueentry_set.all()[0]494 self._finish_parsing_and_cleanup(queue_entry)495 def test_job_abort_queued_synchronous(self):496 self._initialize_test()497 job = self._create_job(hosts=[1, 2])498 job.synch_count = 2499 job.save()500 job.hostqueueentry_set.update(aborted=True)501 self._run_dispatcher()502 for host_queue_entry in job.hostqueueentry_set.all():503 self.assertEqual(host_queue_entry.status,504 HqeStatus.ABORTED)505 def test_no_pidfile_leaking(self):506 self._initialize_test()507 self.test_simple_job()508 self.assertEquals(self.mock_drone_manager._pidfiles, {})509 self.test_job_abort_in_verify()510 self.assertEquals(self.mock_drone_manager._pidfiles, {})511 self.test_job_abort()512 self.assertEquals(self.mock_drone_manager._pidfiles, {})513 def _make_job_and_queue_entry(self):514 job = self._create_job(hosts=[1])515 queue_entry = job.hostqueueentry_set.all()[0]516 return job, queue_entry517 def test_recover_running_no_process(self):518 # recovery should re-execute a Running HQE if no process is found519 _, queue_entry = self._make_job_and_queue_entry()520 queue_entry.status = HqeStatus.RUNNING521 queue_entry.execution_subdir = '1-myuser/host1'522 queue_entry.save()523 queue_entry.host.status = HostStatus.RUNNING524 queue_entry.host.save()525 self._initialize_test()526 self._run_dispatcher()527 self._finish_job(queue_entry)528 def test_recover_verifying_hqe_no_special_task(self):529 # recovery should fail on a Verifing HQE with no corresponding530 # Verify or Cleanup SpecialTask531 _, queue_entry = self._make_job_and_queue_entry()532 queue_entry.status = HqeStatus.VERIFYING533 queue_entry.save()534 # make some dummy SpecialTasks that shouldn't count535 models.SpecialTask.objects.create(536 host=queue_entry.host,537 task=models.SpecialTask.Task.VERIFY,538 requested_by=models.User.current_user())539 models.SpecialTask.objects.create(540 host=queue_entry.host,541 task=models.SpecialTask.Task.CLEANUP,542 queue_entry=queue_entry,543 is_complete=True,544 requested_by=models.User.current_user())545 self.assertRaises(host_scheduler.SchedulerError, self._initialize_test)546 def _test_recover_verifying_hqe_helper(self, task, pidfile_type):547 _, queue_entry = self._make_job_and_queue_entry()548 queue_entry.status = HqeStatus.VERIFYING549 queue_entry.save()550 special_task = models.SpecialTask.objects.create(551 host=queue_entry.host, task=task, queue_entry=queue_entry)552 self._initialize_test()553 self._run_dispatcher()554 self.mock_drone_manager.finish_process(pidfile_type)555 self._run_dispatcher()556 # don't bother checking the rest of the job execution, as long as the557 # SpecialTask ran558 def test_recover_verifying_hqe_with_cleanup(self):559 # recover an HQE that was in pre-job cleanup560 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.CLEANUP,561 _PidfileType.CLEANUP)562 def test_recover_verifying_hqe_with_verify(self):563 # recover an HQE that was in pre-job verify564 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.VERIFY,565 _PidfileType.VERIFY)566 def test_recover_pending_hqes_with_group(self):567 # recover a group of HQEs that are in Pending, in the same group (e.g.,568 # in a job with atomic hosts)569 job = self._create_job(hosts=[1, 2], atomic_group=1)570 job.save()571 job.hostqueueentry_set.all().update(status=HqeStatus.PENDING)572 self._initialize_test()573 for queue_entry in job.hostqueueentry_set.all():574 self.assertEquals(queue_entry.status, HqeStatus.STARTING)575 def test_recover_parsing(self):576 self._initialize_test()577 job, queue_entry = self._make_job_and_queue_entry()578 job.run_verify = False579 job.reboot_after = model_attributes.RebootAfter.NEVER580 job.save()581 self._run_dispatcher() # launches job582 self.mock_drone_manager.finish_process(_PidfileType.JOB)583 self._run_dispatcher() # launches parsing584 # now "restart" the scheduler585 self._create_dispatcher()586 self._initialize_test()587 self._run_dispatcher()588 self.mock_drone_manager.finish_process(_PidfileType.PARSE)589 self._run_dispatcher()590 def test_recover_parsing__no_process_already_aborted(self):591 _, queue_entry = self._make_job_and_queue_entry()592 queue_entry.execution_subdir = 'host1'593 queue_entry.status = HqeStatus.PARSING594 queue_entry.aborted = True595 queue_entry.save()596 self._initialize_test()597 self._run_dispatcher()598 def test_job_scheduled_just_after_abort(self):599 # test a pretty obscure corner case where a job is aborted while queued,600 # another job is ready to run, and throttling is active. the post-abort601 # cleanup must not be pre-empted by the second job.602 job1, queue_entry1 = self._make_job_and_queue_entry()603 job2, queue_entry2 = self._make_job_and_queue_entry()604 self.mock_drone_manager.process_capacity = 0605 self._run_dispatcher() # schedule job1, but won't start verify606 job1.hostqueueentry_set.update(aborted=True)607 self.mock_drone_manager.process_capacity = 100608 self._run_dispatcher() # cleanup must run here, not verify for job2609 self._check_statuses(queue_entry1, HqeStatus.ABORTED,610 HostStatus.CLEANING)611 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)612 self._run_dispatcher() # now verify starts for job2613 self._check_statuses(queue_entry2, HqeStatus.VERIFYING,614 HostStatus.VERIFYING)615 def test_reverify_interrupting_pre_job(self):616 # ensure things behave sanely if a reverify is scheduled in the middle617 # of pre-job actions618 _, queue_entry = self._make_job_and_queue_entry()619 self._run_dispatcher() # pre-job verify620 self._create_reverify_request()621 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,622 exit_status=256)623 self._run_dispatcher() # repair624 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)625 self._run_dispatcher() # reverify runs now626 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)627 self._run_dispatcher() # pre-job verify628 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)629 self._run_dispatcher() # and job runs...630 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)631 self._finish_job(queue_entry) # reverify has been deleted632 self._check_statuses(queue_entry, HqeStatus.COMPLETED,633 HostStatus.READY)634 self._assert_nothing_is_running()635 def test_reverify_while_job_running(self):636 # once a job is running, a reverify must not be allowed to preempt637 # Gathering638 _, queue_entry = self._make_job_and_queue_entry()639 self._run_pre_job_verify(queue_entry)640 self._run_dispatcher() # job runs641 self._create_reverify_request()642 # make job end with a signal, so gathering will run643 self.mock_drone_manager.finish_process(_PidfileType.JOB,644 exit_status=271)645 self._run_dispatcher() # gathering must start646 self.mock_drone_manager.finish_process(_PidfileType.GATHER)647 self._run_dispatcher() # parsing and cleanup648 self._finish_parsing_and_cleanup(queue_entry)649 self._run_dispatcher() # now reverify runs650 self._check_statuses(queue_entry, HqeStatus.FAILED,651 HostStatus.VERIFYING)652 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)653 self._run_dispatcher()654 self._check_host_status(queue_entry.host, HostStatus.READY)655 def test_reverify_while_host_pending(self):656 # ensure that if a reverify is scheduled while a host is in Pending, it657 # won't run until the host is actually free658 job = self._create_job(hosts=[1, 2])659 queue_entry = job.hostqueueentry_set.get(host__hostname='host1')660 job.synch_count = 2661 job.save()662 host2 = self.hosts[1]663 host2.locked = True664 host2.save()665 self._run_dispatcher() # verify host1666 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)667 self._run_dispatcher() # host1 Pending668 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)669 self._create_reverify_request()670 self._run_dispatcher() # nothing should happen here671 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)672 # now let the job run673 host2.locked = False674 host2.save()675 self._run_dispatcher() # verify host2676 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)677 self._run_dispatcher() # run job678 self._finish_job(queue_entry)679 # need to explicitly finish host1's post-job cleanup680 self.mock_drone_manager.finish_specific_process(681 'hosts/host1/4-cleanup', drone_manager.AUTOSERV_PID_FILE)682 self._run_dispatcher()683 # the reverify should now be running684 self._check_statuses(queue_entry, HqeStatus.COMPLETED,685 HostStatus.VERIFYING)686 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)687 self._run_dispatcher()688 self._check_host_status(queue_entry.host, HostStatus.READY)689 def test_throttling(self):690 job = self._create_job(hosts=[1, 2, 3])691 job.synch_count = 3692 job.save()693 queue_entries = list(job.hostqueueentry_set.all())694 def _check_hqe_statuses(*statuses):695 for queue_entry, status in zip(queue_entries, statuses):696 self._check_statuses(queue_entry, status)697 self.mock_drone_manager.process_capacity = 2698 self._run_dispatcher() # verify runs on 1 and 2699 _check_hqe_statuses(HqeStatus.VERIFYING, HqeStatus.VERIFYING,700 HqeStatus.VERIFYING)701 self.assertEquals(len(self.mock_drone_manager.running_pidfile_ids()), 2)702 self.mock_drone_manager.finish_specific_process(703 'hosts/host1/1-verify', drone_manager.AUTOSERV_PID_FILE)704 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)705 self._run_dispatcher() # verify runs on 3706 _check_hqe_statuses(HqeStatus.PENDING, HqeStatus.PENDING,707 HqeStatus.VERIFYING)708 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)709 self._run_dispatcher() # job won't run due to throttling710 _check_hqe_statuses(HqeStatus.STARTING, HqeStatus.STARTING,711 HqeStatus.STARTING)712 self._assert_nothing_is_running()713 self.mock_drone_manager.process_capacity = 3714 self._run_dispatcher() # now job runs715 _check_hqe_statuses(HqeStatus.RUNNING, HqeStatus.RUNNING,...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run autotest automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful