How to use _check_host_status method in autotest

Best Python code snippet using autotest_python

monitor_db_functional_test.py

Source:monitor_db_functional_test.py Github

copy

Full Screen

...296 def _check_statuses(self, queue_entry, queue_entry_status,297 host_status=None):298 self._check_entry_status(queue_entry, queue_entry_status)299 if host_status:300 self._check_host_status(queue_entry.host, host_status)301 def _check_entry_status(self, queue_entry, status):302 # update from DB303 queue_entry = self._update_instance(queue_entry)304 self.assertEquals(queue_entry.status, status)305 def _check_host_status(self, host, status):306 # update from DB307 host = self._update_instance(host)308 self.assertEquals(host.status, status)309 def _run_pre_job_verify(self, queue_entry):310 self._run_dispatcher() # launches verify311 self._check_statuses(queue_entry, HqeStatus.VERIFYING,312 HostStatus.VERIFYING)313 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)314 def test_simple_job(self):315 self._initialize_test()316 job, queue_entry = self._make_job_and_queue_entry()317 self._run_pre_job_verify(queue_entry)318 self._run_dispatcher() # launches job319 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)320 self._finish_job(queue_entry)321 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)322 self._assert_nothing_is_running()323 def _setup_for_pre_job_reset(self):324 self._initialize_test()325 job, queue_entry = self._make_job_and_queue_entry()326 job.reboot_before = model_attributes.RebootBefore.ALWAYS327 job.save()328 return queue_entry329 def _run_pre_job_reset_job(self, queue_entry):330 self._run_dispatcher() # reset331 self._check_statuses(queue_entry, HqeStatus.RESETTING,332 HostStatus.RESETTING)333 self.mock_drone_manager.finish_process(_PidfileType.RESET)334 self._run_dispatcher() # job335 self._finish_job(queue_entry)336 def test_pre_job_reset(self):337 queue_entry = self._setup_for_pre_job_reset()338 self._run_pre_job_reset_job(queue_entry)339 def _run_pre_job_reset_one_failure(self):340 queue_entry = self._setup_for_pre_job_reset()341 self._run_dispatcher() # reset342 self.mock_drone_manager.finish_process(_PidfileType.RESET,343 exit_status=256)344 self._run_dispatcher() # repair345 self._check_statuses(queue_entry, HqeStatus.QUEUED,346 HostStatus.REPAIRING)347 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)348 return queue_entry349 def test_pre_job_reset_failure(self):350 queue_entry = self._run_pre_job_reset_one_failure()351 # from here the job should run as normal352 self._run_pre_job_reset_job(queue_entry)353 def test_pre_job_reset_double_failure(self):354 # TODO (showard): this test isn't perfect. in reality, when the second355 # reset fails, it copies its results over to the job directory using356 # copy_results_on_drone() and then parses them. since we don't handle357 # that, there appear to be no results at the job directory. the358 # scheduler handles this gracefully, parsing gets effectively skipped,359 # and this test passes as is. but we ought to properly test that360 # behavior.361 queue_entry = self._run_pre_job_reset_one_failure()362 self._run_dispatcher() # second reset363 self.mock_drone_manager.finish_process(_PidfileType.RESET,364 exit_status=256)365 self._run_dispatcher()366 self._check_statuses(queue_entry, HqeStatus.FAILED,367 HostStatus.REPAIR_FAILED)368 # nothing else should run369 self._assert_nothing_is_running()370 def _assert_nothing_is_running(self):371 self.assertEquals(self.mock_drone_manager.running_pidfile_ids(), [])372 def _setup_for_post_job_cleanup(self):373 self._initialize_test()374 job, queue_entry = self._make_job_and_queue_entry()375 job.reboot_after = model_attributes.RebootAfter.ALWAYS376 job.save()377 return queue_entry378 def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry,379 include_verify=True):380 if include_verify:381 self._run_pre_job_verify(queue_entry)382 self._run_dispatcher() # job383 self.mock_drone_manager.finish_process(_PidfileType.JOB)384 self._run_dispatcher() # parsing + cleanup385 self.mock_drone_manager.finish_process(_PidfileType.PARSE)386 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,387 exit_status=256)388 self._run_dispatcher() # repair, HQE unaffected389 return queue_entry390 def test_post_job_cleanup_failure(self):391 queue_entry = self._setup_for_post_job_cleanup()392 self._run_post_job_cleanup_failure_up_to_repair(queue_entry)393 self._check_statuses(queue_entry, HqeStatus.COMPLETED,394 HostStatus.REPAIRING)395 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)396 self._run_dispatcher()397 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)398 def test_post_job_cleanup_failure_repair_failure(self):399 queue_entry = self._setup_for_post_job_cleanup()400 self._run_post_job_cleanup_failure_up_to_repair(queue_entry)401 self.mock_drone_manager.finish_process(_PidfileType.REPAIR,402 exit_status=256)403 self._run_dispatcher()404 self._check_statuses(queue_entry, HqeStatus.COMPLETED,405 HostStatus.REPAIR_FAILED)406 def _ensure_post_job_process_is_paired(self, queue_entry, pidfile_type):407 pidfile_name = _PIDFILE_TYPE_TO_PIDFILE[pidfile_type]408 queue_entry = self._update_instance(queue_entry)409 pidfile_id = self.mock_drone_manager.pidfile_from_path(410 queue_entry.execution_path(), pidfile_name)411 self.assert_(pidfile_id._paired_with_pidfile)412 def _finish_job(self, queue_entry):413 self._check_statuses(queue_entry, HqeStatus.RUNNING)414 self.mock_drone_manager.finish_process(_PidfileType.JOB)415 self._run_dispatcher() # launches parsing416 self._check_statuses(queue_entry, HqeStatus.PARSING)417 self._ensure_post_job_process_is_paired(queue_entry, _PidfileType.PARSE)418 self._finish_parsing()419 def _finish_parsing(self):420 self.mock_drone_manager.finish_process(_PidfileType.PARSE)421 self._run_dispatcher()422 def _create_reverify_request(self):423 host = self.hosts[0]424 models.SpecialTask.schedule_special_task(425 host=host, task=models.SpecialTask.Task.VERIFY)426 return host427 def test_requested_reverify(self):428 host = self._create_reverify_request()429 self._run_dispatcher()430 self._check_host_status(host, HostStatus.VERIFYING)431 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)432 self._run_dispatcher()433 self._check_host_status(host, HostStatus.READY)434 def test_requested_reverify_failure(self):435 host = self._create_reverify_request()436 self._run_dispatcher()437 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,438 exit_status=256)439 self._run_dispatcher() # repair440 self._check_host_status(host, HostStatus.REPAIRING)441 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)442 self._run_dispatcher()443 self._check_host_status(host, HostStatus.READY)444 def _setup_for_do_not_verify(self):445 self._initialize_test()446 job, queue_entry = self._make_job_and_queue_entry()447 queue_entry.host.protection = host_protections.Protection.DO_NOT_VERIFY448 queue_entry.host.save()449 return queue_entry450 def test_do_not_verify_job(self):451 queue_entry = self._setup_for_do_not_verify()452 self._run_dispatcher() # runs job directly453 self._finish_job(queue_entry)454 def test_do_not_verify_job_with_cleanup(self):455 queue_entry = self._setup_for_do_not_verify()456 queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS457 queue_entry.job.save()458 self._run_dispatcher() # cleanup459 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)460 self._run_dispatcher() # job461 self._finish_job(queue_entry)462 def test_do_not_verify_pre_job_cleanup_failure(self):463 queue_entry = self._setup_for_do_not_verify()464 queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS465 queue_entry.job.save()466 self._run_dispatcher() # cleanup467 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,468 exit_status=256)469 self._run_dispatcher() # failure ignored; job runs470 self._finish_job(queue_entry)471 def test_do_not_verify_post_job_cleanup_failure(self):472 queue_entry = self._setup_for_do_not_verify()473 queue_entry.job.reboot_after = model_attributes.RebootAfter.ALWAYS474 queue_entry.job.save()475 self._run_post_job_cleanup_failure_up_to_repair(queue_entry,476 include_verify=False)477 # failure ignored, host still set to Ready478 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)479 self._run_dispatcher() # nothing else runs480 self._assert_nothing_is_running()481 def test_do_not_verify_requested_reverify_failure(self):482 host = self._create_reverify_request()483 host.protection = host_protections.Protection.DO_NOT_VERIFY484 host.save()485 self._run_dispatcher()486 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,487 exit_status=256)488 self._run_dispatcher()489 self._check_host_status(host, HostStatus.READY) # ignore failure490 self._assert_nothing_is_running()491 def test_job_abort_in_verify(self):492 self._initialize_test()493 job = self._create_job(hosts=[1])494 queue_entries = list(job.hostqueueentry_set.all())495 self._run_dispatcher() # launches verify496 self._check_statuses(queue_entries[0], HqeStatus.VERIFYING)497 job.hostqueueentry_set.update(aborted=True)498 self._run_dispatcher() # kills verify, launches cleanup499 self.assert_(self.mock_drone_manager.was_last_process_killed(500 _PidfileType.VERIFY, set([signal.SIGKILL])))501 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)502 self._run_dispatcher()503 def test_job_abort(self):504 self._initialize_test()505 job = self._create_job(hosts=[1])506 job.run_reset = False507 job.save()508 queue_entries = list(job.hostqueueentry_set.all())509 self._run_dispatcher() # launches job510 self._check_statuses(queue_entries[0], HqeStatus.RUNNING)511 job.hostqueueentry_set.update(aborted=True)512 self._run_dispatcher() # kills job, launches gathering513 self._check_statuses(queue_entries[0], HqeStatus.GATHERING)514 self.mock_drone_manager.finish_process(_PidfileType.GATHER)515 self._run_dispatcher() # launches parsing + cleanup516 queue_entry = job.hostqueueentry_set.all()[0]517 self._finish_parsing()518 # The abort will cause gathering to launch a cleanup.519 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)520 self._run_dispatcher()521 self.mock_drone_manager.finish_process(_PidfileType.RESET)522 self._run_dispatcher()523 def test_job_abort_queued_synchronous(self):524 self._initialize_test()525 job = self._create_job(hosts=[1,2])526 job.synch_count = 2527 job.save()528 job.hostqueueentry_set.update(aborted=True)529 self._run_dispatcher()530 for host_queue_entry in job.hostqueueentry_set.all():531 self.assertEqual(host_queue_entry.status,532 HqeStatus.ABORTED)533 def test_no_pidfile_leaking(self):534 self._initialize_test()535 self.test_simple_job()536 self.mock_drone_manager.refresh()537 self.assertEquals(self.mock_drone_manager._pidfiles, {})538 self.test_job_abort_in_verify()539 self.mock_drone_manager.refresh()540 self.assertEquals(self.mock_drone_manager._pidfiles, {})541 self.test_job_abort()542 self.mock_drone_manager.refresh()543 self.assertEquals(self.mock_drone_manager._pidfiles, {})544 def _make_job_and_queue_entry(self):545 job = self._create_job(hosts=[1])546 queue_entry = job.hostqueueentry_set.all()[0]547 return job, queue_entry548 def test_recover_running_no_process(self):549 # recovery should re-execute a Running HQE if no process is found550 _, queue_entry = self._make_job_and_queue_entry()551 queue_entry.status = HqeStatus.RUNNING552 queue_entry.execution_subdir = '1-myuser/host1'553 queue_entry.save()554 queue_entry.host.status = HostStatus.RUNNING555 queue_entry.host.save()556 self._initialize_test()557 self._run_dispatcher()558 self._finish_job(queue_entry)559 def test_recover_verifying_hqe_no_special_task(self):560 # recovery should move a Resetting HQE with no corresponding561 # Verify or Reset SpecialTask back to Queued.562 _, queue_entry = self._make_job_and_queue_entry()563 queue_entry.status = HqeStatus.RESETTING564 queue_entry.save()565 # make some dummy SpecialTasks that shouldn't count566 models.SpecialTask.objects.create(567 host=queue_entry.host,568 task=models.SpecialTask.Task.RESET,569 requested_by=models.User.current_user())570 models.SpecialTask.objects.create(571 host=queue_entry.host,572 task=models.SpecialTask.Task.CLEANUP,573 queue_entry=queue_entry,574 is_complete=True,575 requested_by=models.User.current_user())576 self._initialize_test()577 self._check_statuses(queue_entry, HqeStatus.QUEUED)578 def _test_recover_verifying_hqe_helper(self, task, pidfile_type):579 _, queue_entry = self._make_job_and_queue_entry()580 queue_entry.status = HqeStatus.VERIFYING581 queue_entry.save()582 special_task = models.SpecialTask.objects.create(583 host=queue_entry.host, task=task, queue_entry=queue_entry)584 self._initialize_test()585 self._run_dispatcher()586 self.mock_drone_manager.finish_process(pidfile_type)587 self._run_dispatcher()588 # don't bother checking the rest of the job execution, as long as the589 # SpecialTask ran590 def test_recover_verifying_hqe_with_cleanup(self):591 # recover an HQE that was in pre-job cleanup592 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.CLEANUP,593 _PidfileType.CLEANUP)594 def test_recover_verifying_hqe_with_verify(self):595 # recover an HQE that was in pre-job verify596 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.VERIFY,597 _PidfileType.VERIFY)598 def test_recover_parsing(self):599 self._initialize_test()600 job, queue_entry = self._make_job_and_queue_entry()601 job.run_verify = False602 job.run_reset = False603 job.reboot_after = model_attributes.RebootAfter.NEVER604 job.save()605 self._run_dispatcher() # launches job606 self.mock_drone_manager.finish_process(_PidfileType.JOB)607 self._run_dispatcher() # launches parsing608 # now "restart" the scheduler609 self._create_dispatcher()610 self._initialize_test()611 self._run_dispatcher()612 self.mock_drone_manager.finish_process(_PidfileType.PARSE)613 self._run_dispatcher()614 def test_recover_parsing__no_process_already_aborted(self):615 _, queue_entry = self._make_job_and_queue_entry()616 queue_entry.execution_subdir = 'host1'617 queue_entry.status = HqeStatus.PARSING618 queue_entry.aborted = True619 queue_entry.save()620 self._initialize_test()621 self._run_dispatcher()622 def test_job_scheduled_just_after_abort(self):623 # test a pretty obscure corner case where a job is aborted while queued,624 # another job is ready to run, and throttling is active. the post-abort625 # cleanup must not be pre-empted by the second job.626 # This test kind of doesn't make sense anymore after verify+cleanup627 # were merged into reset. It should maybe just be removed.628 job1, queue_entry1 = self._make_job_and_queue_entry()629 queue_entry1.save()630 job2, queue_entry2 = self._make_job_and_queue_entry()631 job2.reboot_before = model_attributes.RebootBefore.IF_DIRTY632 job2.save()633 self.mock_drone_manager.process_capacity = 0634 self._run_dispatcher() # schedule job1, but won't start verify635 job1.hostqueueentry_set.update(aborted=True)636 self.mock_drone_manager.process_capacity = 100637 self._run_dispatcher() # reset must run here, not verify for job2638 self._check_statuses(queue_entry1, HqeStatus.ABORTED,639 HostStatus.RESETTING)640 self.mock_drone_manager.finish_process(_PidfileType.RESET)641 self._run_dispatcher() # now verify starts for job2642 self._check_statuses(queue_entry2, HqeStatus.RUNNING,643 HostStatus.RUNNING)644 def test_reverify_interrupting_pre_job(self):645 # ensure things behave sanely if a reverify is scheduled in the middle646 # of pre-job actions647 _, queue_entry = self._make_job_and_queue_entry()648 self._run_dispatcher() # pre-job verify649 self._create_reverify_request()650 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,651 exit_status=256)652 self._run_dispatcher() # repair653 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)654 self._run_dispatcher() # reverify runs now655 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)656 self._run_dispatcher() # pre-job verify657 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)658 self._run_dispatcher() # and job runs...659 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)660 self._finish_job(queue_entry) # reverify has been deleted661 self._check_statuses(queue_entry, HqeStatus.COMPLETED,662 HostStatus.READY)663 self._assert_nothing_is_running()664 def test_reverify_while_job_running(self):665 # once a job is running, a reverify must not be allowed to preempt666 # Gathering667 _, queue_entry = self._make_job_and_queue_entry()668 self._run_pre_job_verify(queue_entry)669 self._run_dispatcher() # job runs670 self._create_reverify_request()671 # make job end with a signal, so gathering will run672 self.mock_drone_manager.finish_process(_PidfileType.JOB,673 exit_status=271)674 self._run_dispatcher() # gathering must start675 self.mock_drone_manager.finish_process(_PidfileType.GATHER)676 self._run_dispatcher() # parsing and cleanup677 self._finish_parsing()678 self._run_dispatcher() # now reverify runs679 self._check_statuses(queue_entry, HqeStatus.FAILED,680 HostStatus.VERIFYING)681 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)682 self._run_dispatcher()683 self._check_host_status(queue_entry.host, HostStatus.READY)684 def test_reverify_while_host_pending(self):685 # ensure that if a reverify is scheduled while a host is in Pending, it686 # won't run until the host is actually free687 job = self._create_job(hosts=[1,2])688 queue_entry = job.hostqueueentry_set.get(host__hostname='host1')689 job.synch_count = 2690 job.save()691 host2 = self.hosts[1]692 host2.locked = True693 host2.save()694 self._run_dispatcher() # verify host1695 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)696 self._run_dispatcher() # host1 Pending697 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)698 self._create_reverify_request()699 self._run_dispatcher() # nothing should happen here700 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)701 # now let the job run702 host2.locked = False703 host2.save()704 self._run_dispatcher() # verify host2705 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)706 self._run_dispatcher() # run job707 self._finish_job(queue_entry)708 # the reverify should now be running709 self._check_statuses(queue_entry, HqeStatus.COMPLETED,710 HostStatus.VERIFYING)711 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)712 self._run_dispatcher()713 self._check_host_status(queue_entry.host, HostStatus.READY)714 def test_throttling(self):715 job = self._create_job(hosts=[1,2,3])716 job.synch_count = 3717 job.save()718 queue_entries = list(job.hostqueueentry_set.all())719 def _check_hqe_statuses(*statuses):720 for queue_entry, status in zip(queue_entries, statuses):721 self._check_statuses(queue_entry, status)722 self.mock_drone_manager.process_capacity = 2723 self._run_dispatcher() # verify runs on 1 and 2724 queue_entries = list(job.hostqueueentry_set.all())725 _check_hqe_statuses(HqeStatus.QUEUED,726 HqeStatus.VERIFYING, HqeStatus.VERIFYING)727 self.assertEquals(len(self.mock_drone_manager.running_pidfile_ids()), 2)728 self.mock_drone_manager.finish_specific_process(729 'hosts/host3/1-verify', drone_manager.AUTOSERV_PID_FILE)730 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)731 self._run_dispatcher() # verify runs on 3732 _check_hqe_statuses(HqeStatus.VERIFYING, HqeStatus.PENDING,733 HqeStatus.PENDING)734 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)735 self._run_dispatcher() # job won't run due to throttling736 _check_hqe_statuses(HqeStatus.STARTING, HqeStatus.STARTING,737 HqeStatus.STARTING)738 self._assert_nothing_is_running()739 self.mock_drone_manager.process_capacity = 3740 self._run_dispatcher() # now job runs741 _check_hqe_statuses(HqeStatus.RUNNING, HqeStatus.RUNNING,742 HqeStatus.RUNNING)743 self.mock_drone_manager.process_capacity = 2744 self.mock_drone_manager.finish_process(_PidfileType.JOB,745 exit_status=271)746 self._run_dispatcher() # gathering won't run due to throttling747 _check_hqe_statuses(HqeStatus.GATHERING, HqeStatus.GATHERING,748 HqeStatus.GATHERING)749 self._assert_nothing_is_running()750 self.mock_drone_manager.process_capacity = 3751 self._run_dispatcher() # now gathering runs752 self.mock_drone_manager.process_capacity = 0753 self.mock_drone_manager.finish_process(_PidfileType.GATHER)754 self._run_dispatcher() # parsing runs despite throttling755 _check_hqe_statuses(HqeStatus.PARSING, HqeStatus.PARSING,756 HqeStatus.PARSING)757 def test_abort_starting_while_throttling(self):758 self._initialize_test()759 job = self._create_job(hosts=[1,2], synchronous=True)760 queue_entry = job.hostqueueentry_set.all()[0]761 job.run_verify = False762 job.run_reset = False763 job.reboot_after = model_attributes.RebootAfter.NEVER764 job.save()765 self.mock_drone_manager.process_capacity = 0766 self._run_dispatcher() # go to starting, but don't start job767 self._check_statuses(queue_entry, HqeStatus.STARTING,768 HostStatus.PENDING)769 job.hostqueueentry_set.update(aborted=True)770 self._run_dispatcher()771 self._check_statuses(queue_entry, HqeStatus.GATHERING,772 HostStatus.RUNNING)773 self.mock_drone_manager.process_capacity = 5774 self._run_dispatcher()775 self._check_statuses(queue_entry, HqeStatus.ABORTED,776 HostStatus.CLEANING)777 def test_simple_metahost_assignment(self):778 job = self._create_job(metahosts=[1])779 self._run_dispatcher()780 entry = job.hostqueueentry_set.all()[0]781 self.assertEquals(entry.host.hostname, 'host1')782 self._check_statuses(entry, HqeStatus.VERIFYING, HostStatus.VERIFYING)783 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)784 self._run_dispatcher()785 self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING)786 # rest of job proceeds normally787 def test_metahost_fail_verify(self):788 self.hosts[1].labels.add(self.labels[0]) # put label1 also on host2789 job = self._create_job(metahosts=[1])790 self._run_dispatcher() # assigned to host1791 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,792 exit_status=256)793 self._run_dispatcher() # host1 failed, gets reassigned to host2794 entry = job.hostqueueentry_set.all()[0]795 self.assertEquals(entry.host.hostname, 'host2')796 self._check_statuses(entry, HqeStatus.VERIFYING, HostStatus.VERIFYING)797 self._check_host_status(self.hosts[0], HostStatus.REPAIRING)798 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)799 self._run_dispatcher()800 self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING)801 def test_hostless_job(self):802 job = self._create_job(hostless=True)803 entry = job.hostqueueentry_set.all()[0]804 self._run_dispatcher()805 self._check_entry_status(entry, HqeStatus.RUNNING)806 self.mock_drone_manager.finish_process(_PidfileType.JOB)807 self._run_dispatcher()808 self._check_entry_status(entry, HqeStatus.PARSING)809 self.mock_drone_manager.finish_process(_PidfileType.PARSE)810 self._run_dispatcher()811 self._check_entry_status(entry, HqeStatus.COMPLETED)...

Full Screen

Full Screen

monitor_db_functional_unittest.py

Source:monitor_db_functional_unittest.py Github

copy

Full Screen

...270 def _check_statuses(self, queue_entry, queue_entry_status,271 host_status=None):272 self._check_entry_status(queue_entry, queue_entry_status)273 if host_status:274 self._check_host_status(queue_entry.host, host_status)275 def _check_entry_status(self, queue_entry, status):276 # update from DB277 queue_entry = self._update_instance(queue_entry)278 self.assertEquals(queue_entry.status, status)279 def _check_host_status(self, host, status):280 # update from DB281 host = self._update_instance(host)282 self.assertEquals(host.status, status)283 def _run_pre_job_verify(self, queue_entry):284 self._run_dispatcher() # launches verify285 self._check_statuses(queue_entry, HqeStatus.VERIFYING,286 HostStatus.VERIFYING)287 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)288 def test_simple_job(self):289 self._initialize_test()290 job, queue_entry = self._make_job_and_queue_entry()291 self._run_pre_job_verify(queue_entry)292 self._run_dispatcher() # launches job293 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)294 self._finish_job(queue_entry)295 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)296 self._assert_nothing_is_running()297 def _setup_for_pre_job_cleanup(self):298 self._initialize_test()299 job, queue_entry = self._make_job_and_queue_entry()300 job.reboot_before = model_attributes.RebootBefore.ALWAYS301 job.save()302 return queue_entry303 def _run_pre_job_cleanup_job(self, queue_entry):304 self._run_dispatcher() # cleanup305 self._check_statuses(queue_entry, HqeStatus.VERIFYING,306 HostStatus.CLEANING)307 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)308 self._run_dispatcher() # verify309 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)310 self._run_dispatcher() # job311 self._finish_job(queue_entry)312 def test_pre_job_cleanup(self):313 queue_entry = self._setup_for_pre_job_cleanup()314 self._run_pre_job_cleanup_job(queue_entry)315 def _run_pre_job_cleanup_one_failure(self):316 queue_entry = self._setup_for_pre_job_cleanup()317 self._run_dispatcher() # cleanup318 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,319 exit_status=256)320 self._run_dispatcher() # repair321 self._check_statuses(queue_entry, HqeStatus.QUEUED,322 HostStatus.REPAIRING)323 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)324 return queue_entry325 def test_pre_job_cleanup_failure(self):326 queue_entry = self._run_pre_job_cleanup_one_failure()327 # from here the job should run as normal328 self._run_pre_job_cleanup_job(queue_entry)329 def test_pre_job_cleanup_double_failure(self):330 # TODO (showard): this test isn't perfect. in reality, when the second331 # cleanup fails, it copies its results over to the job directory using332 # copy_results_on_drone() and then parses them. since we don't handle333 # that, there appear to be no results at the job directory. the334 # scheduler handles this gracefully, parsing gets effectively skipped,335 # and this test passes as is. but we ought to properly test that336 # behavior.337 queue_entry = self._run_pre_job_cleanup_one_failure()338 self._run_dispatcher() # second cleanup339 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,340 exit_status=256)341 self._run_dispatcher()342 self._check_statuses(queue_entry, HqeStatus.FAILED,343 HostStatus.REPAIR_FAILED)344 # nothing else should run345 self._assert_nothing_is_running()346 def _assert_nothing_is_running(self):347 self.assertEquals(self.mock_drone_manager.running_pidfile_ids(), [])348 def _setup_for_post_job_cleanup(self):349 self._initialize_test()350 job, queue_entry = self._make_job_and_queue_entry()351 job.reboot_after = model_attributes.RebootAfter.ALWAYS352 job.save()353 return queue_entry354 def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry,355 include_verify=True):356 if include_verify:357 self._run_pre_job_verify(queue_entry)358 self._run_dispatcher() # job359 self.mock_drone_manager.finish_process(_PidfileType.JOB)360 self._run_dispatcher() # parsing + cleanup361 self.mock_drone_manager.finish_process(_PidfileType.PARSE)362 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,363 exit_status=256)364 self._run_dispatcher() # repair, HQE unaffected365 self.mock_drone_manager.finish_process(_PidfileType.ARCHIVE)366 self._run_dispatcher()367 return queue_entry368 def test_post_job_cleanup_failure(self):369 queue_entry = self._setup_for_post_job_cleanup()370 self._run_post_job_cleanup_failure_up_to_repair(queue_entry)371 self._check_statuses(queue_entry, HqeStatus.COMPLETED,372 HostStatus.REPAIRING)373 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)374 self._run_dispatcher()375 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)376 def test_post_job_cleanup_failure_repair_failure(self):377 queue_entry = self._setup_for_post_job_cleanup()378 self._run_post_job_cleanup_failure_up_to_repair(queue_entry)379 self.mock_drone_manager.finish_process(_PidfileType.REPAIR,380 exit_status=256)381 self._run_dispatcher()382 self._check_statuses(queue_entry, HqeStatus.COMPLETED,383 HostStatus.REPAIR_FAILED)384 def _ensure_post_job_process_is_paired(self, queue_entry, pidfile_type):385 pidfile_name = _PIDFILE_TYPE_TO_PIDFILE[pidfile_type]386 queue_entry = self._update_instance(queue_entry)387 pidfile_id = self.mock_drone_manager.pidfile_from_path(388 queue_entry.execution_path(), pidfile_name)389 self.assert_(pidfile_id._paired_with_pidfile)390 def _finish_job(self, queue_entry):391 self.mock_drone_manager.finish_process(_PidfileType.JOB)392 self._run_dispatcher() # launches parsing + cleanup393 self._check_statuses(queue_entry, HqeStatus.PARSING,394 HostStatus.CLEANING)395 self._ensure_post_job_process_is_paired(queue_entry, _PidfileType.PARSE)396 self._finish_parsing_and_cleanup(queue_entry)397 def _finish_parsing_and_cleanup(self, queue_entry):398 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)399 self.mock_drone_manager.finish_process(_PidfileType.PARSE)400 self._run_dispatcher()401 self._check_entry_status(queue_entry, HqeStatus.ARCHIVING)402 self.mock_drone_manager.finish_process(_PidfileType.ARCHIVE)403 self._run_dispatcher()404 def _create_reverify_request(self):405 host = self.hosts[0]406 models.SpecialTask.schedule_special_task(407 host=host, task=models.SpecialTask.Task.VERIFY)408 return host409 def test_requested_reverify(self):410 host = self._create_reverify_request()411 self._run_dispatcher()412 self._check_host_status(host, HostStatus.VERIFYING)413 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)414 self._run_dispatcher()415 self._check_host_status(host, HostStatus.READY)416 def test_requested_reverify_failure(self):417 host = self._create_reverify_request()418 self._run_dispatcher()419 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,420 exit_status=256)421 self._run_dispatcher() # repair422 self._check_host_status(host, HostStatus.REPAIRING)423 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)424 self._run_dispatcher()425 self._check_host_status(host, HostStatus.READY)426 def _setup_for_do_not_verify(self):427 self._initialize_test()428 job, queue_entry = self._make_job_and_queue_entry()429 queue_entry.host.protection = host_protections.Protection.DO_NOT_VERIFY430 queue_entry.host.save()431 return queue_entry432 def test_do_not_verify_job(self):433 queue_entry = self._setup_for_do_not_verify()434 self._run_dispatcher() # runs job directly435 self._finish_job(queue_entry)436 def test_do_not_verify_job_with_cleanup(self):437 queue_entry = self._setup_for_do_not_verify()438 queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS439 queue_entry.job.save()440 self._run_dispatcher() # cleanup441 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)442 self._run_dispatcher() # job443 self._finish_job(queue_entry)444 def test_do_not_verify_pre_job_cleanup_failure(self):445 queue_entry = self._setup_for_do_not_verify()446 queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS447 queue_entry.job.save()448 self._run_dispatcher() # cleanup449 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,450 exit_status=256)451 self._run_dispatcher() # failure ignored; job runs452 self._finish_job(queue_entry)453 def test_do_not_verify_post_job_cleanup_failure(self):454 queue_entry = self._setup_for_do_not_verify()455 self._run_post_job_cleanup_failure_up_to_repair(queue_entry,456 include_verify=False)457 # failure ignored, host still set to Ready458 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)459 self._run_dispatcher() # nothing else runs460 self._assert_nothing_is_running()461 def test_do_not_verify_requested_reverify_failure(self):462 host = self._create_reverify_request()463 host.protection = host_protections.Protection.DO_NOT_VERIFY464 host.save()465 self._run_dispatcher()466 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,467 exit_status=256)468 self._run_dispatcher()469 self._check_host_status(host, HostStatus.READY) # ignore failure470 self._assert_nothing_is_running()471 def test_job_abort_in_verify(self):472 self._initialize_test()473 job = self._create_job(hosts=[1])474 self._run_dispatcher() # launches verify475 job.hostqueueentry_set.update(aborted=True)476 self._run_dispatcher() # kills verify, launches cleanup477 self.assert_(self.mock_drone_manager.was_last_process_killed(478 _PidfileType.VERIFY))479 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)480 self._run_dispatcher()481 def test_job_abort(self):482 self._initialize_test()483 job = self._create_job(hosts=[1])484 job.run_verify = False485 job.save()486 self._run_dispatcher() # launches job487 job.hostqueueentry_set.update(aborted=True)488 self._run_dispatcher() # kills job, launches gathering489 self.assert_(self.mock_drone_manager.was_last_process_killed(490 _PidfileType.JOB))491 self.mock_drone_manager.finish_process(_PidfileType.GATHER)492 self._run_dispatcher() # launches parsing + cleanup493 queue_entry = job.hostqueueentry_set.all()[0]494 self._finish_parsing_and_cleanup(queue_entry)495 def test_job_abort_queued_synchronous(self):496 self._initialize_test()497 job = self._create_job(hosts=[1, 2])498 job.synch_count = 2499 job.save()500 job.hostqueueentry_set.update(aborted=True)501 self._run_dispatcher()502 for host_queue_entry in job.hostqueueentry_set.all():503 self.assertEqual(host_queue_entry.status,504 HqeStatus.ABORTED)505 def test_no_pidfile_leaking(self):506 self._initialize_test()507 self.test_simple_job()508 self.assertEquals(self.mock_drone_manager._pidfiles, {})509 self.test_job_abort_in_verify()510 self.assertEquals(self.mock_drone_manager._pidfiles, {})511 self.test_job_abort()512 self.assertEquals(self.mock_drone_manager._pidfiles, {})513 def _make_job_and_queue_entry(self):514 job = self._create_job(hosts=[1])515 queue_entry = job.hostqueueentry_set.all()[0]516 return job, queue_entry517 def test_recover_running_no_process(self):518 # recovery should re-execute a Running HQE if no process is found519 _, queue_entry = self._make_job_and_queue_entry()520 queue_entry.status = HqeStatus.RUNNING521 queue_entry.execution_subdir = '1-myuser/host1'522 queue_entry.save()523 queue_entry.host.status = HostStatus.RUNNING524 queue_entry.host.save()525 self._initialize_test()526 self._run_dispatcher()527 self._finish_job(queue_entry)528 def test_recover_verifying_hqe_no_special_task(self):529 # recovery should fail on a Verifing HQE with no corresponding530 # Verify or Cleanup SpecialTask531 _, queue_entry = self._make_job_and_queue_entry()532 queue_entry.status = HqeStatus.VERIFYING533 queue_entry.save()534 # make some dummy SpecialTasks that shouldn't count535 models.SpecialTask.objects.create(536 host=queue_entry.host,537 task=models.SpecialTask.Task.VERIFY,538 requested_by=models.User.current_user())539 models.SpecialTask.objects.create(540 host=queue_entry.host,541 task=models.SpecialTask.Task.CLEANUP,542 queue_entry=queue_entry,543 is_complete=True,544 requested_by=models.User.current_user())545 self.assertRaises(host_scheduler.SchedulerError, self._initialize_test)546 def _test_recover_verifying_hqe_helper(self, task, pidfile_type):547 _, queue_entry = self._make_job_and_queue_entry()548 queue_entry.status = HqeStatus.VERIFYING549 queue_entry.save()550 special_task = models.SpecialTask.objects.create(551 host=queue_entry.host, task=task, queue_entry=queue_entry)552 self._initialize_test()553 self._run_dispatcher()554 self.mock_drone_manager.finish_process(pidfile_type)555 self._run_dispatcher()556 # don't bother checking the rest of the job execution, as long as the557 # SpecialTask ran558 def test_recover_verifying_hqe_with_cleanup(self):559 # recover an HQE that was in pre-job cleanup560 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.CLEANUP,561 _PidfileType.CLEANUP)562 def test_recover_verifying_hqe_with_verify(self):563 # recover an HQE that was in pre-job verify564 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.VERIFY,565 _PidfileType.VERIFY)566 def test_recover_pending_hqes_with_group(self):567 # recover a group of HQEs that are in Pending, in the same group (e.g.,568 # in a job with atomic hosts)569 job = self._create_job(hosts=[1, 2], atomic_group=1)570 job.save()571 job.hostqueueentry_set.all().update(status=HqeStatus.PENDING)572 self._initialize_test()573 for queue_entry in job.hostqueueentry_set.all():574 self.assertEquals(queue_entry.status, HqeStatus.STARTING)575 def test_recover_parsing(self):576 self._initialize_test()577 job, queue_entry = self._make_job_and_queue_entry()578 job.run_verify = False579 job.reboot_after = model_attributes.RebootAfter.NEVER580 job.save()581 self._run_dispatcher() # launches job582 self.mock_drone_manager.finish_process(_PidfileType.JOB)583 self._run_dispatcher() # launches parsing584 # now "restart" the scheduler585 self._create_dispatcher()586 self._initialize_test()587 self._run_dispatcher()588 self.mock_drone_manager.finish_process(_PidfileType.PARSE)589 self._run_dispatcher()590 def test_recover_parsing__no_process_already_aborted(self):591 _, queue_entry = self._make_job_and_queue_entry()592 queue_entry.execution_subdir = 'host1'593 queue_entry.status = HqeStatus.PARSING594 queue_entry.aborted = True595 queue_entry.save()596 self._initialize_test()597 self._run_dispatcher()598 def test_job_scheduled_just_after_abort(self):599 # test a pretty obscure corner case where a job is aborted while queued,600 # another job is ready to run, and throttling is active. the post-abort601 # cleanup must not be pre-empted by the second job.602 job1, queue_entry1 = self._make_job_and_queue_entry()603 job2, queue_entry2 = self._make_job_and_queue_entry()604 self.mock_drone_manager.process_capacity = 0605 self._run_dispatcher() # schedule job1, but won't start verify606 job1.hostqueueentry_set.update(aborted=True)607 self.mock_drone_manager.process_capacity = 100608 self._run_dispatcher() # cleanup must run here, not verify for job2609 self._check_statuses(queue_entry1, HqeStatus.ABORTED,610 HostStatus.CLEANING)611 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)612 self._run_dispatcher() # now verify starts for job2613 self._check_statuses(queue_entry2, HqeStatus.VERIFYING,614 HostStatus.VERIFYING)615 def test_reverify_interrupting_pre_job(self):616 # ensure things behave sanely if a reverify is scheduled in the middle617 # of pre-job actions618 _, queue_entry = self._make_job_and_queue_entry()619 self._run_dispatcher() # pre-job verify620 self._create_reverify_request()621 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,622 exit_status=256)623 self._run_dispatcher() # repair624 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)625 self._run_dispatcher() # reverify runs now626 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)627 self._run_dispatcher() # pre-job verify628 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)629 self._run_dispatcher() # and job runs...630 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)631 self._finish_job(queue_entry) # reverify has been deleted632 self._check_statuses(queue_entry, HqeStatus.COMPLETED,633 HostStatus.READY)634 self._assert_nothing_is_running()635 def test_reverify_while_job_running(self):636 # once a job is running, a reverify must not be allowed to preempt637 # Gathering638 _, queue_entry = self._make_job_and_queue_entry()639 self._run_pre_job_verify(queue_entry)640 self._run_dispatcher() # job runs641 self._create_reverify_request()642 # make job end with a signal, so gathering will run643 self.mock_drone_manager.finish_process(_PidfileType.JOB,644 exit_status=271)645 self._run_dispatcher() # gathering must start646 self.mock_drone_manager.finish_process(_PidfileType.GATHER)647 self._run_dispatcher() # parsing and cleanup648 self._finish_parsing_and_cleanup(queue_entry)649 self._run_dispatcher() # now reverify runs650 self._check_statuses(queue_entry, HqeStatus.FAILED,651 HostStatus.VERIFYING)652 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)653 self._run_dispatcher()654 self._check_host_status(queue_entry.host, HostStatus.READY)655 def test_reverify_while_host_pending(self):656 # ensure that if a reverify is scheduled while a host is in Pending, it657 # won't run until the host is actually free658 job = self._create_job(hosts=[1, 2])659 queue_entry = job.hostqueueentry_set.get(host__hostname='host1')660 job.synch_count = 2661 job.save()662 host2 = self.hosts[1]663 host2.locked = True664 host2.save()665 self._run_dispatcher() # verify host1666 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)667 self._run_dispatcher() # host1 Pending668 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)669 self._create_reverify_request()670 self._run_dispatcher() # nothing should happen here671 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)672 # now let the job run673 host2.locked = False674 host2.save()675 self._run_dispatcher() # verify host2676 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)677 self._run_dispatcher() # run job678 self._finish_job(queue_entry)679 # need to explicitly finish host1's post-job cleanup680 self.mock_drone_manager.finish_specific_process(681 'hosts/host1/4-cleanup', drone_manager.AUTOSERV_PID_FILE)682 self._run_dispatcher()683 # the reverify should now be running684 self._check_statuses(queue_entry, HqeStatus.COMPLETED,685 HostStatus.VERIFYING)686 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)687 self._run_dispatcher()688 self._check_host_status(queue_entry.host, HostStatus.READY)689 def test_throttling(self):690 job = self._create_job(hosts=[1, 2, 3])691 job.synch_count = 3692 job.save()693 queue_entries = list(job.hostqueueentry_set.all())694 def _check_hqe_statuses(*statuses):695 for queue_entry, status in zip(queue_entries, statuses):696 self._check_statuses(queue_entry, status)697 self.mock_drone_manager.process_capacity = 2698 self._run_dispatcher() # verify runs on 1 and 2699 _check_hqe_statuses(HqeStatus.VERIFYING, HqeStatus.VERIFYING,700 HqeStatus.VERIFYING)701 self.assertEquals(len(self.mock_drone_manager.running_pidfile_ids()), 2)702 self.mock_drone_manager.finish_specific_process(703 'hosts/host1/1-verify', drone_manager.AUTOSERV_PID_FILE)704 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)705 self._run_dispatcher() # verify runs on 3706 _check_hqe_statuses(HqeStatus.PENDING, HqeStatus.PENDING,707 HqeStatus.VERIFYING)708 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)709 self._run_dispatcher() # job won't run due to throttling710 _check_hqe_statuses(HqeStatus.STARTING, HqeStatus.STARTING,711 HqeStatus.STARTING)712 self._assert_nothing_is_running()713 self.mock_drone_manager.process_capacity = 3714 self._run_dispatcher() # now job runs715 _check_hqe_statuses(HqeStatus.RUNNING, HqeStatus.RUNNING,716 HqeStatus.RUNNING)717 self.mock_drone_manager.process_capacity = 2718 self.mock_drone_manager.finish_process(_PidfileType.JOB,719 exit_status=271)720 self._run_dispatcher() # gathering won't run due to throttling721 _check_hqe_statuses(HqeStatus.GATHERING, HqeStatus.GATHERING,722 HqeStatus.GATHERING)723 self._assert_nothing_is_running()724 self.mock_drone_manager.process_capacity = 3725 self._run_dispatcher() # now gathering runs726 self.mock_drone_manager.process_capacity = 0727 self.mock_drone_manager.finish_process(_PidfileType.GATHER)728 self._run_dispatcher() # parsing runs despite throttling729 _check_hqe_statuses(HqeStatus.PARSING, HqeStatus.PARSING,730 HqeStatus.PARSING)731 def test_abort_starting_while_throttling(self):732 self._initialize_test()733 job = self._create_job(hosts=[1, 2], synchronous=True)734 queue_entry = job.hostqueueentry_set.all()[0]735 job.run_verify = False736 job.reboot_after = model_attributes.RebootAfter.NEVER737 job.save()738 self.mock_drone_manager.process_capacity = 0739 self._run_dispatcher() # go to starting, but don't start job740 self._check_statuses(queue_entry, HqeStatus.STARTING,741 HostStatus.PENDING)742 job.hostqueueentry_set.update(aborted=True)743 self._run_dispatcher()744 self._check_statuses(queue_entry, HqeStatus.GATHERING,745 HostStatus.RUNNING)746 self.mock_drone_manager.process_capacity = 5747 self._run_dispatcher()748 self._check_statuses(queue_entry, HqeStatus.ABORTED,749 HostStatus.CLEANING)750 def test_simple_atomic_group_job(self):751 job = self._create_job(atomic_group=1)752 self._run_dispatcher() # expand + verify753 queue_entries = job.hostqueueentry_set.all()754 self.assertEquals(len(queue_entries), 2)755 self.assertEquals(queue_entries[0].host.hostname, 'host5')756 self.assertEquals(queue_entries[1].host.hostname, 'host6')757 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)758 self._run_dispatcher() # delay task started waiting759 self.mock_drone_manager.finish_specific_process(760 'hosts/host6/1-verify', drone_manager.AUTOSERV_PID_FILE)761 self._run_dispatcher() # job starts now762 for entry in queue_entries:763 self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING)764 # rest of job proceeds normally765 def test_simple_metahost_assignment(self):766 job = self._create_job(metahosts=[1])767 self._run_dispatcher()768 entry = job.hostqueueentry_set.all()[0]769 self.assertEquals(entry.host.hostname, 'host1')770 self._check_statuses(entry, HqeStatus.VERIFYING, HostStatus.VERIFYING)771 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)772 self._run_dispatcher()773 self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING)774 # rest of job proceeds normally775 def test_metahost_fail_verify(self):776 self.hosts[1].labels.add(self.labels[0]) # put label1 also on host2777 job = self._create_job(metahosts=[1])778 self._run_dispatcher() # assigned to host1779 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,780 exit_status=256)781 self._run_dispatcher() # host1 failed, gets reassigned to host2782 entry = job.hostqueueentry_set.all()[0]783 self.assertEquals(entry.host.hostname, 'host2')784 self._check_statuses(entry, HqeStatus.VERIFYING, HostStatus.VERIFYING)785 self._check_host_status(self.hosts[0], HostStatus.REPAIRING)786 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)787 self._run_dispatcher()788 self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING)789 def test_hostless_job(self):790 job = self._create_job(hostless=True)791 entry = job.hostqueueentry_set.all()[0]792 self._run_dispatcher()793 self._check_entry_status(entry, HqeStatus.RUNNING)794 self.mock_drone_manager.finish_process(_PidfileType.JOB)795 self._run_dispatcher()796 self._check_entry_status(entry, HqeStatus.PARSING)797 self.mock_drone_manager.finish_process(_PidfileType.PARSE)798 self._run_dispatcher()799 self._check_entry_status(entry, HqeStatus.ARCHIVING)...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run autotest automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful