How to use _process_perf_metric_test_result method in lisa

Best Python code snippet using lisa_python

ch_tests_tool.py

Source:ch_tests_tool.py Github

copy

Full Screen

...89 shell=True,90 )91 if result.exit_code == 0:92 status = TestStatus.PASSED93 metrics = self._process_perf_metric_test_result(result.stdout)94 else:95 status = TestStatus.FAILED96 trace = f"Testcase '{testcase}' failed: {result.stderr}"97 failed_testcases.append(testcase)98 except Exception as e:99 self._log.info(f"Testcase failed, tescase name: {testcase}")100 status = TestStatus.FAILED101 trace = str(e)102 failed_testcases.append(testcase)103 msg = metrics if status == TestStatus.PASSED else trace104 self._send_subtest_msg(105 test_id=test_result.id_,106 environment=environment,107 test_name=testcase,108 test_status=status,109 test_message=msg,110 )111 # Write stdout of testcase to log as per given requirement112 with open(testcase_log_file, "w") as f:113 f.write(result.stdout)114 assert_that(115 failed_testcases, f"Failed Testcases: {failed_testcases}"116 ).is_empty()117 def _initialize(self, *args: Any, **kwargs: Any) -> None:118 tool_path = self.get_tool_path(use_global=True)119 self.repo_root = tool_path / "cloud-hypervisor"120 self.cmd_path = self.repo_root / "scripts" / "dev_cli.sh"121 def _install(self) -> bool:122 git = self.node.tools[Git]123 git.clone(self.repo, self.get_tool_path(use_global=True))124 if isinstance(self.node.os, CBLMariner):125 daemon_json_file = PurePath("/etc/docker/daemon.json")126 daemon_json = '{"default-ulimits":{"nofile":{"Hard":65535,"Name":"nofile","Soft":65535}}}' # noqa: E501127 self.node.tools[Echo].write_to_file(128 daemon_json, daemon_json_file, sudo=True129 )130 self.node.execute("groupadd -f docker", expected_exit_code=0)131 username = self.node.tools[Whoami].get_username()132 res = self.node.execute("getent group docker", expected_exit_code=0)133 if username not in res.stdout: # if current user is not in docker group134 self.node.execute(f"usermod -a -G docker {username}", sudo=True)135 # reboot for group membership change to take effect136 self.node.reboot()137 self.node.tools[Docker].start()138 return self._check_exists()139 def _extract_test_results(self, output: str) -> List[CloudHypervisorTestResult]:140 results: List[CloudHypervisorTestResult] = []141 # Cargo will output test status for each test separately in JSON format. Parse142 # the output line by line to obtain the list of all tests run along with their143 # outcomes.144 #145 # Example output:146 # { "type": "test", "event": "ok", "name": "integration::test_vfio" }147 lines = output.split("\n")148 for line in lines:149 result = {}150 try:151 result = json.loads(line)152 except json.decoder.JSONDecodeError:153 continue154 if type(result) is not dict:155 continue156 if "type" not in result or result["type"] != "test":157 continue158 if "event" not in result or result["event"] not in ["ok", "failed"]:159 continue160 status = TestStatus.PASSED if result["event"] == "ok" else TestStatus.FAILED161 results.append(162 CloudHypervisorTestResult(163 name=result["name"],164 status=status,165 )166 )167 return results168 def _send_subtest_msg(169 self,170 test_id: str,171 environment: Environment,172 test_name: str,173 test_status: TestStatus,174 test_message: str = "",175 ) -> None:176 subtest_msg = create_test_result_message(177 SubTestMessage, test_id, environment, test_name, test_status, test_message178 )179 notifier.notify(subtest_msg)180 def _list_perf_metrics_tests(self, hypervisor: str = "kvm") -> List[str]:181 tests_list = []182 result = self.run(183 f"tests --hypervisor {hypervisor} --metrics -- -- --list-tests",184 timeout=self.TIME_OUT,185 force_run=True,186 cwd=self.repo_root,187 shell=True,188 expected_exit_code=0,189 )190 stdout = result.stdout191 # Ex. String for below regex192 # "boot_time_ms" (test_timeout=2s,test_iterations=10)193 regex = '\\"(.*)\\" \\('194 pattern = re.compile(regex)195 tests_list = pattern.findall(stdout)196 self._log.debug(f"Testcases found: {tests_list}")197 return tests_list198 def _process_perf_metric_test_result(self, output: str) -> str:199 # Sample Output200 # "git_human_readable": "v27.0",201 # "git_revision": "2ba6a9bfcfd79629aecf77504fa554ab821d138e",202 # "git_commit_date": "Thu Sep 29 17:56:21 2022 +0100",203 # "date": "Wed Oct 12 03:51:38 UTC 2022",204 # "results": [205 # {206 # "name": "block_multi_queue_read_MiBps",207 # "mean": 158.64382311768824,208 # "std_dev": 7.685502103050337,209 # "max": 173.9743994350565,210 # "min": 154.10646435356466211 # }212 # ]...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run lisa automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful