How to use pytest_runtest_teardown method in Pytest

Best Python code snippet using pytest

Run Pytest automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

conftest.py

Source: conftest.py Github

copy
1import cProfile
2import json
3import os
4import pstats
5import pytest
6import logging
7import re
8from faker import Faker
9from docker import Client
10
11try:
12    import StringIO
13except ImportError:
14    from io import StringIO
15
16
17PROFILE_RESULTS_FILE = 'reports/global.prof'
18TOASTER_TIMINGS_JSON = '/tmp/toaster-timings.json'
19NODE_EXPORTER_METRIC_FILE = '/var/lib/node_exporter/textfile_collector/salt_toaster.prom'
20
21
22logger = logging.getLogger(__name__)
23logger.setLevel(logging.INFO)
24
25
26class Handler(logging.Handler):
27
28    def emit(self, report):
29        pytest.logentries.append(self.format(report))  # pylint: disable=no-member
30
31
32class ExtraSaltPlugin(object):
33
34    @pytest.hookimpl()
35    def pytest_namespace(self):
36        return dict(logentries=[])
37
38    @pytest.hookimpl(hookwrapper=True)
39    def pytest_sessionstart(self, session):
40        handler = Handler()
41        logging.root.addHandler(handler)
42        yield
43
44    @pytest.hookimpl(hookwrapper=True)
45    def pytest_terminal_summary(self, terminalreporter):
46        for item in pytest.logentries:  # pylint: disable=no-member
47            terminalreporter.write_line(item)
48        yield
49
50    @pytest.hookimpl(hookwrapper=True)
51    def pytest_runtest_setup(self, item):
52        if not item.module.__name__ in pytest.logentries:  # pylint: disable=no-member
53            logger.info(item.module.__name__)
54        yield
55
56
57
58@pytest.fixture(scope="session")
59def docker_client():
60    client = Client(base_url='unix://var/run/docker.sock', timeout=180)
61    return client
62
63
64@pytest.fixture(autouse=True)
65def tagschecker(request):
66    tags = set(request.config.getini('TAGS'))
67
68    tags_marker = request.node.get_closest_marker('tags')
69    xfailtags_marker = request.node.get_closest_marker('xfailtags')
70    skiptags_marker = request.node.get_closest_marker('skiptags')
71
72    if xfailtags_marker and not tags.isdisjoint(set(xfailtags_marker.args)):
73        request.node.add_marker(pytest.mark.xfail())
74    elif (
75        tags_marker and tags.isdisjoint(set(tags_marker.args)) or
76        skiptags_marker and not tags.isdisjoint(set(skiptags_marker.args))
77    ):
78        pytest.skip('skipped for this tags: {}'.format(tags))
79
80
81@pytest.fixture(scope='module')
82def module_config(request):
83    fake = Faker()
84    return {
85        "masters": [
86            {
87                "minions": [
88                    {
89                        "config": {
90                            "container__config__name": 'minion_{0}_{1}_{2}'.format(fake.word(), fake.word(), os.environ.get('ST_JOB_ID', '')),  # pylint: disable=no-member
91                            "container__config__image": (
92                                request.config.getini('MINION_IMAGE') or
93                                request.config.getini('IMAGE')
94                            )
95                        }
96                    }
97                ]
98            }
99        ]
100    }
101
102
103@pytest.fixture(scope="module")
104def master(setup):
105    config, initconfig = setup
106    return config['masters'][0]['fixture']
107
108
109@pytest.fixture(scope="module")
110def minion(setup):
111    config, initconfig = setup
112    minions = config['masters'][0]['minions']
113    return minions[0]['fixture'] if minions else None
114
115
116class SaltToasterException(Exception):
117    pass
118
119class ToasterTestsProfiling(object):
120    """Toaster Tests Profiling plugin for pytest."""
121
122    AVAILABLE_MODES = ['boolean', 'cumulative', "deltas"]
123
124    global_profile = None
125    mode = None
126    metrics = {}
127
128    def __init__(self, mode="default"):
129        self.global_profile = cProfile.Profile()
130        self.global_profile.enable()
131        if mode in self.AVAILABLE_MODES:
132            self.mode = mode
133        else:
134            raise SaltToasterException("Mode '{}' is not supported".format(mode))
135        from_json = True if self.mode == "cumulative" else False
136        self.metrics = self.read_initial_values(from_json=from_json)
137
138    def read_initial_values(self, from_json=False):
139        timings = {
140            'pytest_runtest_setup': 0,
141            'pytest_runtest_call': 0,
142            'pytest_runtest_teardown': 0
143        }
144        if from_json:
145            # Read possible values on the JSON file
146            try:
147                with open(TOASTER_TIMINGS_JSON) as infile:
148                    timings.update(json.load(infile))
149            except IOError as exc:
150                logger.error("Failed to read JSON file: {}".format(exc))
151        return timings
152
153    def export_metrics_to_prometheus(self, metrics):
154        # Export metrics to prometheus node exporter
155        if self.mode == "boolean":
156            metrics_header = \
157'''
158# HELP node_salt_toaster Pytest step being executed at the moment (1 = yes, 0 = no).
159# TYPE node_salt_toaster gauge
160'''
161        else:
162            metrics_header = \
163'''
164# HELP node_salt_toaster Seconds pytest spent in each Salt toaster step.
165# TYPE node_salt_toaster counter
166'''
167        metrics_str = metrics_header + \
168'''
169node_salt_toaster{{step="pytest_runtest_setup"}} {pytest_runtest_setup}
170node_salt_toaster{{step="pytest_runtest_call"}} {pytest_runtest_call}
171node_salt_toaster{{step="pytest_runtest_teardown"}} {pytest_runtest_teardown}
172'''
173        try:
174            with open(NODE_EXPORTER_METRIC_FILE, 'w') as metrics_file:
175                metrics_file.write(
176                    metrics_str.format(
177                        pytest_runtest_setup=metrics['pytest_runtest_setup'],
178                        pytest_runtest_call=metrics['pytest_runtest_call'],
179                        pytest_runtest_teardown=metrics['pytest_runtest_teardown'],
180                    ).lstrip()
181                )
182        except IOError as exc:
183            logger.error("Failed to export metrics to Prometheus node " \
184                "exporter file {}: {}".format(NODE_EXPORTER_METRIC_FILE, exc))
185
186    def accumulate_values_to_json(self, values, json_filename):
187        # Accumulate current values with the initial ones
188        for item in self.metrics.keys():
189            values[item] += self.metrics[item]
190        with open(json_filename, 'w') as outfile:
191            json.dump(values, outfile)
192        self.export_metrics_to_prometheus(values)
193
194    def export_metrics_delta(self, old_metrics, new_metrics, json_filename):
195        deltas = {}
196        for item in self.metrics.keys():
197            deltas[item] = new_metrics[item] - old_metrics[item]
198        with open(json_filename, 'w') as outfile:
199            json.dump(new_metrics, outfile)
200        self.metrics = new_metrics
201        self.export_metrics_to_prometheus(deltas)
202
203    def process_stats(self):  # @UnusedVariable
204        timings = {
205            'pytest_runtest_setup': 0,
206            'pytest_runtest_call': 0,
207            'pytest_runtest_teardown': 0
208        }
209        self.global_profile.disable()
210        self.global_profile.dump_stats(PROFILE_RESULTS_FILE)
211        self.global_profile.enable()
212        stream = StringIO.StringIO()
213        stats = pstats.Stats(PROFILE_RESULTS_FILE, stream=stream)
214        stats.sort_stats('cumulative').print_stats('pytest_runtest_setup', 1)
215        stats.sort_stats('cumulative').print_stats('pytest_runtest_call', 1)
216        stats.sort_stats('cumulative').print_stats('pytest_runtest_teardown', 1)
217        for line in stream.getvalue().split('\n'):
218            if re.match('.+\d+.+\d+\.\d+.+\d+\.\d+.+\d+\.\d+.+\d+\.\d+.*', line):
219                line_list = [item for item in line.split(' ') if item]
220                if 'pytest_runtest_setup' in line:
221                   timings['pytest_runtest_setup'] = float(line_list[3])
222                elif 'pytest_runtest_call' in line:
223                   timings['pytest_runtest_call'] = float(line_list[3])
224                elif 'pytest_runtest_teardown' in line:
225                   timings['pytest_runtest_teardown'] = float(line_list[3])
226        if self.mode == "deltas":
227            self.export_metrics_delta(self.metrics, timings, TOASTER_TIMINGS_JSON)
228        elif self.mode == "cumulative":
229            self.accumulate_values_to_json(timings, TOASTER_TIMINGS_JSON)
230
231    def process_stats_switch_on(self, stepname):
232        self.metrics[stepname] = 1
233        self.export_metrics_to_prometheus(self.metrics)
234
235    def process_stats_switch_off(self, stepname):
236        self.metrics[stepname] = 0
237        self.export_metrics_to_prometheus(self.metrics)
238
239    @pytest.hookimpl(hookwrapper=True)
240    def pytest_runtest_setup(self, item):  # @UnusedVariable
241        if self.mode == "boolean":
242            self.process_stats_switch_on("pytest_runtest_setup")
243            yield
244            self.process_stats_switch_off("pytest_runtest_setup")
245        else:
246            yield
247
248    @pytest.hookimpl(hookwrapper=True)
249    def pytest_runtest_call(self, item):  # @UnusedVariable
250        if self.mode == "boolean":
251            self.process_stats_switch_on("pytest_runtest_call")
252            yield
253            self.process_stats_switch_off("pytest_runtest_call")
254        else:
255            yield
256
257    @pytest.hookimpl(hookwrapper=True)
258    def pytest_runtest_teardown(self, item, nextitem):  # @UnusedVariable
259        if self.mode == "boolean":
260            self.process_stats_switch_on("pytest_runtest_teardown")
261            yield
262            self.process_stats_switch_off("pytest_runtest_teardown")
263        elif self.mode in ["cumulative", "deltas"]:
264            yield
265            self.process_stats()
266        else:
267            yield
268
269    def pytest_terminal_summary(self, terminalreporter):
270        self.global_profile.disable()
271        self.global_profile.dump_stats(PROFILE_RESULTS_FILE)
272        terminalreporter.write_sep("-",
273            "generated cProfile stats file on: {}".format(PROFILE_RESULTS_FILE))
274        terminalreporter.write_sep("-", "Salt Toaster Profiling Stats")
275        stats = pstats.Stats(self.global_profile, stream=terminalreporter)
276        stats.sort_stats('cumulative').print_stats('pytest_runtest_setup', 1)
277        stats.sort_stats('cumulative').print_stats('pytest_runtest_call', 1)
278        stats.sort_stats('cumulative').print_stats('pytest_runtest_teardown', 1)
279
280
281def pytest_configure(config):
282    plugin = ExtraSaltPlugin()
283    config.pluginmanager.register(plugin, 'ExtraSaltPlugin')
284    config.pluginmanager.register(ToasterTestsProfiling(mode="boolean"))
285
Full Screen

Accelerate Your Automation Test Cycles With LambdaTest

Leverage LambdaTest’s cloud-based platform to execute your automation tests in parallel and trim down your test execution time significantly. Your first 100 automation testing minutes are on us.

Try LambdaTest

Run Python Tests on LambdaTest Cloud Grid

Execute automation tests with Pytest on a cloud-based Grid of 3000+ real browsers and operating systems for both web and mobile applications.

Test now for Free
LambdaTestX

We use cookies to give you the best experience. Cookies help to provide a more personalized experience and relevant advertising for you, and web analytics for us. Learn More in our Cookies policy, Privacy & Terms of service

Allow Cookie
Sarah

I hope you find the best code examples for your project.

If you want to accelerate automated browser testing, try LambdaTest. Your first 100 automation testing minutes are FREE.

Sarah Elson (Product & Growth Lead)