Best Python code snippet using playwright-python
analysis.py
Source:analysis.py  
...103        dtype='object'104    )105    return results106def parse_results(results):107    test_events = extract_test_events(results)108    student_info, student_info_changes = extract_student_info(results)109    student_assignments = extract_student_assignments(results)110    return test_events, student_info, student_info_changes, student_assignments111def extract_test_events(112    results113):114    test_events = (115        results116        .rename(columns={117            'TermTested': 'term_school_year',118            'DistrictName': 'legal_entity',119            'Subject': 'subject',120            'Course': 'course',121            'StudentID': 'student_id_nwea',122            'TestDate': 'test_date',123            'StartRIT': 'rit_score',124            'StartRITSEM': 'rit_score_sem',125            'StartPercentile': 'percentile',...test_zmap.py
Source:test_zmap.py  
1# -*- coding: utf-8 -*-2from __future__ import (absolute_import, division, print_function,3                        unicode_literals)4from future.builtins import *  # NOQA5import os6import unittest7from obspy.core.event import read_events8from obspy.core.utcdatetime import UTCDateTime9from obspy.core.util import NamedTemporaryFile, get_example_file10from obspy.io.zmap import core as zmap11_STD_ZMAP_FIELDS = ('lon', 'lat', 'year', 'month', 'day', 'mag', 'depth',12                    'hour', 'minute', 'second')13_EXT_ZMAP_FIELDS = ('h_err', 'z_err', 'm_err')14_ORIGIN_FIELDS = ('lon', 'lat', 'year', 'month', 'day', 'depth', 'hour',15                  'minute', 'second', 'h_err', 'z_err')16_MAGNITUDE_FIELDS = ('mag', 'm_err')17class ZMAPTestCase(unittest.TestCase):18    """19    Test suite for obspy.io.zmap.core20    """21    def setUp(self):22        data_dir = os.path.join(os.path.dirname(__file__), 'data')23        path_to_catalog = os.path.join(data_dir, 'neries_events.xml')24        self.catalog = read_events(path_to_catalog)25        self.zmap_fields = _STD_ZMAP_FIELDS26        # Extract our favorite test event from the catalog27        test_event_id = 'quakeml:eu.emsc/event/20120404_0000041'28        self.test_event = next(e for e in self.catalog.events29                               if e.resource_id.id == test_event_id)30        self.test_data = {31            'lon': '79.689000', 'lat': '41.818000', 'month': '4',32            'year': '2012.258465590847', 'day': '4', 'hour': '14',33            'minute': '21', 'second': '42.3', 'depth': '1.000000',34            'mag': '4.400000'35        }36    def tearDown(self):37        # Make sure events are deleted before the next test to prevent38        # resource identifier warnings39        self.catalog = None40        self.test_event = None41    def test_serialize(self):42        """43        Test serialization to zmap format44        """45        pickler = zmap.Pickler()46        # test full event (including origin/magnitude)47        dump = pickler.dumps(self.catalog)48        self.assertIn(self._expected_string(self.test_data), dump)49        self.assertEqual(dump.count('\n'), 3)50        # no preferred origin51        oid = self.test_event.preferred_origin_id52        self.test_event.preferred_origin_id = None53        dump = pickler.dumps(self.catalog)54        self.assertIn(self._expected_string({'mag': '4.400000'}), dump)55        self.test_event.preferred_origin_id = oid56        # no preferred magnitude57        self.test_event.preferred_magnitude_id = None58        dump = pickler.dumps(self.catalog)59        test_data = self.test_data.copy()60        del test_data['mag']61        self.assertIn(self._expected_string(test_data), dump)62    def test_plugin_interface(self):63        """64        Test if zmap writing works via obspy's plugin interface65        """66        with NamedTemporaryFile() as f:67            self.catalog.write(f, format='ZMAP')68            f.seek(0)69            file_content = f.read().decode('utf-8')70        self.assertIn(self._expected_string(self.test_data), file_content)71    def test_dump_to_file(self):72        """73        Test output to pre-opened file74        """75        with NamedTemporaryFile() as f:76            zmap._write_zmap(self.catalog, f)77            f.seek(0)78            file_content = f.read().decode('utf-8')79        self.assertIn(self._expected_string(self.test_data), file_content)80    def test_dump_to_filename(self):81        """82        Test output to file with a filename specified83        """84        with NamedTemporaryFile() as f:85            zmap._write_zmap(self.catalog, f.name)86            f.seek(0)87            file_content = f.read().decode('utf-8')88        self.assertIn(self._expected_string(self.test_data), file_content)89    def test_dump_with_uncertainty(self):90        """91        Test export of non-standard (CSEP) uncertainty fields92        """93        self.zmap_fields += _EXT_ZMAP_FIELDS94        self.test_data.update({'h_err': 'NaN', 'z_err': '0.000000',95                               'm_err': '0.000000'})96        pickler = zmap.Pickler(with_uncertainties=True)97        dump = pickler.dumps(self.catalog)98        self.assertIn(self._expected_string(self.test_data), dump)99    def test_ou_hz_error(self):100        """101        Test hz error extraction from origin_uncertainty102        """103        self.zmap_fields += _EXT_ZMAP_FIELDS104        self.test_data.update({'h_err': '1.000000', 'z_err': '0.000000',105                               'm_err': '0.000000'})106        pickler = zmap.Pickler(with_uncertainties=True)107        o = self.test_event.preferred_origin()108        o.origin_uncertainty.preferred_description = 'horizontal uncertainty'109        o.origin_uncertainty.horizontal_uncertainty = 1.0110        dump = pickler.dumps(self.catalog)111        self.assertIn(self._expected_string(self.test_data), dump)112        # with unsupported preferred_description113        self.test_data.update({'h_err': 'NaN', 'z_err': '0.000000',114                               'm_err': '0.000000'})115        o.origin_uncertainty.preferred_description = 'uncertainty ellipse'116        dump = pickler.dumps(self.catalog)117        self.assertIn(self._expected_string(self.test_data), dump)118    def test_lat_lon_hz_error(self):119        """120        Test hz error extraction from lat/lon121        """122        self.zmap_fields += _EXT_ZMAP_FIELDS123        self.test_data.update({'h_err': '0.138679', 'z_err': '0.000000',124                               'm_err': '0.000000'})125        pickler = zmap.Pickler(with_uncertainties=True)126        o = self.test_event.preferred_origin()127        o.latitude_errors.uncertainty = .001128        o.longitude_errors.uncertainty = .001129        dump = pickler.dumps(self.catalog)130        self.assertIn(self._expected_string(self.test_data), dump)131    def test_is_zmap(self):132        """133        Test zmap format detection134        """135        # Regular ZMAP136        test_events = [self.test_data, dict(self.test_data, mag='5.1')]137        with NamedTemporaryFile() as f:138            f.write(self._serialize(test_events).encode('utf-8'))139            self.assertTrue(zmap._is_zmap(f.name))140            # Pre-opened file141            f.seek(0)142            self.assertTrue(zmap._is_zmap(f))143        # Extended ZMAP (13 columns)144        self.zmap_fields += _EXT_ZMAP_FIELDS145        self.test_data.update({'h_err': '0.138679', 'z_err': '0.000000',146                               'm_err': '0.000000'})147        test_events = [self.test_data, dict(self.test_data, mag='5.1')]148        with NamedTemporaryFile() as f:149            f.write(self._serialize(test_events).encode('utf-8'))150            self.assertTrue(zmap._is_zmap(f.name))151        # ZMAP string152        test_string = self._serialize(test_events)153        self.assertTrue(zmap._is_zmap(test_string))154        # Non-ZMAP string155        test_string = '0.000000\t' + test_string156        self.assertFalse(zmap._is_zmap(test_string + '\n'))157        # Non-ZMAP file (14 columns)158        self.zmap_fields += ('dummy',)159        self.test_data.update({'dummy': '0'})160        test_events = [self.test_data, dict(self.test_data, mag='5.1')]161        with NamedTemporaryFile() as f:162            f.write(self._serialize(test_events).encode('utf-8'))163            self.assertFalse(zmap._is_zmap(f.name))164        # Non-ZMAP file (non-numeric columns)165        self.zmap_fields = _STD_ZMAP_FIELDS + _EXT_ZMAP_FIELDS166        self.test_data.update({'mag': 'bad'})167        test_events = [self.test_data]168        with NamedTemporaryFile() as f:169            f.write(self._serialize(test_events).encode('utf-8'))170            self.assertFalse(zmap._is_zmap(f.name))171    def test_is_zmap_binary_files(self):172        """173        Test zmap format detection on non-ZMAP (e.g. binary) files, see #1022.174        """175        # Non-ZMAP file, binary176        for filename in ["test.mseed", "test.sac"]:177            file_ = get_example_file(filename)178            self.assertFalse(zmap._is_zmap(file_))179    def test_deserialize(self):180        """181        Test ZMAP deserialization to catalog182        """183        # Regular ZMAP184        test_events = [self.test_data, dict(self.test_data, mag='5.1')]185        zmap_str = self._serialize(test_events)186        catalog = zmap.Unpickler().loads(zmap_str)187        self._assert_zmap_equal(catalog, test_events)188        # Leniency (1 to 13 or more columns (extra columns are ignored))189        self.zmap_fields += _EXT_ZMAP_FIELDS + ('extra',)190        self.test_data.update({'h_err': '0.138679', 'z_err': '0.000000',191                               'm_err': '0.000000', 'extra': '0.000000'})192        data = {}193        for field in self.zmap_fields:194            data[field] = self.test_data[field]195            test_events = [data, dict(data, lon='0')]196            zmap_str = self._serialize(test_events, fill_nans=False)197            catalog = zmap.Unpickler().loads(zmap_str)198            self._assert_zmap_equal(catalog, test_events)199        # Deserialize accepts a year without the weird fractional part that200        # redundantly defines the point in time within the year.201        test_events = [dict(e, year=int(float(e['year'])))202                       for e in test_events]203        zmap_str = self._serialize((test_events))204        catalog = zmap.Unpickler().loads(zmap_str)205        self._assert_zmap_equal(catalog, test_events)206    def test_read(self):207        # via file, file name, plugin interface208        test_events = [self.test_data, dict(self.test_data, lon='5.1')]209        zmap_str = self._serialize((test_events))210        with NamedTemporaryFile() as f:211            f.write(zmap_str.encode('utf-8'))212            catalog = zmap._read_zmap(f.name)213            self._assert_zmap_equal(catalog, test_events)214            f.seek(0)215            catalog = zmap._read_zmap(f)216            self._assert_zmap_equal(catalog, test_events)217            catalog = read_events(f.name)218            self._assert_zmap_equal(catalog, test_events)219        # direct ZMAP string220        catalog = zmap._read_zmap(zmap_str)221        self._assert_zmap_equal(catalog, test_events)222    def _assert_zmap_equal(self, catalog, dicts):223        """224        Compares a zmap imported catalog with test event dictionaries225        """226        self.assertEqual(len(catalog), len(dicts))227        for event, test_dict in zip(catalog, dicts):228            origin = event.preferred_origin()229            if any(k in test_dict for k in _ORIGIN_FIELDS):230                self.assertNotEqual(None, origin)231            magnitude = event.preferred_magnitude()232            if any(k in test_dict for k in _MAGNITUDE_FIELDS):233                self.assertNotEqual(None, magnitude)234            d = dict((k, float(v) if v != 'NaN' else None)235                     for (k, v) in test_dict.items())236            if 'lon' in d:237                self.assertEqual(d['lon'], origin.longitude)238            if 'lat' in d:239                self.assertEqual(d['lat'], origin.latitude)240            if 'depth' in d:241                self.assertEqual(d['depth'] * 1000, origin.depth)242            if 'z_err' in d:243                self.assertEqual(d['z_err'] * 1000, origin.depth_errors.244                                 uncertainty)245            if 'h_err' in d:246                self.assertEqual(d['h_err'], origin.origin_uncertainty247                                 .horizontal_uncertainty)248                self.assertEqual('horizontal uncertainty', origin249                                 .origin_uncertainty.preferred_description)250            if 'year' in d:251                year = d['year']252                comps = ['year', 'month', 'day', 'hour', 'minute', 'second']253                if year % 1 != 0:254                    start = UTCDateTime(int(year), 1, 1)255                    end = UTCDateTime(int(year) + 1, 1, 1)256                    utc = start + (year % 1) * (end - start)257                elif any(d.get(k, 0) > 0 for k in comps[1:]):258                    utc = UTCDateTime(*[int(d.get(k)) for k in comps])259                self.assertEqual(utc, event.preferred_origin().time)260            if 'mag' in d:261                self.assertEqual(d['mag'], magnitude.mag)262            if 'm_err' in d:263                self.assertEqual(d['m_err'], magnitude.mag_errors.uncertainty)264    def _serialize(self, test_dicts, fill_nans=True):265        zmap_str = ''266        for d in test_dicts:267            if fill_nans:268                zmap_str += '\t'.join(str(d[f]) if f in d else 'NaN'269                                      for f in self.zmap_fields) + '\n'270            else:271                zmap_str += '\t'.join(str(d[f]) for f in self.zmap_fields272                                      if f in d) + '\n'273        return zmap_str274    def _expected_string(self, zmap_dict):275        """276        Returns the expected string from a ZMAP dump.277        zmap_dict contains (string) values for all the fields that are expected278        to have specific values. All other fields default to 'NaN'.279        """280        full_zmap = dict.fromkeys(self.zmap_fields, 'NaN')281        full_zmap.update(zmap_dict)282        string = '\t'.join(full_zmap[f] for f in self.zmap_fields)283        return string284def suite():285    return unittest.makeSuite(ZMAPTestCase, 'test')286if __name__ == '__main__':...test_lib.py
Source:test_lib.py  
1# -*- coding: utf-8 -*-2"""Storage related functions and classes for testing."""3from __future__ import unicode_literals4from dfdatetime import filetime as dfdatetime_filetime5from plaso.containers import events6from plaso.containers import time_events7from plaso.containers import windows_events8from plaso.lib import definitions9from plaso.lib import timelib10from tests import test_lib as shared_test_lib11class StorageTestCase(shared_test_lib.BaseTestCase):12  """The unit test case for a storage object."""13  # pylint: disable=protected-access14  def _CreateTestEvents(self):15    """Creates events for testing.16    Returns:17      list[EventObject]: events.18    """19    test_events = []20    filetime = dfdatetime_filetime.Filetime()21    event_data = windows_events.WindowsRegistryEventData()22    event_data.key_path = 'MY AutoRun key'23    event_data.parser = 'UNKNOWN'24    event_data.regvalue = {'Value': 'c:/Temp/evil.exe'}25    filetime.CopyFromDateTimeString('2012-04-20 22:38:46.929596')26    event = time_events.DateTimeValuesEvent(27        filetime, definitions.TIME_DESCRIPTION_WRITTEN)28    self._MergeEventAndEventData(event, event_data)29    test_events.append(event)30    event_data = windows_events.WindowsRegistryEventData()31    event_data.key_path = (32        'HKEY_CURRENT_USER\\Secret\\EvilEmpire\\Malicious_key')33    event_data.parser = 'UNKNOWN'34    event_data.regvalue = {'Value': 'send all the exes to the other world'}35    filetime.CopyFromDateTimeString('2012-04-20 23:56:46.929596')36    event = time_events.DateTimeValuesEvent(37        filetime, definitions.TIME_DESCRIPTION_WRITTEN)38    self._MergeEventAndEventData(event, event_data)39    test_events.append(event)40    event_data = windows_events.WindowsRegistryEventData()41    event_data.key_path = 'HKEY_CURRENT_USER\\Windows\\Normal'42    event_data.parser = 'UNKNOWN'43    event_data.regvalue = {'Value': 'run all the benign stuff'}44    filetime.CopyFromDateTimeString('2012-04-20 16:44:46')45    event = time_events.DateTimeValuesEvent(46        filetime, definitions.TIME_DESCRIPTION_WRITTEN)47    self._MergeEventAndEventData(event, event_data)48    test_events.append(event)49    timestamp = timelib.Timestamp.CopyFromString('2009-04-05 12:27:39')50    # TODO: refactor to use event data.51    event = time_events.TimestampEvent(52        timestamp, definitions.TIME_DESCRIPTION_WRITTEN,53        data_type='text:entry')54    event.hostname = 'nomachine'55    event.offset = 1256    event.parser = 'UNKNOWN'57    event.text = (58        'This is a line by someone not reading the log line properly. And '59        'since this log line exceeds the accepted 80 chars it will be '60        'shortened.')61    event.username = 'johndoe'62    test_events.append(event)63    return test_events64  def _CreateTestEventTags(self, test_events):65    """Creates the event tags for testing.66    Args:67      list[EventObject]: test_events.68    Returns:69      list[EventTag]: event tags.70    """71    event_tags = []72    event_identifier = test_events[0].GetIdentifier()73    event_tag = events.EventTag(comment='My comment')74    event_tag.SetEventIdentifier(event_identifier)75    event_tags.append(event_tag)76    event_identifier = test_events[1].GetIdentifier()77    event_tag = events.EventTag()78    event_tag.SetEventIdentifier(event_identifier)79    event_tag.AddLabel('Malware')80    event_tags.append(event_tag)81    event_identifier = test_events[2].GetIdentifier()82    event_tag = events.EventTag(comment='This is interesting')83    event_tag.SetEventIdentifier(event_identifier)84    event_tag.AddLabels(['Malware', 'Benign'])85    event_tags.append(event_tag)86    event_identifier = test_events[1].GetIdentifier()87    event_tag = events.EventTag()88    event_tag.SetEventIdentifier(event_identifier)89    event_tag.AddLabel('Interesting')90    event_tags.append(event_tag)91    return event_tags92  # TODO: remove after event data refactor.93  def _MergeEventAndEventData(self, event, event_data):94    """Merges the event data with the event.95    args:96      event (EventObject): event.97      event_data (EventData): event_data.98    """99    for attribute_name, attribute_value in event_data.GetAttributes():...test_annot.py
Source:test_annot.py  
...31    start_seconds.extend([np.nan, np.nan])32    stop_seconds.extend([np.nan, 0])33    return names, start_seconds, stop_seconds34@pytest.fixture()35def test_events(test_lists):36    names, start_seconds, stop_seconds = test_lists37    et = Events.from_lists(names, start_seconds, stop_seconds)38    return et39def test_from_ds(test_data):40    names, times = test_data41    names = xr.DataArray(name='event_names', data=np.array(names, dtype='U128'), dims=['index',])42    names.attrs['allowed_names'] = list(set(names.data))43    times = xr.DataArray(name='event_times', data=times, dims=['index','event_time'], coords={'event_time': ['start_seconds', 'stop_seconds']})44    names, times45    ds = xr.Dataset({da.name: da for da in [names, times]})46    ds.attrs['pulse'] = 'event'47    ds.attrs['sine'] = 'segment'48    ds49    et = Events.from_dataset(ds)...LambdaTest’s Playwright tutorial will give you a broader idea about the Playwright automation framework, its unique features, and use cases with examples to exceed your understanding of Playwright testing. This tutorial will give A to Z guidance, from installing the Playwright framework to some best practices and advanced concepts.
Get 100 minutes of automation test minutes FREE!!
