Best Python code snippet using autotest_python
test_scraper.py
Source:test_scraper.py  
1#!/usr/bin/env python2import os3import sys4import inspect5file_path = os.path.dirname(inspect.getfile(inspect.currentframe()))6sys.path.insert(0, os.path.split(os.path.split(file_path)[0])[0])7import unittest8from unittest.mock import patch9import datetime10from market_data.scraper import Scraper, InvalidSourceError11from market_data.data import EquityData, InvalidTickerError, InvalidDateError12from market_data.data import EmptyDateListError13import market_data.tests.utils as test_utils14class ScraperTests(unittest.TestCase):15    def test_scrape_from_valid_source(self):16        scraper = Scraper('yahoo')17        self.assertIsInstance(scraper, Scraper)18        self.assertEqual(scraper.source, 'yahoo')19    def test_invalid_source_scrape_error(self):20        with self.assertRaises(InvalidSourceError):21            scraper = Scraper('google')22def load_test_data():23    test_file = r'market_data/tests/amzn_scrape_test_data.html'24    with open(test_file, 'rb') as f:25        data = f.read()26    return data27class ScraperYahooEquityPricesTests(unittest.TestCase):28    @patch('urllib.request.urlopen', autospec=True)29    def test_scrape_returns_correct_equity_data(self, mock_urlopen):30        ticker, dt, expected_data = test_utils.get_expected_equity_data()31        scraper = Scraper('yahoo')32        mock_urlopen_context = mock_urlopen.return_value.__enter__.return_value33        mock_urlopen_context.status = 20034        mock_urlopen_context.read.return_value = load_test_data()35        results = scraper.scrape_equity_data(ticker, dt)36        self.assertIsInstance(results, EquityData)37        self.assertEqual(results, expected_data,38                         msg=f'res: {results} != ex: {expected_data}')39    @patch('urllib.request.urlopen', autospec=True)40    def test_scrape_equity_data_with_date_and_time(self, mock_urlopen):41        ticker, dt, expected_data = test_utils.get_expected_equity_data()42        scraper = Scraper('yahoo')43        dt = datetime.datetime(dt.year, dt.month, dt.day, 19, 35, 33)44        mock_urlopen_context = mock_urlopen.return_value.__enter__.return_value45        mock_urlopen_context.status = 20046        mock_urlopen_context.read.return_value = load_test_data()47        results = scraper.scrape_equity_data(ticker, dt)48        self.assertIsInstance(results, EquityData)49        self.assertEqual(results, expected_data,50                         msg=f'res: {results} != ex: {expected_data}')51    @patch('urllib.request.urlopen', autospec=True)52    def test_scrape_invalid_date(self, mock_urlopen):53        ticker = 'AMZN'54        dt = datetime.datetime(2019, 9, 3)55        scraper = Scraper('yahoo')56        mock_urlopen_context = mock_urlopen.return_value.__enter__.return_value57        mock_urlopen_context.status = 20058        mock_urlopen_context.read.return_value = load_test_data()59        with self.assertRaises(InvalidDateError):60            results = scraper.scrape_equity_data(ticker, dt)61    @patch('urllib.request.urlopen', autospec=True)62    def test_scrape_invalid_ticker(self, mock_urlopen):63        ticker = 'AMZNN'64        dt = datetime.datetime(2019, 8, 23)65        scraper = Scraper('yahoo')66        mock_urlopen_context = mock_urlopen.return_value.__enter__.return_value67        mock_urlopen_context.read.return_value = b''68        with self.assertRaises(InvalidTickerError):69            results = scraper.scrape_equity_data(ticker, dt)70    @patch('urllib.request.urlopen', autospec=True)71    def test_scrape_page_found_but_invalid_ticker(self, mock_urlopen):72        ticker = 'AMZNN'73        dt = datetime.datetime(2019, 8, 23)74        scraper = Scraper('yahoo')75        mock_urlopen_context = mock_urlopen.return_value.__enter__.return_value76        mock_urlopen_context.status = 20077        mock_urlopen_context.read.return_value = b'<HTML><body></body></HTML>'78        with self.assertRaises(InvalidTickerError):79            results = scraper.scrape_equity_data(ticker, dt)80class ScraperYahooEquityMultipleDatesTests(unittest.TestCase):81    def setUp(self):82        self.ticker = 'AMZN'83    @patch('urllib.request.urlopen', autospec=True)84    def test_scrape_invalid_ticker(self, mock_urlopen):85        ticker = 'AMZNN'86        dt = datetime.datetime(2019, 8, 23)87        scraper = Scraper('yahoo')88        mock_urlopen = mock_urlopen.return_value.__enter__.return_value89        mock_urlopen.read.return_value = b''90        with self.assertRaises(InvalidTickerError):91            _, _ = scraper.scrape_eq_multiple_dates(ticker, [dt])92    @patch('urllib.request.urlopen', autospec=True)93    def test_scraper_empty_date_list_input(self, mock_urlopen):94        scraper = Scraper('yahoo')95        mock_urlopen = mock_urlopen.return_value.__enter__.return_value96        mock_urlopen.status = 20097        mock_urlopen.read.return_value = load_test_data()98        with self.assertRaises(EmptyDateListError):99            _, _ = scraper.scrape_eq_multiple_dates(self.ticker, [])100    @patch('urllib.request.urlopen', autospec=True)101    def test_scraper_single_date(self, mock_urlopen):102        ticker, dt, expected_data = test_utils.get_expected_equity_data()103        scraper = Scraper('yahoo')104        mock_urlopen = mock_urlopen.return_value.__enter__.return_value105        mock_urlopen.status = 200106        mock_urlopen.read.return_value = load_test_data()107        data, errors = scraper.scrape_eq_multiple_dates(ticker, [dt])108        self.assertEqual(len(data), 1)109        self.assertEqual(len(errors), 0)110        self.assertIsInstance(data[0][1], EquityData)111        self.assertEqual(data[0][0], dt.date())112        self.assertEqual(data[0][1], expected_data,113                         msg=f'res: {data[0][1]} != ex: {expected_data}')114    @patch('urllib.request.urlopen', autospec=True)115    def test_scraper_multiple_valid_dates(self, mock_urlopen):116        test_data = test_utils.load_test_data()117        scraper = Scraper('yahoo')118        mock_urlopen = mock_urlopen.return_value.__enter__.return_value119        mock_urlopen.status = 200120        mock_urlopen.read.return_value = load_test_data()121        dt_1 = datetime.datetime(2019, 8, 26)122        dt_2 = datetime.datetime(2019, 8, 23)123        dt_3 = datetime.datetime(2019, 8, 27)124        dates = (dt_1, dt_2, dt_3)125        data, errors = scraper.scrape_eq_multiple_dates(self.ticker, dates)126        self.assertEqual(len(data), 3)127        self.assertEqual(len(errors), 0)128        for d in data:129            self.assertIsInstance(d[1], EquityData)130        for i, date in enumerate(dates):131            self.assertEqual(data[i][0], date.date())132            expected_data = test_utils.get_test_data(test_data, self.ticker,133                                                     date)134            self.assertEqual(data[i][1], expected_data,135                             msg=f'res: {data[i][1]} != ex: {expected_data}')136    @patch('urllib.request.urlopen', autospec=True)137    def test_valid_and_non_valid_dates(self, mock_urlopen):138        test_data = test_utils.load_test_data()139        scraper = Scraper('yahoo')140        mock_urlopen = mock_urlopen.return_value.__enter__.return_value141        mock_urlopen.status = 200142        mock_urlopen.read.return_value = load_test_data()143        dt_1 = datetime.datetime(2019, 8, 26)144        dt_2 = datetime.datetime(2019, 8, 23)145        dt_3 = datetime.datetime(2019, 9, 2)146        dt_4 = datetime.datetime(2019, 8, 27)147        dt_5 = datetime.datetime(2019, 9, 4)148        dates = (dt_1, dt_2, dt_3, dt_4, dt_5)149        data, errors = scraper.scrape_eq_multiple_dates(self.ticker, dates)150        self.assertEqual(len(data), 3)151        self.assertEqual(len(errors), 2)152        # data checks153        for d in data:154            self.assertIsInstance(d[1], EquityData)155        for i, date in enumerate((dt_1, dt_2, dt_4)):156            self.assertEqual(data[i][0], date.date())157            expected_data = test_utils.get_test_data(test_data, self.ticker,158                                                     date)159            self.assertEqual(data[i][1], expected_data,160                             msg=f'res: {data[i][1]} != ex: {expected_data}')161        # error checks162        for i, date in enumerate((dt_3, dt_5)):163            self.assertIsInstance(errors[i], InvalidDateError)164            self.assertEqual(str(date.date()), str(errors[i]))165    @patch('urllib.request.urlopen', autospec=True)166    def test_no_valid_dates(self, mock_urlopen):167        scraper = Scraper('yahoo')168        mock_urlopen = mock_urlopen.return_value.__enter__.return_value169        mock_urlopen.status = 200170        mock_urlopen.read.return_value = load_test_data()171        dt_1 = datetime.datetime(2019, 9, 2)172        dt_2 = datetime.datetime(2019, 9, 4)173        dates = (dt_1, dt_2)174        data, errors = scraper.scrape_eq_multiple_dates(self.ticker, dates)175        self.assertEqual(len(data), 0)176        self.assertEqual(len(errors), 2)177        for i, error in enumerate(errors):178            self.assertIsInstance(error, InvalidDateError)179            self.assertEqual(str(dates[i].date()), str(error))180if __name__ == '__main__':...test_mldata.py
Source:test_mldata.py  
...33@with_setup(setup_tmpdata, teardown_tmpdata)34def test_download():35    """Test that fetch_mldata is able to download and cache a data set."""36    _urlopen_ref = datasets.mldata.urlopen37    datasets.mldata.urlopen = mock_mldata_urlopen({38        'mock': {39            'label': sp.ones((150,)),40            'data': sp.ones((150, 4)),41        },42    })43    try:44        mock = fetch_mldata('mock', data_home=tmpdir)45        for n in ["COL_NAMES", "DESCR", "target", "data"]:46            assert_in(n, mock)47        assert_equal(mock.target.shape, (150,))48        assert_equal(mock.data.shape, (150, 4))49        assert_raises(datasets.mldata.HTTPError,50                      fetch_mldata, 'not_existing_name')51    finally:52        datasets.mldata.urlopen = _urlopen_ref53@with_setup(setup_tmpdata, teardown_tmpdata)54def test_fetch_one_column():55    _urlopen_ref = datasets.mldata.urlopen56    try:57        dataname = 'onecol'58        # create fake data set in cache59        x = sp.arange(6).reshape(2, 3)60        datasets.mldata.urlopen = mock_mldata_urlopen({dataname: {'x': x}})61        dset = fetch_mldata(dataname, data_home=tmpdir)62        for n in ["COL_NAMES", "DESCR", "data"]:63            assert_in(n, dset)64        assert_not_in("target", dset)65        assert_equal(dset.data.shape, (2, 3))66        assert_array_equal(dset.data, x)67        # transposing the data array68        dset = fetch_mldata(dataname, transpose_data=False, data_home=tmpdir)69        assert_equal(dset.data.shape, (3, 2))70    finally:71        datasets.mldata.urlopen = _urlopen_ref72@with_setup(setup_tmpdata, teardown_tmpdata)73def test_fetch_multiple_column():74    _urlopen_ref = datasets.mldata.urlopen75    try:76        # create fake data set in cache77        x = sp.arange(6).reshape(2, 3)78        y = sp.array([1, -1])79        z = sp.arange(12).reshape(4, 3)80        # by default81        dataname = 'threecol-default'82        datasets.mldata.urlopen = mock_mldata_urlopen({83            dataname: (84                {85                    'label': y,86                    'data': x,87                    'z': z,88                },89                ['z', 'data', 'label'],90            ),91        })92        dset = fetch_mldata(dataname, data_home=tmpdir)93        for n in ["COL_NAMES", "DESCR", "target", "data", "z"]:94            assert_in(n, dset)95        assert_not_in("x", dset)96        assert_not_in("y", dset)97        assert_array_equal(dset.data, x)98        assert_array_equal(dset.target, y)99        assert_array_equal(dset.z, z.T)100        # by order101        dataname = 'threecol-order'102        datasets.mldata.urlopen = mock_mldata_urlopen({103            dataname: ({'y': y, 'x': x, 'z': z},104                       ['y', 'x', 'z']), })105        dset = fetch_mldata(dataname, data_home=tmpdir)106        for n in ["COL_NAMES", "DESCR", "target", "data", "z"]:107            assert_in(n, dset)108        assert_not_in("x", dset)109        assert_not_in("y", dset)110        assert_array_equal(dset.data, x)111        assert_array_equal(dset.target, y)112        assert_array_equal(dset.z, z.T)113        # by number114        dataname = 'threecol-number'115        datasets.mldata.urlopen = mock_mldata_urlopen({116            dataname: ({'y': y, 'x': x, 'z': z},117                       ['z', 'x', 'y']),118        })119        dset = fetch_mldata(dataname, target_name=2, data_name=0,120                            data_home=tmpdir)121        for n in ["COL_NAMES", "DESCR", "target", "data", "x"]:122            assert_in(n, dset)123        assert_not_in("y", dset)124        assert_not_in("z", dset)125        assert_array_equal(dset.data, z)126        assert_array_equal(dset.target, y)127        # by name128        dset = fetch_mldata(dataname, target_name='y', data_name='z',129                            data_home=tmpdir)...Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
