Best Python code snippet using pandera_python
test_proximity.py
Source:test_proximity.py  
1import dask.array as da2import numpy as np3import pytest4import xarray as xr5from xrspatial import allocation, direction, euclidean_distance, great_circle_distance, proximity6from xrspatial.proximity import _calc_direction7from xrspatial.tests.general_checks import general_output_checks8def test_great_circle_distance():9    # invalid x_coord10    ys = [0, 0, -91, 91]11    xs = [-181, 181, 0, 0]12    for x, y in zip(xs, ys):13        with pytest.raises(Exception) as e_info:14            great_circle_distance(x1=0, x2=x, y1=0, y2=y)15            assert e_info16@pytest.fixture17def test_raster(backend):18    height, width = 4, 619    # create test raster, all non-zero cells are unique,20    # this is to test allocation and direction against corresponding proximity21    data = np.asarray([[0., 0., 0., 0., 0., 2.],22                       [0., 0., 1., 0., 0., 0.],23                       [0., np.inf, 3., 0., 0., 0.],24                       [4., 0., 0., 0., np.nan, 0.]])25    _lon = np.linspace(-20, 20, width)26    _lat = np.linspace(20, -20, height)27    raster = xr.DataArray(data, dims=['lat', 'lon'])28    raster['lon'] = _lon29    raster['lat'] = _lat30    if 'dask' in backend:31        raster.data = da.from_array(data, chunks=(4, 3))32    return raster33@pytest.fixture34def result_default_proximity():35    # DEFAULT SETTINGS36    expected_result = np.array([37        [20.82733247, 15.54920505, 13.33333333, 15.54920505,  8., 0.],38        [16., 8., 0., 8., 15.54920505, 13.33333333],39        [13.33333333, 8., 0., 8., 16., 24.],40        [0., 8., 13.33333333, 15.54920505, 20.82733247, 27.45501371]41    ], dtype=np.float32)42    return expected_result43@pytest.fixture44def result_target_proximity():45    target_values = [2, 3]46    expected_result = np.array([47        [31.09841011, 27.84081736, 24., 16., 8., 0.],48        [20.82733247, 15.54920505, 13.33333333, 15.54920505, 15.54920505, 13.33333333],49        [16., 8., 0., 8., 16., 24.],50        [20.82733247, 15.54920505, 13.33333333, 15.54920505, 20.82733247, 27.45501371]51    ], dtype=np.float32)52    return target_values, expected_result53@pytest.fixture54def result_manhattan_proximity():55    # distance_metric SETTING: MANHATTAN56    expected_result = np.array([57        [29.33333333, 21.33333333, 13.33333333, 16., 8., 0.],58        [16., 8., 0., 8., 16., 13.33333333],59        [13.33333333, 8., 0., 8., 16., 24.],60        [0., 8., 13.33333333, 21.33333333, 29.33333333, 37.33333333]61    ], dtype=np.float32)62    return expected_result63@pytest.fixture64def result_great_circle_proximity():65    # distance_metric SETTING: GREAT_CIRCLE66    expected_result = np.array([67        [2278099.27025501, 1717528.97437217, 1484259.87724365, 1673057.17235307, 836769.1780019, 0],68        [1768990.54084204, 884524.60324856, 0, 884524.60324856, 1717528.97437217, 1484259.87724365],69        [1484259.87724365, 884524.60324856, 0, 884524.60324856, 1768990.54084204, 2653336.85436932],70        [0, 836769.1780019, 1484259.87724365, 1717528.97437217, 2278099.27025501, 2986647.12982316]71    ], dtype=np.float32)72    return expected_result73@pytest.fixture74def result_max_distance_proximity():75    # max_distance setting76    max_distance = 1077    expected_result = np.array([78        [np.nan, np.nan, np.nan, np.nan, 8., 0.],79        [np.nan, 8., 0., 8., np.nan, np.nan],80        [np.nan, 8., 0., 8., np.nan, np.nan],81        [0., 8., np.nan, np.nan, np.nan, np.nan]82    ], dtype=np.float32)83    return max_distance, expected_result84@pytest.fixture85def result_default_allocation():86    expected_result = np.array([87        [1., 1., 1., 1., 2., 2.],88        [1., 1., 1., 1., 2., 2.],89        [4., 3., 3., 3., 3., 3.],90        [4., 4., 3., 3., 3., 3.]91    ], dtype=np.float32)92    return expected_result93@pytest.fixture94def result_max_distance_allocation():95    # max_distance setting96    max_distance = 1097    expected_result = np.array([98        [np.nan, np.nan, np.nan, np.nan, 2., 2.],99        [np.nan, 1., 1., 1., np.nan, np.nan],100        [np.nan, 3., 3., 3., np.nan, np.nan],101        [4., 4., np.nan, np.nan, np.nan, np.nan]102    ], dtype=np.float32)103    return max_distance, expected_result104@pytest.fixture105def result_default_direction():106    expected_result = np.array([107        [50.194427, 30.963757, 360., 329.03625, 90., 0.],108        [90., 90., 0., 270., 149.03624, 180.],109        [360., 90., 0., 270., 270., 270.],110        [0., 270., 180., 210.96376, 230.19443, 240.9454]111    ], dtype=np.float32)112    return expected_result113@pytest.fixture114def result_max_distance_direction():115    # max_distance setting116    max_distance = 10117    expected_result = np.array([118        [np.nan, np.nan, np.nan, np.nan, 90., 0.],119        [np.nan, 90., 0., 270., np.nan, np.nan],120        [np.nan, 90., 0., 270., np.nan, np.nan],121        [0., 270., np.nan, np.nan, np.nan, np.nan]122    ], dtype=np.float32)123    return max_distance, expected_result124@pytest.mark.parametrize("backend", ['numpy', 'dask+numpy'])125def test_default_proximity(test_raster, result_default_proximity):126    default_prox = proximity(test_raster, x='lon', y='lat')127    general_output_checks(test_raster, default_prox, result_default_proximity, verify_dtype=True)128@pytest.mark.parametrize("backend", ['numpy', 'dask+numpy'])129def test_target_proximity(test_raster, result_target_proximity):130    target_values, expected_result = result_target_proximity131    target_prox = proximity(test_raster, x='lon', y='lat', target_values=target_values)132    general_output_checks(test_raster, target_prox, expected_result, verify_dtype=True)133@pytest.mark.parametrize("backend", ['numpy', 'dask+numpy'])134def test_manhattan_proximity(test_raster, result_manhattan_proximity):135    manhattan_prox = proximity(test_raster, x='lon', y='lat', distance_metric='MANHATTAN')136    general_output_checks(137        test_raster, manhattan_prox, result_manhattan_proximity, verify_dtype=True138    )139@pytest.mark.parametrize("backend", ['numpy', 'dask+numpy'])140def test_great_circle_proximity(test_raster, result_great_circle_proximity):141    great_circle_prox = proximity(test_raster, x='lon', y='lat', distance_metric='GREAT_CIRCLE')142    general_output_checks(143        test_raster, great_circle_prox, result_great_circle_proximity, verify_dtype=True144    )145@pytest.mark.parametrize("backend", ['numpy', 'dask+numpy'])146def test_max_distance_proximity(test_raster, result_max_distance_proximity):147    max_distance, expected_result = result_max_distance_proximity148    max_distance_prox = proximity(test_raster, x='lon', y='lat', max_distance=max_distance)149    general_output_checks(test_raster, max_distance_prox, expected_result, verify_dtype=True)150@pytest.mark.parametrize("backend", ['numpy', 'dask+numpy'])151def test_default_allocation(test_raster, result_default_allocation):152    allocation_agg = allocation(test_raster, x='lon', y='lat')153    general_output_checks(test_raster, allocation_agg, result_default_allocation, verify_dtype=True)154@pytest.mark.parametrize("backend", ['numpy'])155def test_default_allocation_against_proximity(test_raster, result_default_proximity):156    allocation_agg = allocation(test_raster, x='lon', y='lat')157    # check against corresponding proximity158    xcoords = allocation_agg['lon'].data159    ycoords = allocation_agg['lat'].data160    for y in range(test_raster.shape[0]):161        for x in range(test_raster.shape[1]):162            a = allocation_agg.data[y, x]163            py, px = np.where(test_raster.data == a)164            # non-zero cells in raster are unique, thus len(px)=len(py)=1165            d = euclidean_distance(xcoords[x], xcoords[px[0]], ycoords[y], ycoords[py[0]])166            np.testing.assert_allclose(result_default_proximity[y, x], d)167@pytest.mark.parametrize("backend", ['numpy', 'dask+numpy'])168def test_max_distance_allocation(test_raster, result_max_distance_allocation):169    max_distance, expected_result = result_max_distance_allocation170    max_distance_alloc = allocation(test_raster, x='lon', y='lat', max_distance=max_distance)171    general_output_checks(test_raster, max_distance_alloc, expected_result, verify_dtype=True)172def test_calc_direction():173    n = 3174    x1, y1 = 1, 1175    output = np.zeros(shape=(n, n))176    for y2 in range(n):177        for x2 in range(n):178            output[y2, x2] = _calc_direction(x2, x1, y2, y1)179    expected_output = np.asarray([[135, 180, 225],180                                  [90,  0,   270],181                                  [45,  360, 315]])182    # set a tolerance of 1e-5183    tolerance = 1e-5184    assert (abs(output-expected_output) <= tolerance).all()185@pytest.mark.parametrize("backend", ['numpy', 'dask+numpy'])186def test_default_direction(test_raster, result_default_direction):187    direction_agg = direction(test_raster, x='lon', y='lat')188    general_output_checks(test_raster, direction_agg, result_default_direction)189@pytest.mark.parametrize("backend", ['numpy'])190def test_default_direction_against_allocation(test_raster, result_default_allocation):191    direction_agg = direction(test_raster, x='lon', y='lat')192    xcoords = direction_agg['lon'].data193    ycoords = direction_agg['lat'].data194    for y in range(test_raster.shape[0]):195        for x in range(test_raster.shape[1]):196            a = result_default_allocation.data[y, x]197            py, px = np.where(test_raster.data == a)198            # non-zero cells in raster are unique, thus len(px)=len(py)=1199            d = _calc_direction(xcoords[x], xcoords[px[0]], ycoords[y], ycoords[py[0]])200            np.testing.assert_allclose(direction_agg.data[y, x], d)201@pytest.mark.parametrize("backend", ['numpy', 'dask+numpy'])202def test_max_distance_direction(test_raster, result_max_distance_direction):203    max_distance, expected_result = result_max_distance_direction204    max_distance_direction = direction(test_raster, x='lon', y='lat', max_distance=max_distance)...test_classify.py
Source:test_classify.py  
1import numpy as np2import pytest3import xarray as xr4from xrspatial import binary, equal_interval, natural_breaks, quantile, reclassify5from xrspatial.tests.general_checks import (create_test_raster, cuda_and_cupy_available,6                                            general_output_checks)7def input_data(backend='numpy'):8    elevation = np.array([9        [-np.inf,  2.,  3.,  4., np.nan],10        [5.,  6.,  7.,  8.,  9.],11        [10., 11., 12., 13., 14.],12        [15., 16., 17., 18., np.inf],13    ])14    raster = create_test_raster(elevation, backend, attrs={'res': (10.0, 10.0)})15    return raster16@pytest.fixture17def result_binary():18    values = [1, 2, 3]19    expected_result = np.asarray([20        [0, 1, 1, 0, 0],21        [0, 0, 0, 0, 0],22        [0, 0, 0, 0, 0],23        [0, 0, 0, 0, 0]24    ], dtype=np.float32)25    return values, expected_result26def test_binary_numpy(result_binary):27    values, expected_result = result_binary28    numpy_agg = input_data()29    numpy_result = binary(numpy_agg, values)30    general_output_checks(numpy_agg, numpy_result, expected_result)31def test_binary_dask_numpy(result_binary):32    values, expected_result = result_binary33    dask_agg = input_data(backend='dask')34    dask_result = binary(dask_agg, values)35    general_output_checks(dask_agg, dask_result, expected_result)36@cuda_and_cupy_available37def test_binary_cupy(result_binary):38    values, expected_result = result_binary39    cupy_agg = input_data(backend='cupy')40    cupy_result = binary(cupy_agg, values)41    general_output_checks(cupy_agg, cupy_result, expected_result)42@cuda_and_cupy_available43def test_binary_dask_cupy(result_binary):44    values, expected_result = result_binary45    dask_cupy_agg = input_data(backend='dask+cupy')46    dask_cupy_result = binary(dask_cupy_agg, values)47    general_output_checks(dask_cupy_agg, dask_cupy_result, expected_result)48@pytest.fixture49def result_reclassify():50    bins = [10, 15, np.inf]51    new_values = [1, 2, 3]52    expected_result = np.asarray([53        [np.nan, 1., 1., 1., np.nan],54        [1., 1., 1., 1., 1.],55        [1., 2., 2., 2., 2.],56        [2., 3., 3., 3., np.nan]57    ], dtype=np.float32)58    return bins, new_values, expected_result59def test_reclassify_numpy_mismatch_length():60    bins = [10]61    new_values = [1, 2, 3]62    numpy_agg = input_data()63    msg = 'bins and new_values mismatch. Should have same length.'64    with pytest.raises(ValueError, match=msg):65        reclassify(numpy_agg, bins, new_values)66def test_reclassify_numpy(result_reclassify):67    bins, new_values, expected_result = result_reclassify68    numpy_agg = input_data()69    numpy_result = reclassify(numpy_agg, bins=bins, new_values=new_values)70    general_output_checks(numpy_agg, numpy_result, expected_result, verify_dtype=True)71def test_reclassify_dask_numpy(result_reclassify):72    bins, new_values, expected_result = result_reclassify73    dask_agg = input_data(backend='dask')74    dask_result = reclassify(dask_agg, bins=bins, new_values=new_values)75    general_output_checks(dask_agg, dask_result, expected_result, verify_dtype=True)76@cuda_and_cupy_available77def test_reclassify_cupy(result_reclassify):78    bins, new_values, expected_result = result_reclassify79    cupy_agg = input_data(backend='cupy')80    cupy_result = reclassify(cupy_agg, bins=bins, new_values=new_values)81    general_output_checks(cupy_agg, cupy_result, expected_result, verify_dtype=True)82@cuda_and_cupy_available83def test_reclassify_dask_cupy(result_reclassify):84    bins, new_values, expected_result = result_reclassify85    dask_cupy_agg = input_data(backend='dask+cupy')86    dask_cupy_result = reclassify(dask_cupy_agg, bins=bins, new_values=new_values)87    general_output_checks(dask_cupy_agg, dask_cupy_result, expected_result, verify_dtype=True)88@pytest.fixture89def result_quantile():90    k = 591    expected_result = np.asarray([92        [np.nan, 0., 0., 0., np.nan],93        [0., 1., 1., 1., 2.],94        [2., 2., 3., 3., 3.],95        [4., 4., 4., 4., np.nan]96    ], dtype=np.float32)97    return k, expected_result98def test_quantile_not_enough_unique_values():99    agg = input_data()100    n_uniques = np.isfinite(agg.data).sum()101    k = n_uniques + 1102    result_quantile = quantile(agg, k=k)103    n_uniques_result = np.isfinite(result_quantile.data).sum()104    np.testing.assert_allclose(n_uniques_result, n_uniques)105def test_quantile_numpy(result_quantile):106    k, expected_result = result_quantile107    numpy_agg = input_data()108    numpy_quantile = quantile(numpy_agg, k=k)109    general_output_checks(numpy_agg, numpy_quantile, expected_result, verify_dtype=True)110def test_quantile_dask_numpy(result_quantile):111    #     Note that dask's percentile algorithm is112    #     approximate, while numpy's is exact.113    #     This may cause some differences between114    #     results of vanilla numpy and115    #     dask version of the input agg.116    #     https://github.com/dask/dask/issues/3099117    dask_numpy_agg = input_data('dask+numpy')118    k, expected_result = result_quantile119    dask_quantile = quantile(dask_numpy_agg, k=k)120    general_output_checks(dask_numpy_agg, dask_quantile)121    dask_quantile = dask_quantile.compute()122    unique_elements = np.unique(123        dask_quantile.data[np.isfinite(dask_quantile.data)]124    )125    assert len(unique_elements) == k126@cuda_and_cupy_available127def test_quantile_cupy(result_quantile):128    k, expected_result = result_quantile129    cupy_agg = input_data('cupy')130    cupy_result = quantile(cupy_agg, k=k)131    general_output_checks(cupy_agg, cupy_result, expected_result, verify_dtype=True)132@pytest.fixture133def result_natural_breaks():134    k = 5135    expected_result = np.asarray([136        [np.nan, 0., 0., 0., np.nan],137        [1., 1., 1., 2., 2.],138        [2., 3., 3., 3., 3.],139        [4., 4., 4., 4., np.nan]140    ], dtype=np.float32)141    return k, expected_result142@pytest.fixture143def result_natural_breaks_num_sample():144    k = 5145    num_sample = 8146    expected_result = np.asarray([147        [np.nan, 0., 0., 0., np.nan],148        [0., 1., 1., 1., 2.],149        [2., 3., 3., 3., 3.],150        [4., 4., 4., 4., np.nan]151    ], dtype=np.float32)152    return k, num_sample, expected_result153def test_natural_breaks_not_enough_unique_values():154    agg = input_data()155    n_uniques = np.isfinite(agg.data).sum()156    k = n_uniques + 1157    result_natural_breaks = natural_breaks(agg, k=k)158    n_uniques_result = np.isfinite(result_natural_breaks.data).sum()159    np.testing.assert_allclose(n_uniques_result, n_uniques)160def test_natural_breaks_numpy(result_natural_breaks):161    numpy_agg = input_data()162    k, expected_result = result_natural_breaks163    numpy_natural_breaks = natural_breaks(numpy_agg, k=k)164    general_output_checks(numpy_agg, numpy_natural_breaks, expected_result, verify_dtype=True)165def test_natural_breaks_numpy_num_sample(result_natural_breaks_num_sample):166    numpy_agg = input_data()167    k, num_sample, expected_result = result_natural_breaks_num_sample168    numpy_natural_breaks = natural_breaks(numpy_agg, k=k, num_sample=num_sample)169    general_output_checks(numpy_agg, numpy_natural_breaks, expected_result, verify_dtype=True)170def test_natural_breaks_cpu_deterministic():171    results = []172    elevation = np.arange(100).reshape(10, 10)173    agg = xr.DataArray(elevation, attrs={'res': (10.0, 10.0)})174    k = 5175    numIters = 3176    for i in range(numIters):177        # vanilla numpy178        numpy_natural_breaks = natural_breaks(agg, k=k)179        general_output_checks(agg, numpy_natural_breaks)180        unique_elements = np.unique(181            numpy_natural_breaks.data[np.isfinite(numpy_natural_breaks.data)]182        )183        assert len(unique_elements) == k184        results.append(numpy_natural_breaks)185    # Check that the code is deterministic.186    # Multiple runs on same data should produce same results187    for i in range(numIters-1):188        np.testing.assert_allclose(189            results[i].data, results[i+1].data, equal_nan=True190        )191@pytest.fixture192def result_equal_interval():193    k = 3194    expected_result = np.asarray([195        [np.nan, 0., 0., 0., np.nan],196        [0., 0., 0., 1., 1.],197        [1., 1., 1., 2., 2.],198        [2., 2., 2., 2., np.nan]199    ], dtype=np.float32)200    return k, expected_result201def test_equal_interval_numpy(result_equal_interval):202    k, expected_result = result_equal_interval203    numpy_agg = input_data('numpy')204    numpy_result = equal_interval(numpy_agg, k=k)205    general_output_checks(numpy_agg, numpy_result, expected_result, verify_dtype=True)206def test_equal_interval_dask_numpy(result_equal_interval):207    k, expected_result = result_equal_interval208    dask_agg = input_data('dask+numpy')209    dask_numpy_result = equal_interval(dask_agg, k=k)210    general_output_checks(dask_agg, dask_numpy_result, expected_result, verify_dtype=True)211@cuda_and_cupy_available212def test_equal_interval_cupy(result_equal_interval):213    k, expected_result = result_equal_interval214    cupy_agg = input_data(backend='cupy')215    cupy_result = equal_interval(cupy_agg, k=k)...features.py
Source:features.py  
...16        return cls(**f)17    def validate(self, example: "Features"):18        assert example.keys() == self.keys()19        def verify_shape(key): return self[key].shape == example[key].shape20        def verify_dtype(key): return self[key].dtype == example[key].dtype21        return all(filter(verify_shape, filter(verify_dtype, self.keys())))22    def reshape(self, example: dict):23        example = OrderedDict(example)24        for key, value in self.items():25            example[key] = example[key] \26                .reshape(value.shape)   \27                .astype(value.dtype)28        return example29    def __hash__(self):30        return int(self.md5, 16)31    def __repr__(self):32        __features_repr__ = ", ".join(f'{key}={value}'33                                      for key, value in self.items())34        return f'{self.__class__.__name__}({__features_repr__})'...Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
