How to use arrays method in hypothesis

Best Python code snippet using hypothesis

shape_base.py

Source:shape_base.py Github

copy

Full Screen

1from __future__ import division, absolute_import, print_function2__all__ = ['atleast_1d', 'atleast_2d', 'atleast_3d', 'block', 'hstack',3 'stack', 'vstack']4from . import numeric as _nx5from .numeric import array, asanyarray, newaxis6from .multiarray import normalize_axis_index7def atleast_1d(*arys):8 """9 Convert inputs to arrays with at least one dimension.10 Scalar inputs are converted to 1-dimensional arrays, whilst11 higher-dimensional inputs are preserved.12 Parameters13 ----------14 arys1, arys2, ... : array_like15 One or more input arrays.16 Returns17 -------18 ret : ndarray19 An array, or list of arrays, each with ``a.ndim >= 1``.20 Copies are made only if necessary.21 See Also22 --------23 atleast_2d, atleast_3d24 Examples25 --------26 >>> np.atleast_1d(1.0)27 array([ 1.])28 >>> x = np.arange(9.0).reshape(3,3)29 >>> np.atleast_1d(x)30 array([[ 0., 1., 2.],31 [ 3., 4., 5.],32 [ 6., 7., 8.]])33 >>> np.atleast_1d(x) is x34 True35 >>> np.atleast_1d(1, [3, 4])36 [array([1]), array([3, 4])]37 """38 res = []39 for ary in arys:40 ary = asanyarray(ary)41 if ary.ndim == 0:42 result = ary.reshape(1)43 else:44 result = ary45 res.append(result)46 if len(res) == 1:47 return res[0]48 else:49 return res50def atleast_2d(*arys):51 """52 View inputs as arrays with at least two dimensions.53 Parameters54 ----------55 arys1, arys2, ... : array_like56 One or more array-like sequences. Non-array inputs are converted57 to arrays. Arrays that already have two or more dimensions are58 preserved.59 Returns60 -------61 res, res2, ... : ndarray62 An array, or list of arrays, each with ``a.ndim >= 2``.63 Copies are avoided where possible, and views with two or more64 dimensions are returned.65 See Also66 --------67 atleast_1d, atleast_3d68 Examples69 --------70 >>> np.atleast_2d(3.0)71 array([[ 3.]])72 >>> x = np.arange(3.0)73 >>> np.atleast_2d(x)74 array([[ 0., 1., 2.]])75 >>> np.atleast_2d(x).base is x76 True77 >>> np.atleast_2d(1, [1, 2], [[1, 2]])78 [array([[1]]), array([[1, 2]]), array([[1, 2]])]79 """80 res = []81 for ary in arys:82 ary = asanyarray(ary)83 if ary.ndim == 0:84 result = ary.reshape(1, 1)85 elif ary.ndim == 1:86 result = ary[newaxis,:]87 else:88 result = ary89 res.append(result)90 if len(res) == 1:91 return res[0]92 else:93 return res94def atleast_3d(*arys):95 """96 View inputs as arrays with at least three dimensions.97 Parameters98 ----------99 arys1, arys2, ... : array_like100 One or more array-like sequences. Non-array inputs are converted to101 arrays. Arrays that already have three or more dimensions are102 preserved.103 Returns104 -------105 res1, res2, ... : ndarray106 An array, or list of arrays, each with ``a.ndim >= 3``. Copies are107 avoided where possible, and views with three or more dimensions are108 returned. For example, a 1-D array of shape ``(N,)`` becomes a view109 of shape ``(1, N, 1)``, and a 2-D array of shape ``(M, N)`` becomes a110 view of shape ``(M, N, 1)``.111 See Also112 --------113 atleast_1d, atleast_2d114 Examples115 --------116 >>> np.atleast_3d(3.0)117 array([[[ 3.]]])118 >>> x = np.arange(3.0)119 >>> np.atleast_3d(x).shape120 (1, 3, 1)121 >>> x = np.arange(12.0).reshape(4,3)122 >>> np.atleast_3d(x).shape123 (4, 3, 1)124 >>> np.atleast_3d(x).base is x.base # x is a reshape, so not base itself125 True126 >>> for arr in np.atleast_3d([1, 2], [[1, 2]], [[[1, 2]]]):127 ... print(arr, arr.shape)128 ...129 [[[1]130 [2]]] (1, 2, 1)131 [[[1]132 [2]]] (1, 2, 1)133 [[[1 2]]] (1, 1, 2)134 """135 res = []136 for ary in arys:137 ary = asanyarray(ary)138 if ary.ndim == 0:139 result = ary.reshape(1, 1, 1)140 elif ary.ndim == 1:141 result = ary[newaxis,:, newaxis]142 elif ary.ndim == 2:143 result = ary[:,:, newaxis]144 else:145 result = ary146 res.append(result)147 if len(res) == 1:148 return res[0]149 else:150 return res151def vstack(tup):152 """153 Stack arrays in sequence vertically (row wise).154 This is equivalent to concatenation along the first axis after 1-D arrays155 of shape `(N,)` have been reshaped to `(1,N)`. Rebuilds arrays divided by156 `vsplit`.157 This function makes most sense for arrays with up to 3 dimensions. For158 instance, for pixel-data with a height (first axis), width (second axis),159 and r/g/b channels (third axis). The functions `concatenate`, `stack` and160 `block` provide more general stacking and concatenation operations.161 Parameters162 ----------163 tup : sequence of ndarrays164 The arrays must have the same shape along all but the first axis.165 1-D arrays must have the same length.166 Returns167 -------168 stacked : ndarray169 The array formed by stacking the given arrays, will be at least 2-D.170 See Also171 --------172 stack : Join a sequence of arrays along a new axis.173 hstack : Stack arrays in sequence horizontally (column wise).174 dstack : Stack arrays in sequence depth wise (along third dimension).175 concatenate : Join a sequence of arrays along an existing axis.176 vsplit : Split array into a list of multiple sub-arrays vertically.177 block : Assemble arrays from blocks.178 Examples179 --------180 >>> a = np.array([1, 2, 3])181 >>> b = np.array([2, 3, 4])182 >>> np.vstack((a,b))183 array([[1, 2, 3],184 [2, 3, 4]])185 >>> a = np.array([[1], [2], [3]])186 >>> b = np.array([[2], [3], [4]])187 >>> np.vstack((a,b))188 array([[1],189 [2],190 [3],191 [2],192 [3],193 [4]])194 """195 return _nx.concatenate([atleast_2d(_m) for _m in tup], 0)196def hstack(tup):197 """198 Stack arrays in sequence horizontally (column wise).199 This is equivalent to concatenation along the second axis, except for 1-D200 arrays where it concatenates along the first axis. Rebuilds arrays divided201 by `hsplit`.202 This function makes most sense for arrays with up to 3 dimensions. For203 instance, for pixel-data with a height (first axis), width (second axis),204 and r/g/b channels (third axis). The functions `concatenate`, `stack` and205 `block` provide more general stacking and concatenation operations.206 Parameters207 ----------208 tup : sequence of ndarrays209 The arrays must have the same shape along all but the second axis,210 except 1-D arrays which can be any length.211 Returns212 -------213 stacked : ndarray214 The array formed by stacking the given arrays.215 See Also216 --------217 stack : Join a sequence of arrays along a new axis.218 vstack : Stack arrays in sequence vertically (row wise).219 dstack : Stack arrays in sequence depth wise (along third axis).220 concatenate : Join a sequence of arrays along an existing axis.221 hsplit : Split array along second axis.222 block : Assemble arrays from blocks.223 Examples224 --------225 >>> a = np.array((1,2,3))226 >>> b = np.array((2,3,4))227 >>> np.hstack((a,b))228 array([1, 2, 3, 2, 3, 4])229 >>> a = np.array([[1],[2],[3]])230 >>> b = np.array([[2],[3],[4]])231 >>> np.hstack((a,b))232 array([[1, 2],233 [2, 3],234 [3, 4]])235 """236 arrs = [atleast_1d(_m) for _m in tup]237 # As a special case, dimension 0 of 1-dimensional arrays is "horizontal"238 if arrs and arrs[0].ndim == 1:239 return _nx.concatenate(arrs, 0)240 else:241 return _nx.concatenate(arrs, 1)242def stack(arrays, axis=0, out=None):243 """244 Join a sequence of arrays along a new axis.245 The `axis` parameter specifies the index of the new axis in the dimensions246 of the result. For example, if ``axis=0`` it will be the first dimension247 and if ``axis=-1`` it will be the last dimension.248 .. versionadded:: 1.10.0249 Parameters250 ----------251 arrays : sequence of array_like252 Each array must have the same shape.253 axis : int, optional254 The axis in the result array along which the input arrays are stacked.255 out : ndarray, optional256 If provided, the destination to place the result. The shape must be257 correct, matching that of what stack would have returned if no258 out argument were specified.259 Returns260 -------261 stacked : ndarray262 The stacked array has one more dimension than the input arrays.263 See Also264 --------265 concatenate : Join a sequence of arrays along an existing axis.266 split : Split array into a list of multiple sub-arrays of equal size.267 block : Assemble arrays from blocks.268 Examples269 --------270 >>> arrays = [np.random.randn(3, 4) for _ in range(10)]271 >>> np.stack(arrays, axis=0).shape272 (10, 3, 4)273 >>> np.stack(arrays, axis=1).shape274 (3, 10, 4)275 >>> np.stack(arrays, axis=2).shape276 (3, 4, 10)277 >>> a = np.array([1, 2, 3])278 >>> b = np.array([2, 3, 4])279 >>> np.stack((a, b))280 array([[1, 2, 3],281 [2, 3, 4]])282 >>> np.stack((a, b), axis=-1)283 array([[1, 2],284 [2, 3],285 [3, 4]])286 """287 arrays = [asanyarray(arr) for arr in arrays]288 if not arrays:289 raise ValueError('need at least one array to stack')290 shapes = set(arr.shape for arr in arrays)291 if len(shapes) != 1:292 raise ValueError('all input arrays must have the same shape')293 result_ndim = arrays[0].ndim + 1294 axis = normalize_axis_index(axis, result_ndim)295 sl = (slice(None),) * axis + (_nx.newaxis,)296 expanded_arrays = [arr[sl] for arr in arrays]297 return _nx.concatenate(expanded_arrays, axis=axis, out=out)298def _block_check_depths_match(arrays, parent_index=[]):299 """300 Recursive function checking that the depths of nested lists in `arrays`301 all match. Mismatch raises a ValueError as described in the block302 docstring below.303 The entire index (rather than just the depth) needs to be calculated304 for each innermost list, in case an error needs to be raised, so that305 the index of the offending list can be printed as part of the error.306 The parameter `parent_index` is the full index of `arrays` within the307 nested lists passed to _block_check_depths_match at the top of the308 recursion.309 The return value is a pair. The first item returned is the full index310 of an element (specifically the first element) from the bottom of the311 nesting in `arrays`. An empty list at the bottom of the nesting is312 represented by a `None` index.313 The second item is the maximum of the ndims of the arrays nested in314 `arrays`.315 """316 def format_index(index):317 idx_str = ''.join('[{}]'.format(i) for i in index if i is not None)318 return 'arrays' + idx_str319 if type(arrays) is tuple:320 # not strictly necessary, but saves us from:321 # - more than one way to do things - no point treating tuples like322 # lists323 # - horribly confusing behaviour that results when tuples are324 # treated like ndarray325 raise TypeError(326 '{} is a tuple. '327 'Only lists can be used to arrange blocks, and np.block does '328 'not allow implicit conversion from tuple to ndarray.'.format(329 format_index(parent_index)330 )331 )332 elif type(arrays) is list and len(arrays) > 0:333 idxs_ndims = (_block_check_depths_match(arr, parent_index + [i])334 for i, arr in enumerate(arrays))335 first_index, max_arr_ndim = next(idxs_ndims)336 for index, ndim in idxs_ndims:337 if ndim > max_arr_ndim:338 max_arr_ndim = ndim339 if len(index) != len(first_index):340 raise ValueError(341 "List depths are mismatched. First element was at depth "342 "{}, but there is an element at depth {} ({})".format(343 len(first_index),344 len(index),345 format_index(index)346 )347 )348 return first_index, max_arr_ndim349 elif type(arrays) is list and len(arrays) == 0:350 # We've 'bottomed out' on an empty list351 return parent_index + [None], 0352 else:353 # We've 'bottomed out' - arrays is either a scalar or an array354 return parent_index, _nx.ndim(arrays)355def _block(arrays, max_depth, result_ndim):356 """357 Internal implementation of block. `arrays` is the argument passed to358 block. `max_depth` is the depth of nested lists within `arrays` and359 `result_ndim` is the greatest of the dimensions of the arrays in360 `arrays` and the depth of the lists in `arrays` (see block docstring361 for details).362 """363 def atleast_nd(a, ndim):364 # Ensures `a` has at least `ndim` dimensions by prepending365 # ones to `a.shape` as necessary366 return array(a, ndmin=ndim, copy=False, subok=True)367 def block_recursion(arrays, depth=0):368 if depth < max_depth:369 if len(arrays) == 0:370 raise ValueError('Lists cannot be empty')371 arrs = [block_recursion(arr, depth+1) for arr in arrays]372 return _nx.concatenate(arrs, axis=-(max_depth-depth))373 else:374 # We've 'bottomed out' - arrays is either a scalar or an array375 # type(arrays) is not list376 return atleast_nd(arrays, result_ndim)377 try:378 return block_recursion(arrays)379 finally:380 # recursive closures have a cyclic reference to themselves, which381 # requires gc to collect (gh-10620). To avoid this problem, for382 # performance and PyPy friendliness, we break the cycle:383 block_recursion = None384def block(arrays):385 """386 Assemble an nd-array from nested lists of blocks.387 Blocks in the innermost lists are concatenated (see `concatenate`) along388 the last dimension (-1), then these are concatenated along the389 second-last dimension (-2), and so on until the outermost list is reached.390 Blocks can be of any dimension, but will not be broadcasted using the normal391 rules. Instead, leading axes of size 1 are inserted, to make ``block.ndim``392 the same for all blocks. This is primarily useful for working with scalars,393 and means that code like ``np.block([v, 1])`` is valid, where394 ``v.ndim == 1``.395 When the nested list is two levels deep, this allows block matrices to be396 constructed from their components.397 .. versionadded:: 1.13.0398 Parameters399 ----------400 arrays : nested list of array_like or scalars (but not tuples)401 If passed a single ndarray or scalar (a nested list of depth 0), this402 is returned unmodified (and not copied).403 Elements shapes must match along the appropriate axes (without404 broadcasting), but leading 1s will be prepended to the shape as405 necessary to make the dimensions match.406 Returns407 -------408 block_array : ndarray409 The array assembled from the given blocks.410 The dimensionality of the output is equal to the greatest of:411 * the dimensionality of all the inputs412 * the depth to which the input list is nested413 Raises414 ------415 ValueError416 * If list depths are mismatched - for instance, ``[[a, b], c]`` is417 illegal, and should be spelt ``[[a, b], [c]]``418 * If lists are empty - for instance, ``[[a, b], []]``419 See Also420 --------421 concatenate : Join a sequence of arrays together.422 stack : Stack arrays in sequence along a new dimension.423 hstack : Stack arrays in sequence horizontally (column wise).424 vstack : Stack arrays in sequence vertically (row wise).425 dstack : Stack arrays in sequence depth wise (along third dimension).426 vsplit : Split array into a list of multiple sub-arrays vertically.427 Notes428 -----429 When called with only scalars, ``np.block`` is equivalent to an ndarray430 call. So ``np.block([[1, 2], [3, 4]])`` is equivalent to431 ``np.array([[1, 2], [3, 4]])``.432 This function does not enforce that the blocks lie on a fixed grid.433 ``np.block([[a, b], [c, d]])`` is not restricted to arrays of the form::434 AAAbb435 AAAbb436 cccDD437 But is also allowed to produce, for some ``a, b, c, d``::438 AAAbb439 AAAbb440 cDDDD441 Since concatenation happens along the last axis first, `block` is _not_442 capable of producing the following directly::443 AAAbb444 cccbb445 cccDD446 Matlab's "square bracket stacking", ``[A, B, ...; p, q, ...]``, is447 equivalent to ``np.block([[A, B, ...], [p, q, ...]])``.448 Examples449 --------450 The most common use of this function is to build a block matrix451 >>> A = np.eye(2) * 2452 >>> B = np.eye(3) * 3453 >>> np.block([454 ... [A, np.zeros((2, 3))],455 ... [np.ones((3, 2)), B ]456 ... ])457 array([[ 2., 0., 0., 0., 0.],458 [ 0., 2., 0., 0., 0.],459 [ 1., 1., 3., 0., 0.],460 [ 1., 1., 0., 3., 0.],461 [ 1., 1., 0., 0., 3.]])462 With a list of depth 1, `block` can be used as `hstack`463 >>> np.block([1, 2, 3]) # hstack([1, 2, 3])464 array([1, 2, 3])465 >>> a = np.array([1, 2, 3])466 >>> b = np.array([2, 3, 4])467 >>> np.block([a, b, 10]) # hstack([a, b, 10])468 array([1, 2, 3, 2, 3, 4, 10])469 >>> A = np.ones((2, 2), int)470 >>> B = 2 * A471 >>> np.block([A, B]) # hstack([A, B])472 array([[1, 1, 2, 2],473 [1, 1, 2, 2]])474 With a list of depth 2, `block` can be used in place of `vstack`:475 >>> a = np.array([1, 2, 3])476 >>> b = np.array([2, 3, 4])477 >>> np.block([[a], [b]]) # vstack([a, b])478 array([[1, 2, 3],479 [2, 3, 4]])480 >>> A = np.ones((2, 2), int)481 >>> B = 2 * A482 >>> np.block([[A], [B]]) # vstack([A, B])483 array([[1, 1],484 [1, 1],485 [2, 2],486 [2, 2]])487 It can also be used in places of `atleast_1d` and `atleast_2d`488 >>> a = np.array(0)489 >>> b = np.array([1])490 >>> np.block([a]) # atleast_1d(a)491 array([0])492 >>> np.block([b]) # atleast_1d(b)493 array([1])494 >>> np.block([[a]]) # atleast_2d(a)495 array([[0]])496 >>> np.block([[b]]) # atleast_2d(b)497 array([[1]])498 """499 bottom_index, arr_ndim = _block_check_depths_match(arrays)500 list_ndim = len(bottom_index)...

Full Screen

Full Screen

test_convert.py

Source:test_convert.py Github

copy

Full Screen

...10 def check_device(self, array, device):11 if device is not None:12 self.assertIsInstance(array, cuda.ndarray)13 self.assertEqual(array.device.id, device)14 def check_concat_arrays(self, arrays, device=None):15 array = dataset.concat_examples(arrays, device)16 self.assertEqual(array.shape, (len(arrays),) + arrays[0].shape)17 self.check_device(array, device)18 for x, y in zip(array, arrays):19 numpy.testing.assert_array_equal(20 cuda.to_cpu(x), cuda.to_cpu(y))21 def test_concat_arrays_cpu(self):22 arrays = self.get_arrays_to_concat(numpy)23 self.check_concat_arrays(arrays)24 @attr.gpu25 def test_concat_arrays_gpu(self):26 arrays = self.get_arrays_to_concat(cuda.cupy)27 self.check_concat_arrays(arrays)28 @attr.gpu29 def test_concat_arrays_to_gpu(self):30 arrays = self.get_arrays_to_concat(numpy)31 self.check_concat_arrays(arrays, cuda.Device().id)32 def get_tuple_arrays_to_concat(self, xp):33 return [(xp.random.rand(2, 3), xp.random.rand(3, 4))34 for _ in range(5)]35 def check_concat_tuples(self, tuples, device=None):36 arrays = dataset.concat_examples(tuples, device)37 self.assertEqual(len(arrays), len(tuples[0]))38 for i in range(len(arrays)):39 shape = (len(tuples),) + tuples[0][i].shape40 self.assertEqual(arrays[i].shape, shape)41 self.check_device(arrays[i], device)42 for x, y in zip(arrays[i], tuples):43 numpy.testing.assert_array_equal(44 cuda.to_cpu(x), cuda.to_cpu(y[i]))45 def test_concat_tuples_cpu(self):46 tuples = self.get_tuple_arrays_to_concat(numpy)47 self.check_concat_tuples(tuples)48 @attr.gpu49 def test_concat_tuples_gpu(self):50 tuples = self.get_tuple_arrays_to_concat(cuda.cupy)51 self.check_concat_tuples(tuples)52 @attr.gpu53 def test_concat_tuples_to_gpu(self):54 tuples = self.get_tuple_arrays_to_concat(numpy)55 self.check_concat_tuples(tuples, cuda.Device().id)56 def get_dict_arrays_to_concat(self, xp):57 return [{'x': xp.random.rand(2, 3), 'y': xp.random.rand(3, 4)}58 for _ in range(5)]59 def check_concat_dicts(self, dicts, device=None):60 arrays = dataset.concat_examples(dicts, device)61 self.assertEqual(frozenset(arrays.keys()), frozenset(dicts[0].keys()))62 for key in arrays:63 shape = (len(dicts),) + dicts[0][key].shape64 self.assertEqual(arrays[key].shape, shape)65 self.check_device(arrays[key], device)66 for x, y in zip(arrays[key], dicts):67 numpy.testing.assert_array_equal(68 cuda.to_cpu(x), cuda.to_cpu(y[key]))69 def test_concat_dicts_cpu(self):70 dicts = self.get_dict_arrays_to_concat(numpy)71 self.check_concat_dicts(dicts)72 @attr.gpu73 def test_concat_dicts_gpu(self):74 dicts = self.get_dict_arrays_to_concat(cuda.cupy)75 self.check_concat_dicts(dicts)76 @attr.gpu77 def test_concat_dicts_to_gpu(self):78 dicts = self.get_dict_arrays_to_concat(numpy)79 self.check_concat_dicts(dicts, cuda.Device().id)80class TestConcatExamplesWithPadding(unittest.TestCase):81 def check_concat_arrays_padding(self, xp):82 arrays = [xp.random.rand(3, 4),83 xp.random.rand(2, 5),84 xp.random.rand(4, 3)]85 array = dataset.concat_examples(arrays, padding=0)86 self.assertEqual(array.shape, (3, 4, 5))87 self.assertEqual(type(array), type(arrays[0]))88 arrays = [cuda.to_cpu(a) for a in arrays]89 array = cuda.to_cpu(array)90 numpy.testing.assert_array_equal(array[0, :3, :4], arrays[0])91 numpy.testing.assert_array_equal(array[0, 3:, :], 0)92 numpy.testing.assert_array_equal(array[0, :, 4:], 0)93 numpy.testing.assert_array_equal(array[1, :2, :5], arrays[1])94 numpy.testing.assert_array_equal(array[1, 2:, :], 0)95 numpy.testing.assert_array_equal(array[2, :4, :3], arrays[2])96 numpy.testing.assert_array_equal(array[2, :, 3:], 0)97 def test_concat_arrays_padding_cpu(self):98 self.check_concat_arrays_padding(numpy)99 @attr.gpu100 def test_concat_arrays_padding_gpu(self):101 self.check_concat_arrays_padding(cuda.cupy)102 def check_concat_tuples_padding(self, xp):103 tuples = [104 (xp.random.rand(3, 4), xp.random.rand(2, 5)),105 (xp.random.rand(4, 4), xp.random.rand(3, 4)),106 (xp.random.rand(2, 5), xp.random.rand(2, 6)),107 ]108 arrays = dataset.concat_examples(tuples, padding=0)109 self.assertEqual(len(arrays), 2)110 self.assertEqual(arrays[0].shape, (3, 4, 5))111 self.assertEqual(arrays[1].shape, (3, 3, 6))112 self.assertEqual(type(arrays[0]), type(tuples[0][0]))113 self.assertEqual(type(arrays[1]), type(tuples[0][1]))114 for i in range(len(tuples)):115 tuples[i] = cuda.to_cpu(tuples[i][0]), cuda.to_cpu(tuples[i][1])116 arrays = tuple(cuda.to_cpu(array) for array in arrays)117 numpy.testing.assert_array_equal(arrays[0][0, :3, :4], tuples[0][0])118 numpy.testing.assert_array_equal(arrays[0][0, 3:, :], 0)119 numpy.testing.assert_array_equal(arrays[0][0, :, 4:], 0)120 numpy.testing.assert_array_equal(arrays[0][1, :4, :4], tuples[1][0])121 numpy.testing.assert_array_equal(arrays[0][1, :, 4:], 0)122 numpy.testing.assert_array_equal(arrays[0][2, :2, :5], tuples[2][0])123 numpy.testing.assert_array_equal(arrays[0][2, 2:, :], 0)124 numpy.testing.assert_array_equal(arrays[1][0, :2, :5], tuples[0][1])125 numpy.testing.assert_array_equal(arrays[1][0, 2:, :], 0)126 numpy.testing.assert_array_equal(arrays[1][0, :, 5:], 0)127 numpy.testing.assert_array_equal(arrays[1][1, :3, :4], tuples[1][1])128 numpy.testing.assert_array_equal(arrays[1][1, 3:, :], 0)129 numpy.testing.assert_array_equal(arrays[1][1, :, 4:], 0)130 numpy.testing.assert_array_equal(arrays[1][2, :2, :6], tuples[2][1])131 numpy.testing.assert_array_equal(arrays[1][2, 2:, :], 0)132 def test_concat_tuples_padding_cpu(self):133 self.check_concat_tuples_padding(numpy)134 @attr.gpu135 def test_concat_tuples_padding_gpu(self):136 self.check_concat_tuples_padding(cuda.cupy)137 def check_concat_dicts_padding(self, xp):138 dicts = [139 {'x': xp.random.rand(3, 4), 'y': xp.random.rand(2, 5)},140 {'x': xp.random.rand(4, 4), 'y': xp.random.rand(3, 4)},141 {'x': xp.random.rand(2, 5), 'y': xp.random.rand(2, 6)},142 ]143 arrays = dataset.concat_examples(dicts, padding=0)144 self.assertIn('x', arrays)145 self.assertIn('y', arrays)146 self.assertEqual(arrays['x'].shape, (3, 4, 5))147 self.assertEqual(arrays['y'].shape, (3, 3, 6))148 self.assertEqual(type(arrays['x']), type(dicts[0]['x']))149 self.assertEqual(type(arrays['y']), type(dicts[0]['y']))150 for d in dicts:151 d['x'] = cuda.to_cpu(d['x'])152 d['y'] = cuda.to_cpu(d['y'])153 arrays = {'x': cuda.to_cpu(arrays['x']), 'y': cuda.to_cpu(arrays['y'])}154 numpy.testing.assert_array_equal(arrays['x'][0, :3, :4], dicts[0]['x'])155 numpy.testing.assert_array_equal(arrays['x'][0, 3:, :], 0)156 numpy.testing.assert_array_equal(arrays['x'][0, :, 4:], 0)157 numpy.testing.assert_array_equal(arrays['x'][1, :4, :4], dicts[1]['x'])158 numpy.testing.assert_array_equal(arrays['x'][1, :, 4:], 0)159 numpy.testing.assert_array_equal(arrays['x'][2, :2, :5], dicts[2]['x'])160 numpy.testing.assert_array_equal(arrays['x'][2, 2:, :], 0)161 numpy.testing.assert_array_equal(arrays['y'][0, :2, :5], dicts[0]['y'])162 numpy.testing.assert_array_equal(arrays['y'][0, 2:, :], 0)163 numpy.testing.assert_array_equal(arrays['y'][0, :, 5:], 0)164 numpy.testing.assert_array_equal(arrays['y'][1, :3, :4], dicts[1]['y'])165 numpy.testing.assert_array_equal(arrays['y'][1, 3:, :], 0)166 numpy.testing.assert_array_equal(arrays['y'][1, :, 4:], 0)167 numpy.testing.assert_array_equal(arrays['y'][2, :2, :6], dicts[2]['y'])168 numpy.testing.assert_array_equal(arrays['y'][2, 2:, :], 0)169 def test_concat_dicts_padding_cpu(self):170 self.check_concat_dicts_padding(numpy)171 @attr.gpu172 def test_concat_dicts_padding_gpu(self):173 self.check_concat_dicts_padding(cuda.cupy)174@testing.parameterize(175 {'padding': None},176 {'padding': 0},177)178class TestConcatExamplesWithBuiltInTypes(unittest.TestCase):179 int_arrays = [1, 2, 3]180 float_arrays = [1.0, 2.0, 3.0]181 def check_device(self, array, device):182 if device is not None and device >= 0:183 self.assertIsInstance(array, cuda.ndarray)184 self.assertEqual(array.device.id, device)185 else:186 self.assertIsInstance(array, numpy.ndarray)187 def check_concat_arrays(self, arrays, device, expected_type):188 array = dataset.concat_examples(arrays, device, self.padding)189 self.assertEqual(array.shape, (len(arrays),))190 self.check_device(array, device)191 for x, y in zip(array, arrays):192 if cuda.get_array_module(x) == numpy:193 numpy.testing.assert_array_equal(194 numpy.array(x),195 numpy.array(y, dtype=expected_type))196 else:197 numpy.testing.assert_array_equal(198 cuda.to_cpu(x),199 numpy.array(y, dtype=expected_type))200 def test_concat_arrays_cpu(self):201 for device in (-1, None):202 self.check_concat_arrays(self.int_arrays,203 device=device,204 expected_type=numpy.int64)205 self.check_concat_arrays(self.float_arrays,206 device=device,207 expected_type=numpy.float64)208 @attr.gpu209 def test_concat_arrays_gpu(self):210 self.check_concat_arrays(self.int_arrays,211 device=cuda.Device().id,212 expected_type=numpy.int64)213 self.check_concat_arrays(self.float_arrays,214 device=cuda.Device().id,215 expected_type=numpy.float64)216def get_xp(gpu):217 if gpu:218 return cuda.cupy219 else:220 return numpy221@testing.parameterize(222 {'device': None, 'src_gpu': False, 'dst_gpu': False},223 {'device': -1, 'src_gpu': False, 'dst_gpu': False},224)225class TestToDeviceCPU(unittest.TestCase):226 def test_to_device(self):227 src_xp = get_xp(self.src_gpu)...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run hypothesis automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful