How to use transpose method in hypothesis

Best Python code snippet using hypothesis

eliminate_transposes.py

Source:eliminate_transposes.py Github

copy

Full Screen

...17 find_compatible_transpose)18LOG = logging.getLogger("nntool." + __name__)19def reverse_reshape(trans, from_shape, to_shape):20 """reverses the effect of this reshape on the transpose"""21 return find_compatible_transpose(find_combination(from_shape, to_shape), trans)22def reverses_transpose(trans1, trans2, dim=None):23 """Checks if one transpose reverses another. If a dim is provided then24 look if the transpose sequence produces an equivalent dim to cope with 1s in25 dimensions."""26 if trans1 is None or trans2 is None:27 return False28 if dim and dim.layout_shape == dim.calc_transpose(trans1).calc_transpose(trans2).layout_shape:29 return True30 for idx, val in enumerate(trans1):31 if trans2[val] != idx:32 return False33 return True34def search_up_for_reverse(G, visited_edges, node, out_idx, transpose, edge_list):35 """Search up the graph for transpose sequences"""36 LOG.debug("looking up at %s", node.name)37 if not isinstance(node, Transposable) and (len(G.out_edges(node.name)) > 1 or isinstance(node, SensitiveToOrder)):38 LOG.debug("rejected %s - sensitive to order or multi output", node.name)39 return []40 if isinstance(node, Transposable) and node.transpose_out:41 if reverses_transpose(node.transpose_out[out_idx], transpose, node.out_dims[out_idx]):42 LOG.debug("accepted %s - transpose out", node.name)43 return [(node, edge_list, 'out', out_idx)]44 else:45 LOG.debug("rejected %s - transpose out - does not reverse", node.name)46 return []47 # if the node is a concat/split then we cannot proceed further since the48 # concat/split must happen on axis 0 and the transposes were already set up for49 # this to happen50 if isinstance(node, (ConcatParameters, SplitParameters)):51 LOG.debug("rejected %s - concat/split", node.name)52 return []53 if isinstance(node, ReshapeParameters):54 new_transpose = reverse_reshape(transpose, node.shape, node.old_shape)55 if new_transpose is None:56 LOG.debug("rejected %s - transpose in - does not reverse", node.name)57 return []58 transpose = new_transpose59 if node.transpose_in and reverses_transpose(node.transpose_in[0], transpose):60 LOG.debug("accepted %s - transpose in", node.name)61 return [(node, edge_list, "in", 0)]62 if isinstance(node, Transposable) and node.transpose_in:63 LOG.debug("rejected %s - transposable", node.name)64 return []65 return search_up_edges(G, visited_edges, node, transpose, edge_list)66def search_up_edges(G, visited_edges, node, transpose, edge_list, start_edge=None):67 all_nodes = []68 for edge in ([start_edge] if start_edge else G.variable_in_edges(node.name)):69 if edge in visited_edges:70 return []71 next_res = search_up_for_reverse(72 G, visited_edges | {edge},73 edge.from_node,74 edge.from_idx,75 transpose,76 edge_list + [edge])77 if not next_res:78 return []79 all_nodes += next_res80 return all_nodes81def search_down_for_reverse(G, visited_edges, node, in_idx, transpose, edge_list=None):82 """Search down the graph for transpose sequences"""83 LOG.debug("looking down at %s", node.name)84 if not isinstance(node, (Transposable)):85 if len(G.variable_in_edges(node.name)) > 1 or isinstance(node, SensitiveToOrder):86 LOG.debug("rejected %s - sensitive to order or multi input", node.name)87 return []88 if edge_list is None:89 edge_list = []90 if isinstance(node, Transposable):91 if node.transpose_in:92 if reverses_transpose(transpose, node.transpose_in[in_idx], node.in_dims[in_idx]):93 LOG.debug("accepted %s - transpose in", node.name)94 return [(node, edge_list, "in", in_idx)]95 else:96 LOG.debug("rejected %s - transpose in - does not reverse", node.name)97 return []98 elif len(transpose) == 1:99 LOG.debug("accepted %s transpose length 1 - transpose in", node.name)100 return [(node, edge_list, "in", in_idx)]101 # if the node is a concat/split then we cannot proceed further since the102 # concat/split must happen on axis 0 and the transposes were already set up for103 # this to happen104 if isinstance(node, (ConcatParameters, SplitParameters)):105 LOG.debug("rejected %s - concat/split", node.name)106 return []107 # if there is a reshape then the dimensionality of the transpose108 # that we are examining may change and that may or may not be compatible109 # with reversing the transpose110 if isinstance(node, ReshapeParameters):111 new_transpose = reverse_reshape(transpose, node.old_shape, node.shape)112 if new_transpose is None:113 LOG.debug("rejected %s - transpose out - does not reverse", node.name)114 return []115 transpose = new_transpose116 if node.transpose_out:117 if reverses_transpose(transpose, node.transpose_out[0]):118 LOG.debug("accepted %s - transpose out", node.name)119 return [(node, edge_list, "out", 0)]120 elif len(transpose) == 1:121 LOG.debug("accepted %s transpose length 1 - transpose out", node.name)122 return [(node, edge_list, "out", 0)]123 if isinstance(node, Transposable) and node.transpose_out:124 LOG.debug("rejected %s - transposable", node.name)125 return []126 return search_down_edges(G, visited_edges, node, transpose, edge_list)127def search_down_edges(G, visited_edges, node, transpose, edge_list, start_edge=None):128 all_nodes = []129 for edge in ([start_edge] if start_edge else G.out_edges(node.name)):130 if edge in visited_edges:131 return []132 next_res = search_down_for_reverse(133 G, visited_edges | {edge}, edge.to_node, edge.to_idx, transpose, edge_list + [edge])134 if not next_res:135 return []136 all_nodes += next_res137 return all_nodes138def search_for_reverses(G):139 results = []140 # visited edges contains all edges included in found transpose pairs141 visited_edges = set()142 for transpose_node in [node for node in G.nodes() if isinstance(node, Transposable)]:143 # for each transpose node we look up and down from the transpose in and transpose out144 # respectively to see if another transpose reverses this one with nothing145 # inbetween that is transpose sensitive146 if transpose_node.transpose_in:147 for edge in G.in_edges(transpose_node.name):148 # this can be true in the case where a node has constant inputs149 # it probably should be eliminated and all nodes transposed uniformly150 if edge.to_idx >= len(transpose_node.transpose_in):151 continue152 trans = transpose_node.transpose_in[edge.to_idx]153 if trans is None:154 continue155 result = search_up_edges(G, visited_edges, transpose_node,156 transpose_node.transpose_in[edge.to_idx], [], start_edge=edge)157 for r in result:158 visited_edges |= set(r[1])159 # result is (from_node, from_transpose_dir, from_idx), (to_node, to_transpose_dir, to_idx),160 # edge list, transpose (from_node)161 results.append(162 (163 (r[0], r[2], r[3]),164 (transpose_node, 'in', edge.to_idx),165 r[1][::-1],166 getattr(r[0], "transpose_" + r[2])[r[3]]167 )168 )169 if transpose_node.transpose_out:170 for edge in G.out_edges(transpose_node.name):171 trans = transpose_node.transpose_out[edge.from_idx]172 if trans is None:173 continue174 result = search_down_edges(G, visited_edges, transpose_node,175 trans, [], start_edge=edge)176 for r in result:177 visited_edges |= set(r[1])178 results.append(179 (180 (transpose_node, 'out', edge.from_idx),181 (r[0], r[2], r[3]),182 r[1],183 transpose_node.transpose_out[edge.from_idx]184 )185 )186 return results187def process_result(res):188 LOG.info("eliminating transpose between %s[%s] and %s[%s]",189 res[0][0].name, res[0][1], res[1][0].name, res[1][1]190 )191 transpose = res[3]192 for edge in res[2]:193 to_node = edge.to_node194 if isinstance(to_node, ReshapeParameters) and not to_node.transpose_in:195 LOG.info("eliminating input transpose on %s", to_node.name)196 transpose = reverse_reshape(transpose, to_node.old_shape, to_node.shape)197 to_node.shape.transpose(transpose)198 elif isinstance(to_node, StridedSliceParameters):199 LOG.info("transpose strided slice %s", to_node.name)200 to_node.act_slice = [to_node.act_slice[idx] for idx in transpose]201 to_node.out_shape = [to_node.out_shape[idx] for idx in transpose]202 for node, direction, edge_idx in [res[idx] for idx in range(2)]:203 trans = getattr(node, "transpose_"+direction)204 # This transpose node may have been selected because it only has205 # one dimension in which case there may not actually be a transpose206 if trans:207 trans[edge_idx] = None208 if all(elem is None for elem in trans):209 setattr(node, "transpose_"+direction, None)210def eliminate_transposes(G):211 """Eliminates unnecessary transposes from the graph. Valid transposes are those that have no...

Full Screen

Full Screen

sparse_csr_matrix_grad.py

Source:sparse_csr_matrix_grad.py Github

copy

Full Screen

...53 sparse_csr_matrix_ops.sparse_matrix_mul(grad, beta), None, None)54@ops.RegisterGradient("SparseMatrixTranspose")55def _SparseMatrixTransposeGrad(op, grad):56 """Gradient for sparse_matrix_transpose op."""57 return sparse_csr_matrix_ops.sparse_matrix_transpose(58 grad, type=op.get_attr("type"), conjugate=op.get_attr("conjugate"))59@ops.RegisterGradient("SparseMatrixSoftmax")60def _SparseMatrixSoftmaxGrad(op, grad_softmax):61 """Gradient for sparse_matrix_softmax op."""62 softmax = op.outputs[0]63 return sparse_csr_matrix_ops.sparse_matrix_softmax_grad(64 softmax, grad_softmax, type=op.get_attr("type"))65@ops.RegisterGradient("SparseMatrixMatMul")66def _SparseMatrixMatMulGrad(op, grad):67 """Gradient for sparse_matrix_mat_mul op."""68 # input to sparse_matrix_mat_mul is (A, B) with CSR A and dense B.69 # Output is dense:70 # C = opA(A) . opB(B) if transpose_output = false71 # C = (opA(A) . opB(B))' = opB(B)' . opA(A)' if transpose_output = true....

Full Screen

Full Screen

matmul_benchmark.py

Source:matmul_benchmark.py Github

copy

Full Screen

1# Copyright 2015 The TensorFlow Authors. All Rights Reserved.2#3# Licensed under the Apache License, Version 2.0 (the "License");4# you may not use this file except in compliance with the License.5# You may obtain a copy of the License at6#7# http://www.apache.org/licenses/LICENSE-2.08#9# Unless required by applicable law or agreed to in writing, software10# distributed under the License is distributed on an "AS IS" BASIS,11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.12# See the License for the specific language governing permissions and13# limitations under the License.14# ==============================================================================15"""Benchmark for Matmul operator."""16from __future__ import absolute_import17from __future__ import division18from __future__ import print_function19import itertools20import time21import numpy as np22from tensorflow.python.client import session as session_lib23from tensorflow.python.framework import ops24from tensorflow.python.ops import control_flow_ops25from tensorflow.python.ops import math_ops26from tensorflow.python.ops import random_ops27from tensorflow.python.ops import variables28from tensorflow.python.platform import test29def build_graph(device, n, m, k, transpose_a, transpose_b, dtype):30 """Build a graph containing a sequence of matmul operations.31 Args:32 device: String, the device to run on.33 n: tensor A's first dimension size.34 m: tensor A's second dimension size.35 k: tensor B's second dimension size.36 transpose_a: boolean value to show if tensor A is transposed.37 transpose_b: boolean value to show if tensor B is transposed.38 dtype: numpy data type of the input tensor.39 Returns:40 A matmul operation to run()41 """42 with ops.device('%s' % device):43 if not transpose_a:44 x = variables.Variable(random_ops.random_uniform([n, m], dtype=dtype))45 else:46 x = variables.Variable(random_ops.random_uniform([m, n], dtype=dtype))47 if not transpose_b:48 y = variables.Variable(random_ops.random_uniform([m, k], dtype=dtype))49 else:50 y = variables.Variable(random_ops.random_uniform([k, m], dtype=dtype))51 z = math_ops.matmul(x, y, transpose_a=transpose_a, transpose_b=transpose_b)52 return control_flow_ops.group(z)53class MatmulBenchmark(test.Benchmark):54 """Benchmark matmul!"""55 def run_graph(self, device, n, m, k, transpose_a, transpose_b, num_iters,56 dtype):57 """Run the graph and print its execution time.58 Args:59 device: String, the device to run on.60 n: tensor A's first dimension size.61 m: tensor A's second dimension size.62 k: tensor B's second dimension size.63 transpose_a: boolean value to show if tensor A is transposed.64 transpose_b: boolean value to show if tensor B is transposed.65 num_iters: number of iterations to run the benchmark.66 dtype: numpy data type of the input tensor.67 Returns:68 The duration of the run in seconds.69 """70 graph = ops.Graph()71 with graph.as_default():72 output = build_graph(device, n, m, k, transpose_a, transpose_b, dtype)73 with session_lib.Session(graph=graph) as session:74 variables.global_variables_initializer().run()75 for _ in range(500):76 session.run(output)77 start_time = time.time()78 for _ in range(num_iters):79 session.run(output)80 duration = (time.time() - start_time)81 num_items = n * m * k * 282 throughput = num_items * num_iters / duration / 1e983 print('%s %s input_info:%s %d %.4fsec, %.4fGitems/s.' %84 (device, str(dtype), str(n) + 'x' + str(m) + 'x' + str(k) +85 ',ta:' + str(transpose_a) + '.tb:' + str(transpose_b), num_iters,86 duration, throughput))87 name_template = ('matmul_{device}_{dtype}_input_info_{inputinfo}')88 self.report_benchmark(89 name=name_template.format(90 device=device,91 dtype=str(dtype).replace(' ', ''),92 inputinfo=str(n) + 'x' + str(m) + 'x' + str(k) + ',ta:' +93 str(transpose_a) + ',tb:' + str(transpose_b)).replace(' ', ''),94 iters=num_iters,95 wall_time=duration)96 return duration97 def run_test_gpu(self, n, m, k, transpose_a, transpose_b, dtype, num_iters):98 self.run_graph(test.gpu_device_name(), n, m, k, transpose_a, transpose_b,99 num_iters, dtype)100 def test_round(self, num_iters):101 dtypes = [np.float32, np.float64]102 for dtype in dtypes:103 for n, m, (transpose_a, transpose_b) in itertools.product(104 [512, 1024], [1, 8, 16, 128], [(False, False), (True, False),105 (False, True)]):106 k = n107 self.run_test_gpu(n, m, k, transpose_a, transpose_b, dtype, num_iters)108 for n, m, k, (transpose_a, transpose_b) in itertools.product(109 [200], [1, 8, 20], [10000], [(False, False), (True, False),110 (False, True)]):111 self.run_test_gpu(n, m, k, transpose_a, transpose_b, dtype, num_iters)112 for (n, m, k), (transpose_a, transpose_b) in itertools.product(113 [(200, 20, 20000), (1, 10000, 200)], [(False, False), (True, False),114 (False, True)]):115 self.run_test_gpu(n, m, k, transpose_a, transpose_b, dtype, num_iters)116 def benchmark_matmul(self):117 self.test_round(num_iters=200)118if __name__ == '__main__':...

Full Screen

Full Screen

transpose-test.js

Source:transpose-test.js Github

copy

Full Screen

...7 topic: function() {8 return d3.transpose;9 },10 "transposes a square matrix": function(transpose) {11 assert.deepEqual(d3.transpose([[1, 2], [3, 4]]), [[1, 3], [2, 4]]);12 },13 "transposes a non-square matrix": function(transpose) {14 assert.deepEqual(d3.transpose([[1, 2, 3, 4, 5], [2, 4, 6, 8, 10]]), [[1, 2], [2, 4], [3, 6], [4, 8], [5, 10]]);15 },16 "transposes a single-row matrix": function(transpose) {17 assert.deepEqual(d3.transpose([[1, 2, 3, 4, 5]]), [[1], [2], [3], [4], [5]]);18 },19 "transposes an empty matrix": function(transpose) {20 assert.deepEqual(d3.transpose([]), []);21 },22 "ignores extra elements given an irregular matrix": function(transpose) {23 assert.deepEqual(d3.transpose([[1, 2], [3, 4], [5, 6, 7]]), [[1, 3, 5], [2, 4, 6]]);24 }25 }26});...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run hypothesis automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful