How to use do_forward method in localstack

Best Python code snippet using localstack_python

tests.py

Source:tests.py Github

copy

Full Screen

...74 """75 Test the backprop using finite differences76 :return:77 """78 finite_differences(lambda x : vg.Sum.do_forward(x))79 def test_fd1(self):80 """81 Test the backprop using finite differences82 :return:83 """84 finite_differences(lambda x : vg.Sum.do_forward(vg.Sigmoid.do_forward(x)), input='rand')85 def test_fd2(self):86 """87 Test the backprop using finite differences88 :return:89 """90 finite_differences(input='rand', function=lambda x:91 vg.Sum.do_forward(92 vg.Sigmoid.do_forward(93 vg.MatrixMultiply.do_forward(x, x)94 )))95 def test_fd3(self):96 """97 Test the backprop using finite differences98 :return:99 """100 def fn(x):101 x = vg.Exp.do_forward(x)102 x = vg.Normalize.do_forward(x)103 return vg.Sum.do_forward(x)104 finite_differences(105 # input=np.asarray([[10.2, 20.4]]),106 input=np.asarray([[0.6931471805599453, 0.0]]),107 # input=np.random.randn(10, 2),108 function=fn)109 def test_mlp(self):110 fd_mlp()111 def testmax(self):112 x = np.asarray([[0., 1.],[4., 5.],[9, 0.]])113 ctx = {}114 vg.RowMax.forward(ctx, x)115 grad = vg.RowMax.backward(ctx, np.asarray([.1, .2, .3]))116 self.assertTrue( (np.asarray([[0., .1], [0., .2 ], [.3, 0. ]]) == grad ).all() )117 def testsum(self):118 x = np.asarray([[0., 1.],[4., 0.],[9, 0.]])119 ctx = {}120 vg.RowMax.forward(ctx, x)121 grad = vg.RowSum.backward(ctx, np.arange(3.0) + 0.1)122 self.assertTrue( (np.asarray([[0.1, 0.1], [1.1, 1.1], [2.1, 2.1]]) == grad ).all() )123 def testlogsoftmax(self):124 x = np.asarray([[0., 0.],[2., 0.],[3., 0.]])125 x = vg.TensorNode(x)126 s = np.exp(vg.logsoftmax(x).value).sum(axis=1)127 self.assertTrue( ( (s - 1.0) ** 2. < 1e-10).all() )128 def testlogsoftmax2(self):129 x = np.random.randn(4, 5)130 x = vg.TensorNode(x)131 els = np.exp(vg.logsoftmax(x).value)132 s = vg.softmax(x).value133 self.assertTrue( ((els - s) ** 2. < 1e-7).all() )134 def testdiamond(self):135 a = vg.TensorNode(np.asarray([1.0]))136 b = vg.Id.do_forward(a)137 c1, c2 = vg.Id.do_forward(b), vg.Id.do_forward(b)138 d = c1 + c2139 a.name = 'a'140 b.name = 'b'141 c1.name = 'c1'142 c2.name = 'c2'143 d.name = 'd'144 # a.debug = True145 d.backward()146 self.assertEqual(2.0, float(a.grad))147 def testdoublediamond(self):148 a0 = vg.TensorNode(np.asarray([1.0]))149 a = vg.Id.do_forward(a0)150 b1, b2 = vg.Id.do_forward(a), vg.Id.do_forward(a)151 c1, c2, c3, c4 = vg.Id.do_forward(b1), vg.Id.do_forward(b1), vg.Id.do_forward(b2), vg.Id.do_forward(b2)152 d1 = c1 + c2153 d2 = c3 + c4154 e = d1 + d2155 e.backward()156 self.assertEqual(4.0, float(a.grad))157 def testseqdiamond(self):158 a = vg.TensorNode(np.asarray([1.0]))159 b = vg.Id.do_forward(a)160 c1, c2 = vg.Id.do_forward(b), vg.Id.do_forward(b)161 d = c1 + c2162 e = vg.Id.do_forward(d)163 f = e + a164 g1, g2 = vg.Id.do_forward(f), vg.Id.do_forward(f)165 h = g1 + g2166 h.backward()167 self.assertEqual(2.0, float(f.grad))168 self.assertEqual(2.0, float(e.grad))169 self.assertEqual(2.0, float(c1.grad))170 self.assertEqual(4.0, float(b.grad))...

Full Screen

Full Screen

functions.py

Source:functions.py Github

copy

Full Screen

...52 :param outputs: Predictions from the model, a distribution over the classes53 :param targets: True class values, given as integers54 :return: A single loss value: the lower the value, the better the outputs match the targets.55 """56 logprobs = Log.do_forward(outputs)57 return logceloss(logprobs, targets)58def logceloss(logprobs, targets):59 """60 Implementation of the cross-entropy loss from logprobabilities61 We separate this from the celoss, because computing the probabilities explicitly (as done there) is numerically62 unstable. It's much more stable to compute the log-probabilities directly, using the log-softmax function.63 :param outputs:64 :param targets:65 :return:66 """67 # The log probability of the correct class, per instance68 per_instance = Select.do_forward(logprobs, indices=targets)69 # The loss sums all these. The higher the better, so we return the negative of this.70 return Sum.do_forward(per_instance) * - 1.071def sigmoid(x):72 """73 Wrap the sigmoid op in a funciton (just for symmetry with the softmax).74 :param x:75 :return:76 """77 return Sigmoid.do_forward(x)78def softmax(x):79 """80 Applies a row-wise softmax to a matrix81 NB: Softmax is almost never computed like this in serious settings. It's much better82 to start from logits and use the logsumexp trick, returning83 `log(softmax(x))`. See the logsoftmax function below.84 :param x: A matrix.85 :return: A matrix of the same size as x, with normalized rows.86 """87 return Normalize.do_forward(Exp.do_forward(x))88def logsoftmax(x):89 """90 Computes the logarithm of the softmax.91 This function uses the "log sum exp trick" to compute the logarithm of the softmax92 in a numerically stable fashion.93 Here is a good explanation: https://gregorygundersen.com/blog/2020/02/09/log-sum-exp/94 :param x: A matrix.95 :return: A matrix of the same size as x, with normalized rows.96 """97 # -- Max over the rows and expand back to the size of x98 xcols = x.value.shape[1]99 xmax = RowMax.do_forward(x)100 xmax = Unsqueeze.do_forward(xmax, dim=1)101 xmax = Expand.do_forward(xmax, repeats=xcols, dim=1)102 assert(xmax.value.shape == x.value.shape), f'{xmax.value.shape} {x.value.shape}'103 diff = x - xmax104 denominator = RowSum.do_forward( Exp.do_forward(diff) )105 denominator = Log.do_forward(denominator)106 denominator = Unsqueeze.do_forward(denominator, dim=1)107 denominator = Expand.do_forward(denominator, repeats=xcols, dim=1)108 assert(denominator.value.shape == x.value.shape), f'{denominator.value.shape} {x.value.shape}'109 res = diff - denominator...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run localstack automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful