# How to use complex_step method in grail

Best Python code snippet using grail_python

besselk.py

Source:besselk.py

`1"""2Module providing convenient and fast derivatives of log(scipy.special.k1e)3See the function: logK1e4Asymptotes:5 Kn(x) ~ sqrt(pi/2x) exp(-x) as x -> inf6 7 Kn(x) ~ 1/2 Gamma(n) / (1/2 x)^n, as x -> 0, for n > 08 K0(x) ~ -log(x), as x -> 09 10 11"""12import scipy.special as special13import numpy as np14from lib.deriv.adtools import cs15def k012e(x):16 """Returns k0e(x), k1e(x) and k2e(x) for real or complex x.17 18 For real x, the fast exponentially scaled K_n Bessel functions, k0e end k1e 19 are defined in scipy.special, but not ke2, which is computed here from the 20 other two using the recurion: 21 K_n(x) = K_{n-2}(x) + 2(n-1)/z * K_{n-1}(x) 22 23 For complex x, kve(0,x) and kve(1,x) and the recursion are used.24 25 Returns the outputs all three functions, evaluated at x.26 """27 if np.iscomplexobj(x):28 k0 = special.kve(0,x)29 k1 = special.kve(1,x)30 k2 = k0 + 2.0*k1/x31 else:32 k0 = special.k0e(x)33 k1 = special.k1e(x)34 k2 = k0 + 2.0*k1/x35 return k0, k1, k236#def ddxlogK1e(z):37 """38 ddx K1(z) = (-1/2) * ( K0(z) + K2(z) )39 ddx log(exp(z)K1(z)) 40 = 1 + [ddx K1(z)] / K1(z)41 = 1 - [K0(z) + K2(z)] / [2*K1(z)]42 43 """ 44def k0e(x):45 if np.iscomplexobj(x):46 return special.kve(0,x)47 else:48 return special.k0e(x)49def k1e(x):50 if np.iscomplexobj(x):51 return special.kve(1,x)52 else:53 return special.k1e(x)54class csret():55 def __init__(self,z):56 self.val = np.real(z)57 self.deriv = 1e20*np.imag(z)58 59def cs2(f,x):60 z1, back = f(x+1e-20j)61 z2 = back(1.0)62 return csret(z1), csret(z2)63def logK1e(x, deriv = False, complex_step = True):64 """65 log(scipy.special.k1e), with derivative capabilities.66 67 k1e(x) is Bessel function K1(x), scaled by exp(x) 68 69 parameters:70 71 x: real or complex ndarray72 deriv: Bool, optional, default = False73 - False: return only function values74 - True: return function values and a backpropagation function.75 76 - When complex_step=True and x is real, the first derivative is computed 77 with complex step differentiation. This is faster.78 - Otherwise, an explicit first derivative calculation is used.79 80 The whole function can be wrapped in a complex step differentiation. Then81 the function value, the complex-step first derivative, the explicit first82 derivative and the second derivative can all be recovered from the real and83 imaginary parts of the two return values.84 85 86 """87 88 89 complexx = np.iscomplexobj(x)90 complex_step = complex_step and not complexx91 92 if not deriv: return np.log(k1e(x))93 if complex_step:94 y, dx = cs(k1e,x)95 return np.log(y), lambda dy: dx(dy/y)96 k0 = k0e(x)97 k1 = k1e(x)98 k2 = k0 + 2.0*k1/x99 ddx = 1 - (k0 + k2) / (2*k1)100 return np.log(k1), lambda dy: dy * ddx 101def logK1e_2ndderiv(x):102 """Slow and probably inaccurate for large x. For testing only."""103 f = special.k1e(x)104 k1 = special.kvp(1,x,1)105 k2 = special.kvp(1,x,2)106 e = np.exp(x)107 f1 = f + e*k1108 f2 = f1 + e*(k1+k2)109 return (f2*f - f1**2) / f**2110if __name__ == "__main__":111 print("Running test script for module besselk\n")112 113 from numpy.random import randn114# n = 10115# m = 3116# x = randn(n,m)**2117 x = randn(2,1)**2118 119 y0 = logK1e(x)120 y1, back = cs(logK1e,x); ddx1 = back(1.0)121 y2, back = logK1e(x, deriv = True); ddx2 = back(1.0) 122 y3, back = logK1e(x, deriv = True, complex_step = False); ddx3 = back(1.0) 123 124 r1, r2 = cs2(lambda x: logK1e(x,True), x)125 y4, ddx4 = r1.val, r1.deriv126 ddx5, d2dx5 = r2.val, r2.deriv127 128 d2dx6 = logK1e_2ndderiv(x)129 130 print('comparing values:')131 for yi in (y1,y2,y3,y4):132 print(abs(y0-yi).max())133 print('\ncomparing derivatives:')134 for ddxi in (ddx2,ddx3,ddx4,ddx5):135 print(abs(ddx1-ddxi).max())136 print('\ncomparing 2nd derivatives:')137 print(abs(d2dx5-d2dx6).max())138 139 ...`

extendedDFM.py

Source:extendedDFM.py

`1#!/usr/bin/env python32# -*- coding: utf-8 -*-3"""4Created on Wed Jan 31 15:25:37 20185@author: congshanzhang6"""7import numpy as np8import pandas as pd9import statsmodels.api as sm10import matplotlib.pyplot as plt11from statsmodels.tsa.statespace import tools12class ExtendedDFM(sm.tsa.DynamicFactor):13 def __init__(self, endog, **kwargs):14 # Setup the model as if we had a factor order of 415 super(ExtendedDFM, self).__init__(16 endog, k_factors=1, factor_order=4, error_order=2,17 **kwargs)18 # Note: `self.parameters` is an ordered dict with the19 # keys corresponding to parameter types, and the values20 # the number of parameters of that type.21 # Add the new parameters22 self.parameters['new_loadings'] = 323 # Cache a slice for the location of the 4 factor AR24 # parameters (a_1, ..., a_4) in the full parameter vector25 offset = (self.parameters['factor_loadings'] +26 self.parameters['exog'] +27 self.parameters['error_cov'])28 self._params_factor_ar = np.s_[offset:offset+2]29 self._params_factor_zero = np.s_[offset+2:offset+4]30 @property31 def start_params(self):32 # Add three new loading parameters to the end of the parameter33 # vector, initialized to zeros (for simplicity; they could34 # be initialized any way you like)35 return np.r_[super(ExtendedDFM, self).start_params, 0, 0, 0]36 37 @property38 def param_names(self):39 # Add the corresponding names for the new loading parameters40 # (the name can be anything you like)41 return super(ExtendedDFM, self).param_names + [42 'loading.L%d.f1.%s' % (i, self.endog_names[3]) for i in range(1,4)]43 def transform_params(self, unconstrained):44 # Perform the typical DFM transformation (w/o the new parameters)45 constrained = super(ExtendedDFM, self).transform_params(46 unconstrained[:-3])47 # Redo the factor AR constraint, since we only want an AR(2),48 # and the previous constraint was for an AR(4)49 ar_params = unconstrained[self._params_factor_ar]50 constrained[self._params_factor_ar] = (51 tools.constrain_stationary_univariate(ar_params))52 # Return all the parameters53 return np.r_[constrained, unconstrained[-3:]]54 def untransform_params(self, constrained):55 # Perform the typical DFM untransformation (w/o the new parameters)56 unconstrained = super(ExtendedDFM, self).untransform_params(57 constrained[:-3])58 # Redo the factor AR unconstraint, since we only want an AR(2),59 # and the previous unconstraint was for an AR(4)60 ar_params = constrained[self._params_factor_ar]61 unconstrained[self._params_factor_ar] = (62 tools.unconstrain_stationary_univariate(ar_params))63 # Return all the parameters64 return np.r_[unconstrained, constrained[-3:]]65 def update(self, params, transformed=True, complex_step=False):66 # Peform the transformation, if required67 if not transformed:68 params = self.transform_params(params)69 params[self._params_factor_zero] = 070 71 # Now perform the usual DFM update, but exclude our new parameters72 super(ExtendedDFM, self).update(params[:-3], transformed=True, complex_step=complex_step)73 # Finally, set our new parameters in the design matrix...`

test_merging.py

Source:test_merging.py

`1from tests.config_data import (2 complex_step,3 complex_step_alt,4 complex_steps_merged,5 echo_step,6)7from valohai_yaml.objs import Config8def test_merging():9 a = Config.parse([echo_step])10 b = Config.parse([complex_step])11 c = a.merge_with(b)12 assert len(c.steps) == 213 for step in a.steps.keys() & b.steps.keys():14 assert step in c.steps15def test_merging_conflict():16 a = Config.parse([complex_step])17 b = Config.parse([complex_step_alt])18 c = a.merge_with(b)19 expected = Config.parse([complex_steps_merged])...`

## Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.