How to use suppressed_stdout method in Test_junkie

Best Python code snippet using test_junkie

test_optimize.py

Source:test_optimize.py Github

copy

Full Screen

1"""2Unit tests for optimization routines from optimize.py3Authors:4 Ed Schofield, Nov 20055 Andrew Straw, April 20086To run it in its simplest form::7 nosetests test_optimize.py8"""9from __future__ import division, print_function, absolute_import10import warnings11import itertools12import numpy as np13from numpy.testing import (assert_raises, assert_allclose, assert_equal,14 assert_, TestCase, run_module_suite, dec,15 assert_almost_equal, assert_warns,16 assert_array_less)17from scipy._lib._testutils import suppressed_stdout18from scipy import optimize19def test_check_grad():20 # Verify if check_grad is able to estimate the derivative of the21 # logistic function.22 def logit(x):23 return 1 / (1 + np.exp(-x))24 def der_logit(x):25 return np.exp(-x) / (1 + np.exp(-x))**226 x0 = np.array([1.5])27 r = optimize.check_grad(logit, der_logit, x0)28 assert_almost_equal(r, 0)29 r = optimize.check_grad(logit, der_logit, x0, epsilon=1e-6)30 assert_almost_equal(r, 0)31 # Check if the epsilon parameter is being considered.32 r = abs(optimize.check_grad(logit, der_logit, x0, epsilon=1e-1) - 0)33 assert_(r > 1e-7)34class CheckOptimize(object):35 """ Base test case for a simple constrained entropy maximization problem36 (the machine translation example of Berger et al in37 Computational Linguistics, vol 22, num 1, pp 39--72, 1996.)38 """39 def setUp(self):40 self.F = np.array([[1,1,1],[1,1,0],[1,0,1],[1,0,0],[1,0,0]])41 self.K = np.array([1., 0.3, 0.5])42 self.startparams = np.zeros(3, np.float64)43 self.solution = np.array([0., -0.524869316, 0.487525860])44 self.maxiter = 100045 self.funccalls = 046 self.gradcalls = 047 self.trace = []48 def func(self, x):49 self.funccalls += 150 if self.funccalls > 6000:51 raise RuntimeError("too many iterations in optimization routine")52 log_pdot = np.dot(self.F, x)53 logZ = np.log(sum(np.exp(log_pdot)))54 f = logZ - np.dot(self.K, x)55 self.trace.append(x)56 return f57 def grad(self, x):58 self.gradcalls += 159 log_pdot = np.dot(self.F, x)60 logZ = np.log(sum(np.exp(log_pdot)))61 p = np.exp(log_pdot - logZ)62 return np.dot(self.F.transpose(), p) - self.K63 def hess(self, x):64 log_pdot = np.dot(self.F, x)65 logZ = np.log(sum(np.exp(log_pdot)))66 p = np.exp(log_pdot - logZ)67 return np.dot(self.F.T,68 np.dot(np.diag(p), self.F - np.dot(self.F.T, p)))69 def hessp(self, x, p):70 return np.dot(self.hess(x), p)71class CheckOptimizeParameterized(CheckOptimize):72 @suppressed_stdout73 def test_cg(self):74 # conjugate gradient optimization routine75 if self.use_wrapper:76 opts = {'maxiter': self.maxiter, 'disp': self.disp,77 'return_all': False}78 res = optimize.minimize(self.func, self.startparams, args=(),79 method='CG', jac=self.grad,80 options=opts)81 params, fopt, func_calls, grad_calls, warnflag = \82 res['x'], res['fun'], res['nfev'], res['njev'], res['status']83 else:84 retval = optimize.fmin_cg(self.func, self.startparams,85 self.grad, (), maxiter=self.maxiter,86 full_output=True, disp=self.disp,87 retall=False)88 (params, fopt, func_calls, grad_calls, warnflag) = retval89 assert_allclose(self.func(params), self.func(self.solution),90 atol=1e-6)91 # Ensure that function call counts are 'known good'; these are from92 # Scipy 0.7.0. Don't allow them to increase.93 assert_(self.funccalls == 9, self.funccalls)94 assert_(self.gradcalls == 7, self.gradcalls)95 # Ensure that the function behaves the same; this is from Scipy 0.7.096 assert_allclose(self.trace[2:4],97 [[0, -0.5, 0.5],98 [0, -5.05700028e-01, 4.95985862e-01]],99 atol=1e-14, rtol=1e-7)100 @suppressed_stdout101 def test_bfgs(self):102 # Broyden-Fletcher-Goldfarb-Shanno optimization routine103 if self.use_wrapper:104 opts = {'maxiter': self.maxiter, 'disp': self.disp,105 'return_all': False}106 res = optimize.minimize(self.func, self.startparams,107 jac=self.grad, method='BFGS', args=(),108 options=opts)109 params, fopt, gopt, Hopt, func_calls, grad_calls, warnflag = (110 res['x'], res['fun'], res['jac'], res['hess_inv'],111 res['nfev'], res['njev'], res['status'])112 else:113 retval = optimize.fmin_bfgs(self.func, self.startparams, self.grad,114 args=(), maxiter=self.maxiter,115 full_output=True, disp=self.disp,116 retall=False)117 (params, fopt, gopt, Hopt, func_calls, grad_calls, warnflag) = retval118 assert_allclose(self.func(params), self.func(self.solution),119 atol=1e-6)120 # Ensure that function call counts are 'known good'; these are from121 # Scipy 0.7.0. Don't allow them to increase.122 assert_(self.funccalls == 10, self.funccalls)123 assert_(self.gradcalls == 8, self.gradcalls)124 # Ensure that the function behaves the same; this is from Scipy 0.7.0125 assert_allclose(self.trace[6:8],126 [[0, -5.25060743e-01, 4.87748473e-01],127 [0, -5.24885582e-01, 4.87530347e-01]],128 atol=1e-14, rtol=1e-7)129 @suppressed_stdout130 def test_bfgs_infinite(self):131 # Test corner case where -Inf is the minimum. See gh-2019.132 func = lambda x: -np.e**-x133 fprime = lambda x: -func(x)134 x0 = [0]135 olderr = np.seterr(over='ignore')136 try:137 if self.use_wrapper:138 opts = {'disp': self.disp}139 x = optimize.minimize(func, x0, jac=fprime, method='BFGS',140 args=(), options=opts)['x']141 else:142 x = optimize.fmin_bfgs(func, x0, fprime, disp=self.disp)143 assert_(not np.isfinite(func(x)))144 finally:145 np.seterr(**olderr)146 @suppressed_stdout147 def test_powell(self):148 # Powell (direction set) optimization routine149 if self.use_wrapper:150 opts = {'maxiter': self.maxiter, 'disp': self.disp,151 'return_all': False}152 res = optimize.minimize(self.func, self.startparams, args=(),153 method='Powell', options=opts)154 params, fopt, direc, numiter, func_calls, warnflag = (155 res['x'], res['fun'], res['direc'], res['nit'],156 res['nfev'], res['status'])157 else:158 retval = optimize.fmin_powell(self.func, self.startparams,159 args=(), maxiter=self.maxiter,160 full_output=True, disp=self.disp,161 retall=False)162 (params, fopt, direc, numiter, func_calls, warnflag) = retval163 assert_allclose(self.func(params), self.func(self.solution),164 atol=1e-6)165 # Ensure that function call counts are 'known good'; these are from166 # Scipy 0.7.0. Don't allow them to increase.167 #168 # However, some leeway must be added: the exact evaluation169 # count is sensitive to numerical error, and floating-point170 # computations are not bit-for-bit reproducible across171 # machines, and when using e.g. MKL, data alignment172 # etc. affect the rounding error.173 #174 assert_(self.funccalls <= 116 + 20, self.funccalls)175 assert_(self.gradcalls == 0, self.gradcalls)176 # Ensure that the function behaves the same; this is from Scipy 0.7.0177 assert_allclose(self.trace[34:39],178 [[0.72949016, -0.44156936, 0.47100962],179 [0.72949016, -0.44156936, 0.48052496],180 [1.45898031, -0.88313872, 0.95153458],181 [0.72949016, -0.44156936, 0.47576729],182 [1.72949016, -0.44156936, 0.47576729]],183 atol=1e-14, rtol=1e-7)184 @suppressed_stdout185 def test_neldermead(self):186 # Nelder-Mead simplex algorithm187 if self.use_wrapper:188 opts = {'maxiter': self.maxiter, 'disp': self.disp,189 'return_all': False}190 res = optimize.minimize(self.func, self.startparams, args=(),191 method='Nelder-mead', options=opts)192 params, fopt, numiter, func_calls, warnflag, final_simplex = (193 res['x'], res['fun'], res['nit'], res['nfev'],194 res['status'], res['final_simplex'])195 else:196 retval = optimize.fmin(self.func, self.startparams,197 args=(), maxiter=self.maxiter,198 full_output=True, disp=self.disp,199 retall=False)200 (params, fopt, numiter, func_calls, warnflag) = retval201 assert_allclose(self.func(params), self.func(self.solution),202 atol=1e-6)203 # Ensure that function call counts are 'known good'; these are from204 # Scipy 0.7.0. Don't allow them to increase.205 assert_(self.funccalls == 167, self.funccalls)206 assert_(self.gradcalls == 0, self.gradcalls)207 # Ensure that the function behaves the same; this is from Scipy 0.7.0208 assert_allclose(self.trace[76:78],209 [[0.1928968, -0.62780447, 0.35166118],210 [0.19572515, -0.63648426, 0.35838135]],211 atol=1e-14, rtol=1e-7)212 @suppressed_stdout213 def test_neldermead_initial_simplex(self):214 # Nelder-Mead simplex algorithm215 simplex = np.zeros((4, 3))216 simplex[...] = self.startparams217 for j in range(3):218 simplex[j+1,j] += 0.1219 if self.use_wrapper:220 opts = {'maxiter': self.maxiter, 'disp': False,221 'return_all': True, 'initial_simplex': simplex}222 res = optimize.minimize(self.func, self.startparams, args=(),223 method='Nelder-mead', options=opts)224 params, fopt, numiter, func_calls, warnflag = \225 res['x'], res['fun'], res['nit'], res['nfev'], \226 res['status']227 assert_allclose(res['allvecs'][0], simplex[0])228 else:229 retval = optimize.fmin(self.func, self.startparams,230 args=(), maxiter=self.maxiter,231 full_output=True, disp=False, retall=False,232 initial_simplex=simplex)233 (params, fopt, numiter, func_calls, warnflag) = retval234 assert_allclose(self.func(params), self.func(self.solution),235 atol=1e-6)236 # Ensure that function call counts are 'known good'; these are from237 # Scipy 0.17.0. Don't allow them to increase.238 assert_(self.funccalls == 100, self.funccalls)239 assert_(self.gradcalls == 0, self.gradcalls)240 # Ensure that the function behaves the same; this is from Scipy 0.15.0241 assert_allclose(self.trace[50:52],242 [[0.14687474, -0.5103282, 0.48252111],243 [0.14474003, -0.5282084, 0.48743951]],244 atol=1e-14, rtol=1e-7)245 @suppressed_stdout246 def test_neldermead_initial_simplex_bad(self):247 # Check it fails with a bad simplices248 bad_simplices = []249 simplex = np.zeros((3, 2))250 simplex[...] = self.startparams[:2]251 for j in range(2):252 simplex[j+1,j] += 0.1253 bad_simplices.append(simplex)254 simplex = np.zeros((3, 3))255 bad_simplices.append(simplex)256 for simplex in bad_simplices:257 if self.use_wrapper:258 opts = {'maxiter': self.maxiter, 'disp': False,259 'return_all': False, 'initial_simplex': simplex}260 assert_raises(ValueError,261 optimize.minimize, self.func, self.startparams, args=(),262 method='Nelder-mead', options=opts)263 else:264 assert_raises(ValueError, optimize.fmin, self.func, self.startparams,265 args=(), maxiter=self.maxiter,266 full_output=True, disp=False, retall=False,267 initial_simplex=simplex)268 @suppressed_stdout269 def test_ncg(self):270 # line-search Newton conjugate gradient optimization routine271 if self.use_wrapper:272 opts = {'maxiter': self.maxiter, 'disp': self.disp,273 'return_all': False}274 retval = optimize.minimize(self.func, self.startparams,275 method='Newton-CG', jac=self.grad,276 args=(), options=opts)['x']277 else:278 retval = optimize.fmin_ncg(self.func, self.startparams, self.grad,279 args=(), maxiter=self.maxiter,280 full_output=False, disp=self.disp,281 retall=False)282 params = retval283 assert_allclose(self.func(params), self.func(self.solution),284 atol=1e-6)285 # Ensure that function call counts are 'known good'; these are from286 # Scipy 0.7.0. Don't allow them to increase.287 assert_(self.funccalls == 7, self.funccalls)288 assert_(self.gradcalls <= 22, self.gradcalls) # 0.13.0289 #assert_(self.gradcalls <= 18, self.gradcalls) # 0.9.0290 #assert_(self.gradcalls == 18, self.gradcalls) # 0.8.0291 #assert_(self.gradcalls == 22, self.gradcalls) # 0.7.0292 # Ensure that the function behaves the same; this is from Scipy 0.7.0293 assert_allclose(self.trace[3:5],294 [[-4.35700753e-07, -5.24869435e-01, 4.87527480e-01],295 [-4.35700753e-07, -5.24869401e-01, 4.87527774e-01]],296 atol=1e-6, rtol=1e-7)297 @suppressed_stdout298 def test_ncg_hess(self):299 # Newton conjugate gradient with Hessian300 if self.use_wrapper:301 opts = {'maxiter': self.maxiter, 'disp': self.disp,302 'return_all': False}303 retval = optimize.minimize(self.func, self.startparams,304 method='Newton-CG', jac=self.grad,305 hess=self.hess,306 args=(), options=opts)['x']307 else:308 retval = optimize.fmin_ncg(self.func, self.startparams, self.grad,309 fhess=self.hess,310 args=(), maxiter=self.maxiter,311 full_output=False, disp=self.disp,312 retall=False)313 params = retval314 assert_allclose(self.func(params), self.func(self.solution),315 atol=1e-6)316 # Ensure that function call counts are 'known good'; these are from317 # Scipy 0.7.0. Don't allow them to increase.318 assert_(self.funccalls == 7, self.funccalls)319 assert_(self.gradcalls <= 18, self.gradcalls) # 0.9.0320 # assert_(self.gradcalls == 18, self.gradcalls) # 0.8.0321 # assert_(self.gradcalls == 22, self.gradcalls) # 0.7.0322 # Ensure that the function behaves the same; this is from Scipy 0.7.0323 assert_allclose(self.trace[3:5],324 [[-4.35700753e-07, -5.24869435e-01, 4.87527480e-01],325 [-4.35700753e-07, -5.24869401e-01, 4.87527774e-01]],326 atol=1e-6, rtol=1e-7)327 @suppressed_stdout328 def test_ncg_hessp(self):329 # Newton conjugate gradient with Hessian times a vector p.330 if self.use_wrapper:331 opts = {'maxiter': self.maxiter, 'disp': self.disp,332 'return_all': False}333 retval = optimize.minimize(self.func, self.startparams,334 method='Newton-CG', jac=self.grad,335 hessp=self.hessp,336 args=(), options=opts)['x']337 else:338 retval = optimize.fmin_ncg(self.func, self.startparams, self.grad,339 fhess_p=self.hessp,340 args=(), maxiter=self.maxiter,341 full_output=False, disp=self.disp,342 retall=False)343 params = retval344 assert_allclose(self.func(params), self.func(self.solution),345 atol=1e-6)346 # Ensure that function call counts are 'known good'; these are from347 # Scipy 0.7.0. Don't allow them to increase.348 assert_(self.funccalls == 7, self.funccalls)349 assert_(self.gradcalls <= 18, self.gradcalls) # 0.9.0350 # assert_(self.gradcalls == 18, self.gradcalls) # 0.8.0351 # assert_(self.gradcalls == 22, self.gradcalls) # 0.7.0352 # Ensure that the function behaves the same; this is from Scipy 0.7.0353 assert_allclose(self.trace[3:5],354 [[-4.35700753e-07, -5.24869435e-01, 4.87527480e-01],355 [-4.35700753e-07, -5.24869401e-01, 4.87527774e-01]],356 atol=1e-6, rtol=1e-7)357def test_neldermead_xatol_fatol():358 # gh4484359 # test we can call with fatol, xatol specified360 func = lambda x: x[0]**2 + x[1]**2361 optimize._minimize._minimize_neldermead(func, [1, 1], maxiter=2,362 xatol=1e-3, fatol=1e-3)363 assert_warns(DeprecationWarning,364 optimize._minimize._minimize_neldermead,365 func, [1, 1], xtol=1e-3, ftol=1e-3, maxiter=2)366class TestOptimizeWrapperDisp(CheckOptimizeParameterized):367 use_wrapper = True368 disp = True369class TestOptimizeWrapperNoDisp(CheckOptimizeParameterized):370 use_wrapper = True371 disp = False372class TestOptimizeNoWrapperDisp(CheckOptimizeParameterized):373 use_wrapper = False374 disp = True375class TestOptimizeNoWrapperNoDisp(CheckOptimizeParameterized):376 use_wrapper = False377 disp = False378class TestOptimizeSimple(CheckOptimize):379 def test_bfgs_nan(self):380 # Test corner case where nan is fed to optimizer. See gh-2067.381 func = lambda x: x382 fprime = lambda x: np.ones_like(x)383 x0 = [np.nan]384 with np.errstate(over='ignore', invalid='ignore'):385 x = optimize.fmin_bfgs(func, x0, fprime, disp=False)386 assert_(np.isnan(func(x)))387 def test_bfgs_nan_return(self):388 # Test corner cases where fun returns NaN. See gh-4793.389 # First case: NaN from first call.390 func = lambda x: np.nan391 with np.errstate(invalid='ignore'):392 result = optimize.minimize(func, 0)393 assert_(np.isnan(result['fun']))394 assert_(result['success'] is False)395 # Second case: NaN from second call.396 func = lambda x: 0 if x == 0 else np.nan397 fprime = lambda x: np.ones_like(x) # Steer away from zero.398 with np.errstate(invalid='ignore'):399 result = optimize.minimize(func, 0, jac=fprime)400 assert_(np.isnan(result['fun']))401 assert_(result['success'] is False)402 def test_bfgs_numerical_jacobian(self):403 # BFGS with numerical jacobian and a vector epsilon parameter.404 # define the epsilon parameter using a random vector405 epsilon = np.sqrt(np.finfo(float).eps) * np.random.rand(len(self.solution))406 params = optimize.fmin_bfgs(self.func, self.startparams,407 epsilon=epsilon, args=(),408 maxiter=self.maxiter, disp=False)409 assert_allclose(self.func(params), self.func(self.solution),410 atol=1e-6)411 def test_bfgs_gh_2169(self):412 def f(x):413 if x < 0:414 return 1.79769313e+308415 else:416 return x + 1./x417 xs = optimize.fmin_bfgs(f, [10.], disp=False)418 assert_allclose(xs, 1.0, rtol=1e-4, atol=1e-4)419 def test_l_bfgs_b(self):420 # limited-memory bound-constrained BFGS algorithm421 retval = optimize.fmin_l_bfgs_b(self.func, self.startparams,422 self.grad, args=(),423 maxiter=self.maxiter)424 (params, fopt, d) = retval425 assert_allclose(self.func(params), self.func(self.solution),426 atol=1e-6)427 # Ensure that function call counts are 'known good'; these are from428 # Scipy 0.7.0. Don't allow them to increase.429 assert_(self.funccalls == 7, self.funccalls)430 assert_(self.gradcalls == 5, self.gradcalls)431 # Ensure that the function behaves the same; this is from Scipy 0.7.0432 assert_allclose(self.trace[3:5],433 [[0., -0.52489628, 0.48753042],434 [0., -0.52489628, 0.48753042]],435 atol=1e-14, rtol=1e-7)436 def test_l_bfgs_b_numjac(self):437 # L-BFGS-B with numerical jacobian438 retval = optimize.fmin_l_bfgs_b(self.func, self.startparams,439 approx_grad=True,440 maxiter=self.maxiter)441 (params, fopt, d) = retval442 assert_allclose(self.func(params), self.func(self.solution),443 atol=1e-6)444 def test_l_bfgs_b_funjac(self):445 # L-BFGS-B with combined objective function and jacobian446 def fun(x):447 return self.func(x), self.grad(x)448 retval = optimize.fmin_l_bfgs_b(fun, self.startparams,449 maxiter=self.maxiter)450 (params, fopt, d) = retval451 assert_allclose(self.func(params), self.func(self.solution),452 atol=1e-6)453 def test_minimize_l_bfgs_b(self):454 # Minimize with L-BFGS-B method455 opts = {'disp': False, 'maxiter': self.maxiter}456 r = optimize.minimize(self.func, self.startparams,457 method='L-BFGS-B', jac=self.grad,458 options=opts)459 assert_allclose(self.func(r.x), self.func(self.solution),460 atol=1e-6)461 # approximate jacobian462 ra = optimize.minimize(self.func, self.startparams,463 method='L-BFGS-B', options=opts)464 assert_allclose(self.func(ra.x), self.func(self.solution),465 atol=1e-6)466 # check that function evaluations in approximate jacobian are counted467 assert_(ra.nfev > r.nfev)468 def test_minimize_l_bfgs_b_ftol(self):469 # Check that the `ftol` parameter in l_bfgs_b works as expected470 v0 = None471 for tol in [1e-1, 1e-4, 1e-7, 1e-10]:472 opts = {'disp': False, 'maxiter': self.maxiter, 'ftol': tol}473 sol = optimize.minimize(self.func, self.startparams,474 method='L-BFGS-B', jac=self.grad,475 options=opts)476 v = self.func(sol.x)477 if v0 is None:478 v0 = v479 else:480 assert_(v < v0)481 assert_allclose(v, self.func(self.solution), rtol=tol)482 def test_minimize_l_bfgs_maxls(self):483 # check that the maxls is passed down to the Fortran routine484 sol = optimize.minimize(optimize.rosen, np.array([-1.2,1.0]),485 method='L-BFGS-B', jac=optimize.rosen_der,486 options={'disp': False, 'maxls': 1})487 assert_(not sol.success)488 def test_minimize_l_bfgs_b_maxfun_interruption(self):489 # gh-6162490 f = optimize.rosen491 g = optimize.rosen_der492 values = []493 x0 = np.ones(7) * 1000494 def objfun(x):495 value = f(x)496 values.append(value)497 return value498 # Look for an interesting test case.499 # Request a maxfun that stops at a particularly bad function500 # evaluation somewhere between 100 and 300 evaluations.501 low, medium, high = 30, 100, 300502 optimize.fmin_l_bfgs_b(objfun, x0, fprime=g, maxfun=high)503 v, k = max((y, i) for i, y in enumerate(values[medium:]))504 maxfun = medium + k505 # If the minimization strategy is reasonable,506 # the minimize() result should not be worse than the best507 # of the first 30 function evaluations.508 target = min(values[:low])509 xmin, fmin, d = optimize.fmin_l_bfgs_b(f, x0, fprime=g, maxfun=maxfun)510 assert_array_less(fmin, target)511 def test_custom(self):512 # This function comes from the documentation example.513 def custmin(fun, x0, args=(), maxfev=None, stepsize=0.1,514 maxiter=100, callback=None, **options):515 bestx = x0516 besty = fun(x0)517 funcalls = 1518 niter = 0519 improved = True520 stop = False521 while improved and not stop and niter < maxiter:522 improved = False523 niter += 1524 for dim in range(np.size(x0)):525 for s in [bestx[dim] - stepsize, bestx[dim] + stepsize]:526 testx = np.copy(bestx)527 testx[dim] = s528 testy = fun(testx, *args)529 funcalls += 1530 if testy < besty:531 besty = testy532 bestx = testx533 improved = True534 if callback is not None:535 callback(bestx)536 if maxfev is not None and funcalls >= maxfev:537 stop = True538 break539 return optimize.OptimizeResult(fun=besty, x=bestx, nit=niter,540 nfev=funcalls, success=(niter > 1))541 x0 = [1.35, 0.9, 0.8, 1.1, 1.2]542 res = optimize.minimize(optimize.rosen, x0, method=custmin,543 options=dict(stepsize=0.05))544 assert_allclose(res.x, 1.0, rtol=1e-4, atol=1e-4)545 def test_minimize_tol_parameter(self):546 # Check that the minimize() tol= argument does something547 def func(z):548 x, y = z549 return x**2*y**2 + x**4 + 1550 def dfunc(z):551 x, y = z552 return np.array([2*x*y**2 + 4*x**3, 2*x**2*y])553 for method in ['nelder-mead', 'powell', 'cg', 'bfgs',554 'newton-cg', 'l-bfgs-b', 'tnc',555 'cobyla', 'slsqp']:556 if method in ('nelder-mead', 'powell', 'cobyla'):557 jac = None558 else:559 jac = dfunc560 sol1 = optimize.minimize(func, [1, 1], jac=jac, tol=1e-10,561 method=method)562 sol2 = optimize.minimize(func, [1, 1], jac=jac, tol=1.0,563 method=method)564 assert_(func(sol1.x) < func(sol2.x),565 "%s: %s vs. %s" % (method, func(sol1.x), func(sol2.x)))566 def test_no_increase(self):567 # Check that the solver doesn't return a value worse than the568 # initial point.569 def func(x):570 return (x - 1)**2571 def bad_grad(x):572 # purposefully invalid gradient function, simulates a case573 # where line searches start failing574 return 2*(x - 1) * (-1) - 2575 def check(method):576 x0 = np.array([2.0])577 f0 = func(x0)578 jac = bad_grad579 if method in ['nelder-mead', 'powell', 'cobyla']:580 jac = None581 sol = optimize.minimize(func, x0, jac=jac, method=method,582 options=dict(maxiter=20))583 assert_equal(func(sol.x), sol.fun)584 dec.knownfailureif(method == 'slsqp', "SLSQP returns slightly worse")(lambda: None)()585 assert_(func(sol.x) <= f0)586 for method in ['nelder-mead', 'powell', 'cg', 'bfgs',587 'newton-cg', 'l-bfgs-b', 'tnc',588 'cobyla', 'slsqp']:589 yield check, method590 def test_slsqp_respect_bounds(self):591 # Regression test for gh-3108592 def f(x):593 return sum((x - np.array([1., 2., 3., 4.]))**2)594 def cons(x):595 a = np.array([[-1, -1, -1, -1], [-3, -3, -2, -1]])596 return np.concatenate([np.dot(a, x) + np.array([5, 10]), x])597 x0 = np.array([0.5, 1., 1.5, 2.])598 res = optimize.minimize(f, x0, method='slsqp',599 constraints={'type': 'ineq', 'fun': cons})600 assert_allclose(res.x, np.array([0., 2, 5, 8])/3, atol=1e-12)601 def test_minimize_automethod(self):602 def f(x):603 return x**2604 def cons(x):605 return x - 2606 x0 = np.array([10.])607 sol_0 = optimize.minimize(f, x0)608 sol_1 = optimize.minimize(f, x0, constraints=[{'type': 'ineq', 'fun': cons}])609 sol_2 = optimize.minimize(f, x0, bounds=[(5, 10)])610 sol_3 = optimize.minimize(f, x0, constraints=[{'type': 'ineq', 'fun': cons}], bounds=[(5, 10)])611 sol_4 = optimize.minimize(f, x0, constraints=[{'type': 'ineq', 'fun': cons}], bounds=[(1, 10)])612 for sol in [sol_0, sol_1, sol_2, sol_3, sol_4]:613 assert_(sol.success)614 assert_allclose(sol_0.x, 0, atol=1e-7)615 assert_allclose(sol_1.x, 2, atol=1e-7)616 assert_allclose(sol_2.x, 5, atol=1e-7)617 assert_allclose(sol_3.x, 5, atol=1e-7)618 assert_allclose(sol_4.x, 2, atol=1e-7)619 def test_minimize_coerce_args_param(self):620 # Regression test for gh-3503621 def Y(x, c):622 return np.sum((x-c)**2)623 def dY_dx(x, c=None):624 return 2*(x-c)625 c = np.array([3, 1, 4, 1, 5, 9, 2, 6, 5, 3, 5])626 xinit = np.random.randn(len(c))627 optimize.minimize(Y, xinit, jac=dY_dx, args=(c), method="BFGS")628 def test_initial_step_scaling(self):629 # Check that optimizer initial step is not huge even if the630 # function and gradients are631 scales = [1e-50, 1, 1e50]632 methods = ['CG', 'BFGS', 'L-BFGS-B', 'Newton-CG']633 def f(x):634 if first_step_size[0] is None and x[0] != x0[0]:635 first_step_size[0] = abs(x[0] - x0[0])636 if abs(x).max() > 1e4:637 raise AssertionError("Optimization stepped far away!")638 return scale*(x[0] - 1)**2639 def g(x):640 return np.array([scale*(x[0] - 1)])641 for scale, method in itertools.product(scales, methods):642 if method in ('CG', 'BFGS'):643 options = dict(gtol=scale*1e-8)644 else:645 options = dict()646 if scale < 1e-10 and method in ('L-BFGS-B', 'Newton-CG'):647 # XXX: return initial point if they see small gradient648 continue649 x0 = [-1.0]650 first_step_size = [None]651 res = optimize.minimize(f, x0, jac=g, method=method,652 options=options)653 err_msg = "{0} {1}: {2}: {3}".format(method, scale, first_step_size,654 res)655 assert_(res.success, err_msg)656 assert_allclose(res.x, [1.0], err_msg=err_msg)657 assert_(res.nit <= 3, err_msg)658 if scale > 1e-10:659 if method in ('CG', 'BFGS'):660 assert_allclose(first_step_size[0], 1.01, err_msg=err_msg)661 else:662 # Newton-CG and L-BFGS-B use different logic for the first step,663 # but are both scaling invariant with step sizes ~ 1664 assert_(first_step_size[0] > 0.5 and first_step_size[0] < 3,665 err_msg)666 else:667 # step size has upper bound of ||grad||, so line668 # search makes many small steps669 pass670class TestLBFGSBBounds(TestCase):671 def setUp(self):672 self.bounds = ((1, None), (None, None))673 self.solution = (1, 0)674 def fun(self, x, p=2.0):675 return 1.0 / p * (x[0]**p + x[1]**p)676 def jac(self, x, p=2.0):677 return x**(p - 1)678 def fj(self, x, p=2.0):679 return self.fun(x, p), self.jac(x, p)680 def test_l_bfgs_b_bounds(self):681 x, f, d = optimize.fmin_l_bfgs_b(self.fun, [0, -1],682 fprime=self.jac,683 bounds=self.bounds)684 assert_(d['warnflag'] == 0, d['task'])685 assert_allclose(x, self.solution, atol=1e-6)686 def test_l_bfgs_b_funjac(self):687 # L-BFGS-B with fun and jac combined and extra arguments688 x, f, d = optimize.fmin_l_bfgs_b(self.fj, [0, -1], args=(2.0, ),689 bounds=self.bounds)690 assert_(d['warnflag'] == 0, d['task'])691 assert_allclose(x, self.solution, atol=1e-6)692 def test_minimize_l_bfgs_b_bounds(self):693 # Minimize with method='L-BFGS-B' with bounds694 res = optimize.minimize(self.fun, [0, -1], method='L-BFGS-B',695 jac=self.jac, bounds=self.bounds)696 assert_(res['success'], res['message'])697 assert_allclose(res.x, self.solution, atol=1e-6)698class TestOptimizeScalar(TestCase):699 def setUp(self):700 self.solution = 1.5701 def fun(self, x, a=1.5):702 """Objective function"""703 return (x - a)**2 - 0.8704 def test_brent(self):705 x = optimize.brent(self.fun)706 assert_allclose(x, self.solution, atol=1e-6)707 x = optimize.brent(self.fun, brack=(-3, -2))708 assert_allclose(x, self.solution, atol=1e-6)709 x = optimize.brent(self.fun, full_output=True)710 assert_allclose(x[0], self.solution, atol=1e-6)711 x = optimize.brent(self.fun, brack=(-15, -1, 15))712 assert_allclose(x, self.solution, atol=1e-6)713 def test_golden(self):714 x = optimize.golden(self.fun)715 assert_allclose(x, self.solution, atol=1e-6)716 x = optimize.golden(self.fun, brack=(-3, -2))717 assert_allclose(x, self.solution, atol=1e-6)718 x = optimize.golden(self.fun, full_output=True)719 assert_allclose(x[0], self.solution, atol=1e-6)720 x = optimize.golden(self.fun, brack=(-15, -1, 15))721 assert_allclose(x, self.solution, atol=1e-6)722 def test_fminbound(self):723 x = optimize.fminbound(self.fun, 0, 1)724 assert_allclose(x, 1, atol=1e-4)725 x = optimize.fminbound(self.fun, 1, 5)726 assert_allclose(x, self.solution, atol=1e-6)727 x = optimize.fminbound(self.fun, np.array([1]), np.array([5]))728 assert_allclose(x, self.solution, atol=1e-6)729 assert_raises(ValueError, optimize.fminbound, self.fun, 5, 1)730 def test_fminbound_scalar(self):731 try:732 optimize.fminbound(self.fun, np.zeros((1, 2)), 1)733 self.fail("exception not raised")734 except ValueError as e:735 assert_('must be scalar' in str(e))736 x = optimize.fminbound(self.fun, 1, np.array(5))737 assert_allclose(x, self.solution, atol=1e-6)738 def test_minimize_scalar(self):739 # combine all tests above for the minimize_scalar wrapper740 x = optimize.minimize_scalar(self.fun).x741 assert_allclose(x, self.solution, atol=1e-6)742 x = optimize.minimize_scalar(self.fun, method='Brent')743 assert_(x.success)744 x = optimize.minimize_scalar(self.fun, method='Brent',745 options=dict(maxiter=3))746 assert_(not x.success)747 x = optimize.minimize_scalar(self.fun, bracket=(-3, -2),748 args=(1.5, ), method='Brent').x749 assert_allclose(x, self.solution, atol=1e-6)750 x = optimize.minimize_scalar(self.fun, method='Brent',751 args=(1.5,)).x752 assert_allclose(x, self.solution, atol=1e-6)753 x = optimize.minimize_scalar(self.fun, bracket=(-15, -1, 15),754 args=(1.5, ), method='Brent').x755 assert_allclose(x, self.solution, atol=1e-6)756 x = optimize.minimize_scalar(self.fun, bracket=(-3, -2),757 args=(1.5, ), method='golden').x758 assert_allclose(x, self.solution, atol=1e-6)759 x = optimize.minimize_scalar(self.fun, method='golden',760 args=(1.5,)).x761 assert_allclose(x, self.solution, atol=1e-6)762 x = optimize.minimize_scalar(self.fun, bracket=(-15, -1, 15),763 args=(1.5, ), method='golden').x764 assert_allclose(x, self.solution, atol=1e-6)765 x = optimize.minimize_scalar(self.fun, bounds=(0, 1), args=(1.5,),766 method='Bounded').x767 assert_allclose(x, 1, atol=1e-4)768 x = optimize.minimize_scalar(self.fun, bounds=(1, 5), args=(1.5, ),769 method='bounded').x770 assert_allclose(x, self.solution, atol=1e-6)771 x = optimize.minimize_scalar(self.fun, bounds=(np.array([1]),772 np.array([5])),773 args=(np.array([1.5]), ),774 method='bounded').x775 assert_allclose(x, self.solution, atol=1e-6)776 assert_raises(ValueError, optimize.minimize_scalar, self.fun,777 bounds=(5, 1), method='bounded', args=(1.5, ))778 assert_raises(ValueError, optimize.minimize_scalar, self.fun,779 bounds=(np.zeros(2), 1), method='bounded', args=(1.5, ))780 x = optimize.minimize_scalar(self.fun, bounds=(1, np.array(5)),781 method='bounded').x782 assert_allclose(x, self.solution, atol=1e-6)783 def test_minimize_scalar_custom(self):784 # This function comes from the documentation example.785 def custmin(fun, bracket, args=(), maxfev=None, stepsize=0.1,786 maxiter=100, callback=None, **options):787 bestx = (bracket[1] + bracket[0]) / 2.0788 besty = fun(bestx)789 funcalls = 1790 niter = 0791 improved = True792 stop = False793 while improved and not stop and niter < maxiter:794 improved = False795 niter += 1796 for testx in [bestx - stepsize, bestx + stepsize]:797 testy = fun(testx, *args)798 funcalls += 1799 if testy < besty:800 besty = testy801 bestx = testx802 improved = True803 if callback is not None:804 callback(bestx)805 if maxfev is not None and funcalls >= maxfev:806 stop = True807 break808 return optimize.OptimizeResult(fun=besty, x=bestx, nit=niter,809 nfev=funcalls, success=(niter > 1))810 res = optimize.minimize_scalar(self.fun, bracket=(0, 4), method=custmin,811 options=dict(stepsize=0.05))812 assert_allclose(res.x, self.solution, atol=1e-6)813 def test_minimize_scalar_coerce_args_param(self):814 # Regression test for gh-3503815 optimize.minimize_scalar(self.fun, args=1.5)816def test_brent_negative_tolerance():817 assert_raises(ValueError, optimize.brent, np.cos, tol=-.01)818class TestNewtonCg(object):819 def test_rosenbrock(self):820 x0 = np.array([-1.2, 1.0])821 sol = optimize.minimize(optimize.rosen, x0,822 jac=optimize.rosen_der,823 hess=optimize.rosen_hess,824 tol=1e-5,825 method='Newton-CG')826 assert_(sol.success, sol.message)827 assert_allclose(sol.x, np.array([1, 1]), rtol=1e-4)828 def test_himmelblau(self):829 x0 = np.array(himmelblau_x0)830 sol = optimize.minimize(himmelblau,831 x0,832 jac=himmelblau_grad,833 hess=himmelblau_hess,834 method='Newton-CG',835 tol=1e-6)836 assert_(sol.success, sol.message)837 assert_allclose(sol.x, himmelblau_xopt, rtol=1e-4)838 assert_allclose(sol.fun, himmelblau_min, atol=1e-4)839class TestRosen(TestCase):840 def test_hess(self):841 # Compare rosen_hess(x) times p with rosen_hess_prod(x,p). See gh-1775842 x = np.array([3, 4, 5])843 p = np.array([2, 2, 2])844 hp = optimize.rosen_hess_prod(x, p)845 dothp = np.dot(optimize.rosen_hess(x), p)846 assert_equal(hp, dothp)847def himmelblau(p):848 """849 R^2 -> R^1 test function for optimization. The function has four local850 minima where himmelblau(xopt) == 0.851 """852 x, y = p853 a = x*x + y - 11854 b = x + y*y - 7855 return a*a + b*b856def himmelblau_grad(p):857 x, y = p858 return np.array([4*x**3 + 4*x*y - 42*x + 2*y**2 - 14,859 2*x**2 + 4*x*y + 4*y**3 - 26*y - 22])860def himmelblau_hess(p):861 x, y = p862 return np.array([[12*x**2 + 4*y - 42, 4*x + 4*y],863 [4*x + 4*y, 4*x + 12*y**2 - 26]])864himmelblau_x0 = [-0.27, -0.9]865himmelblau_xopt = [3, 2]866himmelblau_min = 0.0867def test_minimize_multiple_constraints():868 # Regression test for gh-4240.869 def func(x):870 return np.array([25 - 0.2 * x[0] - 0.4 * x[1] - 0.33 * x[2]])871 def func1(x):872 return np.array([x[1]])873 def func2(x):874 return np.array([x[2]])875 cons = ({'type': 'ineq', 'fun': func},876 {'type': 'ineq', 'fun': func1},877 {'type': 'ineq', 'fun': func2})878 f = lambda x: -1 * (x[0] + x[1] + x[2])879 res = optimize.minimize(f, [0, 0, 0], method='SLSQP', constraints=cons)880 assert_allclose(res.x, [125, 0, 0], atol=1e-10)881class TestOptimizeResultAttributes(TestCase):882 # Test that all minimizers return an OptimizeResult containing883 # all the OptimizeResult attributes884 def setUp(self):885 self.x0 = [5, 5]886 self.func = optimize.rosen887 self.jac = optimize.rosen_der888 self.hess = optimize.rosen_hess889 self.hessp = optimize.rosen_hess_prod890 self.bounds = [(0., 10.), (0., 10.)]891 def test_attributes_present(self):892 methods = ['Nelder-Mead', 'Powell', 'CG', 'BFGS', 'Newton-CG',893 'L-BFGS-B', 'TNC', 'COBYLA', 'SLSQP', 'dogleg',894 'trust-ncg']895 attributes = ['nit', 'nfev', 'x', 'success', 'status', 'fun',896 'message']897 skip = {'COBYLA': ['nit']}898 for method in methods:899 with warnings.catch_warnings():900 warnings.simplefilter("ignore")901 res = optimize.minimize(self.func, self.x0, method=method,902 jac=self.jac, hess=self.hess,903 hessp=self.hessp)904 for attribute in attributes:905 if method in skip and attribute in skip[method]:906 continue907 assert_(hasattr(res, attribute))908 assert_(attribute in dir(res))909class TestBrute:910 # Test the "brute force" method911 def setUp(self):912 self.params = (2, 3, 7, 8, 9, 10, 44, -1, 2, 26, 1, -2, 0.5)913 self.rranges = (slice(-4, 4, 0.25), slice(-4, 4, 0.25))914 self.solution = np.array([-1.05665192, 1.80834843])915 def f1(self, z, *params):916 x, y = z917 a, b, c, d, e, f, g, h, i, j, k, l, scale = params918 return (a * x**2 + b * x * y + c * y**2 + d*x + e*y + f)919 def f2(self, z, *params):920 x, y = z921 a, b, c, d, e, f, g, h, i, j, k, l, scale = params922 return (-g*np.exp(-((x-h)**2 + (y-i)**2) / scale))923 def f3(self, z, *params):924 x, y = z925 a, b, c, d, e, f, g, h, i, j, k, l, scale = params926 return (-j*np.exp(-((x-k)**2 + (y-l)**2) / scale))927 def func(self, z, *params):928 return self.f1(z, *params) + self.f2(z, *params) + self.f3(z, *params)929 @suppressed_stdout930 def test_brute(self):931 # test fmin932 resbrute = optimize.brute(self.func, self.rranges, args=self.params,933 full_output=True, finish=optimize.fmin)934 assert_allclose(resbrute[0], self.solution, atol=1e-3)935 assert_allclose(resbrute[1], self.func(self.solution, *self.params),936 atol=1e-3)937 # test minimize938 resbrute = optimize.brute(self.func, self.rranges, args=self.params,939 full_output=True,940 finish=optimize.minimize)941 assert_allclose(resbrute[0], self.solution, atol=1e-3)942 assert_allclose(resbrute[1], self.func(self.solution, *self.params),943 atol=1e-3)944class TestIterationLimits(TestCase):945 # Tests that optimisation does not give up before trying requested946 # number of iterations or evaluations. And that it does not succeed947 # by exceeding the limits.948 def setUp(self):949 self.funcalls = 0950 def slow_func(self, v):951 self.funcalls += 1952 r,t = np.sqrt(v[0]**2+v[1]**2), np.arctan2(v[0],v[1])953 return np.sin(r*20 + t)+r*0.5954 def test_neldermead_limit(self):955 self.check_limits("Nelder-Mead", 200)956 def test_powell_limit(self):957 self.check_limits("powell", 1000)958 def check_limits(self, method, default_iters):959 for start_v in [[0.1,0.1], [1,1], [2,2]]:960 for mfev in [50, 500, 5000]:961 self.funcalls = 0962 res = optimize.minimize(self.slow_func, start_v,963 method=method, options={"maxfev":mfev})964 assert_(self.funcalls == res["nfev"])965 if res["success"]:966 assert_(res["nfev"] < mfev)967 else:968 assert_(res["nfev"] >= mfev)969 for mit in [50, 500,5000]:970 res = optimize.minimize(self.slow_func, start_v,971 method=method, options={"maxiter":mit})972 if res["success"]:973 assert_(res["nit"] <= mit)974 else:975 assert_(res["nit"] >= mit)976 for mfev,mit in [[50,50], [5000,5000],[5000,np.inf]]:977 self.funcalls = 0978 res = optimize.minimize(self.slow_func, start_v,979 method=method, options={"maxiter":mit, "maxfev":mfev})980 assert_(self.funcalls == res["nfev"])981 if res["success"]:982 assert_(res["nfev"] < mfev and res["nit"] <= mit)983 else:984 assert_(res["nfev"] >= mfev or res["nit"] >= mit)985 for mfev,mit in [[np.inf,None], [None,np.inf]]:986 self.funcalls = 0987 res = optimize.minimize(self.slow_func, start_v,988 method=method, options={"maxiter":mit, "maxfev":mfev})989 assert_(self.funcalls == res["nfev"])990 if res["success"]:991 if mfev is None:992 assert_(res["nfev"] < default_iters*2)993 else:994 assert_(res["nit"] <= default_iters*2)995 else:996 assert_(res["nfev"] >= default_iters*2 or997 res["nit"] >= default_iters*2)998if __name__ == "__main__":...

Full Screen

Full Screen

_testutils.py

Source:_testutils.py Github

copy

Full Screen

...19 except (ValueError, KeyError):20 pass21 return dec.knownfailureif(True, msg)(func)22 return deco23def suppressed_stdout(f):24 import nose25 def pwrapper(*arg, **kwargs):26 oldstdout = sys.stdout27 sys.stdout = open(os.devnull, 'w')28 try:29 return f(*arg, **kwargs)30 finally:31 sys.stdout.close()32 sys.stdout = oldstdout33 return nose.tools.make_decorator(f)(pwrapper)34@decorator35def xslow(func, *a, **kw):36 try:37 v = int(os.environ.get('SCIPY_XSLOW', '0'))...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run Test_junkie automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful