_basinhopping.py 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736
  1. """
  2. basinhopping: The basinhopping global optimization algorithm
  3. """
  4. from __future__ import division, print_function, absolute_import
  5. import numpy as np
  6. import math
  7. from numpy import cos, sin
  8. import scipy.optimize
  9. from scipy._lib._util import check_random_state
  10. __all__ = ['basinhopping']
  11. class Storage(object):
  12. """
  13. Class used to store the lowest energy structure
  14. """
  15. def __init__(self, minres):
  16. self._add(minres)
  17. def _add(self, minres):
  18. self.minres = minres
  19. self.minres.x = np.copy(minres.x)
  20. def update(self, minres):
  21. if minres.fun < self.minres.fun:
  22. self._add(minres)
  23. return True
  24. else:
  25. return False
  26. def get_lowest(self):
  27. return self.minres
  28. class BasinHoppingRunner(object):
  29. """This class implements the core of the basinhopping algorithm.
  30. x0 : ndarray
  31. The starting coordinates.
  32. minimizer : callable
  33. The local minimizer, with signature ``result = minimizer(x)``.
  34. The return value is an `optimize.OptimizeResult` object.
  35. step_taking : callable
  36. This function displaces the coordinates randomly. Signature should
  37. be ``x_new = step_taking(x)``. Note that `x` may be modified in-place.
  38. accept_tests : list of callables
  39. Each test is passed the kwargs `f_new`, `x_new`, `f_old` and
  40. `x_old`. These tests will be used to judge whether or not to accept
  41. the step. The acceptable return values are True, False, or ``"force
  42. accept"``. If any of the tests return False then the step is rejected.
  43. If ``"force accept"``, then this will override any other tests in
  44. order to accept the step. This can be used, for example, to forcefully
  45. escape from a local minimum that ``basinhopping`` is trapped in.
  46. disp : bool, optional
  47. Display status messages.
  48. """
  49. def __init__(self, x0, minimizer, step_taking, accept_tests, disp=False):
  50. self.x = np.copy(x0)
  51. self.minimizer = minimizer
  52. self.step_taking = step_taking
  53. self.accept_tests = accept_tests
  54. self.disp = disp
  55. self.nstep = 0
  56. # initialize return object
  57. self.res = scipy.optimize.OptimizeResult()
  58. self.res.minimization_failures = 0
  59. # do initial minimization
  60. minres = minimizer(self.x)
  61. if not minres.success:
  62. self.res.minimization_failures += 1
  63. if self.disp:
  64. print("warning: basinhopping: local minimization failure")
  65. self.x = np.copy(minres.x)
  66. self.energy = minres.fun
  67. if self.disp:
  68. print("basinhopping step %d: f %g" % (self.nstep, self.energy))
  69. # initialize storage class
  70. self.storage = Storage(minres)
  71. if hasattr(minres, "nfev"):
  72. self.res.nfev = minres.nfev
  73. if hasattr(minres, "njev"):
  74. self.res.njev = minres.njev
  75. if hasattr(minres, "nhev"):
  76. self.res.nhev = minres.nhev
  77. def _monte_carlo_step(self):
  78. """Do one Monte Carlo iteration
  79. Randomly displace the coordinates, minimize, and decide whether
  80. or not to accept the new coordinates.
  81. """
  82. # Take a random step. Make a copy of x because the step_taking
  83. # algorithm might change x in place
  84. x_after_step = np.copy(self.x)
  85. x_after_step = self.step_taking(x_after_step)
  86. # do a local minimization
  87. minres = self.minimizer(x_after_step)
  88. x_after_quench = minres.x
  89. energy_after_quench = minres.fun
  90. if not minres.success:
  91. self.res.minimization_failures += 1
  92. if self.disp:
  93. print("warning: basinhopping: local minimization failure")
  94. if hasattr(minres, "nfev"):
  95. self.res.nfev += minres.nfev
  96. if hasattr(minres, "njev"):
  97. self.res.njev += minres.njev
  98. if hasattr(minres, "nhev"):
  99. self.res.nhev += minres.nhev
  100. # accept the move based on self.accept_tests. If any test is False,
  101. # then reject the step. If any test returns the special string
  102. # 'force accept', then accept the step regardless. This can be used
  103. # to forcefully escape from a local minimum if normal basin hopping
  104. # steps are not sufficient.
  105. accept = True
  106. for test in self.accept_tests:
  107. testres = test(f_new=energy_after_quench, x_new=x_after_quench,
  108. f_old=self.energy, x_old=self.x)
  109. if testres == 'force accept':
  110. accept = True
  111. break
  112. elif testres is None:
  113. raise ValueError("accept_tests must return True, False, or "
  114. "'force accept'")
  115. elif not testres:
  116. accept = False
  117. # Report the result of the acceptance test to the take step class.
  118. # This is for adaptive step taking
  119. if hasattr(self.step_taking, "report"):
  120. self.step_taking.report(accept, f_new=energy_after_quench,
  121. x_new=x_after_quench, f_old=self.energy,
  122. x_old=self.x)
  123. return accept, minres
  124. def one_cycle(self):
  125. """Do one cycle of the basinhopping algorithm
  126. """
  127. self.nstep += 1
  128. new_global_min = False
  129. accept, minres = self._monte_carlo_step()
  130. if accept:
  131. self.energy = minres.fun
  132. self.x = np.copy(minres.x)
  133. new_global_min = self.storage.update(minres)
  134. # print some information
  135. if self.disp:
  136. self.print_report(minres.fun, accept)
  137. if new_global_min:
  138. print("found new global minimum on step %d with function"
  139. " value %g" % (self.nstep, self.energy))
  140. # save some variables as BasinHoppingRunner attributes
  141. self.xtrial = minres.x
  142. self.energy_trial = minres.fun
  143. self.accept = accept
  144. return new_global_min
  145. def print_report(self, energy_trial, accept):
  146. """print a status update"""
  147. minres = self.storage.get_lowest()
  148. print("basinhopping step %d: f %g trial_f %g accepted %d "
  149. " lowest_f %g" % (self.nstep, self.energy, energy_trial,
  150. accept, minres.fun))
  151. class AdaptiveStepsize(object):
  152. """
  153. Class to implement adaptive stepsize.
  154. This class wraps the step taking class and modifies the stepsize to
  155. ensure the true acceptance rate is as close as possible to the target.
  156. Parameters
  157. ----------
  158. takestep : callable
  159. The step taking routine. Must contain modifiable attribute
  160. takestep.stepsize
  161. accept_rate : float, optional
  162. The target step acceptance rate
  163. interval : int, optional
  164. Interval for how often to update the stepsize
  165. factor : float, optional
  166. The step size is multiplied or divided by this factor upon each
  167. update.
  168. verbose : bool, optional
  169. Print information about each update
  170. """
  171. def __init__(self, takestep, accept_rate=0.5, interval=50, factor=0.9,
  172. verbose=True):
  173. self.takestep = takestep
  174. self.target_accept_rate = accept_rate
  175. self.interval = interval
  176. self.factor = factor
  177. self.verbose = verbose
  178. self.nstep = 0
  179. self.nstep_tot = 0
  180. self.naccept = 0
  181. def __call__(self, x):
  182. return self.take_step(x)
  183. def _adjust_step_size(self):
  184. old_stepsize = self.takestep.stepsize
  185. accept_rate = float(self.naccept) / self.nstep
  186. if accept_rate > self.target_accept_rate:
  187. # We're accepting too many steps. This generally means we're
  188. # trapped in a basin. Take bigger steps
  189. self.takestep.stepsize /= self.factor
  190. else:
  191. # We're not accepting enough steps. Take smaller steps
  192. self.takestep.stepsize *= self.factor
  193. if self.verbose:
  194. print("adaptive stepsize: acceptance rate %f target %f new "
  195. "stepsize %g old stepsize %g" % (accept_rate,
  196. self.target_accept_rate, self.takestep.stepsize,
  197. old_stepsize))
  198. def take_step(self, x):
  199. self.nstep += 1
  200. self.nstep_tot += 1
  201. if self.nstep % self.interval == 0:
  202. self._adjust_step_size()
  203. return self.takestep(x)
  204. def report(self, accept, **kwargs):
  205. "called by basinhopping to report the result of the step"
  206. if accept:
  207. self.naccept += 1
  208. class RandomDisplacement(object):
  209. """
  210. Add a random displacement of maximum size `stepsize` to each coordinate
  211. Calling this updates `x` in-place.
  212. Parameters
  213. ----------
  214. stepsize : float, optional
  215. Maximum stepsize in any dimension
  216. random_state : None or `np.random.RandomState` instance, optional
  217. The random number generator that generates the displacements
  218. """
  219. def __init__(self, stepsize=0.5, random_state=None):
  220. self.stepsize = stepsize
  221. self.random_state = check_random_state(random_state)
  222. def __call__(self, x):
  223. x += self.random_state.uniform(-self.stepsize, self.stepsize,
  224. np.shape(x))
  225. return x
  226. class MinimizerWrapper(object):
  227. """
  228. wrap a minimizer function as a minimizer class
  229. """
  230. def __init__(self, minimizer, func=None, **kwargs):
  231. self.minimizer = minimizer
  232. self.func = func
  233. self.kwargs = kwargs
  234. def __call__(self, x0):
  235. if self.func is None:
  236. return self.minimizer(x0, **self.kwargs)
  237. else:
  238. return self.minimizer(self.func, x0, **self.kwargs)
  239. class Metropolis(object):
  240. """
  241. Metropolis acceptance criterion
  242. Parameters
  243. ----------
  244. T : float
  245. The "temperature" parameter for the accept or reject criterion.
  246. random_state : None or `np.random.RandomState` object
  247. Random number generator used for acceptance test
  248. """
  249. def __init__(self, T, random_state=None):
  250. # Avoid ZeroDivisionError since "MBH can be regarded as a special case
  251. # of the BH framework with the Metropolis criterion, where temperature
  252. # T = 0." (Reject all steps that increase energy.)
  253. self.beta = 1.0 / T if T != 0 else float('inf')
  254. self.random_state = check_random_state(random_state)
  255. def accept_reject(self, energy_new, energy_old):
  256. """
  257. If new energy is lower than old, it will always be accepted.
  258. If new is higher than old, there is a chance it will be accepted,
  259. less likely for larger differences.
  260. """
  261. w = math.exp(min(0, -float(energy_new - energy_old) * self.beta))
  262. rand = self.random_state.rand()
  263. return w >= rand
  264. def __call__(self, **kwargs):
  265. """
  266. f_new and f_old are mandatory in kwargs
  267. """
  268. return bool(self.accept_reject(kwargs["f_new"],
  269. kwargs["f_old"]))
  270. def basinhopping(func, x0, niter=100, T=1.0, stepsize=0.5,
  271. minimizer_kwargs=None, take_step=None, accept_test=None,
  272. callback=None, interval=50, disp=False, niter_success=None,
  273. seed=None):
  274. """
  275. Find the global minimum of a function using the basin-hopping algorithm
  276. Basin-hopping is a two-phase method that combines a global stepping
  277. algorithm with local minimization at each step. Designed to mimic
  278. the natural process of energy minimization of clusters of atoms, it works
  279. well for similar problems with "funnel-like, but rugged" energy landscapes
  280. [5]_.
  281. As the step-taking, step acceptance, and minimization methods are all
  282. customizable, this function can also be used to implement other two-phase
  283. methods.
  284. Parameters
  285. ----------
  286. func : callable ``f(x, *args)``
  287. Function to be optimized. ``args`` can be passed as an optional item
  288. in the dict ``minimizer_kwargs``
  289. x0 : array_like
  290. Initial guess.
  291. niter : integer, optional
  292. The number of basin-hopping iterations
  293. T : float, optional
  294. The "temperature" parameter for the accept or reject criterion. Higher
  295. "temperatures" mean that larger jumps in function value will be
  296. accepted. For best results ``T`` should be comparable to the
  297. separation (in function value) between local minima.
  298. stepsize : float, optional
  299. Maximum step size for use in the random displacement.
  300. minimizer_kwargs : dict, optional
  301. Extra keyword arguments to be passed to the local minimizer
  302. ``scipy.optimize.minimize()`` Some important options could be:
  303. method : str
  304. The minimization method (e.g. ``"L-BFGS-B"``)
  305. args : tuple
  306. Extra arguments passed to the objective function (``func``) and
  307. its derivatives (Jacobian, Hessian).
  308. take_step : callable ``take_step(x)``, optional
  309. Replace the default step-taking routine with this routine. The default
  310. step-taking routine is a random displacement of the coordinates, but
  311. other step-taking algorithms may be better for some systems.
  312. ``take_step`` can optionally have the attribute ``take_step.stepsize``.
  313. If this attribute exists, then ``basinhopping`` will adjust
  314. ``take_step.stepsize`` in order to try to optimize the global minimum
  315. search.
  316. accept_test : callable, ``accept_test(f_new=f_new, x_new=x_new, f_old=fold, x_old=x_old)``, optional
  317. Define a test which will be used to judge whether or not to accept the
  318. step. This will be used in addition to the Metropolis test based on
  319. "temperature" ``T``. The acceptable return values are True,
  320. False, or ``"force accept"``. If any of the tests return False
  321. then the step is rejected. If the latter, then this will override any
  322. other tests in order to accept the step. This can be used, for example,
  323. to forcefully escape from a local minimum that ``basinhopping`` is
  324. trapped in.
  325. callback : callable, ``callback(x, f, accept)``, optional
  326. A callback function which will be called for all minima found. ``x``
  327. and ``f`` are the coordinates and function value of the trial minimum,
  328. and ``accept`` is whether or not that minimum was accepted. This can
  329. be used, for example, to save the lowest N minima found. Also,
  330. ``callback`` can be used to specify a user defined stop criterion by
  331. optionally returning True to stop the ``basinhopping`` routine.
  332. interval : integer, optional
  333. interval for how often to update the ``stepsize``
  334. disp : bool, optional
  335. Set to True to print status messages
  336. niter_success : integer, optional
  337. Stop the run if the global minimum candidate remains the same for this
  338. number of iterations.
  339. seed : int or `np.random.RandomState`, optional
  340. If `seed` is not specified the `np.RandomState` singleton is used.
  341. If `seed` is an int, a new `np.random.RandomState` instance is used,
  342. seeded with seed.
  343. If `seed` is already a `np.random.RandomState instance`, then that
  344. `np.random.RandomState` instance is used.
  345. Specify `seed` for repeatable minimizations. The random numbers
  346. generated with this seed only affect the default Metropolis
  347. `accept_test` and the default `take_step`. If you supply your own
  348. `take_step` and `accept_test`, and these functions use random
  349. number generation, then those functions are responsible for the state
  350. of their random number generator.
  351. Returns
  352. -------
  353. res : OptimizeResult
  354. The optimization result represented as a ``OptimizeResult`` object.
  355. Important attributes are: ``x`` the solution array, ``fun`` the value
  356. of the function at the solution, and ``message`` which describes the
  357. cause of the termination. The ``OptimizeResult`` object returned by the
  358. selected minimizer at the lowest minimum is also contained within this
  359. object and can be accessed through the ``lowest_optimization_result``
  360. attribute. See `OptimizeResult` for a description of other attributes.
  361. See Also
  362. --------
  363. minimize :
  364. The local minimization function called once for each basinhopping step.
  365. ``minimizer_kwargs`` is passed to this routine.
  366. Notes
  367. -----
  368. Basin-hopping is a stochastic algorithm which attempts to find the global
  369. minimum of a smooth scalar function of one or more variables [1]_ [2]_ [3]_
  370. [4]_. The algorithm in its current form was described by David Wales and
  371. Jonathan Doye [2]_ http://www-wales.ch.cam.ac.uk/.
  372. The algorithm is iterative with each cycle composed of the following
  373. features
  374. 1) random perturbation of the coordinates
  375. 2) local minimization
  376. 3) accept or reject the new coordinates based on the minimized function
  377. value
  378. The acceptance test used here is the Metropolis criterion of standard Monte
  379. Carlo algorithms, although there are many other possibilities [3]_.
  380. This global minimization method has been shown to be extremely efficient
  381. for a wide variety of problems in physics and chemistry. It is
  382. particularly useful when the function has many minima separated by large
  383. barriers. See the Cambridge Cluster Database
  384. http://www-wales.ch.cam.ac.uk/CCD.html for databases of molecular systems
  385. that have been optimized primarily using basin-hopping. This database
  386. includes minimization problems exceeding 300 degrees of freedom.
  387. See the free software program GMIN (http://www-wales.ch.cam.ac.uk/GMIN) for
  388. a Fortran implementation of basin-hopping. This implementation has many
  389. different variations of the procedure described above, including more
  390. advanced step taking algorithms and alternate acceptance criterion.
  391. For stochastic global optimization there is no way to determine if the true
  392. global minimum has actually been found. Instead, as a consistency check,
  393. the algorithm can be run from a number of different random starting points
  394. to ensure the lowest minimum found in each example has converged to the
  395. global minimum. For this reason ``basinhopping`` will by default simply
  396. run for the number of iterations ``niter`` and return the lowest minimum
  397. found. It is left to the user to ensure that this is in fact the global
  398. minimum.
  399. Choosing ``stepsize``: This is a crucial parameter in ``basinhopping`` and
  400. depends on the problem being solved. The step is chosen uniformly in the
  401. region from x0-stepsize to x0+stepsize, in each dimension. Ideally it
  402. should be comparable to the typical separation (in argument values) between
  403. local minima of the function being optimized. ``basinhopping`` will, by
  404. default, adjust ``stepsize`` to find an optimal value, but this may take
  405. many iterations. You will get quicker results if you set a sensible
  406. initial value for ``stepsize``.
  407. Choosing ``T``: The parameter ``T`` is the "temperature" used in the
  408. Metropolis criterion. Basinhopping steps are always accepted if
  409. ``func(xnew) < func(xold)``. Otherwise, they are accepted with
  410. probability::
  411. exp( -(func(xnew) - func(xold)) / T )
  412. So, for best results, ``T`` should to be comparable to the typical
  413. difference (in function values) between local minima. (The height of
  414. "walls" between local minima is irrelevant.)
  415. If ``T`` is 0, the algorithm becomes Monotonic Basin-Hopping, in which all
  416. steps that increase energy are rejected.
  417. .. versionadded:: 0.12.0
  418. References
  419. ----------
  420. .. [1] Wales, David J. 2003, Energy Landscapes, Cambridge University Press,
  421. Cambridge, UK.
  422. .. [2] Wales, D J, and Doye J P K, Global Optimization by Basin-Hopping and
  423. the Lowest Energy Structures of Lennard-Jones Clusters Containing up to
  424. 110 Atoms. Journal of Physical Chemistry A, 1997, 101, 5111.
  425. .. [3] Li, Z. and Scheraga, H. A., Monte Carlo-minimization approach to the
  426. multiple-minima problem in protein folding, Proc. Natl. Acad. Sci. USA,
  427. 1987, 84, 6611.
  428. .. [4] Wales, D. J. and Scheraga, H. A., Global optimization of clusters,
  429. crystals, and biomolecules, Science, 1999, 285, 1368.
  430. .. [5] Olson, B., Hashmi, I., Molloy, K., and Shehu1, A., Basin Hopping as
  431. a General and Versatile Optimization Framework for the Characterization
  432. of Biological Macromolecules, Advances in Artificial Intelligence,
  433. Volume 2012 (2012), Article ID 674832, :doi:`10.1155/2012/674832`
  434. Examples
  435. --------
  436. The following example is a one-dimensional minimization problem, with many
  437. local minima superimposed on a parabola.
  438. >>> from scipy.optimize import basinhopping
  439. >>> func = lambda x: np.cos(14.5 * x - 0.3) + (x + 0.2) * x
  440. >>> x0=[1.]
  441. Basinhopping, internally, uses a local minimization algorithm. We will use
  442. the parameter ``minimizer_kwargs`` to tell basinhopping which algorithm to
  443. use and how to set up that minimizer. This parameter will be passed to
  444. ``scipy.optimize.minimize()``.
  445. >>> minimizer_kwargs = {"method": "BFGS"}
  446. >>> ret = basinhopping(func, x0, minimizer_kwargs=minimizer_kwargs,
  447. ... niter=200)
  448. >>> print("global minimum: x = %.4f, f(x0) = %.4f" % (ret.x, ret.fun))
  449. global minimum: x = -0.1951, f(x0) = -1.0009
  450. Next consider a two-dimensional minimization problem. Also, this time we
  451. will use gradient information to significantly speed up the search.
  452. >>> def func2d(x):
  453. ... f = np.cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] +
  454. ... 0.2) * x[0]
  455. ... df = np.zeros(2)
  456. ... df[0] = -14.5 * np.sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2
  457. ... df[1] = 2. * x[1] + 0.2
  458. ... return f, df
  459. We'll also use a different local minimization algorithm. Also we must tell
  460. the minimizer that our function returns both energy and gradient (jacobian)
  461. >>> minimizer_kwargs = {"method":"L-BFGS-B", "jac":True}
  462. >>> x0 = [1.0, 1.0]
  463. >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
  464. ... niter=200)
  465. >>> print("global minimum: x = [%.4f, %.4f], f(x0) = %.4f" % (ret.x[0],
  466. ... ret.x[1],
  467. ... ret.fun))
  468. global minimum: x = [-0.1951, -0.1000], f(x0) = -1.0109
  469. Here is an example using a custom step-taking routine. Imagine you want
  470. the first coordinate to take larger steps than the rest of the coordinates.
  471. This can be implemented like so:
  472. >>> class MyTakeStep(object):
  473. ... def __init__(self, stepsize=0.5):
  474. ... self.stepsize = stepsize
  475. ... def __call__(self, x):
  476. ... s = self.stepsize
  477. ... x[0] += np.random.uniform(-2.*s, 2.*s)
  478. ... x[1:] += np.random.uniform(-s, s, x[1:].shape)
  479. ... return x
  480. Since ``MyTakeStep.stepsize`` exists basinhopping will adjust the magnitude
  481. of ``stepsize`` to optimize the search. We'll use the same 2-D function as
  482. before
  483. >>> mytakestep = MyTakeStep()
  484. >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
  485. ... niter=200, take_step=mytakestep)
  486. >>> print("global minimum: x = [%.4f, %.4f], f(x0) = %.4f" % (ret.x[0],
  487. ... ret.x[1],
  488. ... ret.fun))
  489. global minimum: x = [-0.1951, -0.1000], f(x0) = -1.0109
  490. Now let's do an example using a custom callback function which prints the
  491. value of every minimum found
  492. >>> def print_fun(x, f, accepted):
  493. ... print("at minimum %.4f accepted %d" % (f, int(accepted)))
  494. We'll run it for only 10 basinhopping steps this time.
  495. >>> np.random.seed(1)
  496. >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
  497. ... niter=10, callback=print_fun)
  498. at minimum 0.4159 accepted 1
  499. at minimum -0.9073 accepted 1
  500. at minimum -0.1021 accepted 1
  501. at minimum -0.1021 accepted 1
  502. at minimum 0.9102 accepted 1
  503. at minimum 0.9102 accepted 1
  504. at minimum 2.2945 accepted 0
  505. at minimum -0.1021 accepted 1
  506. at minimum -1.0109 accepted 1
  507. at minimum -1.0109 accepted 1
  508. The minimum at -1.0109 is actually the global minimum, found already on the
  509. 8th iteration.
  510. Now let's implement bounds on the problem using a custom ``accept_test``:
  511. >>> class MyBounds(object):
  512. ... def __init__(self, xmax=[1.1,1.1], xmin=[-1.1,-1.1] ):
  513. ... self.xmax = np.array(xmax)
  514. ... self.xmin = np.array(xmin)
  515. ... def __call__(self, **kwargs):
  516. ... x = kwargs["x_new"]
  517. ... tmax = bool(np.all(x <= self.xmax))
  518. ... tmin = bool(np.all(x >= self.xmin))
  519. ... return tmax and tmin
  520. >>> mybounds = MyBounds()
  521. >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
  522. ... niter=10, accept_test=mybounds)
  523. """
  524. x0 = np.array(x0)
  525. # set up the np.random.RandomState generator
  526. rng = check_random_state(seed)
  527. # set up minimizer
  528. if minimizer_kwargs is None:
  529. minimizer_kwargs = dict()
  530. wrapped_minimizer = MinimizerWrapper(scipy.optimize.minimize, func,
  531. **minimizer_kwargs)
  532. # set up step-taking algorithm
  533. if take_step is not None:
  534. if not callable(take_step):
  535. raise TypeError("take_step must be callable")
  536. # if take_step.stepsize exists then use AdaptiveStepsize to control
  537. # take_step.stepsize
  538. if hasattr(take_step, "stepsize"):
  539. take_step_wrapped = AdaptiveStepsize(take_step, interval=interval,
  540. verbose=disp)
  541. else:
  542. take_step_wrapped = take_step
  543. else:
  544. # use default
  545. displace = RandomDisplacement(stepsize=stepsize, random_state=rng)
  546. take_step_wrapped = AdaptiveStepsize(displace, interval=interval,
  547. verbose=disp)
  548. # set up accept tests
  549. accept_tests = []
  550. if accept_test is not None:
  551. if not callable(accept_test):
  552. raise TypeError("accept_test must be callable")
  553. accept_tests = [accept_test]
  554. # use default
  555. metropolis = Metropolis(T, random_state=rng)
  556. accept_tests.append(metropolis)
  557. if niter_success is None:
  558. niter_success = niter + 2
  559. bh = BasinHoppingRunner(x0, wrapped_minimizer, take_step_wrapped,
  560. accept_tests, disp=disp)
  561. # start main iteration loop
  562. count, i = 0, 0
  563. message = ["requested number of basinhopping iterations completed"
  564. " successfully"]
  565. for i in range(niter):
  566. new_global_min = bh.one_cycle()
  567. if callable(callback):
  568. # should we pass a copy of x?
  569. val = callback(bh.xtrial, bh.energy_trial, bh.accept)
  570. if val is not None:
  571. if val:
  572. message = ["callback function requested stop early by"
  573. "returning True"]
  574. break
  575. count += 1
  576. if new_global_min:
  577. count = 0
  578. elif count > niter_success:
  579. message = ["success condition satisfied"]
  580. break
  581. # prepare return object
  582. res = bh.res
  583. res.lowest_optimization_result = bh.storage.get_lowest()
  584. res.x = np.copy(res.lowest_optimization_result.x)
  585. res.fun = res.lowest_optimization_result.fun
  586. res.message = message
  587. res.nit = i + 1
  588. return res
  589. def _test_func2d_nograd(x):
  590. f = (cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + 0.2) * x[0]
  591. + 1.010876184442655)
  592. return f
  593. def _test_func2d(x):
  594. f = (cos(14.5 * x[0] - 0.3) + (x[0] + 0.2) * x[0] + cos(14.5 * x[1] -
  595. 0.3) + (x[1] + 0.2) * x[1] + x[0] * x[1] + 1.963879482144252)
  596. df = np.zeros(2)
  597. df[0] = -14.5 * sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2 + x[1]
  598. df[1] = -14.5 * sin(14.5 * x[1] - 0.3) + 2. * x[1] + 0.2 + x[0]
  599. return f, df
  600. if __name__ == "__main__":
  601. print("\n\nminimize a 2d function without gradient")
  602. # minimum expected at ~[-0.195, -0.1]
  603. kwargs = {"method": "L-BFGS-B"}
  604. x0 = np.array([1.0, 1.])
  605. scipy.optimize.minimize(_test_func2d_nograd, x0, **kwargs)
  606. ret = basinhopping(_test_func2d_nograd, x0, minimizer_kwargs=kwargs,
  607. niter=200, disp=False)
  608. print("minimum expected at func([-0.195, -0.1]) = 0.0")
  609. print(ret)
  610. print("\n\ntry a harder 2d problem")
  611. kwargs = {"method": "L-BFGS-B", "jac": True}
  612. x0 = np.array([1.0, 1.0])
  613. ret = basinhopping(_test_func2d, x0, minimizer_kwargs=kwargs, niter=200,
  614. disp=False)
  615. print("minimum expected at ~, func([-0.19415263, -0.19415263]) = 0")
  616. print(ret)