_shgo.py 61 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655
  1. """
  2. shgo: The simplicial homology global optimisation algorithm
  3. """
  4. from __future__ import division, print_function, absolute_import
  5. import numpy as np
  6. import time
  7. import logging
  8. import warnings
  9. from scipy import spatial
  10. from scipy.optimize import OptimizeResult, minimize
  11. from scipy.optimize._shgo_lib import sobol_seq
  12. from scipy.optimize._shgo_lib.triangulation import Complex
  13. __all__ = ['shgo']
  14. def shgo(func, bounds, args=(), constraints=None, n=100, iters=1, callback=None,
  15. minimizer_kwargs=None, options=None, sampling_method='simplicial'):
  16. """
  17. Finds the global minimum of a function using SHG optimization.
  18. SHGO stands for "simplicial homology global optimization".
  19. Parameters
  20. ----------
  21. func : callable
  22. The objective function to be minimized. Must be in the form
  23. ``f(x, *args)``, where ``x`` is the argument in the form of a 1-D array
  24. and ``args`` is a tuple of any additional fixed parameters needed to
  25. completely specify the function.
  26. bounds : sequence
  27. Bounds for variables. ``(min, max)`` pairs for each element in ``x``,
  28. defining the lower and upper bounds for the optimizing argument of
  29. `func`. It is required to have ``len(bounds) == len(x)``.
  30. ``len(bounds)`` is used to determine the number of parameters in ``x``.
  31. Use ``None`` for one of min or max when there is no bound in that
  32. direction. By default bounds are ``(None, None)``.
  33. args : tuple, optional
  34. Any additional fixed parameters needed to completely specify the
  35. objective function.
  36. constraints : dict or sequence of dict, optional
  37. Constraints definition.
  38. Function(s) ``R**n`` in the form::
  39. g(x) <= 0 applied as g : R^n -> R^m
  40. h(x) == 0 applied as h : R^n -> R^p
  41. Each constraint is defined in a dictionary with fields:
  42. type : str
  43. Constraint type: 'eq' for equality, 'ineq' for inequality.
  44. fun : callable
  45. The function defining the constraint.
  46. jac : callable, optional
  47. The Jacobian of `fun` (only for SLSQP).
  48. args : sequence, optional
  49. Extra arguments to be passed to the function and Jacobian.
  50. Equality constraint means that the constraint function result is to
  51. be zero whereas inequality means that it is to be non-negative.
  52. Note that COBYLA only supports inequality constraints.
  53. .. note::
  54. Only the COBYLA and SLSQP local minimize methods currently
  55. support constraint arguments. If the ``constraints`` sequence
  56. used in the local optimization problem is not defined in
  57. ``minimizer_kwargs`` and a constrained method is used then the
  58. global ``constraints`` will be used.
  59. (Defining a ``constraints`` sequence in ``minimizer_kwargs``
  60. means that ``constraints`` will not be added so if equality
  61. constraints and so forth need to be added then the inequality
  62. functions in ``constraints`` need to be added to
  63. ``minimizer_kwargs`` too).
  64. n : int, optional
  65. Number of sampling points used in the construction of the simplicial
  66. complex. Note that this argument is only used for ``sobol`` and other
  67. arbitrary `sampling_methods`.
  68. iters : int, optional
  69. Number of iterations used in the construction of the simplicial complex.
  70. callback : callable, optional
  71. Called after each iteration, as ``callback(xk)``, where ``xk`` is the
  72. current parameter vector.
  73. minimizer_kwargs : dict, optional
  74. Extra keyword arguments to be passed to the minimizer
  75. ``scipy.optimize.minimize`` Some important options could be:
  76. * method : str
  77. The minimization method (e.g. ``SLSQP``).
  78. * args : tuple
  79. Extra arguments passed to the objective function (``func``) and
  80. its derivatives (Jacobian, Hessian).
  81. * options : dict, optional
  82. Note that by default the tolerance is specified as
  83. ``{ftol: 1e-12}``
  84. options : dict, optional
  85. A dictionary of solver options. Many of the options specified for the
  86. global routine are also passed to the scipy.optimize.minimize routine.
  87. The options that are also passed to the local routine are marked with
  88. "(L)".
  89. Stopping criteria, the algorithm will terminate if any of the specified
  90. criteria are met. However, the default algorithm does not require any to
  91. be specified:
  92. * maxfev : int (L)
  93. Maximum number of function evaluations in the feasible domain.
  94. (Note only methods that support this option will terminate
  95. the routine at precisely exact specified value. Otherwise the
  96. criterion will only terminate during a global iteration)
  97. * f_min
  98. Specify the minimum objective function value, if it is known.
  99. * f_tol : float
  100. Precision goal for the value of f in the stopping
  101. criterion. Note that the global routine will also
  102. terminate if a sampling point in the global routine is
  103. within this tolerance.
  104. * maxiter : int
  105. Maximum number of iterations to perform.
  106. * maxev : int
  107. Maximum number of sampling evaluations to perform (includes
  108. searching in infeasible points).
  109. * maxtime : float
  110. Maximum processing runtime allowed
  111. * minhgrd : int
  112. Minimum homology group rank differential. The homology group of the
  113. objective function is calculated (approximately) during every
  114. iteration. The rank of this group has a one-to-one correspondence
  115. with the number of locally convex subdomains in the objective
  116. function (after adequate sampling points each of these subdomains
  117. contain a unique global minimum). If the difference in the hgr is 0
  118. between iterations for ``maxhgrd`` specified iterations the
  119. algorithm will terminate.
  120. Objective function knowledge:
  121. * symmetry : bool
  122. Specify True if the objective function contains symmetric variables.
  123. The search space (and therefore performance) is decreased by O(n!).
  124. * jac : bool or callable, optional
  125. Jacobian (gradient) of objective function. Only for CG, BFGS,
  126. Newton-CG, L-BFGS-B, TNC, SLSQP, dogleg, trust-ncg. If ``jac`` is a
  127. boolean and is True, ``fun`` is assumed to return the gradient along
  128. with the objective function. If False, the gradient will be
  129. estimated numerically. ``jac`` can also be a callable returning the
  130. gradient of the objective. In this case, it must accept the same
  131. arguments as ``fun``. (Passed to `scipy.optimize.minmize` automatically)
  132. * hess, hessp : callable, optional
  133. Hessian (matrix of second-order derivatives) of objective function
  134. or Hessian of objective function times an arbitrary vector p.
  135. Only for Newton-CG, dogleg, trust-ncg. Only one of ``hessp`` or
  136. ``hess`` needs to be given. If ``hess`` is provided, then
  137. ``hessp`` will be ignored. If neither ``hess`` nor ``hessp`` is
  138. provided, then the Hessian product will be approximated using
  139. finite differences on ``jac``. ``hessp`` must compute the Hessian
  140. times an arbitrary vector. (Passed to `scipy.optimize.minmize`
  141. automatically)
  142. Algorithm settings:
  143. * minimize_every_iter : bool
  144. If True then promising global sampling points will be passed to a
  145. local minimisation routine every iteration. If False then only the
  146. final minimiser pool will be run. Defaults to False.
  147. * local_iter : int
  148. Only evaluate a few of the best minimiser pool candidates every
  149. iteration. If False all potential points are passed to the local
  150. minimisation routine.
  151. * infty_constraints: bool
  152. If True then any sampling points generated which are outside will
  153. the feasible domain will be saved and given an objective function
  154. value of ``inf``. If False then these points will be discarded.
  155. Using this functionality could lead to higher performance with
  156. respect to function evaluations before the global minimum is found,
  157. specifying False will use less memory at the cost of a slight
  158. decrease in performance. Defaults to True.
  159. Feedback:
  160. * disp : bool (L)
  161. Set to True to print convergence messages.
  162. sampling_method : str or function, optional
  163. Current built in sampling method options are ``sobol`` and
  164. ``simplicial``. The default ``simplicial`` uses less memory and provides
  165. the theoretical guarantee of convergence to the global minimum in finite
  166. time. The ``sobol`` method is faster in terms of sampling point
  167. generation at the cost of higher memory resources and the loss of
  168. guaranteed convergence. It is more appropriate for most "easier"
  169. problems where the convergence is relatively fast.
  170. User defined sampling functions must accept two arguments of ``n``
  171. sampling points of dimension ``dim`` per call and output an array of
  172. sampling points with shape `n x dim`.
  173. Returns
  174. -------
  175. res : OptimizeResult
  176. The optimization result represented as a `OptimizeResult` object.
  177. Important attributes are:
  178. ``x`` the solution array corresponding to the global minimum,
  179. ``fun`` the function output at the global solution,
  180. ``xl`` an ordered list of local minima solutions,
  181. ``funl`` the function output at the corresponding local solutions,
  182. ``success`` a Boolean flag indicating if the optimizer exited
  183. successfully,
  184. ``message`` which describes the cause of the termination,
  185. ``nfev`` the total number of objective function evaluations including
  186. the sampling calls,
  187. ``nlfev`` the total number of objective function evaluations
  188. culminating from all local search optimisations,
  189. ``nit`` number of iterations performed by the global routine.
  190. Notes
  191. -----
  192. Global optimization using simplicial homology global optimisation [1]_.
  193. Appropriate for solving general purpose NLP and blackbox optimisation
  194. problems to global optimality (low dimensional problems).
  195. In general, the optimization problems are of the form::
  196. minimize f(x) subject to
  197. g_i(x) >= 0, i = 1,...,m
  198. h_j(x) = 0, j = 1,...,p
  199. where x is a vector of one or more variables. ``f(x)`` is the objective
  200. function ``R^n -> R``, ``g_i(x)`` are the inequality constraints, and
  201. ``h_j(x)`` are the equality constraints.
  202. Optionally, the lower and upper bounds for each element in x can also be
  203. specified using the `bounds` argument.
  204. While most of the theoretical advantages of SHGO are only proven for when
  205. ``f(x)`` is a Lipschitz smooth function. The algorithm is also proven to
  206. converge to the global optimum for the more general case where ``f(x)`` is
  207. non-continuous, non-convex and non-smooth, if the default sampling method
  208. is used [1]_.
  209. The local search method may be specified using the ``minimizer_kwargs``
  210. parameter which is passed on to ``scipy.optimize.minimize``. By default
  211. the ``SLSQP`` method is used. In general it is recommended to use the
  212. ``SLSQP`` or ``COBYLA`` local minimization if inequality constraints
  213. are defined for the problem since the other methods do not use constraints.
  214. The ``sobol`` method points are generated using the Sobol (1967) [2]_
  215. sequence. The primitive polynomials and various sets of initial direction
  216. numbers for generating Sobol sequences is provided by [3]_ by Frances Kuo
  217. and Stephen Joe. The original program sobol.cc (MIT) is available and
  218. described at http://web.maths.unsw.edu.au/~fkuo/sobol/ translated to
  219. Python 3 by Carl Sandrock 2016-03-31.
  220. References
  221. ----------
  222. .. [1] Endres, SC, Sandrock, C, Focke, WW (2018) "A simplicial homology
  223. algorithm for lipschitz optimisation", Journal of Global Optimization.
  224. .. [2] Sobol, IM (1967) "The distribution of points in a cube and the
  225. approximate evaluation of integrals", USSR Comput. Math. Math. Phys.
  226. 7, 86-112.
  227. .. [3] Joe, SW and Kuo, FY (2008) "Constructing Sobol sequences with
  228. better two-dimensional projections", SIAM J. Sci. Comput. 30,
  229. 2635-2654.
  230. .. [4] Hoch, W and Schittkowski, K (1981) "Test examples for nonlinear
  231. programming codes", Lecture Notes in Economics and mathematical
  232. Systems, 187. Springer-Verlag, New York.
  233. http://www.ai7.uni-bayreuth.de/test_problem_coll.pdf
  234. .. [5] Wales, DJ (2015) "Perspective: Insight into reaction coordinates and
  235. dynamics from the potential energy landscape",
  236. Journal of Chemical Physics, 142(13), 2015.
  237. Examples
  238. --------
  239. First consider the problem of minimizing the Rosenbrock function, `rosen`:
  240. >>> from scipy.optimize import rosen, shgo
  241. >>> bounds = [(0,2), (0, 2), (0, 2), (0, 2), (0, 2)]
  242. >>> result = shgo(rosen, bounds)
  243. >>> result.x, result.fun
  244. (array([ 1., 1., 1., 1., 1.]), 2.9203923741900809e-18)
  245. Note that bounds determine the dimensionality of the objective
  246. function and is therefore a required input, however you can specify
  247. empty bounds using ``None`` or objects like ``np.inf`` which will be
  248. converted to large float numbers.
  249. >>> bounds = [(None, None), ]*4
  250. >>> result = shgo(rosen, bounds)
  251. >>> result.x
  252. array([ 0.99999851, 0.99999704, 0.99999411, 0.9999882 ])
  253. Next we consider the Eggholder function, a problem with several local
  254. minima and one global minimum. We will demonstrate the use of arguments and
  255. the capabilities of `shgo`.
  256. (https://en.wikipedia.org/wiki/Test_functions_for_optimization)
  257. >>> def eggholder(x):
  258. ... return (-(x[1] + 47.0)
  259. ... * np.sin(np.sqrt(abs(x[0]/2.0 + (x[1] + 47.0))))
  260. ... - x[0] * np.sin(np.sqrt(abs(x[0] - (x[1] + 47.0))))
  261. ... )
  262. ...
  263. >>> bounds = [(-512, 512), (-512, 512)]
  264. `shgo` has two built-in low discrepancy sampling sequences. First we will
  265. input 30 initial sampling points of the Sobol sequence:
  266. >>> result = shgo(eggholder, bounds, n=30, sampling_method='sobol')
  267. >>> result.x, result.fun
  268. (array([ 512. , 404.23180542]), -959.64066272085051)
  269. `shgo` also has a return for any other local minima that was found, these
  270. can be called using:
  271. >>> result.xl
  272. array([[ 512. , 404.23180542],
  273. [ 283.07593402, -487.12566542],
  274. [-294.66820039, -462.01964031],
  275. [-105.87688985, 423.15324143],
  276. [-242.97923629, 274.38032063],
  277. [-506.25823477, 6.3131022 ],
  278. [-408.71981195, -156.10117154],
  279. [ 150.23210485, 301.31378508],
  280. [ 91.00922754, -391.28375925],
  281. [ 202.8966344 , -269.38042147],
  282. [ 361.66625957, -106.96490692],
  283. [-219.40615102, -244.06022436],
  284. [ 151.59603137, -100.61082677]])
  285. >>> result.funl
  286. array([-959.64066272, -718.16745962, -704.80659592, -565.99778097,
  287. -559.78685655, -557.36868733, -507.87385942, -493.9605115 ,
  288. -426.48799655, -421.15571437, -419.31194957, -410.98477763,
  289. -202.53912972])
  290. These results are useful in applications where there are many global minima
  291. and the values of other global minima are desired or where the local minima
  292. can provide insight into the system (for example morphologies
  293. in physical chemistry [5]_).
  294. If we want to find a larger number of local minima, we can increase the
  295. number of sampling points or the number of iterations. We'll increase the
  296. number of sampling points to 60 and the number of iterations from the
  297. default of 1 to 5. This gives us 60 x 5 = 300 initial sampling points.
  298. >>> result_2 = shgo(eggholder, bounds, n=60, iters=5, sampling_method='sobol')
  299. >>> len(result.xl), len(result_2.xl)
  300. (13, 39)
  301. Note the difference between, e.g., ``n=180, iters=1`` and ``n=60, iters=3``.
  302. In the first case the promising points contained in the minimiser pool
  303. is processed only once. In the latter case it is processed every 60 sampling
  304. points for a total of 3 times.
  305. To demonstrate solving problems with non-linear constraints consider the
  306. following example from Hock and Schittkowski problem 73 (cattle-feed) [4]_::
  307. minimize: f = 24.55 * x_1 + 26.75 * x_2 + 39 * x_3 + 40.50 * x_4
  308. subject to: 2.3 * x_1 + 5.6 * x_2 + 11.1 * x_3 + 1.3 * x_4 - 5 >= 0,
  309. 12 * x_1 + 11.9 * x_2 + 41.8 * x_3 + 52.1 * x_4 - 21
  310. -1.645 * sqrt(0.28 * x_1**2 + 0.19 * x_2**2 +
  311. 20.5 * x_3**2 + 0.62 * x_4**2) >= 0,
  312. x_1 + x_2 + x_3 + x_4 - 1 == 0,
  313. 1 >= x_i >= 0 for all i
  314. The approximate answer given in [4]_ is::
  315. f([0.6355216, -0.12e-11, 0.3127019, 0.05177655]) = 29.894378
  316. >>> def f(x): # (cattle-feed)
  317. ... return 24.55*x[0] + 26.75*x[1] + 39*x[2] + 40.50*x[3]
  318. ...
  319. >>> def g1(x):
  320. ... return 2.3*x[0] + 5.6*x[1] + 11.1*x[2] + 1.3*x[3] - 5 # >=0
  321. ...
  322. >>> def g2(x):
  323. ... return (12*x[0] + 11.9*x[1] +41.8*x[2] + 52.1*x[3] - 21
  324. ... - 1.645 * np.sqrt(0.28*x[0]**2 + 0.19*x[1]**2
  325. ... + 20.5*x[2]**2 + 0.62*x[3]**2)
  326. ... ) # >=0
  327. ...
  328. >>> def h1(x):
  329. ... return x[0] + x[1] + x[2] + x[3] - 1 # == 0
  330. ...
  331. >>> cons = ({'type': 'ineq', 'fun': g1},
  332. ... {'type': 'ineq', 'fun': g2},
  333. ... {'type': 'eq', 'fun': h1})
  334. >>> bounds = [(0, 1.0),]*4
  335. >>> res = shgo(f, bounds, iters=3, constraints=cons)
  336. >>> res
  337. fun: 29.894378159142136
  338. funl: array([29.89437816])
  339. message: 'Optimization terminated successfully.'
  340. nfev: 119
  341. nit: 3
  342. nlfev: 40
  343. nlhev: 0
  344. nljev: 5
  345. success: True
  346. x: array([6.35521569e-01, 1.13700270e-13, 3.12701881e-01, 5.17765506e-02])
  347. xl: array([[6.35521569e-01, 1.13700270e-13, 3.12701881e-01, 5.17765506e-02]])
  348. >>> g1(res.x), g2(res.x), h1(res.x)
  349. (-5.0626169922907138e-14, -2.9594104944408173e-12, 0.0)
  350. """
  351. # Initiate SHGO class
  352. shc = SHGO(func, bounds, args=args, constraints=constraints, n=n,
  353. iters=iters, callback=callback,
  354. minimizer_kwargs=minimizer_kwargs,
  355. options=options, sampling_method=sampling_method)
  356. # Run the algorithm, process results and test success
  357. shc.construct_complex()
  358. if not shc.break_routine:
  359. if shc.disp:
  360. print("Successfully completed construction of complex.")
  361. # Test post iterations success
  362. if len(shc.LMC.xl_maps) == 0:
  363. # If sampling failed to find pool, return lowest sampled point
  364. # with a warning
  365. shc.find_lowest_vertex()
  366. shc.break_routine = True
  367. shc.fail_routine(mes="Failed to find a feasible minimiser point. "
  368. "Lowest sampling point = {}".format(shc.f_lowest))
  369. shc.res.fun = shc.f_lowest
  370. shc.res.x = shc.x_lowest
  371. shc.res.nfev = shc.fn
  372. # Confirm the routine ran successfully
  373. if not shc.break_routine:
  374. shc.res.message = 'Optimization terminated successfully.'
  375. shc.res.success = True
  376. # Return the final results
  377. return shc.res
  378. class SHGO(object):
  379. def __init__(self, func, bounds, args=(), constraints=None, n=None,
  380. iters=None, callback=None, minimizer_kwargs=None,
  381. options=None, sampling_method='sobol'):
  382. # Input checks
  383. methods = ['sobol', 'simplicial']
  384. if sampling_method not in methods:
  385. raise ValueError(("Unknown sampling_method specified."
  386. " Valid methods: {}").format(', '.join(methods)))
  387. # Initiate class
  388. self.func = func
  389. self.bounds = bounds
  390. self.args = args
  391. self.callback = callback
  392. # Bounds
  393. abound = np.array(bounds, float)
  394. self.dim = np.shape(abound)[0] # Dimensionality of problem
  395. # Set none finite values to large floats
  396. infind = ~np.isfinite(abound)
  397. abound[infind[:, 0], 0] = -1e50
  398. abound[infind[:, 1], 1] = 1e50
  399. # Check if bounds are correctly specified
  400. bnderr = abound[:, 0] > abound[:, 1]
  401. if bnderr.any():
  402. raise ValueError('Error: lb > ub in bounds {}.'
  403. .format(', '.join(str(b) for b in bnderr)))
  404. self.bounds = abound
  405. # Constraints
  406. # Process constraint dict sequence:
  407. if constraints is not None:
  408. self.min_cons = constraints
  409. self.g_cons = []
  410. self.g_args = []
  411. if (type(constraints) is not tuple) and (type(constraints)
  412. is not list):
  413. constraints = (constraints,)
  414. for cons in constraints:
  415. if cons['type'] is 'ineq':
  416. self.g_cons.append(cons['fun'])
  417. try:
  418. self.g_args.append(cons['args'])
  419. except KeyError:
  420. self.g_args.append(())
  421. self.g_cons = tuple(self.g_cons)
  422. self.g_args = tuple(self.g_args)
  423. else:
  424. self.g_cons = None
  425. self.g_args = None
  426. # Define local minimization keyword arguments
  427. # Start with defaults
  428. self.minimizer_kwargs = {'args': self.args,
  429. 'method': 'SLSQP',
  430. 'bounds': self.bounds,
  431. 'options': {},
  432. 'callback': self.callback
  433. }
  434. if minimizer_kwargs is not None:
  435. # Overwrite with supplied values
  436. self.minimizer_kwargs.update(minimizer_kwargs)
  437. else:
  438. self.minimizer_kwargs['options'] = {'ftol': 1e-12}
  439. if (self.minimizer_kwargs['method'] in ('SLSQP', 'COBYLA') and
  440. (minimizer_kwargs is not None and
  441. 'constraints' not in minimizer_kwargs and
  442. constraints is not None) or
  443. (self.g_cons is not None)):
  444. self.minimizer_kwargs['constraints'] = self.min_cons
  445. # Process options dict
  446. if options is not None:
  447. self.init_options(options)
  448. else: # Default settings:
  449. self.f_min_true = None
  450. self.minimize_every_iter = False
  451. # Algorithm limits
  452. self.maxiter = None
  453. self.maxfev = None
  454. self.maxev = None
  455. self.maxtime = None
  456. self.f_min_true = None
  457. self.minhgrd = None
  458. # Objective function knowledge
  459. self.symmetry = False
  460. # Algorithm functionality
  461. self.local_iter = False
  462. self.infty_cons_sampl = True
  463. # Feedback
  464. self.disp = False
  465. # Remove unknown arguments in self.minimizer_kwargs
  466. # Start with arguments all the solvers have in common
  467. self.min_solver_args = ['fun', 'x0', 'args',
  468. 'callback', 'options', 'method']
  469. # then add the ones unique to specific solvers
  470. solver_args = {
  471. '_custom': ['jac', 'hess', 'hessp', 'bounds', 'constraints'],
  472. 'nelder-mead': [],
  473. 'powell': [],
  474. 'cg': ['jac'],
  475. 'bfgs': ['jac'],
  476. 'newton-cg': ['jac', 'hess', 'hessp'],
  477. 'l-bfgs-b': ['jac', 'bounds'],
  478. 'tnc': ['jac', 'bounds'],
  479. 'cobyla': ['constraints'],
  480. 'slsqp': ['jac', 'bounds', 'constraints'],
  481. 'dogleg': ['jac', 'hess'],
  482. 'trust-ncg': ['jac', 'hess', 'hessp'],
  483. 'trust-krylov': ['jac', 'hess', 'hessp'],
  484. 'trust-exact': ['jac', 'hess'],
  485. }
  486. method = self.minimizer_kwargs['method']
  487. self.min_solver_args += solver_args[method.lower()]
  488. # Only retain the known arguments
  489. def _restrict_to_keys(dictionary, goodkeys):
  490. """Remove keys from dictionary if not in goodkeys - inplace"""
  491. existingkeys = set(dictionary)
  492. for key in existingkeys - set(goodkeys):
  493. dictionary.pop(key, None)
  494. _restrict_to_keys(self.minimizer_kwargs, self.min_solver_args)
  495. _restrict_to_keys(self.minimizer_kwargs['options'],
  496. self.min_solver_args + ['ftol'])
  497. # Algorithm controls
  498. # Global controls
  499. self.stop_global = False # Used in the stopping_criteria method
  500. self.break_routine = False # Break the algorithm globally
  501. self.iters = iters # Iterations to be ran
  502. self.iters_done = 0 # Iterations to be ran
  503. self.n = n # Sampling points per iteration
  504. self.nc = n # Sampling points to sample in current iteration
  505. self.n_prc = 0 # Processed points (used to track Delaunay iters)
  506. self.n_sampled = 0 # To track no. of sampling points already generated
  507. self.fn = 0 # Number of feasible sampling points evaluations performed
  508. self.hgr = 0 # Homology group rank
  509. # Default settings if no sampling criteria.
  510. if self.iters is None:
  511. self.iters = 1
  512. if self.n is None:
  513. self.n = 100
  514. self.nc = self.n
  515. if not ((self.maxiter is None) and (self.maxfev is None) and (
  516. self.maxev is None)
  517. and (self.minhgrd is None) and (self.f_min_true is None)):
  518. self.iters = None
  519. # Set complex construction mode based on a provided stopping criteria:
  520. # Choose complex constructor
  521. if sampling_method == 'simplicial':
  522. self.iterate_complex = self.iterate_hypercube
  523. self.minimizers = self.simplex_minimizers
  524. self.sampling_method = sampling_method
  525. elif (sampling_method == 'sobol') or (type(sampling_method) is not str):
  526. self.iterate_complex = self.iterate_delaunay
  527. self.minimizers = self.delaunay_complex_minimisers
  528. # Sampling method used
  529. if sampling_method == 'sobol':
  530. self.sampling_method = sampling_method
  531. self.sampling = self.sampling_sobol
  532. self.Sobol = sobol_seq.Sobol() # Init Sobol class
  533. if self.dim < 40:
  534. self.sobol_points = self.sobol_points_40
  535. else:
  536. self.sobol_points = self.sobol_points_10k
  537. else:
  538. # A user defined sampling method:
  539. # self.sampling_points = sampling_method
  540. self.sampling = sampling_method
  541. # Local controls
  542. self.stop_l_iter = False # Local minimisation iterations
  543. self.stop_complex_iter = False # Sampling iterations
  544. # Initiate storage objects used in algorithm classes
  545. self.minimizer_pool = []
  546. # Cache of local minimizers mapped
  547. self.LMC = LMapCache()
  548. # Initialize return object
  549. self.res = OptimizeResult() # scipy.optimize.OptimizeResult object
  550. self.res.nfev = 0 # Includes each sampling point as func evaluation
  551. self.res.nlfev = 0 # Local function evals for all minimisers
  552. self.res.nljev = 0 # Local Jacobian evals for all minimisers
  553. self.res.nlhev = 0 # Local Hessian evals for all minimisers
  554. # Initiation aids
  555. def init_options(self, options):
  556. """
  557. Initiates the options.
  558. Can also be useful to change parameters after class initiation.
  559. Parameters
  560. ----------
  561. options : dict
  562. Returns
  563. -------
  564. None
  565. """
  566. self.minimizer_kwargs['options'].update(options)
  567. # Default settings:
  568. self.minimize_every_iter = options.get('minimize_every_iter', False)
  569. # Algorithm limits
  570. # Maximum number of iterations to perform.
  571. self.maxiter = options.get('maxiter', None)
  572. # Maximum number of function evaluations in the feasible domain
  573. self.maxfev = options.get('maxfev', None)
  574. # Maximum number of sampling evaluations (includes searching in
  575. # infeasible points
  576. self.maxev = options.get('maxev', None)
  577. # Maximum processing runtime allowed
  578. self.init = time.time()
  579. self.maxtime = options.get('maxtime', None)
  580. if 'f_min' in options:
  581. # Specify the minimum objective function value, if it is known.
  582. self.f_min_true = options['f_min']
  583. self.f_tol = options.get('f_tol', 1e-4)
  584. else:
  585. self.f_min_true = None
  586. self.minhgrd = options.get('minhgrd', None)
  587. # Objective function knowledge
  588. self.symmetry = 'symmetry' in options
  589. # Algorithm functionality
  590. # Only evaluate a few of the best candiates
  591. self.local_iter = options.get('local_iter', False)
  592. self.infty_cons_sampl = options.get('infty_constraints', True)
  593. # Feedback
  594. self.disp = options.get('disp', False)
  595. # Iteration properties
  596. # Main construction loop:
  597. def construct_complex(self):
  598. """
  599. Construct for `iters` iterations.
  600. If uniform sampling is used, every iteration adds 'n' sampling points.
  601. Iterations if a stopping criteria (ex. sampling points or
  602. processing time) has been met.
  603. """
  604. if self.disp:
  605. print('Splitting first generation')
  606. while not self.stop_global:
  607. if self.break_routine:
  608. break
  609. # Iterate complex, process minimisers
  610. self.iterate()
  611. self.stopping_criteria()
  612. # Build minimiser pool
  613. # Final iteration only needed if pools weren't minimised every iteration
  614. if not self.minimize_every_iter:
  615. if not self.break_routine:
  616. self.find_minima()
  617. self.res.nit = self.iters_done + 1
  618. def find_minima(self):
  619. """
  620. Construct the minimiser pool, map the minimisers to local minima
  621. and sort the results into a global return object.
  622. """
  623. self.minimizers()
  624. if len(self.X_min) is not 0:
  625. # Minimise the pool of minimisers with local minimisation methods
  626. # Note that if Options['local_iter'] is an `int` instead of default
  627. # value False then only that number of candidates will be minimised
  628. self.minimise_pool(self.local_iter)
  629. # Sort results and build the global return object
  630. self.sort_result()
  631. # Lowest values used to report in case of failures
  632. self.f_lowest = self.res.fun
  633. self.x_lowest = self.res.x
  634. else:
  635. self.find_lowest_vertex()
  636. def find_lowest_vertex(self):
  637. # Find the lowest objective function value on one of
  638. # the vertices of the simplicial complex
  639. if self.sampling_method == 'simplicial':
  640. self.f_lowest = np.inf
  641. for x in self.HC.V.cache:
  642. if self.HC.V[x].f < self.f_lowest:
  643. self.f_lowest = self.HC.V[x].f
  644. self.x_lowest = self.HC.V[x].x_a
  645. if self.f_lowest == np.inf: # no feasible point
  646. self.f_lowest = None
  647. self.x_lowest = None
  648. else:
  649. if self.fn == 0:
  650. self.f_lowest = None
  651. self.x_lowest = None
  652. else:
  653. self.f_I = np.argsort(self.F, axis=-1)
  654. self.f_lowest = self.F[self.f_I[0]]
  655. self.x_lowest = self.C[self.f_I[0]]
  656. # Stopping criteria functions:
  657. def finite_iterations(self):
  658. if self.iters is not None:
  659. if self.iters_done >= (self.iters - 1):
  660. self.stop_global = True
  661. if self.maxiter is not None: # Stop for infeasible sampling
  662. if self.iters_done >= (self.maxiter - 1):
  663. self.stop_global = True
  664. return self.stop_global
  665. def finite_fev(self):
  666. # Finite function evals in the feasible domain
  667. if self.fn >= self.maxfev:
  668. self.stop_global = True
  669. return self.stop_global
  670. def finite_ev(self):
  671. # Finite evaluations including infeasible sampling points
  672. if self.n_sampled >= self.maxev:
  673. self.stop_global = True
  674. def finite_time(self):
  675. if (time.time() - self.init) >= self.maxtime:
  676. self.stop_global = True
  677. def finite_precision(self):
  678. """
  679. Stop the algorithm if the final function value is known
  680. Specify in options (with ``self.f_min_true = options['f_min']``)
  681. and the tolerance with ``f_tol = options['f_tol']``
  682. """
  683. # If no minimiser has been found use the lowest sampling value
  684. if len(self.LMC.xl_maps) == 0:
  685. self.find_lowest_vertex()
  686. # Function to stop algorithm at specified percentage error:
  687. if self.f_lowest == 0.0:
  688. if self.f_min_true == 0.0:
  689. if self.f_lowest <= self.f_tol:
  690. self.stop_global = True
  691. else:
  692. pe = (self.f_lowest - self.f_min_true) / abs(self.f_min_true)
  693. if self.f_lowest <= self.f_min_true:
  694. self.stop_global = True
  695. # 2if (pe - self.f_tol) <= abs(1.0 / abs(self.f_min_true)):
  696. if abs(pe) >= 2 * self.f_tol:
  697. warnings.warn("A much lower value than expected f* =" +
  698. " {} than".format(self.f_min_true) +
  699. " the was found f_lowest =" +
  700. "{} ".format(self.f_lowest))
  701. if pe <= self.f_tol:
  702. self.stop_global = True
  703. return self.stop_global
  704. def finite_homology_growth(self):
  705. if self.LMC.size == 0:
  706. return # pass on no reason to stop yet.
  707. self.hgrd = self.LMC.size - self.hgr
  708. self.hgr = self.LMC.size
  709. if self.hgrd <= self.minhgrd:
  710. self.stop_global = True
  711. return self.stop_global
  712. def stopping_criteria(self):
  713. """
  714. Various stopping criteria ran every iteration
  715. Returns
  716. -------
  717. stop : bool
  718. """
  719. if self.maxiter is not None:
  720. self.finite_iterations()
  721. if self.iters is not None:
  722. self.finite_iterations()
  723. if self.maxfev is not None:
  724. self.finite_fev()
  725. if self.maxev is not None:
  726. self.finite_ev()
  727. if self.maxtime is not None:
  728. self.finite_time()
  729. if self.f_min_true is not None:
  730. self.finite_precision()
  731. if self.minhgrd is not None:
  732. self.finite_homology_growth()
  733. def iterate(self):
  734. self.iterate_complex()
  735. # Build minimiser pool
  736. if self.minimize_every_iter:
  737. if not self.break_routine:
  738. self.find_minima() # Process minimiser pool
  739. # Algorithm updates
  740. self.iters_done += 1
  741. def iterate_hypercube(self):
  742. """
  743. Iterate a subdivision of the complex
  744. Note: called with ``self.iterate_complex()`` after class initiation
  745. """
  746. # Iterate the complex
  747. if self.n_sampled == 0:
  748. # Initial triangulation of the hyper-rectangle
  749. self.HC = Complex(self.dim, self.func, self.args,
  750. self.symmetry, self.bounds, self.g_cons,
  751. self.g_args)
  752. else:
  753. self.HC.split_generation()
  754. # feasible sampling points counted by the triangulation.py routines
  755. self.fn = self.HC.V.nfev
  756. self.n_sampled = self.HC.V.size # nevs counted in triangulation.py
  757. return
  758. def iterate_delaunay(self):
  759. """
  760. Build a complex of Delaunay triangulated points
  761. Note: called with ``self.iterate_complex()`` after class initiation
  762. """
  763. self.nc += self.n
  764. self.sampled_surface(infty_cons_sampl=self.infty_cons_sampl)
  765. self.n_sampled = self.nc
  766. return
  767. # Hypercube minimizers
  768. def simplex_minimizers(self):
  769. """
  770. Returns the indexes of all minimizers
  771. """
  772. self.minimizer_pool = []
  773. # Note: Can implement parallelization here
  774. for x in self.HC.V.cache:
  775. if self.HC.V[x].minimiser():
  776. if self.disp:
  777. logging.info('=' * 60)
  778. logging.info(
  779. 'v.x = {} is minimiser'.format(self.HC.V[x].x_a))
  780. logging.info('v.f = {} is minimiser'.format(self.HC.V[x].f))
  781. logging.info('=' * 30)
  782. if self.HC.V[x] not in self.minimizer_pool:
  783. self.minimizer_pool.append(self.HC.V[x])
  784. if self.disp:
  785. logging.info('Neighbours:')
  786. logging.info('=' * 30)
  787. for vn in self.HC.V[x].nn:
  788. logging.info('x = {} || f = {}'.format(vn.x, vn.f))
  789. logging.info('=' * 60)
  790. self.minimizer_pool_F = []
  791. self.X_min = []
  792. # normalized tuple in the Vertex cache
  793. self.X_min_cache = {} # Cache used in hypercube sampling
  794. for v in self.minimizer_pool:
  795. self.X_min.append(v.x_a)
  796. self.minimizer_pool_F.append(v.f)
  797. self.X_min_cache[tuple(v.x_a)] = v.x
  798. self.minimizer_pool_F = np.array(self.minimizer_pool_F)
  799. self.X_min = np.array(self.X_min)
  800. # TODO: Only do this if global mode
  801. self.sort_min_pool()
  802. return self.X_min
  803. # Local minimisation
  804. # Minimiser pool processing
  805. def minimise_pool(self, force_iter=False):
  806. """
  807. This processing method can optionally minimise only the best candidate
  808. solutions in the minimiser pool
  809. Parameters
  810. ----------
  811. force_iter : int
  812. Number of starting minimisers to process (can be sepcified
  813. globally or locally)
  814. """
  815. # Find first local minimum
  816. # NOTE: Since we always minimize this value regardless it is a waste to
  817. # build the topograph first before minimizing
  818. lres_f_min = self.minimize(self.X_min[0], ind=self.minimizer_pool[0])
  819. # Trim minimised point from current minimiser set
  820. self.trim_min_pool(0)
  821. # Force processing to only
  822. if force_iter:
  823. self.local_iter = force_iter
  824. while not self.stop_l_iter:
  825. # Global stopping criteria:
  826. if self.f_min_true is not None:
  827. if (lres_f_min.fun - self.f_min_true) / abs(
  828. self.f_min_true) <= self.f_tol:
  829. self.stop_l_iter = True
  830. break
  831. # Note first iteration is outside loop:
  832. if self.local_iter is not None:
  833. if self.disp:
  834. logging.info(
  835. 'SHGO.iters in function minimise_pool = {}'.format(
  836. self.local_iter))
  837. self.local_iter -= 1
  838. if self.local_iter == 0:
  839. self.stop_l_iter = True
  840. break
  841. if np.shape(self.X_min)[0] == 0:
  842. self.stop_l_iter = True
  843. break
  844. # Construct topograph from current minimiser set
  845. # (NOTE: This is a very small topograph using only the miniser pool
  846. # , it might be worth using some graph theory tools instead.
  847. self.g_topograph(lres_f_min.x, self.X_min)
  848. # Find local minimum at the miniser with the greatest euclidean
  849. # distance from the current solution
  850. ind_xmin_l = self.Z[:, -1]
  851. lres_f_min = self.minimize(self.Ss[-1, :], self.minimizer_pool[-1])
  852. # Trim minimised point from current minimiser set
  853. self.trim_min_pool(ind_xmin_l)
  854. # Reset controls
  855. self.stop_l_iter = False
  856. return
  857. def sort_min_pool(self):
  858. # Sort to find minimum func value in min_pool
  859. self.ind_f_min = np.argsort(self.minimizer_pool_F)
  860. self.minimizer_pool = np.array(self.minimizer_pool)[self.ind_f_min]
  861. self.minimizer_pool_F = np.array(self.minimizer_pool_F)[
  862. self.ind_f_min]
  863. return
  864. def trim_min_pool(self, trim_ind):
  865. self.X_min = np.delete(self.X_min, trim_ind, axis=0)
  866. self.minimizer_pool_F = np.delete(self.minimizer_pool_F, trim_ind)
  867. self.minimizer_pool = np.delete(self.minimizer_pool, trim_ind)
  868. return
  869. def g_topograph(self, x_min, X_min):
  870. """
  871. Returns the topographical vector stemming from the specified value
  872. ``x_min`` for the current feasible set ``X_min`` with True boolean
  873. values indicating positive entries and False values indicating
  874. negative entries.
  875. """
  876. x_min = np.array([x_min])
  877. self.Y = spatial.distance.cdist(x_min, X_min, 'euclidean')
  878. # Find sorted indexes of spatial distances:
  879. self.Z = np.argsort(self.Y, axis=-1)
  880. self.Ss = X_min[self.Z][0]
  881. self.minimizer_pool = self.minimizer_pool[self.Z]
  882. self.minimizer_pool = self.minimizer_pool[0]
  883. return self.Ss
  884. # Local bound functions
  885. def construct_lcb_simplicial(self, v_min):
  886. """
  887. Construct locally (approximately) convex bounds
  888. Parameters
  889. ----------
  890. v_min : Vertex object
  891. The minimiser vertex
  892. Returns
  893. -------
  894. cbounds : list of lists
  895. List of size dim with length-2 list of bounds for each dimension
  896. """
  897. cbounds = [[x_b_i[0], x_b_i[1]] for x_b_i in self.bounds]
  898. # Loop over all bounds
  899. for vn in v_min.nn:
  900. for i, x_i in enumerate(vn.x_a):
  901. # Lower bound
  902. if (x_i < v_min.x_a[i]) and (x_i > cbounds[i][0]):
  903. cbounds[i][0] = x_i
  904. # Upper bound
  905. if (x_i > v_min.x_a[i]) and (x_i < cbounds[i][1]):
  906. cbounds[i][1] = x_i
  907. if self.disp:
  908. logging.info('cbounds found for v_min.x_a = {}'.format(v_min.x_a))
  909. logging.info('cbounds = {}'.format(cbounds))
  910. return cbounds
  911. def construct_lcb_delaunay(self, v_min, ind=None):
  912. """
  913. Construct locally (approximately) convex bounds
  914. Parameters
  915. ----------
  916. v_min : Vertex object
  917. The minimiser vertex
  918. Returns
  919. -------
  920. cbounds : list of lists
  921. List of size dim with length-2 list of bounds for each dimension
  922. """
  923. cbounds = []
  924. for x_b_i in self.bounds:
  925. cbounds.append([x_b_i[0], x_b_i[1]])
  926. return cbounds
  927. # Minimize a starting point locally
  928. def minimize(self, x_min, ind=None):
  929. """
  930. This function is used to calculate the local minima using the specified
  931. sampling point as a starting value.
  932. Parameters
  933. ----------
  934. x_min : vector of floats
  935. Current starting point to minimise.
  936. Returns
  937. -------
  938. lres : OptimizeResult
  939. The local optimization result represented as a `OptimizeResult`
  940. object.
  941. """
  942. # Use minima maps if vertex was already run
  943. if self.disp:
  944. logging.info('Vertex minimiser maps = {}'.format(self.LMC.v_maps))
  945. if self.LMC[x_min].lres is not None:
  946. return self.LMC[x_min].lres
  947. # TODO: Check discarded bound rules
  948. if self.callback is not None:
  949. print('Callback for '
  950. 'minimizer starting at {}:'.format(x_min))
  951. if self.disp:
  952. print('Starting '
  953. 'minimization at {}...'.format(x_min))
  954. if self.sampling_method == 'simplicial':
  955. x_min_t = tuple(x_min)
  956. # Find the normalized tuple in the Vertex cache:
  957. x_min_t_norm = self.X_min_cache[tuple(x_min_t)]
  958. x_min_t_norm = tuple(x_min_t_norm)
  959. g_bounds = self.construct_lcb_simplicial(self.HC.V[x_min_t_norm])
  960. if 'bounds' in self.min_solver_args:
  961. self.minimizer_kwargs['bounds'] = g_bounds
  962. if self.disp:
  963. print('bounds in kwarg:')
  964. print(self.minimizer_kwargs['bounds'])
  965. else:
  966. g_bounds = self.construct_lcb_delaunay(x_min, ind=ind)
  967. if 'bounds' in self.min_solver_args:
  968. self.minimizer_kwargs['bounds'] = g_bounds
  969. # Local minimization using scipy.optimize.minimize:
  970. lres = minimize(self.func, x_min, **self.minimizer_kwargs)
  971. if self.disp:
  972. print('lres = {}'.format(lres))
  973. # Local function evals for all minimisers
  974. self.res.nlfev += lres.nfev
  975. if 'njev' in lres:
  976. self.res.nljev += lres.njev
  977. if 'nhev' in lres:
  978. self.res.nlhev += lres.nhev
  979. try: # Needed because of the brain dead 1x1 numpy arrays
  980. lres.fun = lres.fun[0]
  981. except (IndexError, TypeError):
  982. lres.fun
  983. # Append minima maps
  984. self.LMC[x_min]
  985. self.LMC.add_res(x_min, lres, bounds=g_bounds)
  986. return lres
  987. # Post local minimisation processing
  988. def sort_result(self):
  989. """
  990. Sort results and build the global return object
  991. """
  992. # Sort results in local minima cache
  993. results = self.LMC.sort_cache_result()
  994. self.res.xl = results['xl']
  995. self.res.funl = results['funl']
  996. self.res.x = results['x']
  997. self.res.fun = results['fun']
  998. # Add local func evals to sampling func evals
  999. # Count the number of feasible vertices and add to local func evals:
  1000. self.res.nfev = self.fn + self.res.nlfev
  1001. return self.res
  1002. # Algorithm controls
  1003. def fail_routine(self, mes=("Failed to converge")):
  1004. self.break_routine = True
  1005. self.res.success = False
  1006. self.X_min = [None]
  1007. self.res.message = mes
  1008. def sampled_surface(self, infty_cons_sampl=False):
  1009. """
  1010. Sample the function surface.
  1011. There are 2 modes, if ``infty_cons_sampl`` is True then the sampled
  1012. points that are generated outside the feasible domain will be
  1013. assigned an ``inf`` value in accordance with SHGO rules.
  1014. This guarantees convergence and usually requires less objective function
  1015. evaluations at the computational costs of more Delaunay triangulation
  1016. points.
  1017. If ``infty_cons_sampl`` is False then the infeasible points are discarded
  1018. and only a subspace of the sampled points are used. This comes at the
  1019. cost of the loss of guaranteed convergence and usually requires more
  1020. objective function evaluations.
  1021. """
  1022. # Generate sampling points
  1023. if self.disp:
  1024. print('Generating sampling points')
  1025. self.sampling(self.nc, self.dim)
  1026. if not infty_cons_sampl:
  1027. # Find subspace of feasible points
  1028. if self.g_cons is not None:
  1029. self.sampling_subspace()
  1030. # Sort remaining samples
  1031. self.sorted_samples()
  1032. # Find objective function references
  1033. self.fun_ref()
  1034. self.n_sampled = self.nc
  1035. def delaunay_complex_minimisers(self):
  1036. # Construct complex minimisers on the current sampling set.
  1037. # if self.fn >= (self.dim + 1):
  1038. if self.fn >= (self.dim + 2):
  1039. # TODO: Check on strange Qhull error where the number of vertices
  1040. # required for an initial simplex is higher than n + 1?
  1041. if self.dim < 2: # Scalar objective functions
  1042. if self.disp:
  1043. print('Constructing 1D minimizer pool')
  1044. self.ax_subspace()
  1045. self.surface_topo_ref()
  1046. self.minimizers_1D()
  1047. else: # Multivariate functions.
  1048. if self.disp:
  1049. print('Constructing Gabrial graph and minimizer pool')
  1050. if self.iters == 1:
  1051. self.delaunay_triangulation(grow=False)
  1052. else:
  1053. self.delaunay_triangulation(grow=True, n_prc=self.n_prc)
  1054. self.n_prc = self.C.shape[0]
  1055. if self.disp:
  1056. print('Triangulation completed, building minimizer pool')
  1057. self.delaunay_minimizers()
  1058. if self.disp:
  1059. logging.info(
  1060. "Minimiser pool = SHGO.X_min = {}".format(self.X_min))
  1061. else:
  1062. if self.disp:
  1063. print(
  1064. 'Not enough sampling points found in the feasible domain.')
  1065. self.minimizer_pool = [None]
  1066. try:
  1067. self.X_min
  1068. except AttributeError:
  1069. self.X_min = []
  1070. def sobol_points_40(self, n, d, skip=0):
  1071. """
  1072. Wrapper for ``sobol_seq.i4_sobol_generate``
  1073. Generate N sampling points in D dimensions
  1074. """
  1075. points = self.Sobol.i4_sobol_generate(d, n, skip=0)
  1076. return points
  1077. def sobol_points_10k(self, N, D):
  1078. """
  1079. sobol.cc by Frances Kuo and Stephen Joe translated to Python 3 by
  1080. Carl Sandrock 2016-03-31
  1081. The original program is available and described at
  1082. http://web.maths.unsw.edu.au/~fkuo/sobol/
  1083. """
  1084. import gzip
  1085. import os
  1086. path = os.path.join(os.path.dirname(__file__), '_shgo_lib',
  1087. 'sobol_vec.gz')
  1088. f = gzip.open(path, 'rb')
  1089. unsigned = "uint64"
  1090. # swallow header
  1091. next(f)
  1092. L = int(np.log(N) // np.log(2.0)) + 1
  1093. C = np.ones(N, dtype=unsigned)
  1094. for i in range(1, N):
  1095. value = i
  1096. while value & 1:
  1097. value >>= 1
  1098. C[i] += 1
  1099. points = np.zeros((N, D), dtype='double')
  1100. # XXX: This appears not to set the first element of V
  1101. V = np.empty(L + 1, dtype=unsigned)
  1102. for i in range(1, L + 1):
  1103. V[i] = 1 << (32 - i)
  1104. X = np.empty(N, dtype=unsigned)
  1105. X[0] = 0
  1106. for i in range(1, N):
  1107. X[i] = X[i - 1] ^ V[C[i - 1]]
  1108. points[i, 0] = X[i] / 2 ** 32
  1109. for j in range(1, D):
  1110. F_int = [int(item) for item in next(f).strip().split()]
  1111. (d, s, a), m = F_int[:3], [0] + F_int[3:]
  1112. if L <= s:
  1113. for i in range(1, L + 1):
  1114. V[i] = m[i] << (32 - i)
  1115. else:
  1116. for i in range(1, s + 1):
  1117. V[i] = m[i] << (32 - i)
  1118. for i in range(s + 1, L + 1):
  1119. V[i] = V[i - s] ^ (
  1120. V[i - s] >> np.array(s, dtype=unsigned))
  1121. for k in range(1, s):
  1122. V[i] ^= np.array(
  1123. (((a >> (s - 1 - k)) & 1) * V[i - k]),
  1124. dtype=unsigned)
  1125. X[0] = 0
  1126. for i in range(1, N):
  1127. X[i] = X[i - 1] ^ V[C[i - 1]]
  1128. points[i, j] = X[i] / 2 ** 32 # *** the actual points
  1129. f.close()
  1130. return points
  1131. def sampling_sobol(self, n, dim):
  1132. """
  1133. Generates uniform sampling points in a hypercube and scales the points
  1134. to the bound limits.
  1135. """
  1136. # Generate sampling points.
  1137. # Generate uniform sample points in [0, 1]^m \subset R^m
  1138. if self.n_sampled == 0:
  1139. self.C = self.sobol_points(n, dim)
  1140. else:
  1141. self.C = self.sobol_points(n, dim, skip=self.n_sampled)
  1142. # Distribute over bounds
  1143. for i in range(len(self.bounds)):
  1144. self.C[:, i] = (self.C[:, i] *
  1145. (self.bounds[i][1] - self.bounds[i][0])
  1146. + self.bounds[i][0])
  1147. return self.C
  1148. def sampling_subspace(self):
  1149. """Find subspace of feasible points from g_func definition"""
  1150. # Subspace of feasible points.
  1151. for ind, g in enumerate(self.g_cons):
  1152. self.C = self.C[g(self.C.T, *self.g_args[ind]) >= 0.0]
  1153. if self.C.size == 0:
  1154. self.res.message = ('No sampling point found within the '
  1155. + 'feasible set. Increasing sampling '
  1156. + 'size.')
  1157. # sampling correctly for both 1D and >1D cases
  1158. if self.disp:
  1159. print(self.res.message)
  1160. def sorted_samples(self): # Validated
  1161. """Find indexes of the sorted sampling points"""
  1162. self.Ind_sorted = np.argsort(self.C, axis=0)
  1163. self.Xs = self.C[self.Ind_sorted]
  1164. return self.Ind_sorted, self.Xs
  1165. def ax_subspace(self): # Validated
  1166. """
  1167. Finds the subspace vectors along each component axis.
  1168. """
  1169. self.Ci = []
  1170. self.Xs_i = []
  1171. self.Ii = []
  1172. for i in range(self.dim):
  1173. self.Ci.append(self.C[:, i])
  1174. self.Ii.append(self.Ind_sorted[:, i])
  1175. self.Xs_i.append(self.Xs[:, i])
  1176. def fun_ref(self):
  1177. """
  1178. Find the objective function output reference table
  1179. """
  1180. # TODO: Replace with cached wrapper
  1181. # Note: This process can be pooled easily
  1182. # Obj. function returns to be used as reference table.:
  1183. f_cache_bool = False
  1184. if self.fn > 0: # Store old function evaluations
  1185. Ftemp = self.F
  1186. fn_old = self.fn
  1187. f_cache_bool = True
  1188. self.F = np.zeros(np.shape(self.C)[0])
  1189. # NOTE: It might be easier to replace this with a cached
  1190. # objective function
  1191. for i in range(self.fn, np.shape(self.C)[0]):
  1192. eval_f = True
  1193. if self.g_cons is not None:
  1194. for g in self.g_cons:
  1195. if g(self.C[i, :], *self.args) < 0.0:
  1196. eval_f = False
  1197. break # Breaks the g loop
  1198. if eval_f:
  1199. self.F[i] = self.func(self.C[i, :], *self.args)
  1200. self.fn += 1
  1201. elif self.infty_cons_sampl:
  1202. self.F[i] = np.inf
  1203. self.fn += 1
  1204. if f_cache_bool:
  1205. if fn_old > 0: # Restore saved function evaluations
  1206. self.F[0:fn_old] = Ftemp
  1207. return self.F
  1208. def surface_topo_ref(self): # Validated
  1209. """
  1210. Find the BD and FD finite differences along each component vector.
  1211. """
  1212. # Replace numpy inf, -inf and nan objects with floating point numbers
  1213. # nan --> float
  1214. self.F[np.isnan(self.F)] = np.inf
  1215. # inf, -inf --> floats
  1216. self.F = np.nan_to_num(self.F)
  1217. self.Ft = self.F[self.Ind_sorted]
  1218. self.Ftp = np.diff(self.Ft, axis=0) # FD
  1219. self.Ftm = np.diff(self.Ft[::-1], axis=0)[::-1] # BD
  1220. def sample_topo(self, ind):
  1221. # Find the position of the sample in the component axial directions
  1222. self.Xi_ind_pos = []
  1223. self.Xi_ind_topo_i = []
  1224. for i in range(self.dim):
  1225. for x, I_ind in zip(self.Ii[i], range(len(self.Ii[i]))):
  1226. if x == ind:
  1227. self.Xi_ind_pos.append(I_ind)
  1228. # Use the topo reference tables to find if point is a minimizer on
  1229. # the current axis
  1230. # First check if index is on the boundary of the sampling points:
  1231. if self.Xi_ind_pos[i] == 0:
  1232. # if boundary is in basin
  1233. self.Xi_ind_topo_i.append(self.Ftp[:, i][0] > 0)
  1234. elif self.Xi_ind_pos[i] == self.fn - 1:
  1235. # Largest value at sample size
  1236. self.Xi_ind_topo_i.append(self.Ftp[:, i][self.fn - 2] < 0)
  1237. # Find axial reference for other points
  1238. else:
  1239. Xi_ind_top_p = self.Ftp[:, i][self.Xi_ind_pos[i]] > 0
  1240. Xi_ind_top_m = self.Ftm[:, i][self.Xi_ind_pos[i] - 1] > 0
  1241. self.Xi_ind_topo_i.append(Xi_ind_top_p and Xi_ind_top_m)
  1242. if np.array(self.Xi_ind_topo_i).all():
  1243. self.Xi_ind_topo = True
  1244. else:
  1245. self.Xi_ind_topo = False
  1246. self.Xi_ind_topo = np.array(self.Xi_ind_topo_i).all()
  1247. return self.Xi_ind_topo
  1248. def minimizers_1D(self):
  1249. """
  1250. Returns the indexes of all minimizers
  1251. """
  1252. self.minimizer_pool = []
  1253. # Note: Can implement parallelization here
  1254. for ind in range(self.fn):
  1255. min_bool = self.sample_topo(ind)
  1256. if min_bool:
  1257. self.minimizer_pool.append(ind)
  1258. self.minimizer_pool_F = self.F[self.minimizer_pool]
  1259. # Sort to find minimum func value in min_pool
  1260. self.sort_min_pool()
  1261. if not len(self.minimizer_pool) == 0:
  1262. self.X_min = self.C[self.minimizer_pool]
  1263. # If function is called again and pool is found unbreak:
  1264. else:
  1265. self.X_min = []
  1266. return self.X_min
  1267. def delaunay_triangulation(self, grow=False, n_prc=0):
  1268. if not grow:
  1269. self.Tri = spatial.Delaunay(self.C)
  1270. else:
  1271. if hasattr(self, 'Tri'):
  1272. self.Tri.add_points(self.C[n_prc:, :])
  1273. else:
  1274. self.Tri = spatial.Delaunay(self.C, incremental=True)
  1275. return self.Tri
  1276. @staticmethod
  1277. def find_neighbors_delaunay(pindex, triang):
  1278. """
  1279. Returns the indexes of points connected to ``pindex`` on the Gabriel
  1280. chain subgraph of the Delaunay triangulation.
  1281. """
  1282. return triang.vertex_neighbor_vertices[1][
  1283. triang.vertex_neighbor_vertices[0][pindex]:
  1284. triang.vertex_neighbor_vertices[0][pindex + 1]]
  1285. def sample_delaunay_topo(self, ind):
  1286. self.Xi_ind_topo_i = []
  1287. # Find the position of the sample in the component Gabrial chain
  1288. G_ind = self.find_neighbors_delaunay(ind, self.Tri)
  1289. # Find finite deference between each point
  1290. for g_i in G_ind:
  1291. rel_topo_bool = self.F[ind] < self.F[g_i]
  1292. self.Xi_ind_topo_i.append(rel_topo_bool)
  1293. # Check if minimizer
  1294. self.Xi_ind_topo = np.array(self.Xi_ind_topo_i).all()
  1295. return self.Xi_ind_topo
  1296. def delaunay_minimizers(self):
  1297. """
  1298. Returns the indexes of all minimizers
  1299. """
  1300. self.minimizer_pool = []
  1301. # Note: Can easily be parralized
  1302. if self.disp:
  1303. logging.info('self.fn = {}'.format(self.fn))
  1304. logging.info('self.nc = {}'.format(self.nc))
  1305. logging.info('np.shape(self.C)'
  1306. ' = {}'.format(np.shape(self.C)))
  1307. for ind in range(self.fn):
  1308. min_bool = self.sample_delaunay_topo(ind)
  1309. if min_bool:
  1310. self.minimizer_pool.append(ind)
  1311. self.minimizer_pool_F = self.F[self.minimizer_pool]
  1312. # Sort to find minimum func value in min_pool
  1313. self.sort_min_pool()
  1314. if self.disp:
  1315. logging.info('self.minimizer_pool = {}'.format(self.minimizer_pool))
  1316. if not len(self.minimizer_pool) == 0:
  1317. self.X_min = self.C[self.minimizer_pool]
  1318. else:
  1319. self.X_min = [] # Empty pool breaks main routine
  1320. return self.X_min
  1321. class LMap:
  1322. def __init__(self, v):
  1323. self.v = v
  1324. self.x_l = None
  1325. self.lres = None
  1326. self.f_min = None
  1327. self.lbounds = []
  1328. class LMapCache:
  1329. def __init__(self):
  1330. self.cache = {}
  1331. # Lists for search queries
  1332. self.v_maps = []
  1333. self.xl_maps = []
  1334. self.f_maps = []
  1335. self.lbound_maps = []
  1336. self.size = 0
  1337. def __getitem__(self, v):
  1338. v = np.ndarray.tolist(v)
  1339. v = tuple(v)
  1340. try:
  1341. return self.cache[v]
  1342. except KeyError:
  1343. xval = LMap(v)
  1344. self.cache[v] = xval
  1345. return self.cache[v]
  1346. def add_res(self, v, lres, bounds=None):
  1347. v = np.ndarray.tolist(v)
  1348. v = tuple(v)
  1349. self.cache[v].x_l = lres.x
  1350. self.cache[v].lres = lres
  1351. self.cache[v].f_min = lres.fun
  1352. self.cache[v].lbounds = bounds
  1353. # Update cache size
  1354. self.size += 1
  1355. # Cache lists for search queries
  1356. self.v_maps.append(v)
  1357. self.xl_maps.append(lres.x)
  1358. self.f_maps.append(lres.fun)
  1359. self.lbound_maps.append(bounds)
  1360. def sort_cache_result(self):
  1361. """
  1362. Sort results and build the global return object
  1363. """
  1364. results = {}
  1365. # Sort results and save
  1366. self.xl_maps = np.array(self.xl_maps)
  1367. self.f_maps = np.array(self.f_maps)
  1368. # Sorted indexes in Func_min
  1369. ind_sorted = np.argsort(self.f_maps)
  1370. # Save ordered list of minima
  1371. results['xl'] = self.xl_maps[ind_sorted] # Ordered x vals
  1372. self.f_maps = np.array(self.f_maps)
  1373. results['funl'] = self.f_maps[ind_sorted]
  1374. results['funl'] = results['funl'].T
  1375. # Find global of all minimisers
  1376. results['x'] = self.xl_maps[ind_sorted[0]] # Save global minima
  1377. results['fun'] = self.f_maps[ind_sorted[0]] # Save global fun value
  1378. self.xl_maps = np.ndarray.tolist(self.xl_maps)
  1379. self.f_maps = np.ndarray.tolist(self.f_maps)
  1380. return results