Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1""" 

2shgo: The simplicial homology global optimisation algorithm 

3""" 

4 

5import numpy as np 

6import time 

7import logging 

8import warnings 

9from scipy import spatial 

10from scipy.optimize import OptimizeResult, minimize 

11from scipy.optimize._shgo_lib import sobol_seq 

12from scipy.optimize._shgo_lib.triangulation import Complex 

13 

14 

15__all__ = ['shgo'] 

16 

17 

18def shgo(func, bounds, args=(), constraints=None, n=100, iters=1, callback=None, 

19 minimizer_kwargs=None, options=None, sampling_method='simplicial'): 

20 """ 

21 Finds the global minimum of a function using SHG optimization. 

22 

23 SHGO stands for "simplicial homology global optimization". 

24 

25 Parameters 

26 ---------- 

27 func : callable 

28 The objective function to be minimized. Must be in the form 

29 ``f(x, *args)``, where ``x`` is the argument in the form of a 1-D array 

30 and ``args`` is a tuple of any additional fixed parameters needed to 

31 completely specify the function. 

32 bounds : sequence 

33 Bounds for variables. ``(min, max)`` pairs for each element in ``x``, 

34 defining the lower and upper bounds for the optimizing argument of 

35 `func`. It is required to have ``len(bounds) == len(x)``. 

36 ``len(bounds)`` is used to determine the number of parameters in ``x``. 

37 Use ``None`` for one of min or max when there is no bound in that 

38 direction. By default bounds are ``(None, None)``. 

39 args : tuple, optional 

40 Any additional fixed parameters needed to completely specify the 

41 objective function. 

42 constraints : dict or sequence of dict, optional 

43 Constraints definition. 

44 Function(s) ``R**n`` in the form:: 

45 

46 g(x) >= 0 applied as g : R^n -> R^m 

47 h(x) == 0 applied as h : R^n -> R^p 

48 

49 Each constraint is defined in a dictionary with fields: 

50 

51 type : str 

52 Constraint type: 'eq' for equality, 'ineq' for inequality. 

53 fun : callable 

54 The function defining the constraint. 

55 jac : callable, optional 

56 The Jacobian of `fun` (only for SLSQP). 

57 args : sequence, optional 

58 Extra arguments to be passed to the function and Jacobian. 

59 

60 Equality constraint means that the constraint function result is to 

61 be zero whereas inequality means that it is to be non-negative. 

62 Note that COBYLA only supports inequality constraints. 

63 

64 .. note:: 

65 

66 Only the COBYLA and SLSQP local minimize methods currently 

67 support constraint arguments. If the ``constraints`` sequence 

68 used in the local optimization problem is not defined in 

69 ``minimizer_kwargs`` and a constrained method is used then the 

70 global ``constraints`` will be used. 

71 (Defining a ``constraints`` sequence in ``minimizer_kwargs`` 

72 means that ``constraints`` will not be added so if equality 

73 constraints and so forth need to be added then the inequality 

74 functions in ``constraints`` need to be added to 

75 ``minimizer_kwargs`` too). 

76 

77 n : int, optional 

78 Number of sampling points used in the construction of the simplicial 

79 complex. Note that this argument is only used for ``sobol`` and other 

80 arbitrary `sampling_methods`. 

81 iters : int, optional 

82 Number of iterations used in the construction of the simplicial complex. 

83 callback : callable, optional 

84 Called after each iteration, as ``callback(xk)``, where ``xk`` is the 

85 current parameter vector. 

86 minimizer_kwargs : dict, optional 

87 Extra keyword arguments to be passed to the minimizer 

88 ``scipy.optimize.minimize`` Some important options could be: 

89 

90 * method : str 

91 The minimization method (e.g. ``SLSQP``). 

92 * args : tuple 

93 Extra arguments passed to the objective function (``func``) and 

94 its derivatives (Jacobian, Hessian). 

95 * options : dict, optional 

96 Note that by default the tolerance is specified as 

97 ``{ftol: 1e-12}`` 

98 

99 options : dict, optional 

100 A dictionary of solver options. Many of the options specified for the 

101 global routine are also passed to the scipy.optimize.minimize routine. 

102 The options that are also passed to the local routine are marked with 

103 "(L)". 

104 

105 Stopping criteria, the algorithm will terminate if any of the specified 

106 criteria are met. However, the default algorithm does not require any to 

107 be specified: 

108 

109 * maxfev : int (L) 

110 Maximum number of function evaluations in the feasible domain. 

111 (Note only methods that support this option will terminate 

112 the routine at precisely exact specified value. Otherwise the 

113 criterion will only terminate during a global iteration) 

114 * f_min 

115 Specify the minimum objective function value, if it is known. 

116 * f_tol : float 

117 Precision goal for the value of f in the stopping 

118 criterion. Note that the global routine will also 

119 terminate if a sampling point in the global routine is 

120 within this tolerance. 

121 * maxiter : int 

122 Maximum number of iterations to perform. 

123 * maxev : int 

124 Maximum number of sampling evaluations to perform (includes 

125 searching in infeasible points). 

126 * maxtime : float 

127 Maximum processing runtime allowed 

128 * minhgrd : int 

129 Minimum homology group rank differential. The homology group of the 

130 objective function is calculated (approximately) during every 

131 iteration. The rank of this group has a one-to-one correspondence 

132 with the number of locally convex subdomains in the objective 

133 function (after adequate sampling points each of these subdomains 

134 contain a unique global minimum). If the difference in the hgr is 0 

135 between iterations for ``maxhgrd`` specified iterations the 

136 algorithm will terminate. 

137 

138 Objective function knowledge: 

139 

140 * symmetry : bool 

141 Specify True if the objective function contains symmetric variables. 

142 The search space (and therefore performance) is decreased by O(n!). 

143 

144 * jac : bool or callable, optional 

145 Jacobian (gradient) of objective function. Only for CG, BFGS, 

146 Newton-CG, L-BFGS-B, TNC, SLSQP, dogleg, trust-ncg. If ``jac`` is a 

147 boolean and is True, ``fun`` is assumed to return the gradient along 

148 with the objective function. If False, the gradient will be 

149 estimated numerically. ``jac`` can also be a callable returning the 

150 gradient of the objective. In this case, it must accept the same 

151 arguments as ``fun``. (Passed to `scipy.optimize.minmize` automatically) 

152 

153 * hess, hessp : callable, optional 

154 Hessian (matrix of second-order derivatives) of objective function 

155 or Hessian of objective function times an arbitrary vector p. 

156 Only for Newton-CG, dogleg, trust-ncg. Only one of ``hessp`` or 

157 ``hess`` needs to be given. If ``hess`` is provided, then 

158 ``hessp`` will be ignored. If neither ``hess`` nor ``hessp`` is 

159 provided, then the Hessian product will be approximated using 

160 finite differences on ``jac``. ``hessp`` must compute the Hessian 

161 times an arbitrary vector. (Passed to `scipy.optimize.minmize` 

162 automatically) 

163 

164 Algorithm settings: 

165 

166 * minimize_every_iter : bool 

167 If True then promising global sampling points will be passed to a 

168 local minimization routine every iteration. If False then only the 

169 final minimizer pool will be run. Defaults to False. 

170 * local_iter : int 

171 Only evaluate a few of the best minimizer pool candidates every 

172 iteration. If False all potential points are passed to the local 

173 minimization routine. 

174 * infty_constraints: bool 

175 If True then any sampling points generated which are outside will 

176 the feasible domain will be saved and given an objective function 

177 value of ``inf``. If False then these points will be discarded. 

178 Using this functionality could lead to higher performance with 

179 respect to function evaluations before the global minimum is found, 

180 specifying False will use less memory at the cost of a slight 

181 decrease in performance. Defaults to True. 

182 

183 Feedback: 

184 

185 * disp : bool (L) 

186 Set to True to print convergence messages. 

187 

188 sampling_method : str or function, optional 

189 Current built in sampling method options are ``sobol`` and 

190 ``simplicial``. The default ``simplicial`` uses less memory and provides 

191 the theoretical guarantee of convergence to the global minimum in finite 

192 time. The ``sobol`` method is faster in terms of sampling point 

193 generation at the cost of higher memory resources and the loss of 

194 guaranteed convergence. It is more appropriate for most "easier" 

195 problems where the convergence is relatively fast. 

196 User defined sampling functions must accept two arguments of ``n`` 

197 sampling points of dimension ``dim`` per call and output an array of 

198 sampling points with shape `n x dim`. 

199 

200 Returns 

201 ------- 

202 res : OptimizeResult 

203 The optimization result represented as a `OptimizeResult` object. 

204 Important attributes are: 

205 ``x`` the solution array corresponding to the global minimum, 

206 ``fun`` the function output at the global solution, 

207 ``xl`` an ordered list of local minima solutions, 

208 ``funl`` the function output at the corresponding local solutions, 

209 ``success`` a Boolean flag indicating if the optimizer exited 

210 successfully, 

211 ``message`` which describes the cause of the termination, 

212 ``nfev`` the total number of objective function evaluations including 

213 the sampling calls, 

214 ``nlfev`` the total number of objective function evaluations 

215 culminating from all local search optimizations, 

216 ``nit`` number of iterations performed by the global routine. 

217 

218 Notes 

219 ----- 

220 Global optimization using simplicial homology global optimization [1]_. 

221 Appropriate for solving general purpose NLP and blackbox optimization 

222 problems to global optimality (low-dimensional problems). 

223 

224 In general, the optimization problems are of the form:: 

225 

226 minimize f(x) subject to 

227 

228 g_i(x) >= 0, i = 1,...,m 

229 h_j(x) = 0, j = 1,...,p 

230 

231 where x is a vector of one or more variables. ``f(x)`` is the objective 

232 function ``R^n -> R``, ``g_i(x)`` are the inequality constraints, and 

233 ``h_j(x)`` are the equality constraints. 

234 

235 Optionally, the lower and upper bounds for each element in x can also be 

236 specified using the `bounds` argument. 

237 

238 While most of the theoretical advantages of SHGO are only proven for when 

239 ``f(x)`` is a Lipschitz smooth function, the algorithm is also proven to 

240 converge to the global optimum for the more general case where ``f(x)`` is 

241 non-continuous, non-convex and non-smooth, if the default sampling method 

242 is used [1]_. 

243 

244 The local search method may be specified using the ``minimizer_kwargs`` 

245 parameter which is passed on to ``scipy.optimize.minimize``. By default, 

246 the ``SLSQP`` method is used. In general, it is recommended to use the 

247 ``SLSQP`` or ``COBYLA`` local minimization if inequality constraints 

248 are defined for the problem since the other methods do not use constraints. 

249 

250 The ``sobol`` method points are generated using the Sobol (1967) [2]_ 

251 sequence. The primitive polynomials and various sets of initial direction 

252 numbers for generating Sobol sequences is provided by [3]_ by Frances Kuo 

253 and Stephen Joe. The original program sobol.cc (MIT) is available and 

254 described at https://web.maths.unsw.edu.au/~fkuo/sobol/ translated to 

255 Python 3 by Carl Sandrock 2016-03-31. 

256 

257 References 

258 ---------- 

259 .. [1] Endres, SC, Sandrock, C, Focke, WW (2018) "A simplicial homology 

260 algorithm for lipschitz optimisation", Journal of Global Optimization. 

261 .. [2] Sobol, IM (1967) "The distribution of points in a cube and the 

262 approximate evaluation of integrals", USSR Comput. Math. Math. Phys. 

263 7, 86-112. 

264 .. [3] Joe, SW and Kuo, FY (2008) "Constructing Sobol sequences with 

265 better two-dimensional projections", SIAM J. Sci. Comput. 30, 

266 2635-2654. 

267 .. [4] Hoch, W and Schittkowski, K (1981) "Test examples for nonlinear 

268 programming codes", Lecture Notes in Economics and Mathematical 

269 Systems, 187. Springer-Verlag, New York. 

270 http://www.ai7.uni-bayreuth.de/test_problem_coll.pdf 

271 .. [5] Wales, DJ (2015) "Perspective: Insight into reaction coordinates and 

272 dynamics from the potential energy landscape", 

273 Journal of Chemical Physics, 142(13), 2015. 

274 

275 Examples 

276 -------- 

277 First consider the problem of minimizing the Rosenbrock function, `rosen`: 

278 

279 >>> from scipy.optimize import rosen, shgo 

280 >>> bounds = [(0,2), (0, 2), (0, 2), (0, 2), (0, 2)] 

281 >>> result = shgo(rosen, bounds) 

282 >>> result.x, result.fun 

283 (array([ 1., 1., 1., 1., 1.]), 2.9203923741900809e-18) 

284 

285 Note that bounds determine the dimensionality of the objective 

286 function and is therefore a required input, however you can specify 

287 empty bounds using ``None`` or objects like ``np.inf`` which will be 

288 converted to large float numbers. 

289 

290 >>> bounds = [(None, None), ]*4 

291 >>> result = shgo(rosen, bounds) 

292 >>> result.x 

293 array([ 0.99999851, 0.99999704, 0.99999411, 0.9999882 ]) 

294 

295 Next, we consider the Eggholder function, a problem with several local 

296 minima and one global minimum. We will demonstrate the use of arguments and 

297 the capabilities of `shgo`. 

298 (https://en.wikipedia.org/wiki/Test_functions_for_optimization) 

299 

300 >>> def eggholder(x): 

301 ... return (-(x[1] + 47.0) 

302 ... * np.sin(np.sqrt(abs(x[0]/2.0 + (x[1] + 47.0)))) 

303 ... - x[0] * np.sin(np.sqrt(abs(x[0] - (x[1] + 47.0)))) 

304 ... ) 

305 ... 

306 >>> bounds = [(-512, 512), (-512, 512)] 

307 

308 `shgo` has two built-in low discrepancy sampling sequences. First, we will 

309 input 30 initial sampling points of the Sobol sequence: 

310 

311 >>> result = shgo(eggholder, bounds, n=30, sampling_method='sobol') 

312 >>> result.x, result.fun 

313 (array([ 512. , 404.23180542]), -959.64066272085051) 

314 

315 `shgo` also has a return for any other local minima that was found, these 

316 can be called using: 

317 

318 >>> result.xl 

319 array([[ 512. , 404.23180542], 

320 [ 283.07593402, -487.12566542], 

321 [-294.66820039, -462.01964031], 

322 [-105.87688985, 423.15324143], 

323 [-242.97923629, 274.38032063], 

324 [-506.25823477, 6.3131022 ], 

325 [-408.71981195, -156.10117154], 

326 [ 150.23210485, 301.31378508], 

327 [ 91.00922754, -391.28375925], 

328 [ 202.8966344 , -269.38042147], 

329 [ 361.66625957, -106.96490692], 

330 [-219.40615102, -244.06022436], 

331 [ 151.59603137, -100.61082677]]) 

332 

333 >>> result.funl 

334 array([-959.64066272, -718.16745962, -704.80659592, -565.99778097, 

335 -559.78685655, -557.36868733, -507.87385942, -493.9605115 , 

336 -426.48799655, -421.15571437, -419.31194957, -410.98477763, 

337 -202.53912972]) 

338 

339 These results are useful in applications where there are many global minima 

340 and the values of other global minima are desired or where the local minima 

341 can provide insight into the system (for example morphologies 

342 in physical chemistry [5]_). 

343 

344 If we want to find a larger number of local minima, we can increase the 

345 number of sampling points or the number of iterations. We'll increase the 

346 number of sampling points to 60 and the number of iterations from the 

347 default of 1 to 5. This gives us 60 x 5 = 300 initial sampling points. 

348 

349 >>> result_2 = shgo(eggholder, bounds, n=60, iters=5, sampling_method='sobol') 

350 >>> len(result.xl), len(result_2.xl) 

351 (13, 39) 

352 

353 Note the difference between, e.g., ``n=180, iters=1`` and ``n=60, iters=3``. 

354 In the first case the promising points contained in the minimiser pool 

355 is processed only once. In the latter case it is processed every 60 sampling 

356 points for a total of 3 times. 

357 

358 To demonstrate solving problems with non-linear constraints consider the 

359 following example from Hock and Schittkowski problem 73 (cattle-feed) [4]_:: 

360 

361 minimize: f = 24.55 * x_1 + 26.75 * x_2 + 39 * x_3 + 40.50 * x_4 

362 

363 subject to: 2.3 * x_1 + 5.6 * x_2 + 11.1 * x_3 + 1.3 * x_4 - 5 >= 0, 

364 

365 12 * x_1 + 11.9 * x_2 + 41.8 * x_3 + 52.1 * x_4 - 21 

366 -1.645 * sqrt(0.28 * x_1**2 + 0.19 * x_2**2 + 

367 20.5 * x_3**2 + 0.62 * x_4**2) >= 0, 

368 

369 x_1 + x_2 + x_3 + x_4 - 1 == 0, 

370 

371 1 >= x_i >= 0 for all i 

372 

373 The approximate answer given in [4]_ is:: 

374 

375 f([0.6355216, -0.12e-11, 0.3127019, 0.05177655]) = 29.894378 

376 

377 >>> def f(x): # (cattle-feed) 

378 ... return 24.55*x[0] + 26.75*x[1] + 39*x[2] + 40.50*x[3] 

379 ... 

380 >>> def g1(x): 

381 ... return 2.3*x[0] + 5.6*x[1] + 11.1*x[2] + 1.3*x[3] - 5 # >=0 

382 ... 

383 >>> def g2(x): 

384 ... return (12*x[0] + 11.9*x[1] +41.8*x[2] + 52.1*x[3] - 21 

385 ... - 1.645 * np.sqrt(0.28*x[0]**2 + 0.19*x[1]**2 

386 ... + 20.5*x[2]**2 + 0.62*x[3]**2) 

387 ... ) # >=0 

388 ... 

389 >>> def h1(x): 

390 ... return x[0] + x[1] + x[2] + x[3] - 1 # == 0 

391 ... 

392 >>> cons = ({'type': 'ineq', 'fun': g1}, 

393 ... {'type': 'ineq', 'fun': g2}, 

394 ... {'type': 'eq', 'fun': h1}) 

395 >>> bounds = [(0, 1.0),]*4 

396 >>> res = shgo(f, bounds, iters=3, constraints=cons) 

397 >>> res 

398 fun: 29.894378159142136 

399 funl: array([29.89437816]) 

400 message: 'Optimization terminated successfully.' 

401 nfev: 114 

402 nit: 3 

403 nlfev: 35 

404 nlhev: 0 

405 nljev: 5 

406 success: True 

407 x: array([6.35521569e-01, 1.13700270e-13, 3.12701881e-01, 5.17765506e-02]) 

408 xl: array([[6.35521569e-01, 1.13700270e-13, 3.12701881e-01, 5.17765506e-02]]) 

409 

410 >>> g1(res.x), g2(res.x), h1(res.x) 

411 (-5.0626169922907138e-14, -2.9594104944408173e-12, 0.0) 

412 

413 """ 

414 # Initiate SHGO class 

415 shc = SHGO(func, bounds, args=args, constraints=constraints, n=n, 

416 iters=iters, callback=callback, 

417 minimizer_kwargs=minimizer_kwargs, 

418 options=options, sampling_method=sampling_method) 

419 

420 # Run the algorithm, process results and test success 

421 shc.construct_complex() 

422 

423 if not shc.break_routine: 

424 if shc.disp: 

425 print("Successfully completed construction of complex.") 

426 

427 # Test post iterations success 

428 if len(shc.LMC.xl_maps) == 0: 

429 # If sampling failed to find pool, return lowest sampled point 

430 # with a warning 

431 shc.find_lowest_vertex() 

432 shc.break_routine = True 

433 shc.fail_routine(mes="Failed to find a feasible minimizer point. " 

434 "Lowest sampling point = {}".format(shc.f_lowest)) 

435 shc.res.fun = shc.f_lowest 

436 shc.res.x = shc.x_lowest 

437 shc.res.nfev = shc.fn 

438 

439 # Confirm the routine ran successfully 

440 if not shc.break_routine: 

441 shc.res.message = 'Optimization terminated successfully.' 

442 shc.res.success = True 

443 

444 # Return the final results 

445 return shc.res 

446 

447 

448class SHGO(object): 

449 def __init__(self, func, bounds, args=(), constraints=None, n=None, 

450 iters=None, callback=None, minimizer_kwargs=None, 

451 options=None, sampling_method='sobol'): 

452 

453 # Input checks 

454 methods = ['sobol', 'simplicial'] 

455 if isinstance(sampling_method, str) and sampling_method not in methods: 

456 raise ValueError(("Unknown sampling_method specified." 

457 " Valid methods: {}").format(', '.join(methods))) 

458 

459 # Initiate class 

460 self.func = func 

461 self.bounds = bounds 

462 self.args = args 

463 self.callback = callback 

464 

465 # Bounds 

466 abound = np.array(bounds, float) 

467 self.dim = np.shape(abound)[0] # Dimensionality of problem 

468 

469 # Set none finite values to large floats 

470 infind = ~np.isfinite(abound) 

471 abound[infind[:, 0], 0] = -1e50 

472 abound[infind[:, 1], 1] = 1e50 

473 

474 # Check if bounds are correctly specified 

475 bnderr = abound[:, 0] > abound[:, 1] 

476 if bnderr.any(): 

477 raise ValueError('Error: lb > ub in bounds {}.' 

478 .format(', '.join(str(b) for b in bnderr))) 

479 

480 self.bounds = abound 

481 

482 # Constraints 

483 # Process constraint dict sequence: 

484 if constraints is not None: 

485 self.min_cons = constraints 

486 self.g_cons = [] 

487 self.g_args = [] 

488 if (type(constraints) is not tuple) and (type(constraints) 

489 is not list): 

490 constraints = (constraints,) 

491 

492 for cons in constraints: 

493 if cons['type'] == 'ineq': 

494 self.g_cons.append(cons['fun']) 

495 try: 

496 self.g_args.append(cons['args']) 

497 except KeyError: 

498 self.g_args.append(()) 

499 self.g_cons = tuple(self.g_cons) 

500 self.g_args = tuple(self.g_args) 

501 else: 

502 self.g_cons = None 

503 self.g_args = None 

504 

505 # Define local minimization keyword arguments 

506 # Start with defaults 

507 self.minimizer_kwargs = {'args': self.args, 

508 'method': 'SLSQP', 

509 'bounds': self.bounds, 

510 'options': {}, 

511 'callback': self.callback 

512 } 

513 if minimizer_kwargs is not None: 

514 # Overwrite with supplied values 

515 self.minimizer_kwargs.update(minimizer_kwargs) 

516 

517 else: 

518 self.minimizer_kwargs['options'] = {'ftol': 1e-12} 

519 

520 if (self.minimizer_kwargs['method'] in ('SLSQP', 'COBYLA') and 

521 (minimizer_kwargs is not None and 

522 'constraints' not in minimizer_kwargs and 

523 constraints is not None) or 

524 (self.g_cons is not None)): 

525 self.minimizer_kwargs['constraints'] = self.min_cons 

526 

527 # Process options dict 

528 if options is not None: 

529 self.init_options(options) 

530 else: # Default settings: 

531 self.f_min_true = None 

532 self.minimize_every_iter = False 

533 

534 # Algorithm limits 

535 self.maxiter = None 

536 self.maxfev = None 

537 self.maxev = None 

538 self.maxtime = None 

539 self.f_min_true = None 

540 self.minhgrd = None 

541 

542 # Objective function knowledge 

543 self.symmetry = False 

544 

545 # Algorithm functionality 

546 self.local_iter = False 

547 self.infty_cons_sampl = True 

548 

549 # Feedback 

550 self.disp = False 

551 

552 # Remove unknown arguments in self.minimizer_kwargs 

553 # Start with arguments all the solvers have in common 

554 self.min_solver_args = ['fun', 'x0', 'args', 

555 'callback', 'options', 'method'] 

556 # then add the ones unique to specific solvers 

557 solver_args = { 

558 '_custom': ['jac', 'hess', 'hessp', 'bounds', 'constraints'], 

559 'nelder-mead': [], 

560 'powell': [], 

561 'cg': ['jac'], 

562 'bfgs': ['jac'], 

563 'newton-cg': ['jac', 'hess', 'hessp'], 

564 'l-bfgs-b': ['jac', 'bounds'], 

565 'tnc': ['jac', 'bounds'], 

566 'cobyla': ['constraints'], 

567 'slsqp': ['jac', 'bounds', 'constraints'], 

568 'dogleg': ['jac', 'hess'], 

569 'trust-ncg': ['jac', 'hess', 'hessp'], 

570 'trust-krylov': ['jac', 'hess', 'hessp'], 

571 'trust-exact': ['jac', 'hess'], 

572 } 

573 method = self.minimizer_kwargs['method'] 

574 self.min_solver_args += solver_args[method.lower()] 

575 

576 # Only retain the known arguments 

577 def _restrict_to_keys(dictionary, goodkeys): 

578 """Remove keys from dictionary if not in goodkeys - inplace""" 

579 existingkeys = set(dictionary) 

580 for key in existingkeys - set(goodkeys): 

581 dictionary.pop(key, None) 

582 

583 _restrict_to_keys(self.minimizer_kwargs, self.min_solver_args) 

584 _restrict_to_keys(self.minimizer_kwargs['options'], 

585 self.min_solver_args + ['ftol']) 

586 

587 # Algorithm controls 

588 # Global controls 

589 self.stop_global = False # Used in the stopping_criteria method 

590 self.break_routine = False # Break the algorithm globally 

591 self.iters = iters # Iterations to be ran 

592 self.iters_done = 0 # Iterations to be ran 

593 self.n = n # Sampling points per iteration 

594 self.nc = n # Sampling points to sample in current iteration 

595 self.n_prc = 0 # Processed points (used to track Delaunay iters) 

596 self.n_sampled = 0 # To track number of sampling points already generated 

597 self.fn = 0 # Number of feasible sampling points evaluations performed 

598 self.hgr = 0 # Homology group rank 

599 

600 # Default settings if no sampling criteria. 

601 if self.iters is None: 

602 self.iters = 1 

603 if self.n is None: 

604 self.n = 100 

605 self.nc = self.n 

606 

607 if not ((self.maxiter is None) and (self.maxfev is None) and ( 

608 self.maxev is None) 

609 and (self.minhgrd is None) and (self.f_min_true is None)): 

610 self.iters = None 

611 

612 # Set complex construction mode based on a provided stopping criteria: 

613 # Choose complex constructor 

614 if sampling_method == 'simplicial': 

615 self.iterate_complex = self.iterate_hypercube 

616 self.minimizers = self.simplex_minimizers 

617 self.sampling_method = sampling_method 

618 

619 elif sampling_method == 'sobol' or not isinstance(sampling_method, str): 

620 self.iterate_complex = self.iterate_delaunay 

621 self.minimizers = self.delaunay_complex_minimisers 

622 # Sampling method used 

623 if sampling_method == 'sobol': 

624 self.sampling_method = sampling_method 

625 self.sampling = self.sampling_sobol 

626 self.Sobol = sobol_seq.Sobol() # Init Sobol class 

627 if self.dim < 40: 

628 self.sobol_points = self.sobol_points_40 

629 else: 

630 self.sobol_points = self.sobol_points_10k 

631 else: 

632 # A user defined sampling method: 

633 # self.sampling_points = sampling_method 

634 self.sampling = self.sampling_custom 

635 self.sampling_function = sampling_method # F(n, d) 

636 self.sampling_method = 'custom' 

637 

638 # Local controls 

639 self.stop_l_iter = False # Local minimisation iterations 

640 self.stop_complex_iter = False # Sampling iterations 

641 

642 # Initiate storage objects used in algorithm classes 

643 self.minimizer_pool = [] 

644 

645 # Cache of local minimizers mapped 

646 self.LMC = LMapCache() 

647 

648 # Initialize return object 

649 self.res = OptimizeResult() # scipy.optimize.OptimizeResult object 

650 self.res.nfev = 0 # Includes each sampling point as func evaluation 

651 self.res.nlfev = 0 # Local function evals for all minimisers 

652 self.res.nljev = 0 # Local Jacobian evals for all minimisers 

653 self.res.nlhev = 0 # Local Hessian evals for all minimisers 

654 

655 # Initiation aids 

656 def init_options(self, options): 

657 """ 

658 Initiates the options. 

659 

660 Can also be useful to change parameters after class initiation. 

661 

662 Parameters 

663 ---------- 

664 options : dict 

665 

666 Returns 

667 ------- 

668 None 

669 

670 """ 

671 self.minimizer_kwargs['options'].update(options) 

672 # Default settings: 

673 self.minimize_every_iter = options.get('minimize_every_iter', False) 

674 

675 # Algorithm limits 

676 # Maximum number of iterations to perform. 

677 self.maxiter = options.get('maxiter', None) 

678 # Maximum number of function evaluations in the feasible domain 

679 self.maxfev = options.get('maxfev', None) 

680 # Maximum number of sampling evaluations (includes searching in 

681 # infeasible points 

682 self.maxev = options.get('maxev', None) 

683 # Maximum processing runtime allowed 

684 self.init = time.time() 

685 self.maxtime = options.get('maxtime', None) 

686 if 'f_min' in options: 

687 # Specify the minimum objective function value, if it is known. 

688 self.f_min_true = options['f_min'] 

689 self.f_tol = options.get('f_tol', 1e-4) 

690 else: 

691 self.f_min_true = None 

692 

693 self.minhgrd = options.get('minhgrd', None) 

694 

695 # Objective function knowledge 

696 self.symmetry = 'symmetry' in options 

697 

698 # Algorithm functionality 

699 # Only evaluate a few of the best candiates 

700 self.local_iter = options.get('local_iter', False) 

701 

702 self.infty_cons_sampl = options.get('infty_constraints', True) 

703 

704 # Feedback 

705 self.disp = options.get('disp', False) 

706 

707 # Iteration properties 

708 # Main construction loop: 

709 def construct_complex(self): 

710 """ 

711 Construct for `iters` iterations. 

712 

713 If uniform sampling is used, every iteration adds 'n' sampling points. 

714 

715 Iterations if a stopping criteria (e.g., sampling points or 

716 processing time) has been met. 

717 

718 """ 

719 if self.disp: 

720 print('Splitting first generation') 

721 

722 while not self.stop_global: 

723 if self.break_routine: 

724 break 

725 # Iterate complex, process minimisers 

726 self.iterate() 

727 self.stopping_criteria() 

728 

729 # Build minimiser pool 

730 # Final iteration only needed if pools weren't minimised every iteration 

731 if not self.minimize_every_iter: 

732 if not self.break_routine: 

733 self.find_minima() 

734 

735 self.res.nit = self.iters_done + 1 

736 

737 def find_minima(self): 

738 """ 

739 Construct the minimizer pool, map the minimizers to local minima 

740 and sort the results into a global return object. 

741 """ 

742 self.minimizers() 

743 if len(self.X_min) != 0: 

744 # Minimize the pool of minimizers with local minimization methods 

745 # Note that if Options['local_iter'] is an `int` instead of default 

746 # value False then only that number of candidates will be minimized 

747 self.minimise_pool(self.local_iter) 

748 # Sort results and build the global return object 

749 self.sort_result() 

750 

751 # Lowest values used to report in case of failures 

752 self.f_lowest = self.res.fun 

753 self.x_lowest = self.res.x 

754 else: 

755 self.find_lowest_vertex() 

756 

757 def find_lowest_vertex(self): 

758 # Find the lowest objective function value on one of 

759 # the vertices of the simplicial complex 

760 if self.sampling_method == 'simplicial': 

761 self.f_lowest = np.inf 

762 for x in self.HC.V.cache: 

763 if self.HC.V[x].f < self.f_lowest: 

764 self.f_lowest = self.HC.V[x].f 

765 self.x_lowest = self.HC.V[x].x_a 

766 if self.f_lowest == np.inf: # no feasible point 

767 self.f_lowest = None 

768 self.x_lowest = None 

769 else: 

770 if self.fn == 0: 

771 self.f_lowest = None 

772 self.x_lowest = None 

773 else: 

774 self.f_I = np.argsort(self.F, axis=-1) 

775 self.f_lowest = self.F[self.f_I[0]] 

776 self.x_lowest = self.C[self.f_I[0]] 

777 

778 # Stopping criteria functions: 

779 def finite_iterations(self): 

780 if self.iters is not None: 

781 if self.iters_done >= (self.iters - 1): 

782 self.stop_global = True 

783 

784 if self.maxiter is not None: # Stop for infeasible sampling 

785 if self.iters_done >= (self.maxiter - 1): 

786 self.stop_global = True 

787 return self.stop_global 

788 

789 def finite_fev(self): 

790 # Finite function evals in the feasible domain 

791 if self.fn >= self.maxfev: 

792 self.stop_global = True 

793 return self.stop_global 

794 

795 def finite_ev(self): 

796 # Finite evaluations including infeasible sampling points 

797 if self.n_sampled >= self.maxev: 

798 self.stop_global = True 

799 

800 def finite_time(self): 

801 if (time.time() - self.init) >= self.maxtime: 

802 self.stop_global = True 

803 

804 def finite_precision(self): 

805 """ 

806 Stop the algorithm if the final function value is known 

807 

808 Specify in options (with ``self.f_min_true = options['f_min']``) 

809 and the tolerance with ``f_tol = options['f_tol']`` 

810 """ 

811 # If no minimizer has been found use the lowest sampling value 

812 if len(self.LMC.xl_maps) == 0: 

813 self.find_lowest_vertex() 

814 

815 # Function to stop algorithm at specified percentage error: 

816 if self.f_lowest == 0.0: 

817 if self.f_min_true == 0.0: 

818 if self.f_lowest <= self.f_tol: 

819 self.stop_global = True 

820 else: 

821 pe = (self.f_lowest - self.f_min_true) / abs(self.f_min_true) 

822 if self.f_lowest <= self.f_min_true: 

823 self.stop_global = True 

824 # 2if (pe - self.f_tol) <= abs(1.0 / abs(self.f_min_true)): 

825 if abs(pe) >= 2 * self.f_tol: 

826 warnings.warn("A much lower value than expected f* =" + 

827 " {} than".format(self.f_min_true) + 

828 " the was found f_lowest =" + 

829 "{} ".format(self.f_lowest)) 

830 if pe <= self.f_tol: 

831 self.stop_global = True 

832 

833 return self.stop_global 

834 

835 def finite_homology_growth(self): 

836 if self.LMC.size == 0: 

837 return # pass on no reason to stop yet. 

838 self.hgrd = self.LMC.size - self.hgr 

839 

840 self.hgr = self.LMC.size 

841 if self.hgrd <= self.minhgrd: 

842 self.stop_global = True 

843 return self.stop_global 

844 

845 def stopping_criteria(self): 

846 """ 

847 Various stopping criteria ran every iteration 

848 

849 Returns 

850 ------- 

851 stop : bool 

852 """ 

853 if self.maxiter is not None: 

854 self.finite_iterations() 

855 if self.iters is not None: 

856 self.finite_iterations() 

857 if self.maxfev is not None: 

858 self.finite_fev() 

859 if self.maxev is not None: 

860 self.finite_ev() 

861 if self.maxtime is not None: 

862 self.finite_time() 

863 if self.f_min_true is not None: 

864 self.finite_precision() 

865 if self.minhgrd is not None: 

866 self.finite_homology_growth() 

867 

868 def iterate(self): 

869 self.iterate_complex() 

870 

871 # Build minimizer pool 

872 if self.minimize_every_iter: 

873 if not self.break_routine: 

874 self.find_minima() # Process minimizer pool 

875 

876 # Algorithm updates 

877 self.iters_done += 1 

878 

879 def iterate_hypercube(self): 

880 """ 

881 Iterate a subdivision of the complex 

882 

883 Note: called with ``self.iterate_complex()`` after class initiation 

884 """ 

885 # Iterate the complex 

886 if self.n_sampled == 0: 

887 # Initial triangulation of the hyper-rectangle 

888 self.HC = Complex(self.dim, self.func, self.args, 

889 self.symmetry, self.bounds, self.g_cons, 

890 self.g_args) 

891 else: 

892 self.HC.split_generation() 

893 

894 # feasible sampling points counted by the triangulation.py routines 

895 self.fn = self.HC.V.nfev 

896 self.n_sampled = self.HC.V.size # nevs counted in triangulation.py 

897 return 

898 

899 def iterate_delaunay(self): 

900 """ 

901 Build a complex of Delaunay triangulated points 

902 

903 Note: called with ``self.iterate_complex()`` after class initiation 

904 """ 

905 self.nc += self.n 

906 self.sampled_surface(infty_cons_sampl=self.infty_cons_sampl) 

907 self.n_sampled = self.nc 

908 return 

909 

910 # Hypercube minimizers 

911 def simplex_minimizers(self): 

912 """ 

913 Returns the indexes of all minimizers 

914 """ 

915 self.minimizer_pool = [] 

916 # Note: Can implement parallelization here 

917 for x in self.HC.V.cache: 

918 if self.HC.V[x].minimiser(): 

919 if self.disp: 

920 logging.info('=' * 60) 

921 logging.info( 

922 'v.x = {} is minimizer'.format(self.HC.V[x].x_a)) 

923 logging.info('v.f = {} is minimizer'.format(self.HC.V[x].f)) 

924 logging.info('=' * 30) 

925 

926 if self.HC.V[x] not in self.minimizer_pool: 

927 self.minimizer_pool.append(self.HC.V[x]) 

928 

929 if self.disp: 

930 logging.info('Neighbors:') 

931 logging.info('=' * 30) 

932 for vn in self.HC.V[x].nn: 

933 logging.info('x = {} || f = {}'.format(vn.x, vn.f)) 

934 

935 logging.info('=' * 60) 

936 

937 self.minimizer_pool_F = [] 

938 self.X_min = [] 

939 # normalized tuple in the Vertex cache 

940 self.X_min_cache = {} # Cache used in hypercube sampling 

941 

942 for v in self.minimizer_pool: 

943 self.X_min.append(v.x_a) 

944 self.minimizer_pool_F.append(v.f) 

945 self.X_min_cache[tuple(v.x_a)] = v.x 

946 

947 self.minimizer_pool_F = np.array(self.minimizer_pool_F) 

948 self.X_min = np.array(self.X_min) 

949 

950 # TODO: Only do this if global mode 

951 self.sort_min_pool() 

952 

953 return self.X_min 

954 

955 # Local minimization 

956 # Minimizer pool processing 

957 def minimise_pool(self, force_iter=False): 

958 """ 

959 This processing method can optionally minimise only the best candidate 

960 solutions in the minimizer pool 

961 

962 Parameters 

963 ---------- 

964 force_iter : int 

965 Number of starting minimizers to process (can be sepcified 

966 globally or locally) 

967 

968 """ 

969 # Find first local minimum 

970 # NOTE: Since we always minimize this value regardless it is a waste to 

971 # build the topograph first before minimizing 

972 lres_f_min = self.minimize(self.X_min[0], ind=self.minimizer_pool[0]) 

973 

974 # Trim minimized point from current minimizer set 

975 self.trim_min_pool(0) 

976 

977 # Force processing to only 

978 if force_iter: 

979 self.local_iter = force_iter 

980 

981 while not self.stop_l_iter: 

982 # Global stopping criteria: 

983 if self.f_min_true is not None: 

984 if (lres_f_min.fun - self.f_min_true) / abs( 

985 self.f_min_true) <= self.f_tol: 

986 self.stop_l_iter = True 

987 break 

988 # Note first iteration is outside loop: 

989 if self.local_iter is not None: 

990 if self.disp: 

991 logging.info( 

992 'SHGO.iters in function minimise_pool = {}'.format( 

993 self.local_iter)) 

994 self.local_iter -= 1 

995 if self.local_iter == 0: 

996 self.stop_l_iter = True 

997 break 

998 

999 if np.shape(self.X_min)[0] == 0: 

1000 self.stop_l_iter = True 

1001 break 

1002 

1003 # Construct topograph from current minimizer set 

1004 # (NOTE: This is a very small topograph using only the minizer pool 

1005 # , it might be worth using some graph theory tools instead. 

1006 self.g_topograph(lres_f_min.x, self.X_min) 

1007 

1008 # Find local minimum at the miniser with the greatest Euclidean 

1009 # distance from the current solution 

1010 ind_xmin_l = self.Z[:, -1] 

1011 lres_f_min = self.minimize(self.Ss[-1, :], self.minimizer_pool[-1]) 

1012 

1013 # Trim minimised point from current minimizer set 

1014 self.trim_min_pool(ind_xmin_l) 

1015 

1016 # Reset controls 

1017 self.stop_l_iter = False 

1018 return 

1019 

1020 def sort_min_pool(self): 

1021 # Sort to find minimum func value in min_pool 

1022 self.ind_f_min = np.argsort(self.minimizer_pool_F) 

1023 self.minimizer_pool = np.array(self.minimizer_pool)[self.ind_f_min] 

1024 self.minimizer_pool_F = np.array(self.minimizer_pool_F)[ 

1025 self.ind_f_min] 

1026 return 

1027 

1028 def trim_min_pool(self, trim_ind): 

1029 self.X_min = np.delete(self.X_min, trim_ind, axis=0) 

1030 self.minimizer_pool_F = np.delete(self.minimizer_pool_F, trim_ind) 

1031 self.minimizer_pool = np.delete(self.minimizer_pool, trim_ind) 

1032 return 

1033 

1034 def g_topograph(self, x_min, X_min): 

1035 """ 

1036 Returns the topographical vector stemming from the specified value 

1037 ``x_min`` for the current feasible set ``X_min`` with True boolean 

1038 values indicating positive entries and False values indicating 

1039 negative entries. 

1040 

1041 """ 

1042 x_min = np.array([x_min]) 

1043 self.Y = spatial.distance.cdist(x_min, X_min, 'euclidean') 

1044 # Find sorted indexes of spatial distances: 

1045 self.Z = np.argsort(self.Y, axis=-1) 

1046 

1047 self.Ss = X_min[self.Z][0] 

1048 self.minimizer_pool = self.minimizer_pool[self.Z] 

1049 self.minimizer_pool = self.minimizer_pool[0] 

1050 return self.Ss 

1051 

1052 # Local bound functions 

1053 def construct_lcb_simplicial(self, v_min): 

1054 """ 

1055 Construct locally (approximately) convex bounds 

1056 

1057 Parameters 

1058 ---------- 

1059 v_min : Vertex object 

1060 The minimizer vertex 

1061 

1062 Returns 

1063 ------- 

1064 cbounds : list of lists 

1065 List of size dimension with length-2 list of bounds for each dimension 

1066 

1067 """ 

1068 cbounds = [[x_b_i[0], x_b_i[1]] for x_b_i in self.bounds] 

1069 # Loop over all bounds 

1070 for vn in v_min.nn: 

1071 for i, x_i in enumerate(vn.x_a): 

1072 # Lower bound 

1073 if (x_i < v_min.x_a[i]) and (x_i > cbounds[i][0]): 

1074 cbounds[i][0] = x_i 

1075 

1076 # Upper bound 

1077 if (x_i > v_min.x_a[i]) and (x_i < cbounds[i][1]): 

1078 cbounds[i][1] = x_i 

1079 

1080 if self.disp: 

1081 logging.info('cbounds found for v_min.x_a = {}'.format(v_min.x_a)) 

1082 logging.info('cbounds = {}'.format(cbounds)) 

1083 

1084 return cbounds 

1085 

1086 def construct_lcb_delaunay(self, v_min, ind=None): 

1087 """ 

1088 Construct locally (approximately) convex bounds 

1089 

1090 Parameters 

1091 ---------- 

1092 v_min : Vertex object 

1093 The minimizer vertex 

1094 

1095 Returns 

1096 ------- 

1097 cbounds : list of lists 

1098 List of size dimension with length-2 list of bounds for each dimension 

1099 """ 

1100 cbounds = [[x_b_i[0], x_b_i[1]] for x_b_i in self.bounds] 

1101 

1102 return cbounds 

1103 

1104 # Minimize a starting point locally 

1105 def minimize(self, x_min, ind=None): 

1106 """ 

1107 This function is used to calculate the local minima using the specified 

1108 sampling point as a starting value. 

1109 

1110 Parameters 

1111 ---------- 

1112 x_min : vector of floats 

1113 Current starting point to minimize. 

1114 

1115 Returns 

1116 ------- 

1117 lres : OptimizeResult 

1118 The local optimization result represented as a `OptimizeResult` 

1119 object. 

1120 """ 

1121 # Use minima maps if vertex was already run 

1122 if self.disp: 

1123 logging.info('Vertex minimiser maps = {}'.format(self.LMC.v_maps)) 

1124 

1125 if self.LMC[x_min].lres is not None: 

1126 return self.LMC[x_min].lres 

1127 

1128 # TODO: Check discarded bound rules 

1129 

1130 if self.callback is not None: 

1131 print('Callback for ' 

1132 'minimizer starting at {}:'.format(x_min)) 

1133 

1134 if self.disp: 

1135 print('Starting ' 

1136 'minimization at {}...'.format(x_min)) 

1137 

1138 if self.sampling_method == 'simplicial': 

1139 x_min_t = tuple(x_min) 

1140 # Find the normalized tuple in the Vertex cache: 

1141 x_min_t_norm = self.X_min_cache[tuple(x_min_t)] 

1142 

1143 x_min_t_norm = tuple(x_min_t_norm) 

1144 

1145 g_bounds = self.construct_lcb_simplicial(self.HC.V[x_min_t_norm]) 

1146 if 'bounds' in self.min_solver_args: 

1147 self.minimizer_kwargs['bounds'] = g_bounds 

1148 

1149 else: 

1150 g_bounds = self.construct_lcb_delaunay(x_min, ind=ind) 

1151 if 'bounds' in self.min_solver_args: 

1152 self.minimizer_kwargs['bounds'] = g_bounds 

1153 

1154 if self.disp and 'bounds' in self.minimizer_kwargs: 

1155 print('bounds in kwarg:') 

1156 print(self.minimizer_kwargs['bounds']) 

1157 

1158 # Local minimization using scipy.optimize.minimize: 

1159 lres = minimize(self.func, x_min, **self.minimizer_kwargs) 

1160 

1161 if self.disp: 

1162 print('lres = {}'.format(lres)) 

1163 

1164 # Local function evals for all minimizers 

1165 self.res.nlfev += lres.nfev 

1166 if 'njev' in lres: 

1167 self.res.nljev += lres.njev 

1168 if 'nhev' in lres: 

1169 self.res.nlhev += lres.nhev 

1170 

1171 try: # Needed because of the brain dead 1x1 NumPy arrays 

1172 lres.fun = lres.fun[0] 

1173 except (IndexError, TypeError): 

1174 lres.fun 

1175 

1176 # Append minima maps 

1177 self.LMC[x_min] 

1178 self.LMC.add_res(x_min, lres, bounds=g_bounds) 

1179 

1180 return lres 

1181 

1182 # Post local minimization processing 

1183 def sort_result(self): 

1184 """ 

1185 Sort results and build the global return object 

1186 """ 

1187 # Sort results in local minima cache 

1188 results = self.LMC.sort_cache_result() 

1189 self.res.xl = results['xl'] 

1190 self.res.funl = results['funl'] 

1191 self.res.x = results['x'] 

1192 self.res.fun = results['fun'] 

1193 

1194 # Add local func evals to sampling func evals 

1195 # Count the number of feasible vertices and add to local func evals: 

1196 self.res.nfev = self.fn + self.res.nlfev 

1197 return self.res 

1198 

1199 # Algorithm controls 

1200 def fail_routine(self, mes=("Failed to converge")): 

1201 self.break_routine = True 

1202 self.res.success = False 

1203 self.X_min = [None] 

1204 self.res.message = mes 

1205 

1206 def sampled_surface(self, infty_cons_sampl=False): 

1207 """ 

1208 Sample the function surface. 

1209 

1210 There are 2 modes, if ``infty_cons_sampl`` is True then the sampled 

1211 points that are generated outside the feasible domain will be 

1212 assigned an ``inf`` value in accordance with SHGO rules. 

1213 This guarantees convergence and usually requires less objective function 

1214 evaluations at the computational costs of more Delaunay triangulation 

1215 points. 

1216 

1217 If ``infty_cons_sampl`` is False, then the infeasible points are discarded 

1218 and only a subspace of the sampled points are used. This comes at the 

1219 cost of the loss of guaranteed convergence and usually requires more 

1220 objective function evaluations. 

1221 """ 

1222 # Generate sampling points 

1223 if self.disp: 

1224 print('Generating sampling points') 

1225 self.sampling(self.nc, self.dim) 

1226 

1227 if not infty_cons_sampl: 

1228 # Find subspace of feasible points 

1229 if self.g_cons is not None: 

1230 self.sampling_subspace() 

1231 

1232 # Sort remaining samples 

1233 self.sorted_samples() 

1234 

1235 # Find objective function references 

1236 self.fun_ref() 

1237 

1238 self.n_sampled = self.nc 

1239 

1240 def delaunay_complex_minimisers(self): 

1241 # Construct complex minimizers on the current sampling set. 

1242 # if self.fn >= (self.dim + 1): 

1243 if self.fn >= (self.dim + 2): 

1244 # TODO: Check on strange Qhull error where the number of vertices 

1245 # required for an initial simplex is higher than n + 1? 

1246 if self.dim < 2: # Scalar objective functions 

1247 if self.disp: 

1248 print('Constructing 1-D minimizer pool') 

1249 

1250 self.ax_subspace() 

1251 self.surface_topo_ref() 

1252 self.minimizers_1D() 

1253 

1254 else: # Multivariate functions. 

1255 if self.disp: 

1256 print('Constructing Gabrial graph and minimizer pool') 

1257 

1258 if self.iters == 1: 

1259 self.delaunay_triangulation(grow=False) 

1260 else: 

1261 self.delaunay_triangulation(grow=True, n_prc=self.n_prc) 

1262 self.n_prc = self.C.shape[0] 

1263 

1264 if self.disp: 

1265 print('Triangulation completed, building minimizer pool') 

1266 

1267 self.delaunay_minimizers() 

1268 

1269 if self.disp: 

1270 logging.info( 

1271 "Minimizer pool = SHGO.X_min = {}".format(self.X_min)) 

1272 else: 

1273 if self.disp: 

1274 print( 

1275 'Not enough sampling points found in the feasible domain.') 

1276 self.minimizer_pool = [None] 

1277 try: 

1278 self.X_min 

1279 except AttributeError: 

1280 self.X_min = [] 

1281 

1282 def sobol_points_40(self, n, d, skip=0): 

1283 """ 

1284 Wrapper for ``sobol_seq.i4_sobol_generate`` 

1285 

1286 Generate N sampling points in D dimensions 

1287 """ 

1288 points = self.Sobol.i4_sobol_generate(d, n, skip=0) 

1289 

1290 return points 

1291 

1292 def sobol_points_10k(self, N, D): 

1293 """ 

1294 sobol.cc by Frances Kuo and Stephen Joe translated to Python 3 by 

1295 Carl Sandrock 2016-03-31 

1296 

1297 The original program is available and described at 

1298 https://web.maths.unsw.edu.au/~fkuo/sobol/ 

1299 """ 

1300 import gzip 

1301 import os 

1302 path = os.path.join(os.path.dirname(__file__), '_shgo_lib', 

1303 'sobol_vec.gz') 

1304 f = gzip.open(path, 'rb') 

1305 unsigned = "uint64" 

1306 # swallow header 

1307 next(f) 

1308 

1309 L = int(np.log(N) // np.log(2.0)) + 1 

1310 

1311 C = np.ones(N, dtype=unsigned) 

1312 for i in range(1, N): 

1313 value = i 

1314 while value & 1: 

1315 value >>= 1 

1316 C[i] += 1 

1317 

1318 points = np.zeros((N, D), dtype='double') 

1319 

1320 # XXX: This appears not to set the first element of V 

1321 V = np.empty(L + 1, dtype=unsigned) 

1322 for i in range(1, L + 1): 

1323 V[i] = 1 << (32 - i) 

1324 

1325 X = np.empty(N, dtype=unsigned) 

1326 X[0] = 0 

1327 for i in range(1, N): 

1328 X[i] = X[i - 1] ^ V[C[i - 1]] 

1329 points[i, 0] = X[i] / 2 ** 32 

1330 

1331 for j in range(1, D): 

1332 F_int = [int(item) for item in next(f).strip().split()] 

1333 (_, s, a), m = F_int[:3], [0] + F_int[3:] 

1334 

1335 if L <= s: 

1336 for i in range(1, L + 1): 

1337 V[i] = m[i] << (32 - i) 

1338 else: 

1339 for i in range(1, s + 1): 

1340 V[i] = m[i] << (32 - i) 

1341 for i in range(s + 1, L + 1): 

1342 V[i] = V[i - s] ^ ( 

1343 V[i - s] >> np.array(s, dtype=unsigned)) 

1344 for k in range(1, s): 

1345 V[i] ^= np.array( 

1346 (((a >> (s - 1 - k)) & 1) * V[i - k]), 

1347 dtype=unsigned) 

1348 

1349 X[0] = 0 

1350 for i in range(1, N): 

1351 X[i] = X[i - 1] ^ V[C[i - 1]] 

1352 points[i, j] = X[i] / 2 ** 32 # *** the actual points 

1353 

1354 f.close() 

1355 return points 

1356 

1357 def sampling_sobol(self, n, dim): 

1358 """ 

1359 Generates uniform sampling points in a hypercube and scales the points 

1360 to the bound limits. 

1361 """ 

1362 # Generate sampling points. 

1363 # Generate uniform sample points in [0, 1]^m \subset R^m 

1364 if self.n_sampled == 0: 

1365 self.C = self.sobol_points(n, dim) 

1366 else: 

1367 self.C = self.sobol_points(n, dim, skip=self.n_sampled) 

1368 # Distribute over bounds 

1369 for i in range(len(self.bounds)): 

1370 self.C[:, i] = (self.C[:, i] * 

1371 (self.bounds[i][1] - self.bounds[i][0]) 

1372 + self.bounds[i][0]) 

1373 return self.C 

1374 

1375 def sampling_custom(self, n, dim): 

1376 """ 

1377 Generates uniform sampling points in a hypercube and scales the points 

1378 to the bound limits. 

1379 """ 

1380 # Generate sampling points. 

1381 # Generate uniform sample points in [0, 1]^m \subset R^m 

1382 self.C = self.sampling_function(n, dim) 

1383 # Distribute over bounds 

1384 for i in range(len(self.bounds)): 

1385 self.C[:, i] = (self.C[:, i] * 

1386 (self.bounds[i][1] - self.bounds[i][0]) 

1387 + self.bounds[i][0]) 

1388 return self.C 

1389 

1390 def sampling_subspace(self): 

1391 """Find subspace of feasible points from g_func definition""" 

1392 # Subspace of feasible points. 

1393 for ind, g in enumerate(self.g_cons): 

1394 self.C = self.C[g(self.C.T, *self.g_args[ind]) >= 0.0] 

1395 if self.C.size == 0: 

1396 self.res.message = ('No sampling point found within the ' 

1397 + 'feasible set. Increasing sampling ' 

1398 + 'size.') 

1399 # sampling correctly for both 1-D and >1-D cases 

1400 if self.disp: 

1401 print(self.res.message) 

1402 

1403 def sorted_samples(self): # Validated 

1404 """Find indexes of the sorted sampling points""" 

1405 self.Ind_sorted = np.argsort(self.C, axis=0) 

1406 self.Xs = self.C[self.Ind_sorted] 

1407 return self.Ind_sorted, self.Xs 

1408 

1409 def ax_subspace(self): # Validated 

1410 """ 

1411 Finds the subspace vectors along each component axis. 

1412 """ 

1413 self.Ci = [] 

1414 self.Xs_i = [] 

1415 self.Ii = [] 

1416 for i in range(self.dim): 

1417 self.Ci.append(self.C[:, i]) 

1418 self.Ii.append(self.Ind_sorted[:, i]) 

1419 self.Xs_i.append(self.Xs[:, i]) 

1420 

1421 def fun_ref(self): 

1422 """ 

1423 Find the objective function output reference table 

1424 """ 

1425 # TODO: Replace with cached wrapper 

1426 

1427 # Note: This process can be pooled easily 

1428 # Obj. function returns to be used as reference table.: 

1429 f_cache_bool = False 

1430 if self.fn > 0: # Store old function evaluations 

1431 Ftemp = self.F 

1432 fn_old = self.fn 

1433 f_cache_bool = True 

1434 

1435 self.F = np.zeros(np.shape(self.C)[0]) 

1436 # NOTE: It might be easier to replace this with a cached 

1437 # objective function 

1438 for i in range(self.fn, np.shape(self.C)[0]): 

1439 eval_f = True 

1440 if self.g_cons is not None: 

1441 for g in self.g_cons: 

1442 if g(self.C[i, :], *self.args) < 0.0: 

1443 eval_f = False 

1444 break # Breaks the g loop 

1445 

1446 if eval_f: 

1447 self.F[i] = self.func(self.C[i, :], *self.args) 

1448 self.fn += 1 

1449 elif self.infty_cons_sampl: 

1450 self.F[i] = np.inf 

1451 self.fn += 1 

1452 if f_cache_bool: 

1453 if fn_old > 0: # Restore saved function evaluations 

1454 self.F[0:fn_old] = Ftemp 

1455 

1456 return self.F 

1457 

1458 def surface_topo_ref(self): # Validated 

1459 """ 

1460 Find the BD and FD finite differences along each component vector. 

1461 """ 

1462 # Replace numpy inf, -inf and nan objects with floating point numbers 

1463 # nan --> float 

1464 self.F[np.isnan(self.F)] = np.inf 

1465 # inf, -inf --> floats 

1466 self.F = np.nan_to_num(self.F) 

1467 

1468 self.Ft = self.F[self.Ind_sorted] 

1469 self.Ftp = np.diff(self.Ft, axis=0) # FD 

1470 self.Ftm = np.diff(self.Ft[::-1], axis=0)[::-1] # BD 

1471 

1472 def sample_topo(self, ind): 

1473 # Find the position of the sample in the component axial directions 

1474 self.Xi_ind_pos = [] 

1475 self.Xi_ind_topo_i = [] 

1476 

1477 for i in range(self.dim): 

1478 for x, I_ind in zip(self.Ii[i], range(len(self.Ii[i]))): 

1479 if x == ind: 

1480 self.Xi_ind_pos.append(I_ind) 

1481 

1482 # Use the topo reference tables to find if point is a minimizer on 

1483 # the current axis 

1484 

1485 # First check if index is on the boundary of the sampling points: 

1486 if self.Xi_ind_pos[i] == 0: 

1487 # if boundary is in basin 

1488 self.Xi_ind_topo_i.append(self.Ftp[:, i][0] > 0) 

1489 

1490 elif self.Xi_ind_pos[i] == self.fn - 1: 

1491 # Largest value at sample size 

1492 self.Xi_ind_topo_i.append(self.Ftp[:, i][self.fn - 2] < 0) 

1493 

1494 # Find axial reference for other points 

1495 else: 

1496 Xi_ind_top_p = self.Ftp[:, i][self.Xi_ind_pos[i]] > 0 

1497 Xi_ind_top_m = self.Ftm[:, i][self.Xi_ind_pos[i] - 1] > 0 

1498 self.Xi_ind_topo_i.append(Xi_ind_top_p and Xi_ind_top_m) 

1499 

1500 if np.array(self.Xi_ind_topo_i).all(): 

1501 self.Xi_ind_topo = True 

1502 else: 

1503 self.Xi_ind_topo = False 

1504 self.Xi_ind_topo = np.array(self.Xi_ind_topo_i).all() 

1505 

1506 return self.Xi_ind_topo 

1507 

1508 def minimizers_1D(self): 

1509 """ 

1510 Returns the indices of all minimizers 

1511 """ 

1512 self.minimizer_pool = [] 

1513 # Note: Can implement parallelization here 

1514 for ind in range(self.fn): 

1515 min_bool = self.sample_topo(ind) 

1516 if min_bool: 

1517 self.minimizer_pool.append(ind) 

1518 

1519 self.minimizer_pool_F = self.F[self.minimizer_pool] 

1520 

1521 # Sort to find minimum func value in min_pool 

1522 self.sort_min_pool() 

1523 if not len(self.minimizer_pool) == 0: 

1524 self.X_min = self.C[self.minimizer_pool] 

1525 # If function is called again and pool is found unbreak: 

1526 else: 

1527 self.X_min = [] 

1528 

1529 return self.X_min 

1530 

1531 def delaunay_triangulation(self, grow=False, n_prc=0): 

1532 if not grow: 

1533 self.Tri = spatial.Delaunay(self.C) 

1534 else: 

1535 if hasattr(self, 'Tri'): 

1536 self.Tri.add_points(self.C[n_prc:, :]) 

1537 else: 

1538 self.Tri = spatial.Delaunay(self.C, incremental=True) 

1539 

1540 return self.Tri 

1541 

1542 @staticmethod 

1543 def find_neighbors_delaunay(pindex, triang): 

1544 """ 

1545 Returns the indices of points connected to ``pindex`` on the Gabriel 

1546 chain subgraph of the Delaunay triangulation. 

1547 """ 

1548 return triang.vertex_neighbor_vertices[1][ 

1549 triang.vertex_neighbor_vertices[0][pindex]: 

1550 triang.vertex_neighbor_vertices[0][pindex + 1]] 

1551 

1552 def sample_delaunay_topo(self, ind): 

1553 self.Xi_ind_topo_i = [] 

1554 

1555 # Find the position of the sample in the component Gabrial chain 

1556 G_ind = self.find_neighbors_delaunay(ind, self.Tri) 

1557 

1558 # Find finite deference between each point 

1559 for g_i in G_ind: 

1560 rel_topo_bool = self.F[ind] < self.F[g_i] 

1561 self.Xi_ind_topo_i.append(rel_topo_bool) 

1562 

1563 # Check if minimizer 

1564 self.Xi_ind_topo = np.array(self.Xi_ind_topo_i).all() 

1565 

1566 return self.Xi_ind_topo 

1567 

1568 def delaunay_minimizers(self): 

1569 """ 

1570 Returns the indices of all minimizers 

1571 """ 

1572 self.minimizer_pool = [] 

1573 # Note: Can easily be parralized 

1574 if self.disp: 

1575 logging.info('self.fn = {}'.format(self.fn)) 

1576 logging.info('self.nc = {}'.format(self.nc)) 

1577 logging.info('np.shape(self.C)' 

1578 ' = {}'.format(np.shape(self.C))) 

1579 for ind in range(self.fn): 

1580 min_bool = self.sample_delaunay_topo(ind) 

1581 if min_bool: 

1582 self.minimizer_pool.append(ind) 

1583 

1584 self.minimizer_pool_F = self.F[self.minimizer_pool] 

1585 

1586 # Sort to find minimum func value in min_pool 

1587 self.sort_min_pool() 

1588 if self.disp: 

1589 logging.info('self.minimizer_pool = {}'.format(self.minimizer_pool)) 

1590 if not len(self.minimizer_pool) == 0: 

1591 self.X_min = self.C[self.minimizer_pool] 

1592 else: 

1593 self.X_min = [] # Empty pool breaks main routine 

1594 return self.X_min 

1595 

1596 

1597class LMap: 

1598 def __init__(self, v): 

1599 self.v = v 

1600 self.x_l = None 

1601 self.lres = None 

1602 self.f_min = None 

1603 self.lbounds = [] 

1604 

1605 

1606class LMapCache: 

1607 def __init__(self): 

1608 self.cache = {} 

1609 

1610 # Lists for search queries 

1611 self.v_maps = [] 

1612 self.xl_maps = [] 

1613 self.f_maps = [] 

1614 self.lbound_maps = [] 

1615 self.size = 0 

1616 

1617 def __getitem__(self, v): 

1618 v = np.ndarray.tolist(v) 

1619 v = tuple(v) 

1620 try: 

1621 return self.cache[v] 

1622 except KeyError: 

1623 xval = LMap(v) 

1624 self.cache[v] = xval 

1625 

1626 return self.cache[v] 

1627 

1628 def add_res(self, v, lres, bounds=None): 

1629 v = np.ndarray.tolist(v) 

1630 v = tuple(v) 

1631 self.cache[v].x_l = lres.x 

1632 self.cache[v].lres = lres 

1633 self.cache[v].f_min = lres.fun 

1634 self.cache[v].lbounds = bounds 

1635 

1636 # Update cache size 

1637 self.size += 1 

1638 

1639 # Cache lists for search queries 

1640 self.v_maps.append(v) 

1641 self.xl_maps.append(lres.x) 

1642 self.f_maps.append(lres.fun) 

1643 self.lbound_maps.append(bounds) 

1644 

1645 def sort_cache_result(self): 

1646 """ 

1647 Sort results and build the global return object 

1648 """ 

1649 results = {} 

1650 # Sort results and save 

1651 self.xl_maps = np.array(self.xl_maps) 

1652 self.f_maps = np.array(self.f_maps) 

1653 

1654 # Sorted indexes in Func_min 

1655 ind_sorted = np.argsort(self.f_maps) 

1656 

1657 # Save ordered list of minima 

1658 results['xl'] = self.xl_maps[ind_sorted] # Ordered x vals 

1659 self.f_maps = np.array(self.f_maps) 

1660 results['funl'] = self.f_maps[ind_sorted] 

1661 results['funl'] = results['funl'].T 

1662 

1663 # Find global of all minimizers 

1664 results['x'] = self.xl_maps[ind_sorted[0]] # Save global minima 

1665 results['fun'] = self.f_maps[ind_sorted[0]] # Save global fun value 

1666 

1667 self.xl_maps = np.ndarray.tolist(self.xl_maps) 

1668 self.f_maps = np.ndarray.tolist(self.f_maps) 

1669 return results