test_minimize_constrained.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781
  1. import numpy as np
  2. import pytest
  3. from scipy.linalg import block_diag
  4. from scipy.sparse import csc_matrix
  5. from numpy.testing import (TestCase, assert_array_almost_equal,
  6. assert_array_less, assert_, assert_allclose,
  7. suppress_warnings)
  8. from pytest import raises
  9. from scipy.optimize import (NonlinearConstraint,
  10. LinearConstraint,
  11. Bounds,
  12. minimize,
  13. BFGS,
  14. SR1)
  15. class Maratos:
  16. """Problem 15.4 from Nocedal and Wright
  17. The following optimization problem:
  18. minimize 2*(x[0]**2 + x[1]**2 - 1) - x[0]
  19. Subject to: x[0]**2 + x[1]**2 - 1 = 0
  20. """
  21. def __init__(self, degrees=60, constr_jac=None, constr_hess=None):
  22. rads = degrees/180*np.pi
  23. self.x0 = [np.cos(rads), np.sin(rads)]
  24. self.x_opt = np.array([1.0, 0.0])
  25. self.constr_jac = constr_jac
  26. self.constr_hess = constr_hess
  27. self.bounds = None
  28. def fun(self, x):
  29. return 2*(x[0]**2 + x[1]**2 - 1) - x[0]
  30. def grad(self, x):
  31. return np.array([4*x[0]-1, 4*x[1]])
  32. def hess(self, x):
  33. return 4*np.eye(2)
  34. @property
  35. def constr(self):
  36. def fun(x):
  37. return x[0]**2 + x[1]**2
  38. if self.constr_jac is None:
  39. def jac(x):
  40. return [[2*x[0], 2*x[1]]]
  41. else:
  42. jac = self.constr_jac
  43. if self.constr_hess is None:
  44. def hess(x, v):
  45. return 2*v[0]*np.eye(2)
  46. else:
  47. hess = self.constr_hess
  48. return NonlinearConstraint(fun, 1, 1, jac, hess)
  49. class MaratosTestArgs:
  50. """Problem 15.4 from Nocedal and Wright
  51. The following optimization problem:
  52. minimize 2*(x[0]**2 + x[1]**2 - 1) - x[0]
  53. Subject to: x[0]**2 + x[1]**2 - 1 = 0
  54. """
  55. def __init__(self, a, b, degrees=60, constr_jac=None, constr_hess=None):
  56. rads = degrees/180*np.pi
  57. self.x0 = [np.cos(rads), np.sin(rads)]
  58. self.x_opt = np.array([1.0, 0.0])
  59. self.constr_jac = constr_jac
  60. self.constr_hess = constr_hess
  61. self.a = a
  62. self.b = b
  63. self.bounds = None
  64. def _test_args(self, a, b):
  65. if self.a != a or self.b != b:
  66. raise ValueError()
  67. def fun(self, x, a, b):
  68. self._test_args(a, b)
  69. return 2*(x[0]**2 + x[1]**2 - 1) - x[0]
  70. def grad(self, x, a, b):
  71. self._test_args(a, b)
  72. return np.array([4*x[0]-1, 4*x[1]])
  73. def hess(self, x, a, b):
  74. self._test_args(a, b)
  75. return 4*np.eye(2)
  76. @property
  77. def constr(self):
  78. def fun(x):
  79. return x[0]**2 + x[1]**2
  80. if self.constr_jac is None:
  81. def jac(x):
  82. return [[4*x[0], 4*x[1]]]
  83. else:
  84. jac = self.constr_jac
  85. if self.constr_hess is None:
  86. def hess(x, v):
  87. return 2*v[0]*np.eye(2)
  88. else:
  89. hess = self.constr_hess
  90. return NonlinearConstraint(fun, 1, 1, jac, hess)
  91. class MaratosGradInFunc:
  92. """Problem 15.4 from Nocedal and Wright
  93. The following optimization problem:
  94. minimize 2*(x[0]**2 + x[1]**2 - 1) - x[0]
  95. Subject to: x[0]**2 + x[1]**2 - 1 = 0
  96. """
  97. def __init__(self, degrees=60, constr_jac=None, constr_hess=None):
  98. rads = degrees/180*np.pi
  99. self.x0 = [np.cos(rads), np.sin(rads)]
  100. self.x_opt = np.array([1.0, 0.0])
  101. self.constr_jac = constr_jac
  102. self.constr_hess = constr_hess
  103. self.bounds = None
  104. def fun(self, x):
  105. return (2*(x[0]**2 + x[1]**2 - 1) - x[0],
  106. np.array([4*x[0]-1, 4*x[1]]))
  107. @property
  108. def grad(self):
  109. return True
  110. def hess(self, x):
  111. return 4*np.eye(2)
  112. @property
  113. def constr(self):
  114. def fun(x):
  115. return x[0]**2 + x[1]**2
  116. if self.constr_jac is None:
  117. def jac(x):
  118. return [[4*x[0], 4*x[1]]]
  119. else:
  120. jac = self.constr_jac
  121. if self.constr_hess is None:
  122. def hess(x, v):
  123. return 2*v[0]*np.eye(2)
  124. else:
  125. hess = self.constr_hess
  126. return NonlinearConstraint(fun, 1, 1, jac, hess)
  127. class HyperbolicIneq:
  128. """Problem 15.1 from Nocedal and Wright
  129. The following optimization problem:
  130. minimize 1/2*(x[0] - 2)**2 + 1/2*(x[1] - 1/2)**2
  131. Subject to: 1/(x[0] + 1) - x[1] >= 1/4
  132. x[0] >= 0
  133. x[1] >= 0
  134. """
  135. def __init__(self, constr_jac=None, constr_hess=None):
  136. self.x0 = [0, 0]
  137. self.x_opt = [1.952823, 0.088659]
  138. self.constr_jac = constr_jac
  139. self.constr_hess = constr_hess
  140. self.bounds = Bounds(0, np.inf)
  141. def fun(self, x):
  142. return 1/2*(x[0] - 2)**2 + 1/2*(x[1] - 1/2)**2
  143. def grad(self, x):
  144. return [x[0] - 2, x[1] - 1/2]
  145. def hess(self, x):
  146. return np.eye(2)
  147. @property
  148. def constr(self):
  149. def fun(x):
  150. return 1/(x[0] + 1) - x[1]
  151. if self.constr_jac is None:
  152. def jac(x):
  153. return [[-1/(x[0] + 1)**2, -1]]
  154. else:
  155. jac = self.constr_jac
  156. if self.constr_hess is None:
  157. def hess(x, v):
  158. return 2*v[0]*np.array([[1/(x[0] + 1)**3, 0],
  159. [0, 0]])
  160. else:
  161. hess = self.constr_hess
  162. return NonlinearConstraint(fun, 0.25, np.inf, jac, hess)
  163. class Rosenbrock:
  164. """Rosenbrock function.
  165. The following optimization problem:
  166. minimize sum(100.0*(x[1:] - x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0)
  167. """
  168. def __init__(self, n=2, random_state=0):
  169. rng = np.random.RandomState(random_state)
  170. self.x0 = rng.uniform(-1, 1, n)
  171. self.x_opt = np.ones(n)
  172. self.bounds = None
  173. def fun(self, x):
  174. x = np.asarray(x)
  175. r = np.sum(100.0 * (x[1:] - x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0,
  176. axis=0)
  177. return r
  178. def grad(self, x):
  179. x = np.asarray(x)
  180. xm = x[1:-1]
  181. xm_m1 = x[:-2]
  182. xm_p1 = x[2:]
  183. der = np.zeros_like(x)
  184. der[1:-1] = (200 * (xm - xm_m1**2) -
  185. 400 * (xm_p1 - xm**2) * xm - 2 * (1 - xm))
  186. der[0] = -400 * x[0] * (x[1] - x[0]**2) - 2 * (1 - x[0])
  187. der[-1] = 200 * (x[-1] - x[-2]**2)
  188. return der
  189. def hess(self, x):
  190. x = np.atleast_1d(x)
  191. H = np.diag(-400 * x[:-1], 1) - np.diag(400 * x[:-1], -1)
  192. diagonal = np.zeros(len(x), dtype=x.dtype)
  193. diagonal[0] = 1200 * x[0]**2 - 400 * x[1] + 2
  194. diagonal[-1] = 200
  195. diagonal[1:-1] = 202 + 1200 * x[1:-1]**2 - 400 * x[2:]
  196. H = H + np.diag(diagonal)
  197. return H
  198. @property
  199. def constr(self):
  200. return ()
  201. class IneqRosenbrock(Rosenbrock):
  202. """Rosenbrock subject to inequality constraints.
  203. The following optimization problem:
  204. minimize sum(100.0*(x[1] - x[0]**2)**2.0 + (1 - x[0])**2)
  205. subject to: x[0] + 2 x[1] <= 1
  206. Taken from matlab ``fmincon`` documentation.
  207. """
  208. def __init__(self, random_state=0):
  209. Rosenbrock.__init__(self, 2, random_state)
  210. self.x0 = [-1, -0.5]
  211. self.x_opt = [0.5022, 0.2489]
  212. self.bounds = None
  213. @property
  214. def constr(self):
  215. A = [[1, 2]]
  216. b = 1
  217. return LinearConstraint(A, -np.inf, b)
  218. class BoundedRosenbrock(Rosenbrock):
  219. """Rosenbrock subject to inequality constraints.
  220. The following optimization problem:
  221. minimize sum(100.0*(x[1] - x[0]**2)**2.0 + (1 - x[0])**2)
  222. subject to: -2 <= x[0] <= 0
  223. 0 <= x[1] <= 2
  224. Taken from matlab ``fmincon`` documentation.
  225. """
  226. def __init__(self, random_state=0):
  227. Rosenbrock.__init__(self, 2, random_state)
  228. self.x0 = [-0.2, 0.2]
  229. self.x_opt = None
  230. self.bounds = Bounds([-2, 0], [0, 2])
  231. class EqIneqRosenbrock(Rosenbrock):
  232. """Rosenbrock subject to equality and inequality constraints.
  233. The following optimization problem:
  234. minimize sum(100.0*(x[1] - x[0]**2)**2.0 + (1 - x[0])**2)
  235. subject to: x[0] + 2 x[1] <= 1
  236. 2 x[0] + x[1] = 1
  237. Taken from matlab ``fimincon`` documentation.
  238. """
  239. def __init__(self, random_state=0):
  240. Rosenbrock.__init__(self, 2, random_state)
  241. self.x0 = [-1, -0.5]
  242. self.x_opt = [0.41494, 0.17011]
  243. self.bounds = None
  244. @property
  245. def constr(self):
  246. A_ineq = [[1, 2]]
  247. b_ineq = 1
  248. A_eq = [[2, 1]]
  249. b_eq = 1
  250. return (LinearConstraint(A_ineq, -np.inf, b_ineq),
  251. LinearConstraint(A_eq, b_eq, b_eq))
  252. class Elec:
  253. """Distribution of electrons on a sphere.
  254. Problem no 2 from COPS collection [2]_. Find
  255. the equilibrium state distribution (of minimal
  256. potential) of the electrons positioned on a
  257. conducting sphere.
  258. References
  259. ----------
  260. .. [1] E. D. Dolan, J. J. Mor\'{e}, and T. S. Munson,
  261. "Benchmarking optimization software with COPS 3.0.",
  262. Argonne National Lab., Argonne, IL (US), 2004.
  263. """
  264. def __init__(self, n_electrons=200, random_state=0,
  265. constr_jac=None, constr_hess=None):
  266. self.n_electrons = n_electrons
  267. self.rng = np.random.RandomState(random_state)
  268. # Initial Guess
  269. phi = self.rng.uniform(0, 2 * np.pi, self.n_electrons)
  270. theta = self.rng.uniform(-np.pi, np.pi, self.n_electrons)
  271. x = np.cos(theta) * np.cos(phi)
  272. y = np.cos(theta) * np.sin(phi)
  273. z = np.sin(theta)
  274. self.x0 = np.hstack((x, y, z))
  275. self.x_opt = None
  276. self.constr_jac = constr_jac
  277. self.constr_hess = constr_hess
  278. self.bounds = None
  279. def _get_cordinates(self, x):
  280. x_coord = x[:self.n_electrons]
  281. y_coord = x[self.n_electrons:2 * self.n_electrons]
  282. z_coord = x[2 * self.n_electrons:]
  283. return x_coord, y_coord, z_coord
  284. def _compute_coordinate_deltas(self, x):
  285. x_coord, y_coord, z_coord = self._get_cordinates(x)
  286. dx = x_coord[:, None] - x_coord
  287. dy = y_coord[:, None] - y_coord
  288. dz = z_coord[:, None] - z_coord
  289. return dx, dy, dz
  290. def fun(self, x):
  291. dx, dy, dz = self._compute_coordinate_deltas(x)
  292. with np.errstate(divide='ignore'):
  293. dm1 = (dx**2 + dy**2 + dz**2) ** -0.5
  294. dm1[np.diag_indices_from(dm1)] = 0
  295. return 0.5 * np.sum(dm1)
  296. def grad(self, x):
  297. dx, dy, dz = self._compute_coordinate_deltas(x)
  298. with np.errstate(divide='ignore'):
  299. dm3 = (dx**2 + dy**2 + dz**2) ** -1.5
  300. dm3[np.diag_indices_from(dm3)] = 0
  301. grad_x = -np.sum(dx * dm3, axis=1)
  302. grad_y = -np.sum(dy * dm3, axis=1)
  303. grad_z = -np.sum(dz * dm3, axis=1)
  304. return np.hstack((grad_x, grad_y, grad_z))
  305. def hess(self, x):
  306. dx, dy, dz = self._compute_coordinate_deltas(x)
  307. d = (dx**2 + dy**2 + dz**2) ** 0.5
  308. with np.errstate(divide='ignore'):
  309. dm3 = d ** -3
  310. dm5 = d ** -5
  311. i = np.arange(self.n_electrons)
  312. dm3[i, i] = 0
  313. dm5[i, i] = 0
  314. Hxx = dm3 - 3 * dx**2 * dm5
  315. Hxx[i, i] = -np.sum(Hxx, axis=1)
  316. Hxy = -3 * dx * dy * dm5
  317. Hxy[i, i] = -np.sum(Hxy, axis=1)
  318. Hxz = -3 * dx * dz * dm5
  319. Hxz[i, i] = -np.sum(Hxz, axis=1)
  320. Hyy = dm3 - 3 * dy**2 * dm5
  321. Hyy[i, i] = -np.sum(Hyy, axis=1)
  322. Hyz = -3 * dy * dz * dm5
  323. Hyz[i, i] = -np.sum(Hyz, axis=1)
  324. Hzz = dm3 - 3 * dz**2 * dm5
  325. Hzz[i, i] = -np.sum(Hzz, axis=1)
  326. H = np.vstack((
  327. np.hstack((Hxx, Hxy, Hxz)),
  328. np.hstack((Hxy, Hyy, Hyz)),
  329. np.hstack((Hxz, Hyz, Hzz))
  330. ))
  331. return H
  332. @property
  333. def constr(self):
  334. def fun(x):
  335. x_coord, y_coord, z_coord = self._get_cordinates(x)
  336. return x_coord**2 + y_coord**2 + z_coord**2 - 1
  337. if self.constr_jac is None:
  338. def jac(x):
  339. x_coord, y_coord, z_coord = self._get_cordinates(x)
  340. Jx = 2 * np.diag(x_coord)
  341. Jy = 2 * np.diag(y_coord)
  342. Jz = 2 * np.diag(z_coord)
  343. return csc_matrix(np.hstack((Jx, Jy, Jz)))
  344. else:
  345. jac = self.constr_jac
  346. if self.constr_hess is None:
  347. def hess(x, v):
  348. D = 2 * np.diag(v)
  349. return block_diag(D, D, D)
  350. else:
  351. hess = self.constr_hess
  352. return NonlinearConstraint(fun, -np.inf, 0, jac, hess)
  353. class TestTrustRegionConstr(TestCase):
  354. @pytest.mark.slow
  355. def test_list_of_problems(self):
  356. list_of_problems = [Maratos(),
  357. Maratos(constr_hess='2-point'),
  358. Maratos(constr_hess=SR1()),
  359. Maratos(constr_jac='2-point', constr_hess=SR1()),
  360. MaratosGradInFunc(),
  361. HyperbolicIneq(),
  362. HyperbolicIneq(constr_hess='3-point'),
  363. HyperbolicIneq(constr_hess=BFGS()),
  364. HyperbolicIneq(constr_jac='3-point',
  365. constr_hess=BFGS()),
  366. Rosenbrock(),
  367. IneqRosenbrock(),
  368. EqIneqRosenbrock(),
  369. BoundedRosenbrock(),
  370. Elec(n_electrons=2),
  371. Elec(n_electrons=2, constr_hess='2-point'),
  372. Elec(n_electrons=2, constr_hess=SR1()),
  373. Elec(n_electrons=2, constr_jac='3-point',
  374. constr_hess=SR1())]
  375. for prob in list_of_problems:
  376. for grad in (prob.grad, '3-point', False):
  377. for hess in (prob.hess,
  378. '3-point',
  379. SR1(),
  380. BFGS(exception_strategy='damp_update'),
  381. BFGS(exception_strategy='skip_update')):
  382. # Remove exceptions
  383. if grad in ('2-point', '3-point', 'cs', False) and \
  384. hess in ('2-point', '3-point', 'cs'):
  385. continue
  386. if prob.grad is True and grad in ('3-point', False):
  387. continue
  388. with suppress_warnings() as sup:
  389. sup.filter(UserWarning, "delta_grad == 0.0")
  390. result = minimize(prob.fun, prob.x0,
  391. method='trust-constr',
  392. jac=grad, hess=hess,
  393. bounds=prob.bounds,
  394. constraints=prob.constr)
  395. if prob.x_opt is not None:
  396. assert_array_almost_equal(result.x, prob.x_opt,
  397. decimal=5)
  398. # gtol
  399. if result.status == 1:
  400. assert_array_less(result.optimality, 1e-8)
  401. # xtol
  402. if result.status == 2:
  403. assert_array_less(result.tr_radius, 1e-8)
  404. if result.method == "tr_interior_point":
  405. assert_array_less(result.barrier_parameter, 1e-8)
  406. # max iter
  407. if result.status in (0, 3):
  408. raise RuntimeError("Invalid termination condition.")
  409. def test_default_jac_and_hess(self):
  410. def fun(x):
  411. return (x - 1) ** 2
  412. bounds = [(-2, 2)]
  413. res = minimize(fun, x0=[-1.5], bounds=bounds, method='trust-constr')
  414. assert_array_almost_equal(res.x, 1, decimal=5)
  415. def test_default_hess(self):
  416. def fun(x):
  417. return (x - 1) ** 2
  418. bounds = [(-2, 2)]
  419. res = minimize(fun, x0=[-1.5], bounds=bounds, method='trust-constr',
  420. jac='2-point')
  421. assert_array_almost_equal(res.x, 1, decimal=5)
  422. def test_no_constraints(self):
  423. prob = Rosenbrock()
  424. result = minimize(prob.fun, prob.x0,
  425. method='trust-constr',
  426. jac=prob.grad, hess=prob.hess)
  427. result1 = minimize(prob.fun, prob.x0,
  428. method='L-BFGS-B',
  429. jac='2-point')
  430. result2 = minimize(prob.fun, prob.x0,
  431. method='L-BFGS-B',
  432. jac='3-point')
  433. assert_array_almost_equal(result.x, prob.x_opt, decimal=5)
  434. assert_array_almost_equal(result1.x, prob.x_opt, decimal=5)
  435. assert_array_almost_equal(result2.x, prob.x_opt, decimal=5)
  436. def test_hessp(self):
  437. prob = Maratos()
  438. def hessp(x, p):
  439. H = prob.hess(x)
  440. return H.dot(p)
  441. result = minimize(prob.fun, prob.x0,
  442. method='trust-constr',
  443. jac=prob.grad, hessp=hessp,
  444. bounds=prob.bounds,
  445. constraints=prob.constr)
  446. if prob.x_opt is not None:
  447. assert_array_almost_equal(result.x, prob.x_opt, decimal=2)
  448. # gtol
  449. if result.status == 1:
  450. assert_array_less(result.optimality, 1e-8)
  451. # xtol
  452. if result.status == 2:
  453. assert_array_less(result.tr_radius, 1e-8)
  454. if result.method == "tr_interior_point":
  455. assert_array_less(result.barrier_parameter, 1e-8)
  456. # max iter
  457. if result.status in (0, 3):
  458. raise RuntimeError("Invalid termination condition.")
  459. def test_args(self):
  460. prob = MaratosTestArgs("a", 234)
  461. result = minimize(prob.fun, prob.x0, ("a", 234),
  462. method='trust-constr',
  463. jac=prob.grad, hess=prob.hess,
  464. bounds=prob.bounds,
  465. constraints=prob.constr)
  466. if prob.x_opt is not None:
  467. assert_array_almost_equal(result.x, prob.x_opt, decimal=2)
  468. # gtol
  469. if result.status == 1:
  470. assert_array_less(result.optimality, 1e-8)
  471. # xtol
  472. if result.status == 2:
  473. assert_array_less(result.tr_radius, 1e-8)
  474. if result.method == "tr_interior_point":
  475. assert_array_less(result.barrier_parameter, 1e-8)
  476. # max iter
  477. if result.status in (0, 3):
  478. raise RuntimeError("Invalid termination condition.")
  479. def test_raise_exception(self):
  480. prob = Maratos()
  481. raises(ValueError, minimize, prob.fun, prob.x0, method='trust-constr',
  482. jac='2-point', hess='2-point', constraints=prob.constr)
  483. def test_issue_9044(self):
  484. # https://github.com/scipy/scipy/issues/9044
  485. # Test the returned `OptimizeResult` contains keys consistent with
  486. # other solvers.
  487. def callback(x, info):
  488. assert_('nit' in info)
  489. assert_('niter' in info)
  490. result = minimize(lambda x: x**2, [0], jac=lambda x: 2*x,
  491. hess=lambda x: 2, callback=callback,
  492. method='trust-constr')
  493. assert_(result.get('success'))
  494. assert_(result.get('nit', -1) == 1)
  495. # Also check existence of the 'niter' attribute, for backward
  496. # compatibility
  497. assert_(result.get('niter', -1) == 1)
  498. class TestEmptyConstraint(TestCase):
  499. """
  500. Here we minimize x^2+y^2 subject to x^2-y^2>1.
  501. The actual minimum is at (0, 0) which fails the constraint.
  502. Therefore we will find a minimum on the boundary at (+/-1, 0).
  503. When minimizing on the boundary, optimize uses a set of
  504. constraints that removes the constraint that sets that
  505. boundary. In our case, there's only one constraint, so
  506. the result is an empty constraint.
  507. This tests that the empty constraint works.
  508. """
  509. def test_empty_constraint(self):
  510. def function(x):
  511. return x[0]**2 + x[1]**2
  512. def functionjacobian(x):
  513. return np.array([2.*x[0], 2.*x[1]])
  514. def functionhvp(x, v):
  515. return 2.*v
  516. def constraint(x):
  517. return np.array([x[0]**2 - x[1]**2])
  518. def constraintjacobian(x):
  519. return np.array([[2*x[0], -2*x[1]]])
  520. def constraintlcoh(x, v):
  521. return np.array([[2., 0.], [0., -2.]]) * v[0]
  522. constraint = NonlinearConstraint(constraint, 1., np.inf, constraintjacobian, constraintlcoh)
  523. startpoint = [1., 2.]
  524. bounds = Bounds([-np.inf, -np.inf], [np.inf, np.inf])
  525. result = minimize(
  526. function,
  527. startpoint,
  528. method='trust-constr',
  529. jac=functionjacobian,
  530. hessp=functionhvp,
  531. constraints=[constraint],
  532. bounds=bounds,
  533. )
  534. assert_array_almost_equal(abs(result.x), np.array([1, 0]), decimal=4)
  535. def test_bug_11886():
  536. def opt(x):
  537. return x[0]**2+x[1]**2
  538. with np.testing.suppress_warnings() as sup:
  539. sup.filter(PendingDeprecationWarning)
  540. A = np.matrix(np.diag([1, 1]))
  541. lin_cons = LinearConstraint(A, -1, np.inf)
  542. minimize(opt, 2*[1], constraints = lin_cons) # just checking that there are no errors
  543. # Remove xfail when gh-11649 is resolved
  544. @pytest.mark.xfail(reason="Known bug in trust-constr; see gh-11649.",
  545. strict=True)
  546. def test_gh11649():
  547. bnds = Bounds(lb=[-1, -1], ub=[1, 1], keep_feasible=True)
  548. def assert_inbounds(x):
  549. assert np.all(x >= bnds.lb)
  550. assert np.all(x <= bnds.ub)
  551. def obj(x):
  552. assert_inbounds(x)
  553. return np.exp(x[0])*(4*x[0]**2 + 2*x[1]**2 + 4*x[0]*x[1] + 2*x[1] + 1)
  554. def nce(x):
  555. assert_inbounds(x)
  556. return x[0]**2 + x[1]
  557. def nci(x):
  558. assert_inbounds(x)
  559. return x[0]*x[1]
  560. x0 = np.array((0.99, -0.99))
  561. nlcs = [NonlinearConstraint(nci, -10, np.inf),
  562. NonlinearConstraint(nce, 1, 1)]
  563. res = minimize(fun=obj, x0=x0, method='trust-constr',
  564. bounds=bnds, constraints=nlcs)
  565. assert res.success
  566. assert_inbounds(res.x)
  567. assert nlcs[0].lb < nlcs[0].fun(res.x) < nlcs[0].ub
  568. assert_allclose(nce(res.x), nlcs[1].ub)
  569. ref = minimize(fun=obj, x0=x0, method='slsqp',
  570. bounds=bnds, constraints=nlcs)
  571. assert_allclose(res.fun, ref.fun)
  572. class TestBoundedNelderMead:
  573. @pytest.mark.parametrize('bounds, x_opt',
  574. [(Bounds(-np.inf, np.inf), Rosenbrock().x_opt),
  575. (Bounds(-np.inf, -0.8), [-0.8, -0.8]),
  576. (Bounds(3.0, np.inf), [3.0, 9.0]),
  577. (Bounds([3.0, 1.0], [4.0, 5.0]), [3., 5.]),
  578. ])
  579. def test_rosen_brock_with_bounds(self, bounds, x_opt):
  580. prob = Rosenbrock()
  581. with suppress_warnings() as sup:
  582. sup.filter(UserWarning, "Initial guess is not within "
  583. "the specified bounds")
  584. result = minimize(prob.fun, [-10, -10],
  585. method='Nelder-Mead',
  586. bounds=bounds)
  587. assert np.less_equal(bounds.lb, result.x).all()
  588. assert np.less_equal(result.x, bounds.ub).all()
  589. assert np.allclose(prob.fun(result.x), result.fun)
  590. assert np.allclose(result.x, x_opt, atol=1.e-3)
  591. def test_equal_all_bounds(self):
  592. prob = Rosenbrock()
  593. bounds = Bounds([4.0, 5.0], [4.0, 5.0])
  594. with suppress_warnings() as sup:
  595. sup.filter(UserWarning, "Initial guess is not within "
  596. "the specified bounds")
  597. result = minimize(prob.fun, [-10, 8],
  598. method='Nelder-Mead',
  599. bounds=bounds)
  600. assert np.allclose(result.x, [4.0, 5.0])
  601. def test_equal_one_bounds(self):
  602. prob = Rosenbrock()
  603. bounds = Bounds([4.0, 5.0], [4.0, 20.0])
  604. with suppress_warnings() as sup:
  605. sup.filter(UserWarning, "Initial guess is not within "
  606. "the specified bounds")
  607. result = minimize(prob.fun, [-10, 8],
  608. method='Nelder-Mead',
  609. bounds=bounds)
  610. assert np.allclose(result.x, [4.0, 16.0])
  611. def test_invalid_bounds(self):
  612. prob = Rosenbrock()
  613. with raises(ValueError, match=r"one of the lower bounds is greater "
  614. r"than an upper bound."):
  615. bounds = Bounds([-np.inf, 1.0], [4.0, -5.0])
  616. minimize(prob.fun, [-10, 3],
  617. method='Nelder-Mead',
  618. bounds=bounds)
  619. @pytest.mark.xfail(reason="Failing on Azure Linux and macOS builds, "
  620. "see gh-13846")
  621. def test_outside_bounds_warning(self):
  622. prob = Rosenbrock()
  623. with raises(UserWarning, match=r"Initial guess is not within "
  624. r"the specified bounds"):
  625. bounds = Bounds([-np.inf, 1.0], [4.0, 5.0])
  626. minimize(prob.fun, [-10, 8],
  627. method='Nelder-Mead',
  628. bounds=bounds)