test_qhull.py 43 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178
  1. import os
  2. import copy
  3. import numpy as np
  4. from numpy.testing import (assert_equal, assert_almost_equal,
  5. assert_, assert_allclose, assert_array_equal)
  6. import pytest
  7. from pytest import raises as assert_raises
  8. import scipy.spatial._qhull as qhull
  9. from scipy.spatial import cKDTree as KDTree
  10. from scipy.spatial import Voronoi
  11. import itertools
  12. def sorted_tuple(x):
  13. return tuple(sorted(x))
  14. def sorted_unique_tuple(x):
  15. return tuple(np.unique(x))
  16. def assert_unordered_tuple_list_equal(a, b, tpl=tuple):
  17. if isinstance(a, np.ndarray):
  18. a = a.tolist()
  19. if isinstance(b, np.ndarray):
  20. b = b.tolist()
  21. a = list(map(tpl, a))
  22. a.sort()
  23. b = list(map(tpl, b))
  24. b.sort()
  25. assert_equal(a, b)
  26. np.random.seed(1234)
  27. points = [(0,0), (0,1), (1,0), (1,1), (0.5, 0.5), (0.5, 1.5)]
  28. pathological_data_1 = np.array([
  29. [-3.14,-3.14], [-3.14,-2.36], [-3.14,-1.57], [-3.14,-0.79],
  30. [-3.14,0.0], [-3.14,0.79], [-3.14,1.57], [-3.14,2.36],
  31. [-3.14,3.14], [-2.36,-3.14], [-2.36,-2.36], [-2.36,-1.57],
  32. [-2.36,-0.79], [-2.36,0.0], [-2.36,0.79], [-2.36,1.57],
  33. [-2.36,2.36], [-2.36,3.14], [-1.57,-0.79], [-1.57,0.79],
  34. [-1.57,-1.57], [-1.57,0.0], [-1.57,1.57], [-1.57,-3.14],
  35. [-1.57,-2.36], [-1.57,2.36], [-1.57,3.14], [-0.79,-1.57],
  36. [-0.79,1.57], [-0.79,-3.14], [-0.79,-2.36], [-0.79,-0.79],
  37. [-0.79,0.0], [-0.79,0.79], [-0.79,2.36], [-0.79,3.14],
  38. [0.0,-3.14], [0.0,-2.36], [0.0,-1.57], [0.0,-0.79], [0.0,0.0],
  39. [0.0,0.79], [0.0,1.57], [0.0,2.36], [0.0,3.14], [0.79,-3.14],
  40. [0.79,-2.36], [0.79,-0.79], [0.79,0.0], [0.79,0.79],
  41. [0.79,2.36], [0.79,3.14], [0.79,-1.57], [0.79,1.57],
  42. [1.57,-3.14], [1.57,-2.36], [1.57,2.36], [1.57,3.14],
  43. [1.57,-1.57], [1.57,0.0], [1.57,1.57], [1.57,-0.79],
  44. [1.57,0.79], [2.36,-3.14], [2.36,-2.36], [2.36,-1.57],
  45. [2.36,-0.79], [2.36,0.0], [2.36,0.79], [2.36,1.57],
  46. [2.36,2.36], [2.36,3.14], [3.14,-3.14], [3.14,-2.36],
  47. [3.14,-1.57], [3.14,-0.79], [3.14,0.0], [3.14,0.79],
  48. [3.14,1.57], [3.14,2.36], [3.14,3.14],
  49. ])
  50. pathological_data_2 = np.array([
  51. [-1, -1], [-1, 0], [-1, 1],
  52. [0, -1], [0, 0], [0, 1],
  53. [1, -1 - np.finfo(np.float_).eps], [1, 0], [1, 1],
  54. ])
  55. bug_2850_chunks = [np.random.rand(10, 2),
  56. np.array([[0,0], [0,1], [1,0], [1,1]]) # add corners
  57. ]
  58. # same with some additional chunks
  59. bug_2850_chunks_2 = (bug_2850_chunks +
  60. [np.random.rand(10, 2),
  61. 0.25 + np.array([[0,0], [0,1], [1,0], [1,1]])])
  62. DATASETS = {
  63. 'some-points': np.asarray(points),
  64. 'random-2d': np.random.rand(30, 2),
  65. 'random-3d': np.random.rand(30, 3),
  66. 'random-4d': np.random.rand(30, 4),
  67. 'random-5d': np.random.rand(30, 5),
  68. 'random-6d': np.random.rand(10, 6),
  69. 'random-7d': np.random.rand(10, 7),
  70. 'random-8d': np.random.rand(10, 8),
  71. 'pathological-1': pathological_data_1,
  72. 'pathological-2': pathological_data_2
  73. }
  74. INCREMENTAL_DATASETS = {
  75. 'bug-2850': (bug_2850_chunks, None),
  76. 'bug-2850-2': (bug_2850_chunks_2, None),
  77. }
  78. def _add_inc_data(name, chunksize):
  79. """
  80. Generate incremental datasets from basic data sets
  81. """
  82. points = DATASETS[name]
  83. ndim = points.shape[1]
  84. opts = None
  85. nmin = ndim + 2
  86. if name == 'some-points':
  87. # since Qz is not allowed, use QJ
  88. opts = 'QJ Pp'
  89. elif name == 'pathological-1':
  90. # include enough points so that we get different x-coordinates
  91. nmin = 12
  92. chunks = [points[:nmin]]
  93. for j in range(nmin, len(points), chunksize):
  94. chunks.append(points[j:j+chunksize])
  95. new_name = "%s-chunk-%d" % (name, chunksize)
  96. assert new_name not in INCREMENTAL_DATASETS
  97. INCREMENTAL_DATASETS[new_name] = (chunks, opts)
  98. for name in DATASETS:
  99. for chunksize in 1, 4, 16:
  100. _add_inc_data(name, chunksize)
  101. class Test_Qhull:
  102. def test_swapping(self):
  103. # Check that Qhull state swapping works
  104. x = qhull._Qhull(b'v',
  105. np.array([[0,0],[0,1],[1,0],[1,1.],[0.5,0.5]]),
  106. b'Qz')
  107. xd = copy.deepcopy(x.get_voronoi_diagram())
  108. y = qhull._Qhull(b'v',
  109. np.array([[0,0],[0,1],[1,0],[1,2.]]),
  110. b'Qz')
  111. yd = copy.deepcopy(y.get_voronoi_diagram())
  112. xd2 = copy.deepcopy(x.get_voronoi_diagram())
  113. x.close()
  114. yd2 = copy.deepcopy(y.get_voronoi_diagram())
  115. y.close()
  116. assert_raises(RuntimeError, x.get_voronoi_diagram)
  117. assert_raises(RuntimeError, y.get_voronoi_diagram)
  118. assert_allclose(xd[0], xd2[0])
  119. assert_unordered_tuple_list_equal(xd[1], xd2[1], tpl=sorted_tuple)
  120. assert_unordered_tuple_list_equal(xd[2], xd2[2], tpl=sorted_tuple)
  121. assert_unordered_tuple_list_equal(xd[3], xd2[3], tpl=sorted_tuple)
  122. assert_array_equal(xd[4], xd2[4])
  123. assert_allclose(yd[0], yd2[0])
  124. assert_unordered_tuple_list_equal(yd[1], yd2[1], tpl=sorted_tuple)
  125. assert_unordered_tuple_list_equal(yd[2], yd2[2], tpl=sorted_tuple)
  126. assert_unordered_tuple_list_equal(yd[3], yd2[3], tpl=sorted_tuple)
  127. assert_array_equal(yd[4], yd2[4])
  128. x.close()
  129. assert_raises(RuntimeError, x.get_voronoi_diagram)
  130. y.close()
  131. assert_raises(RuntimeError, y.get_voronoi_diagram)
  132. def test_issue_8051(self):
  133. points = np.array([[0, 0], [0, 1], [0, 2], [1, 0], [1, 1], [1, 2],[2, 0], [2, 1], [2, 2]])
  134. Voronoi(points)
  135. class TestUtilities:
  136. """
  137. Check that utility functions work.
  138. """
  139. def test_find_simplex(self):
  140. # Simple check that simplex finding works
  141. points = np.array([(0,0), (0,1), (1,1), (1,0)], dtype=np.double)
  142. tri = qhull.Delaunay(points)
  143. # +---+
  144. # |\ 0|
  145. # | \ |
  146. # |1 \|
  147. # +---+
  148. assert_equal(tri.simplices, [[1, 3, 2], [3, 1, 0]])
  149. for p in [(0.25, 0.25, 1),
  150. (0.75, 0.75, 0),
  151. (0.3, 0.2, 1)]:
  152. i = tri.find_simplex(p[:2])
  153. assert_equal(i, p[2], err_msg='%r' % (p,))
  154. j = qhull.tsearch(tri, p[:2])
  155. assert_equal(i, j)
  156. def test_plane_distance(self):
  157. # Compare plane distance from hyperplane equations obtained from Qhull
  158. # to manually computed plane equations
  159. x = np.array([(0,0), (1, 1), (1, 0), (0.99189033, 0.37674127),
  160. (0.99440079, 0.45182168)], dtype=np.double)
  161. p = np.array([0.99966555, 0.15685619], dtype=np.double)
  162. tri = qhull.Delaunay(x)
  163. z = tri.lift_points(x)
  164. pz = tri.lift_points(p)
  165. dist = tri.plane_distance(p)
  166. for j, v in enumerate(tri.simplices):
  167. x1 = z[v[0]]
  168. x2 = z[v[1]]
  169. x3 = z[v[2]]
  170. n = np.cross(x1 - x3, x2 - x3)
  171. n /= np.sqrt(np.dot(n, n))
  172. n *= -np.sign(n[2])
  173. d = np.dot(n, pz - x3)
  174. assert_almost_equal(dist[j], d)
  175. def test_convex_hull(self):
  176. # Simple check that the convex hull seems to works
  177. points = np.array([(0,0), (0,1), (1,1), (1,0)], dtype=np.double)
  178. tri = qhull.Delaunay(points)
  179. # +---+
  180. # |\ 0|
  181. # | \ |
  182. # |1 \|
  183. # +---+
  184. assert_equal(tri.convex_hull, [[3, 2], [1, 2], [1, 0], [3, 0]])
  185. def test_volume_area(self):
  186. #Basic check that we get back the correct volume and area for a cube
  187. points = np.array([(0, 0, 0), (0, 1, 0), (1, 0, 0), (1, 1, 0),
  188. (0, 0, 1), (0, 1, 1), (1, 0, 1), (1, 1, 1)])
  189. hull = qhull.ConvexHull(points)
  190. assert_allclose(hull.volume, 1., rtol=1e-14,
  191. err_msg="Volume of cube is incorrect")
  192. assert_allclose(hull.area, 6., rtol=1e-14,
  193. err_msg="Area of cube is incorrect")
  194. def test_random_volume_area(self):
  195. #Test that the results for a random 10-point convex are
  196. #coherent with the output of qconvex Qt s FA
  197. points = np.array([(0.362568364506, 0.472712355305, 0.347003084477),
  198. (0.733731893414, 0.634480295684, 0.950513180209),
  199. (0.511239955611, 0.876839441267, 0.418047827863),
  200. (0.0765906233393, 0.527373281342, 0.6509863541),
  201. (0.146694972056, 0.596725793348, 0.894860986685),
  202. (0.513808585741, 0.069576205858, 0.530890338876),
  203. (0.512343805118, 0.663537132612, 0.037689295973),
  204. (0.47282965018, 0.462176697655, 0.14061843691),
  205. (0.240584597123, 0.778660020591, 0.722913476339),
  206. (0.951271745935, 0.967000673944, 0.890661319684)])
  207. hull = qhull.ConvexHull(points)
  208. assert_allclose(hull.volume, 0.14562013, rtol=1e-07,
  209. err_msg="Volume of random polyhedron is incorrect")
  210. assert_allclose(hull.area, 1.6670425, rtol=1e-07,
  211. err_msg="Area of random polyhedron is incorrect")
  212. def test_incremental_volume_area_random_input(self):
  213. """Test that incremental mode gives the same volume/area as
  214. non-incremental mode and incremental mode with restart"""
  215. nr_points = 20
  216. dim = 3
  217. points = np.random.random((nr_points, dim))
  218. inc_hull = qhull.ConvexHull(points[:dim+1, :], incremental=True)
  219. inc_restart_hull = qhull.ConvexHull(points[:dim+1, :], incremental=True)
  220. for i in range(dim+1, nr_points):
  221. hull = qhull.ConvexHull(points[:i+1, :])
  222. inc_hull.add_points(points[i:i+1, :])
  223. inc_restart_hull.add_points(points[i:i+1, :], restart=True)
  224. assert_allclose(hull.volume, inc_hull.volume, rtol=1e-7)
  225. assert_allclose(hull.volume, inc_restart_hull.volume, rtol=1e-7)
  226. assert_allclose(hull.area, inc_hull.area, rtol=1e-7)
  227. assert_allclose(hull.area, inc_restart_hull.area, rtol=1e-7)
  228. def _check_barycentric_transforms(self, tri, err_msg="",
  229. unit_cube=False,
  230. unit_cube_tol=0):
  231. """Check that a triangulation has reasonable barycentric transforms"""
  232. vertices = tri.points[tri.simplices]
  233. sc = 1/(tri.ndim + 1.0)
  234. centroids = vertices.sum(axis=1) * sc
  235. # Either: (i) the simplex has a `nan` barycentric transform,
  236. # or, (ii) the centroid is in the simplex
  237. def barycentric_transform(tr, x):
  238. r = tr[:,-1,:]
  239. Tinv = tr[:,:-1,:]
  240. return np.einsum('ijk,ik->ij', Tinv, x - r)
  241. eps = np.finfo(float).eps
  242. c = barycentric_transform(tri.transform, centroids)
  243. with np.errstate(invalid="ignore"):
  244. ok = np.isnan(c).all(axis=1) | (abs(c - sc)/sc < 0.1).all(axis=1)
  245. assert_(ok.all(), "%s %s" % (err_msg, np.nonzero(~ok)))
  246. # Invalid simplices must be (nearly) zero volume
  247. q = vertices[:,:-1,:] - vertices[:,-1,None,:]
  248. volume = np.array([np.linalg.det(q[k,:,:])
  249. for k in range(tri.nsimplex)])
  250. ok = np.isfinite(tri.transform[:,0,0]) | (volume < np.sqrt(eps))
  251. assert_(ok.all(), "%s %s" % (err_msg, np.nonzero(~ok)))
  252. # Also, find_simplex for the centroid should end up in some
  253. # simplex for the non-degenerate cases
  254. j = tri.find_simplex(centroids)
  255. ok = (j != -1) | np.isnan(tri.transform[:,0,0])
  256. assert_(ok.all(), "%s %s" % (err_msg, np.nonzero(~ok)))
  257. if unit_cube:
  258. # If in unit cube, no interior point should be marked out of hull
  259. at_boundary = (centroids <= unit_cube_tol).any(axis=1)
  260. at_boundary |= (centroids >= 1 - unit_cube_tol).any(axis=1)
  261. ok = (j != -1) | at_boundary
  262. assert_(ok.all(), "%s %s" % (err_msg, np.nonzero(~ok)))
  263. def test_degenerate_barycentric_transforms(self):
  264. # The triangulation should not produce invalid barycentric
  265. # transforms that stump the simplex finding
  266. data = np.load(os.path.join(os.path.dirname(__file__), 'data',
  267. 'degenerate_pointset.npz'))
  268. points = data['c']
  269. data.close()
  270. tri = qhull.Delaunay(points)
  271. # Check that there are not too many invalid simplices
  272. bad_count = np.isnan(tri.transform[:,0,0]).sum()
  273. assert_(bad_count < 23, bad_count)
  274. # Check the transforms
  275. self._check_barycentric_transforms(tri)
  276. @pytest.mark.slow
  277. def test_more_barycentric_transforms(self):
  278. # Triangulate some "nasty" grids
  279. eps = np.finfo(float).eps
  280. npoints = {2: 70, 3: 11, 4: 5, 5: 3}
  281. for ndim in range(2, 6):
  282. # Generate an uniform grid in n-d unit cube
  283. x = np.linspace(0, 1, npoints[ndim])
  284. grid = np.c_[list(map(np.ravel, np.broadcast_arrays(*np.ix_(*([x]*ndim)))))].T
  285. err_msg = "ndim=%d" % ndim
  286. # Check using regular grid
  287. tri = qhull.Delaunay(grid)
  288. self._check_barycentric_transforms(tri, err_msg=err_msg,
  289. unit_cube=True)
  290. # Check with eps-perturbations
  291. np.random.seed(1234)
  292. m = (np.random.rand(grid.shape[0]) < 0.2)
  293. grid[m,:] += 2*eps*(np.random.rand(*grid[m,:].shape) - 0.5)
  294. tri = qhull.Delaunay(grid)
  295. self._check_barycentric_transforms(tri, err_msg=err_msg,
  296. unit_cube=True,
  297. unit_cube_tol=2*eps)
  298. # Check with duplicated data
  299. tri = qhull.Delaunay(np.r_[grid, grid])
  300. self._check_barycentric_transforms(tri, err_msg=err_msg,
  301. unit_cube=True,
  302. unit_cube_tol=2*eps)
  303. class TestVertexNeighborVertices:
  304. def _check(self, tri):
  305. expected = [set() for j in range(tri.points.shape[0])]
  306. for s in tri.simplices:
  307. for a in s:
  308. for b in s:
  309. if a != b:
  310. expected[a].add(b)
  311. indptr, indices = tri.vertex_neighbor_vertices
  312. got = [set(map(int, indices[indptr[j]:indptr[j+1]]))
  313. for j in range(tri.points.shape[0])]
  314. assert_equal(got, expected, err_msg="%r != %r" % (got, expected))
  315. def test_triangle(self):
  316. points = np.array([(0,0), (0,1), (1,0)], dtype=np.double)
  317. tri = qhull.Delaunay(points)
  318. self._check(tri)
  319. def test_rectangle(self):
  320. points = np.array([(0,0), (0,1), (1,1), (1,0)], dtype=np.double)
  321. tri = qhull.Delaunay(points)
  322. self._check(tri)
  323. def test_complicated(self):
  324. points = np.array([(0,0), (0,1), (1,1), (1,0),
  325. (0.5, 0.5), (0.9, 0.5)], dtype=np.double)
  326. tri = qhull.Delaunay(points)
  327. self._check(tri)
  328. class TestDelaunay:
  329. """
  330. Check that triangulation works.
  331. """
  332. def test_masked_array_fails(self):
  333. masked_array = np.ma.masked_all(1)
  334. assert_raises(ValueError, qhull.Delaunay, masked_array)
  335. def test_array_with_nans_fails(self):
  336. points_with_nan = np.array([(0,0), (0,1), (1,1), (1,np.nan)], dtype=np.double)
  337. assert_raises(ValueError, qhull.Delaunay, points_with_nan)
  338. def test_nd_simplex(self):
  339. # simple smoke test: triangulate a n-dimensional simplex
  340. for nd in range(2, 8):
  341. points = np.zeros((nd+1, nd))
  342. for j in range(nd):
  343. points[j,j] = 1.0
  344. points[-1,:] = 1.0
  345. tri = qhull.Delaunay(points)
  346. tri.simplices.sort()
  347. assert_equal(tri.simplices, np.arange(nd+1, dtype=int)[None, :])
  348. assert_equal(tri.neighbors, -1 + np.zeros((nd+1), dtype=int)[None,:])
  349. def test_2d_square(self):
  350. # simple smoke test: 2d square
  351. points = np.array([(0,0), (0,1), (1,1), (1,0)], dtype=np.double)
  352. tri = qhull.Delaunay(points)
  353. assert_equal(tri.simplices, [[1, 3, 2], [3, 1, 0]])
  354. assert_equal(tri.neighbors, [[-1, -1, 1], [-1, -1, 0]])
  355. def test_duplicate_points(self):
  356. x = np.array([0, 1, 0, 1], dtype=np.float64)
  357. y = np.array([0, 0, 1, 1], dtype=np.float64)
  358. xp = np.r_[x, x]
  359. yp = np.r_[y, y]
  360. # shouldn't fail on duplicate points
  361. qhull.Delaunay(np.c_[x, y])
  362. qhull.Delaunay(np.c_[xp, yp])
  363. def test_pathological(self):
  364. # both should succeed
  365. points = DATASETS['pathological-1']
  366. tri = qhull.Delaunay(points)
  367. assert_equal(tri.points[tri.simplices].max(), points.max())
  368. assert_equal(tri.points[tri.simplices].min(), points.min())
  369. points = DATASETS['pathological-2']
  370. tri = qhull.Delaunay(points)
  371. assert_equal(tri.points[tri.simplices].max(), points.max())
  372. assert_equal(tri.points[tri.simplices].min(), points.min())
  373. def test_joggle(self):
  374. # Check that the option QJ indeed guarantees that all input points
  375. # occur as vertices of the triangulation
  376. points = np.random.rand(10, 2)
  377. points = np.r_[points, points] # duplicate input data
  378. tri = qhull.Delaunay(points, qhull_options="QJ Qbb Pp")
  379. assert_array_equal(np.unique(tri.simplices.ravel()),
  380. np.arange(len(points)))
  381. def test_coplanar(self):
  382. # Check that the coplanar point output option indeed works
  383. points = np.random.rand(10, 2)
  384. points = np.r_[points, points] # duplicate input data
  385. tri = qhull.Delaunay(points)
  386. assert_(len(np.unique(tri.simplices.ravel())) == len(points)//2)
  387. assert_(len(tri.coplanar) == len(points)//2)
  388. assert_(len(np.unique(tri.coplanar[:,2])) == len(points)//2)
  389. assert_(np.all(tri.vertex_to_simplex >= 0))
  390. def test_furthest_site(self):
  391. points = [(0, 0), (0, 1), (1, 0), (0.5, 0.5), (1.1, 1.1)]
  392. tri = qhull.Delaunay(points, furthest_site=True)
  393. expected = np.array([(1, 4, 0), (4, 2, 0)]) # from Qhull
  394. assert_array_equal(tri.simplices, expected)
  395. @pytest.mark.parametrize("name", sorted(INCREMENTAL_DATASETS))
  396. def test_incremental(self, name):
  397. # Test incremental construction of the triangulation
  398. chunks, opts = INCREMENTAL_DATASETS[name]
  399. points = np.concatenate(chunks, axis=0)
  400. obj = qhull.Delaunay(chunks[0], incremental=True,
  401. qhull_options=opts)
  402. for chunk in chunks[1:]:
  403. obj.add_points(chunk)
  404. obj2 = qhull.Delaunay(points)
  405. obj3 = qhull.Delaunay(chunks[0], incremental=True,
  406. qhull_options=opts)
  407. if len(chunks) > 1:
  408. obj3.add_points(np.concatenate(chunks[1:], axis=0),
  409. restart=True)
  410. # Check that the incremental mode agrees with upfront mode
  411. if name.startswith('pathological'):
  412. # XXX: These produce valid but different triangulations.
  413. # They look OK when plotted, but how to check them?
  414. assert_array_equal(np.unique(obj.simplices.ravel()),
  415. np.arange(points.shape[0]))
  416. assert_array_equal(np.unique(obj2.simplices.ravel()),
  417. np.arange(points.shape[0]))
  418. else:
  419. assert_unordered_tuple_list_equal(obj.simplices, obj2.simplices,
  420. tpl=sorted_tuple)
  421. assert_unordered_tuple_list_equal(obj2.simplices, obj3.simplices,
  422. tpl=sorted_tuple)
  423. def test_vertices_deprecation(self):
  424. tri = qhull.Delaunay([(0, 0), (0, 1), (1, 0)])
  425. msg = ("Delaunay attribute 'vertices' is deprecated in favour of "
  426. "'simplices' and will be removed in Scipy 1.11.0.")
  427. with pytest.warns(DeprecationWarning, match=msg):
  428. tri.vertices
  429. def assert_hulls_equal(points, facets_1, facets_2):
  430. # Check that two convex hulls constructed from the same point set
  431. # are equal
  432. facets_1 = set(map(sorted_tuple, facets_1))
  433. facets_2 = set(map(sorted_tuple, facets_2))
  434. if facets_1 != facets_2 and points.shape[1] == 2:
  435. # The direct check fails for the pathological cases
  436. # --- then the convex hull from Delaunay differs (due
  437. # to rounding error etc.) from the hull computed
  438. # otherwise, by the question whether (tricoplanar)
  439. # points that lie almost exactly on the hull are
  440. # included as vertices of the hull or not.
  441. #
  442. # So we check the result, and accept it if the Delaunay
  443. # hull line segments are a subset of the usual hull.
  444. eps = 1000 * np.finfo(float).eps
  445. for a, b in facets_1:
  446. for ap, bp in facets_2:
  447. t = points[bp] - points[ap]
  448. t /= np.linalg.norm(t) # tangent
  449. n = np.array([-t[1], t[0]]) # normal
  450. # check that the two line segments are parallel
  451. # to the same line
  452. c1 = np.dot(n, points[b] - points[ap])
  453. c2 = np.dot(n, points[a] - points[ap])
  454. if not np.allclose(np.dot(c1, n), 0):
  455. continue
  456. if not np.allclose(np.dot(c2, n), 0):
  457. continue
  458. # Check that the segment (a, b) is contained in (ap, bp)
  459. c1 = np.dot(t, points[a] - points[ap])
  460. c2 = np.dot(t, points[b] - points[ap])
  461. c3 = np.dot(t, points[bp] - points[ap])
  462. if c1 < -eps or c1 > c3 + eps:
  463. continue
  464. if c2 < -eps or c2 > c3 + eps:
  465. continue
  466. # OK:
  467. break
  468. else:
  469. raise AssertionError("comparison fails")
  470. # it was OK
  471. return
  472. assert_equal(facets_1, facets_2)
  473. class TestConvexHull:
  474. def test_masked_array_fails(self):
  475. masked_array = np.ma.masked_all(1)
  476. assert_raises(ValueError, qhull.ConvexHull, masked_array)
  477. def test_array_with_nans_fails(self):
  478. points_with_nan = np.array([(0,0), (1,1), (2,np.nan)], dtype=np.double)
  479. assert_raises(ValueError, qhull.ConvexHull, points_with_nan)
  480. @pytest.mark.parametrize("name", sorted(DATASETS))
  481. def test_hull_consistency_tri(self, name):
  482. # Check that a convex hull returned by qhull in ndim
  483. # and the hull constructed from ndim delaunay agree
  484. points = DATASETS[name]
  485. tri = qhull.Delaunay(points)
  486. hull = qhull.ConvexHull(points)
  487. assert_hulls_equal(points, tri.convex_hull, hull.simplices)
  488. # Check that the hull extremes are as expected
  489. if points.shape[1] == 2:
  490. assert_equal(np.unique(hull.simplices), np.sort(hull.vertices))
  491. else:
  492. assert_equal(np.unique(hull.simplices), hull.vertices)
  493. @pytest.mark.parametrize("name", sorted(INCREMENTAL_DATASETS))
  494. def test_incremental(self, name):
  495. # Test incremental construction of the convex hull
  496. chunks, _ = INCREMENTAL_DATASETS[name]
  497. points = np.concatenate(chunks, axis=0)
  498. obj = qhull.ConvexHull(chunks[0], incremental=True)
  499. for chunk in chunks[1:]:
  500. obj.add_points(chunk)
  501. obj2 = qhull.ConvexHull(points)
  502. obj3 = qhull.ConvexHull(chunks[0], incremental=True)
  503. if len(chunks) > 1:
  504. obj3.add_points(np.concatenate(chunks[1:], axis=0),
  505. restart=True)
  506. # Check that the incremental mode agrees with upfront mode
  507. assert_hulls_equal(points, obj.simplices, obj2.simplices)
  508. assert_hulls_equal(points, obj.simplices, obj3.simplices)
  509. def test_vertices_2d(self):
  510. # The vertices should be in counterclockwise order in 2-D
  511. np.random.seed(1234)
  512. points = np.random.rand(30, 2)
  513. hull = qhull.ConvexHull(points)
  514. assert_equal(np.unique(hull.simplices), np.sort(hull.vertices))
  515. # Check counterclockwiseness
  516. x, y = hull.points[hull.vertices].T
  517. angle = np.arctan2(y - y.mean(), x - x.mean())
  518. assert_(np.all(np.diff(np.unwrap(angle)) > 0))
  519. def test_volume_area(self):
  520. # Basic check that we get back the correct volume and area for a cube
  521. points = np.array([(0, 0, 0), (0, 1, 0), (1, 0, 0), (1, 1, 0),
  522. (0, 0, 1), (0, 1, 1), (1, 0, 1), (1, 1, 1)])
  523. tri = qhull.ConvexHull(points)
  524. assert_allclose(tri.volume, 1., rtol=1e-14)
  525. assert_allclose(tri.area, 6., rtol=1e-14)
  526. @pytest.mark.parametrize("incremental", [False, True])
  527. def test_good2d(self, incremental):
  528. # Make sure the QGn option gives the correct value of "good".
  529. points = np.array([[0.2, 0.2],
  530. [0.2, 0.4],
  531. [0.4, 0.4],
  532. [0.4, 0.2],
  533. [0.3, 0.6]])
  534. hull = qhull.ConvexHull(points=points,
  535. incremental=incremental,
  536. qhull_options='QG4')
  537. expected = np.array([False, True, False, False], dtype=bool)
  538. actual = hull.good
  539. assert_equal(actual, expected)
  540. @pytest.mark.parametrize("visibility", [
  541. "QG4", # visible=True
  542. "QG-4", # visible=False
  543. ])
  544. @pytest.mark.parametrize("new_gen, expected", [
  545. # add generator that places QG4 inside hull
  546. # so all facets are invisible
  547. (np.array([[0.3, 0.7]]),
  548. np.array([False, False, False, False, False], dtype=bool)),
  549. # adding a generator on the opposite side of the square
  550. # should preserve the single visible facet & add one invisible
  551. # facet
  552. (np.array([[0.3, -0.7]]),
  553. np.array([False, True, False, False, False], dtype=bool)),
  554. # split the visible facet on top of the square into two
  555. # visible facets, with visibility at the end of the array
  556. # because add_points concatenates
  557. (np.array([[0.3, 0.41]]),
  558. np.array([False, False, False, True, True], dtype=bool)),
  559. # with our current Qhull options, coplanarity will not count
  560. # for visibility; this case shifts one visible & one invisible
  561. # facet & adds a coplanar facet
  562. # simplex at index position 2 is the shifted visible facet
  563. # the final simplex is the coplanar facet
  564. (np.array([[0.5, 0.6], [0.6, 0.6]]),
  565. np.array([False, False, True, False, False], dtype=bool)),
  566. # place the new generator such that it envelops the query
  567. # point within the convex hull, but only just barely within
  568. # the double precision limit
  569. # NOTE: testing exact degeneracy is less predictable than this
  570. # scenario, perhaps because of the default Qt option we have
  571. # enabled for Qhull to handle precision matters
  572. (np.array([[0.3, 0.6 + 1e-16]]),
  573. np.array([False, False, False, False, False], dtype=bool)),
  574. ])
  575. def test_good2d_incremental_changes(self, new_gen, expected,
  576. visibility):
  577. # use the usual square convex hull
  578. # generators from test_good2d
  579. points = np.array([[0.2, 0.2],
  580. [0.2, 0.4],
  581. [0.4, 0.4],
  582. [0.4, 0.2],
  583. [0.3, 0.6]])
  584. hull = qhull.ConvexHull(points=points,
  585. incremental=True,
  586. qhull_options=visibility)
  587. hull.add_points(new_gen)
  588. actual = hull.good
  589. if '-' in visibility:
  590. expected = np.invert(expected)
  591. assert_equal(actual, expected)
  592. @pytest.mark.parametrize("incremental", [False, True])
  593. def test_good2d_no_option(self, incremental):
  594. # handle case where good attribue doesn't exist
  595. # because Qgn or Qg-n wasn't specified
  596. points = np.array([[0.2, 0.2],
  597. [0.2, 0.4],
  598. [0.4, 0.4],
  599. [0.4, 0.2],
  600. [0.3, 0.6]])
  601. hull = qhull.ConvexHull(points=points,
  602. incremental=incremental)
  603. actual = hull.good
  604. assert actual is None
  605. # preserve None after incremental addition
  606. if incremental:
  607. hull.add_points(np.zeros((1, 2)))
  608. actual = hull.good
  609. assert actual is None
  610. @pytest.mark.parametrize("incremental", [False, True])
  611. def test_good2d_inside(self, incremental):
  612. # Make sure the QGn option gives the correct value of "good".
  613. # When point n is inside the convex hull of the rest, good is
  614. # all False.
  615. points = np.array([[0.2, 0.2],
  616. [0.2, 0.4],
  617. [0.4, 0.4],
  618. [0.4, 0.2],
  619. [0.3, 0.3]])
  620. hull = qhull.ConvexHull(points=points,
  621. incremental=incremental,
  622. qhull_options='QG4')
  623. expected = np.array([False, False, False, False], dtype=bool)
  624. actual = hull.good
  625. assert_equal(actual, expected)
  626. @pytest.mark.parametrize("incremental", [False, True])
  627. def test_good3d(self, incremental):
  628. # Make sure the QGn option gives the correct value of "good"
  629. # for a 3d figure
  630. points = np.array([[0.0, 0.0, 0.0],
  631. [0.90029516, -0.39187448, 0.18948093],
  632. [0.48676420, -0.72627633, 0.48536925],
  633. [0.57651530, -0.81179274, -0.09285832],
  634. [0.67846893, -0.71119562, 0.18406710]])
  635. hull = qhull.ConvexHull(points=points,
  636. incremental=incremental,
  637. qhull_options='QG0')
  638. expected = np.array([True, False, False, False], dtype=bool)
  639. assert_equal(hull.good, expected)
  640. class TestVoronoi:
  641. @pytest.mark.parametrize("qhull_opts, extra_pts", [
  642. # option Qz (default for SciPy) will add
  643. # an extra point at infinity
  644. ("Qbb Qc Qz", 1),
  645. ("Qbb Qc", 0),
  646. ])
  647. @pytest.mark.parametrize("n_pts", [50, 100])
  648. @pytest.mark.parametrize("ndim", [2, 3])
  649. def test_point_region_structure(self,
  650. qhull_opts,
  651. n_pts,
  652. extra_pts,
  653. ndim):
  654. # see gh-16773
  655. rng = np.random.default_rng(7790)
  656. points = rng.random((n_pts, ndim))
  657. vor = Voronoi(points, qhull_options=qhull_opts)
  658. pt_region = vor.point_region
  659. assert pt_region.max() == n_pts - 1 + extra_pts
  660. assert pt_region.size == len(vor.regions) - extra_pts
  661. assert len(vor.regions) == n_pts + extra_pts
  662. assert vor.points.shape[0] == n_pts
  663. # if there is an empty sublist in the Voronoi
  664. # regions data structure, it should never be
  665. # indexed because it corresponds to an internally
  666. # added point at infinity and is not a member of the
  667. # generators (input points)
  668. if extra_pts:
  669. sublens = [len(x) for x in vor.regions]
  670. # only one point at infinity (empty region)
  671. # is allowed
  672. assert sublens.count(0) == 1
  673. assert sublens.index(0) not in pt_region
  674. def test_masked_array_fails(self):
  675. masked_array = np.ma.masked_all(1)
  676. assert_raises(ValueError, qhull.Voronoi, masked_array)
  677. def test_simple(self):
  678. # Simple case with known Voronoi diagram
  679. points = [(0, 0), (0, 1), (0, 2),
  680. (1, 0), (1, 1), (1, 2),
  681. (2, 0), (2, 1), (2, 2)]
  682. # qhull v o Fv Qbb Qc Qz < dat
  683. output = """
  684. 2
  685. 5 10 1
  686. -10.101 -10.101
  687. 0.5 0.5
  688. 0.5 1.5
  689. 1.5 0.5
  690. 1.5 1.5
  691. 2 0 1
  692. 3 2 0 1
  693. 2 0 2
  694. 3 3 0 1
  695. 4 1 2 4 3
  696. 3 4 0 2
  697. 2 0 3
  698. 3 4 0 3
  699. 2 0 4
  700. 0
  701. 12
  702. 4 0 3 0 1
  703. 4 0 1 0 1
  704. 4 1 4 1 2
  705. 4 1 2 0 2
  706. 4 2 5 0 2
  707. 4 3 4 1 3
  708. 4 3 6 0 3
  709. 4 4 5 2 4
  710. 4 4 7 3 4
  711. 4 5 8 0 4
  712. 4 6 7 0 3
  713. 4 7 8 0 4
  714. """
  715. self._compare_qvoronoi(points, output)
  716. def _compare_qvoronoi(self, points, output, **kw):
  717. """Compare to output from 'qvoronoi o Fv < data' to Voronoi()"""
  718. # Parse output
  719. output = [list(map(float, x.split())) for x in output.strip().splitlines()]
  720. nvertex = int(output[1][0])
  721. vertices = list(map(tuple, output[3:2+nvertex])) # exclude inf
  722. nregion = int(output[1][1])
  723. regions = [[int(y)-1 for y in x[1:]]
  724. for x in output[2+nvertex:2+nvertex+nregion]]
  725. ridge_points = [[int(y) for y in x[1:3]]
  726. for x in output[3+nvertex+nregion:]]
  727. ridge_vertices = [[int(y)-1 for y in x[3:]]
  728. for x in output[3+nvertex+nregion:]]
  729. # Compare results
  730. vor = qhull.Voronoi(points, **kw)
  731. def sorttuple(x):
  732. return tuple(sorted(x))
  733. assert_allclose(vor.vertices, vertices)
  734. assert_equal(set(map(tuple, vor.regions)),
  735. set(map(tuple, regions)))
  736. p1 = list(zip(list(map(sorttuple, ridge_points)), list(map(sorttuple, ridge_vertices))))
  737. p2 = list(zip(list(map(sorttuple, vor.ridge_points.tolist())),
  738. list(map(sorttuple, vor.ridge_vertices))))
  739. p1.sort()
  740. p2.sort()
  741. assert_equal(p1, p2)
  742. @pytest.mark.parametrize("name", sorted(DATASETS))
  743. def test_ridges(self, name):
  744. # Check that the ridges computed by Voronoi indeed separate
  745. # the regions of nearest neighborhood, by comparing the result
  746. # to KDTree.
  747. points = DATASETS[name]
  748. tree = KDTree(points)
  749. vor = qhull.Voronoi(points)
  750. for p, v in vor.ridge_dict.items():
  751. # consider only finite ridges
  752. if not np.all(np.asarray(v) >= 0):
  753. continue
  754. ridge_midpoint = vor.vertices[v].mean(axis=0)
  755. d = 1e-6 * (points[p[0]] - ridge_midpoint)
  756. dist, k = tree.query(ridge_midpoint + d, k=1)
  757. assert_equal(k, p[0])
  758. dist, k = tree.query(ridge_midpoint - d, k=1)
  759. assert_equal(k, p[1])
  760. def test_furthest_site(self):
  761. points = [(0, 0), (0, 1), (1, 0), (0.5, 0.5), (1.1, 1.1)]
  762. # qhull v o Fv Qbb Qc Qu < dat
  763. output = """
  764. 2
  765. 3 5 1
  766. -10.101 -10.101
  767. 0.6000000000000001 0.5
  768. 0.5 0.6000000000000001
  769. 3 0 2 1
  770. 2 0 1
  771. 2 0 2
  772. 0
  773. 3 0 2 1
  774. 5
  775. 4 0 2 0 2
  776. 4 0 4 1 2
  777. 4 0 1 0 1
  778. 4 1 4 0 1
  779. 4 2 4 0 2
  780. """
  781. self._compare_qvoronoi(points, output, furthest_site=True)
  782. def test_furthest_site_flag(self):
  783. points = [(0, 0), (0, 1), (1, 0), (0.5, 0.5), (1.1, 1.1)]
  784. vor = Voronoi(points)
  785. assert_equal(vor.furthest_site,False)
  786. vor = Voronoi(points,furthest_site=True)
  787. assert_equal(vor.furthest_site,True)
  788. @pytest.mark.parametrize("name", sorted(INCREMENTAL_DATASETS))
  789. def test_incremental(self, name):
  790. # Test incremental construction of the triangulation
  791. if INCREMENTAL_DATASETS[name][0][0].shape[1] > 3:
  792. # too slow (testing of the result --- qhull is still fast)
  793. return
  794. chunks, opts = INCREMENTAL_DATASETS[name]
  795. points = np.concatenate(chunks, axis=0)
  796. obj = qhull.Voronoi(chunks[0], incremental=True,
  797. qhull_options=opts)
  798. for chunk in chunks[1:]:
  799. obj.add_points(chunk)
  800. obj2 = qhull.Voronoi(points)
  801. obj3 = qhull.Voronoi(chunks[0], incremental=True,
  802. qhull_options=opts)
  803. if len(chunks) > 1:
  804. obj3.add_points(np.concatenate(chunks[1:], axis=0),
  805. restart=True)
  806. # -- Check that the incremental mode agrees with upfront mode
  807. assert_equal(len(obj.point_region), len(obj2.point_region))
  808. assert_equal(len(obj.point_region), len(obj3.point_region))
  809. # The vertices may be in different order or duplicated in
  810. # the incremental map
  811. for objx in obj, obj3:
  812. vertex_map = {-1: -1}
  813. for i, v in enumerate(objx.vertices):
  814. for j, v2 in enumerate(obj2.vertices):
  815. if np.allclose(v, v2):
  816. vertex_map[i] = j
  817. def remap(x):
  818. if hasattr(x, '__len__'):
  819. return tuple(set([remap(y) for y in x]))
  820. try:
  821. return vertex_map[x]
  822. except KeyError as e:
  823. raise AssertionError("incremental result has spurious vertex at %r"
  824. % (objx.vertices[x],)) from e
  825. def simplified(x):
  826. items = set(map(sorted_tuple, x))
  827. if () in items:
  828. items.remove(())
  829. items = [x for x in items if len(x) > 1]
  830. items.sort()
  831. return items
  832. assert_equal(
  833. simplified(remap(objx.regions)),
  834. simplified(obj2.regions)
  835. )
  836. assert_equal(
  837. simplified(remap(objx.ridge_vertices)),
  838. simplified(obj2.ridge_vertices)
  839. )
  840. # XXX: compare ridge_points --- not clear exactly how to do this
  841. class Test_HalfspaceIntersection:
  842. def assert_unordered_allclose(self, arr1, arr2, rtol=1e-7):
  843. """Check that every line in arr1 is only once in arr2"""
  844. assert_equal(arr1.shape, arr2.shape)
  845. truths = np.zeros((arr1.shape[0],), dtype=bool)
  846. for l1 in arr1:
  847. indexes = np.nonzero((abs(arr2 - l1) < rtol).all(axis=1))[0]
  848. assert_equal(indexes.shape, (1,))
  849. truths[indexes[0]] = True
  850. assert_(truths.all())
  851. @pytest.mark.parametrize("dt", [np.float64, int])
  852. def test_cube_halfspace_intersection(self, dt):
  853. halfspaces = np.array([[-1, 0, 0],
  854. [0, -1, 0],
  855. [1, 0, -2],
  856. [0, 1, -2]], dtype=dt)
  857. feasible_point = np.array([1, 1], dtype=dt)
  858. points = np.array([[0.0, 0.0], [2.0, 0.0], [0.0, 2.0], [2.0, 2.0]])
  859. hull = qhull.HalfspaceIntersection(halfspaces, feasible_point)
  860. assert_allclose(hull.intersections, points)
  861. def test_self_dual_polytope_intersection(self):
  862. fname = os.path.join(os.path.dirname(__file__), 'data',
  863. 'selfdual-4d-polytope.txt')
  864. ineqs = np.genfromtxt(fname)
  865. halfspaces = -np.hstack((ineqs[:, 1:], ineqs[:, :1]))
  866. feas_point = np.array([0., 0., 0., 0.])
  867. hs = qhull.HalfspaceIntersection(halfspaces, feas_point)
  868. assert_equal(hs.intersections.shape, (24, 4))
  869. assert_almost_equal(hs.dual_volume, 32.0)
  870. assert_equal(len(hs.dual_facets), 24)
  871. for facet in hs.dual_facets:
  872. assert_equal(len(facet), 6)
  873. dists = halfspaces[:, -1] + halfspaces[:, :-1].dot(feas_point)
  874. self.assert_unordered_allclose((halfspaces[:, :-1].T/dists).T, hs.dual_points)
  875. points = itertools.permutations([0., 0., 0.5, -0.5])
  876. for point in points:
  877. assert_equal(np.sum((hs.intersections == point).all(axis=1)), 1)
  878. def test_wrong_feasible_point(self):
  879. halfspaces = np.array([[-1.0, 0.0, 0.0],
  880. [0.0, -1.0, 0.0],
  881. [1.0, 0.0, -1.0],
  882. [0.0, 1.0, -1.0]])
  883. feasible_point = np.array([0.5, 0.5, 0.5])
  884. #Feasible point is (ndim,) instead of (ndim-1,)
  885. assert_raises(ValueError, qhull.HalfspaceIntersection, halfspaces, feasible_point)
  886. feasible_point = np.array([[0.5], [0.5]])
  887. #Feasible point is (ndim-1, 1) instead of (ndim-1,)
  888. assert_raises(ValueError, qhull.HalfspaceIntersection, halfspaces, feasible_point)
  889. feasible_point = np.array([[0.5, 0.5]])
  890. #Feasible point is (1, ndim-1) instead of (ndim-1,)
  891. assert_raises(ValueError, qhull.HalfspaceIntersection, halfspaces, feasible_point)
  892. feasible_point = np.array([-0.5, -0.5])
  893. #Feasible point is outside feasible region
  894. assert_raises(qhull.QhullError, qhull.HalfspaceIntersection, halfspaces, feasible_point)
  895. def test_incremental(self):
  896. #Cube
  897. halfspaces = np.array([[0., 0., -1., -0.5],
  898. [0., -1., 0., -0.5],
  899. [-1., 0., 0., -0.5],
  900. [1., 0., 0., -0.5],
  901. [0., 1., 0., -0.5],
  902. [0., 0., 1., -0.5]])
  903. #Cut each summit
  904. extra_normals = np.array([[1., 1., 1.],
  905. [1., 1., -1.],
  906. [1., -1., 1.],
  907. [1, -1., -1.]])
  908. offsets = np.array([[-1.]]*8)
  909. extra_halfspaces = np.hstack((np.vstack((extra_normals, -extra_normals)),
  910. offsets))
  911. feas_point = np.array([0., 0., 0.])
  912. inc_hs = qhull.HalfspaceIntersection(halfspaces, feas_point, incremental=True)
  913. inc_res_hs = qhull.HalfspaceIntersection(halfspaces, feas_point, incremental=True)
  914. for i, ehs in enumerate(extra_halfspaces):
  915. inc_hs.add_halfspaces(ehs[np.newaxis, :])
  916. inc_res_hs.add_halfspaces(ehs[np.newaxis, :], restart=True)
  917. total = np.vstack((halfspaces, extra_halfspaces[:i+1, :]))
  918. hs = qhull.HalfspaceIntersection(total, feas_point)
  919. assert_allclose(inc_hs.halfspaces, inc_res_hs.halfspaces)
  920. assert_allclose(inc_hs.halfspaces, hs.halfspaces)
  921. #Direct computation and restart should have points in same order
  922. assert_allclose(hs.intersections, inc_res_hs.intersections)
  923. #Incremental will have points in different order than direct computation
  924. self.assert_unordered_allclose(inc_hs.intersections, hs.intersections)
  925. inc_hs.close()
  926. def test_cube(self):
  927. # Halfspaces of the cube:
  928. halfspaces = np.array([[-1., 0., 0., 0.], # x >= 0
  929. [1., 0., 0., -1.], # x <= 1
  930. [0., -1., 0., 0.], # y >= 0
  931. [0., 1., 0., -1.], # y <= 1
  932. [0., 0., -1., 0.], # z >= 0
  933. [0., 0., 1., -1.]]) # z <= 1
  934. point = np.array([0.5, 0.5, 0.5])
  935. hs = qhull.HalfspaceIntersection(halfspaces, point)
  936. # qhalf H0.5,0.5,0.5 o < input.txt
  937. qhalf_points = np.array([
  938. [-2, 0, 0],
  939. [2, 0, 0],
  940. [0, -2, 0],
  941. [0, 2, 0],
  942. [0, 0, -2],
  943. [0, 0, 2]])
  944. qhalf_facets = [
  945. [2, 4, 0],
  946. [4, 2, 1],
  947. [5, 2, 0],
  948. [2, 5, 1],
  949. [3, 4, 1],
  950. [4, 3, 0],
  951. [5, 3, 1],
  952. [3, 5, 0]]
  953. assert len(qhalf_facets) == len(hs.dual_facets)
  954. for a, b in zip(qhalf_facets, hs.dual_facets):
  955. assert set(a) == set(b) # facet orientation can differ
  956. assert_allclose(hs.dual_points, qhalf_points)