test_qhull.py 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008
  1. from __future__ import division, print_function, absolute_import
  2. import os
  3. import copy
  4. import pytest
  5. import numpy as np
  6. from numpy.testing import (assert_equal, assert_almost_equal,
  7. assert_, assert_allclose, assert_array_equal)
  8. import pytest
  9. from pytest import raises as assert_raises
  10. from scipy._lib.six import xrange
  11. import scipy.spatial.qhull as qhull
  12. from scipy.spatial import cKDTree as KDTree
  13. from scipy.spatial import Voronoi
  14. import itertools
  15. def sorted_tuple(x):
  16. return tuple(sorted(x))
  17. def sorted_unique_tuple(x):
  18. return tuple(np.unique(x))
  19. def assert_unordered_tuple_list_equal(a, b, tpl=tuple):
  20. if isinstance(a, np.ndarray):
  21. a = a.tolist()
  22. if isinstance(b, np.ndarray):
  23. b = b.tolist()
  24. a = list(map(tpl, a))
  25. a.sort()
  26. b = list(map(tpl, b))
  27. b.sort()
  28. assert_equal(a, b)
  29. np.random.seed(1234)
  30. points = [(0,0), (0,1), (1,0), (1,1), (0.5, 0.5), (0.5, 1.5)]
  31. pathological_data_1 = np.array([
  32. [-3.14,-3.14], [-3.14,-2.36], [-3.14,-1.57], [-3.14,-0.79],
  33. [-3.14,0.0], [-3.14,0.79], [-3.14,1.57], [-3.14,2.36],
  34. [-3.14,3.14], [-2.36,-3.14], [-2.36,-2.36], [-2.36,-1.57],
  35. [-2.36,-0.79], [-2.36,0.0], [-2.36,0.79], [-2.36,1.57],
  36. [-2.36,2.36], [-2.36,3.14], [-1.57,-0.79], [-1.57,0.79],
  37. [-1.57,-1.57], [-1.57,0.0], [-1.57,1.57], [-1.57,-3.14],
  38. [-1.57,-2.36], [-1.57,2.36], [-1.57,3.14], [-0.79,-1.57],
  39. [-0.79,1.57], [-0.79,-3.14], [-0.79,-2.36], [-0.79,-0.79],
  40. [-0.79,0.0], [-0.79,0.79], [-0.79,2.36], [-0.79,3.14],
  41. [0.0,-3.14], [0.0,-2.36], [0.0,-1.57], [0.0,-0.79], [0.0,0.0],
  42. [0.0,0.79], [0.0,1.57], [0.0,2.36], [0.0,3.14], [0.79,-3.14],
  43. [0.79,-2.36], [0.79,-0.79], [0.79,0.0], [0.79,0.79],
  44. [0.79,2.36], [0.79,3.14], [0.79,-1.57], [0.79,1.57],
  45. [1.57,-3.14], [1.57,-2.36], [1.57,2.36], [1.57,3.14],
  46. [1.57,-1.57], [1.57,0.0], [1.57,1.57], [1.57,-0.79],
  47. [1.57,0.79], [2.36,-3.14], [2.36,-2.36], [2.36,-1.57],
  48. [2.36,-0.79], [2.36,0.0], [2.36,0.79], [2.36,1.57],
  49. [2.36,2.36], [2.36,3.14], [3.14,-3.14], [3.14,-2.36],
  50. [3.14,-1.57], [3.14,-0.79], [3.14,0.0], [3.14,0.79],
  51. [3.14,1.57], [3.14,2.36], [3.14,3.14],
  52. ])
  53. pathological_data_2 = np.array([
  54. [-1, -1], [-1, 0], [-1, 1],
  55. [0, -1], [0, 0], [0, 1],
  56. [1, -1 - np.finfo(np.float_).eps], [1, 0], [1, 1],
  57. ])
  58. bug_2850_chunks = [np.random.rand(10, 2),
  59. np.array([[0,0], [0,1], [1,0], [1,1]]) # add corners
  60. ]
  61. # same with some additional chunks
  62. bug_2850_chunks_2 = (bug_2850_chunks +
  63. [np.random.rand(10, 2),
  64. 0.25 + np.array([[0,0], [0,1], [1,0], [1,1]])])
  65. DATASETS = {
  66. 'some-points': np.asarray(points),
  67. 'random-2d': np.random.rand(30, 2),
  68. 'random-3d': np.random.rand(30, 3),
  69. 'random-4d': np.random.rand(30, 4),
  70. 'random-5d': np.random.rand(30, 5),
  71. 'random-6d': np.random.rand(10, 6),
  72. 'random-7d': np.random.rand(10, 7),
  73. 'random-8d': np.random.rand(10, 8),
  74. 'pathological-1': pathological_data_1,
  75. 'pathological-2': pathological_data_2
  76. }
  77. INCREMENTAL_DATASETS = {
  78. 'bug-2850': (bug_2850_chunks, None),
  79. 'bug-2850-2': (bug_2850_chunks_2, None),
  80. }
  81. def _add_inc_data(name, chunksize):
  82. """
  83. Generate incremental datasets from basic data sets
  84. """
  85. points = DATASETS[name]
  86. ndim = points.shape[1]
  87. opts = None
  88. nmin = ndim + 2
  89. if name == 'some-points':
  90. # since Qz is not allowed, use QJ
  91. opts = 'QJ Pp'
  92. elif name == 'pathological-1':
  93. # include enough points so that we get different x-coordinates
  94. nmin = 12
  95. chunks = [points[:nmin]]
  96. for j in xrange(nmin, len(points), chunksize):
  97. chunks.append(points[j:j+chunksize])
  98. new_name = "%s-chunk-%d" % (name, chunksize)
  99. assert new_name not in INCREMENTAL_DATASETS
  100. INCREMENTAL_DATASETS[new_name] = (chunks, opts)
  101. for name in DATASETS:
  102. for chunksize in 1, 4, 16:
  103. _add_inc_data(name, chunksize)
  104. class Test_Qhull(object):
  105. def test_swapping(self):
  106. # Check that Qhull state swapping works
  107. x = qhull._Qhull(b'v',
  108. np.array([[0,0],[0,1],[1,0],[1,1.],[0.5,0.5]]),
  109. b'Qz')
  110. xd = copy.deepcopy(x.get_voronoi_diagram())
  111. y = qhull._Qhull(b'v',
  112. np.array([[0,0],[0,1],[1,0],[1,2.]]),
  113. b'Qz')
  114. yd = copy.deepcopy(y.get_voronoi_diagram())
  115. xd2 = copy.deepcopy(x.get_voronoi_diagram())
  116. x.close()
  117. yd2 = copy.deepcopy(y.get_voronoi_diagram())
  118. y.close()
  119. assert_raises(RuntimeError, x.get_voronoi_diagram)
  120. assert_raises(RuntimeError, y.get_voronoi_diagram)
  121. assert_allclose(xd[0], xd2[0])
  122. assert_unordered_tuple_list_equal(xd[1], xd2[1], tpl=sorted_tuple)
  123. assert_unordered_tuple_list_equal(xd[2], xd2[2], tpl=sorted_tuple)
  124. assert_unordered_tuple_list_equal(xd[3], xd2[3], tpl=sorted_tuple)
  125. assert_array_equal(xd[4], xd2[4])
  126. assert_allclose(yd[0], yd2[0])
  127. assert_unordered_tuple_list_equal(yd[1], yd2[1], tpl=sorted_tuple)
  128. assert_unordered_tuple_list_equal(yd[2], yd2[2], tpl=sorted_tuple)
  129. assert_unordered_tuple_list_equal(yd[3], yd2[3], tpl=sorted_tuple)
  130. assert_array_equal(yd[4], yd2[4])
  131. x.close()
  132. assert_raises(RuntimeError, x.get_voronoi_diagram)
  133. y.close()
  134. assert_raises(RuntimeError, y.get_voronoi_diagram)
  135. def test_issue_8051(self):
  136. points = np.array([[0, 0], [0, 1], [0, 2], [1, 0], [1, 1], [1, 2],[2, 0], [2, 1], [2, 2]])
  137. Voronoi(points)
  138. class TestUtilities(object):
  139. """
  140. Check that utility functions work.
  141. """
  142. def test_find_simplex(self):
  143. # Simple check that simplex finding works
  144. points = np.array([(0,0), (0,1), (1,1), (1,0)], dtype=np.double)
  145. tri = qhull.Delaunay(points)
  146. # +---+
  147. # |\ 0|
  148. # | \ |
  149. # |1 \|
  150. # +---+
  151. assert_equal(tri.vertices, [[1, 3, 2], [3, 1, 0]])
  152. for p in [(0.25, 0.25, 1),
  153. (0.75, 0.75, 0),
  154. (0.3, 0.2, 1)]:
  155. i = tri.find_simplex(p[:2])
  156. assert_equal(i, p[2], err_msg='%r' % (p,))
  157. j = qhull.tsearch(tri, p[:2])
  158. assert_equal(i, j)
  159. def test_plane_distance(self):
  160. # Compare plane distance from hyperplane equations obtained from Qhull
  161. # to manually computed plane equations
  162. x = np.array([(0,0), (1, 1), (1, 0), (0.99189033, 0.37674127),
  163. (0.99440079, 0.45182168)], dtype=np.double)
  164. p = np.array([0.99966555, 0.15685619], dtype=np.double)
  165. tri = qhull.Delaunay(x)
  166. z = tri.lift_points(x)
  167. pz = tri.lift_points(p)
  168. dist = tri.plane_distance(p)
  169. for j, v in enumerate(tri.vertices):
  170. x1 = z[v[0]]
  171. x2 = z[v[1]]
  172. x3 = z[v[2]]
  173. n = np.cross(x1 - x3, x2 - x3)
  174. n /= np.sqrt(np.dot(n, n))
  175. n *= -np.sign(n[2])
  176. d = np.dot(n, pz - x3)
  177. assert_almost_equal(dist[j], d)
  178. def test_convex_hull(self):
  179. # Simple check that the convex hull seems to works
  180. points = np.array([(0,0), (0,1), (1,1), (1,0)], dtype=np.double)
  181. tri = qhull.Delaunay(points)
  182. # +---+
  183. # |\ 0|
  184. # | \ |
  185. # |1 \|
  186. # +---+
  187. assert_equal(tri.convex_hull, [[3, 2], [1, 2], [1, 0], [3, 0]])
  188. def test_volume_area(self):
  189. #Basic check that we get back the correct volume and area for a cube
  190. points = np.array([(0, 0, 0), (0, 1, 0), (1, 0, 0), (1, 1, 0),
  191. (0, 0, 1), (0, 1, 1), (1, 0, 1), (1, 1, 1)])
  192. hull = qhull.ConvexHull(points)
  193. assert_allclose(hull.volume, 1., rtol=1e-14,
  194. err_msg="Volume of cube is incorrect")
  195. assert_allclose(hull.area, 6., rtol=1e-14,
  196. err_msg="Area of cube is incorrect")
  197. def test_random_volume_area(self):
  198. #Test that the results for a random 10-point convex are
  199. #coherent with the output of qconvex Qt s FA
  200. points = np.array([(0.362568364506, 0.472712355305, 0.347003084477),
  201. (0.733731893414, 0.634480295684, 0.950513180209),
  202. (0.511239955611, 0.876839441267, 0.418047827863),
  203. (0.0765906233393, 0.527373281342, 0.6509863541),
  204. (0.146694972056, 0.596725793348, 0.894860986685),
  205. (0.513808585741, 0.069576205858, 0.530890338876),
  206. (0.512343805118, 0.663537132612, 0.037689295973),
  207. (0.47282965018, 0.462176697655, 0.14061843691),
  208. (0.240584597123, 0.778660020591, 0.722913476339),
  209. (0.951271745935, 0.967000673944, 0.890661319684)])
  210. hull = qhull.ConvexHull(points)
  211. assert_allclose(hull.volume, 0.14562013, rtol=1e-07,
  212. err_msg="Volume of random polyhedron is incorrect")
  213. assert_allclose(hull.area, 1.6670425, rtol=1e-07,
  214. err_msg="Area of random polyhedron is incorrect")
  215. def test_incremental_volume_area_random_input(self):
  216. """Test that incremental mode gives the same volume/area as
  217. non-incremental mode and incremental mode with restart"""
  218. nr_points = 20
  219. dim = 3
  220. points = np.random.random((nr_points, dim))
  221. inc_hull = qhull.ConvexHull(points[:dim+1, :], incremental=True)
  222. inc_restart_hull = qhull.ConvexHull(points[:dim+1, :], incremental=True)
  223. for i in range(dim+1, nr_points):
  224. hull = qhull.ConvexHull(points[:i+1, :])
  225. inc_hull.add_points(points[i:i+1, :])
  226. inc_restart_hull.add_points(points[i:i+1, :], restart=True)
  227. assert_allclose(hull.volume, inc_hull.volume, rtol=1e-7)
  228. assert_allclose(hull.volume, inc_restart_hull.volume, rtol=1e-7)
  229. assert_allclose(hull.area, inc_hull.area, rtol=1e-7)
  230. assert_allclose(hull.area, inc_restart_hull.area, rtol=1e-7)
  231. def _check_barycentric_transforms(self, tri, err_msg="",
  232. unit_cube=False,
  233. unit_cube_tol=0):
  234. """Check that a triangulation has reasonable barycentric transforms"""
  235. vertices = tri.points[tri.vertices]
  236. sc = 1/(tri.ndim + 1.0)
  237. centroids = vertices.sum(axis=1) * sc
  238. # Either: (i) the simplex has a `nan` barycentric transform,
  239. # or, (ii) the centroid is in the simplex
  240. def barycentric_transform(tr, x):
  241. ndim = tr.shape[1]
  242. r = tr[:,-1,:]
  243. Tinv = tr[:,:-1,:]
  244. return np.einsum('ijk,ik->ij', Tinv, x - r)
  245. eps = np.finfo(float).eps
  246. c = barycentric_transform(tri.transform, centroids)
  247. olderr = np.seterr(invalid="ignore")
  248. try:
  249. ok = np.isnan(c).all(axis=1) | (abs(c - sc)/sc < 0.1).all(axis=1)
  250. finally:
  251. np.seterr(**olderr)
  252. assert_(ok.all(), "%s %s" % (err_msg, np.nonzero(~ok)))
  253. # Invalid simplices must be (nearly) zero volume
  254. q = vertices[:,:-1,:] - vertices[:,-1,None,:]
  255. volume = np.array([np.linalg.det(q[k,:,:])
  256. for k in range(tri.nsimplex)])
  257. ok = np.isfinite(tri.transform[:,0,0]) | (volume < np.sqrt(eps))
  258. assert_(ok.all(), "%s %s" % (err_msg, np.nonzero(~ok)))
  259. # Also, find_simplex for the centroid should end up in some
  260. # simplex for the non-degenerate cases
  261. j = tri.find_simplex(centroids)
  262. ok = (j != -1) | np.isnan(tri.transform[:,0,0])
  263. assert_(ok.all(), "%s %s" % (err_msg, np.nonzero(~ok)))
  264. if unit_cube:
  265. # If in unit cube, no interior point should be marked out of hull
  266. at_boundary = (centroids <= unit_cube_tol).any(axis=1)
  267. at_boundary |= (centroids >= 1 - unit_cube_tol).any(axis=1)
  268. ok = (j != -1) | at_boundary
  269. assert_(ok.all(), "%s %s" % (err_msg, np.nonzero(~ok)))
  270. def test_degenerate_barycentric_transforms(self):
  271. # The triangulation should not produce invalid barycentric
  272. # transforms that stump the simplex finding
  273. data = np.load(os.path.join(os.path.dirname(__file__), 'data',
  274. 'degenerate_pointset.npz'))
  275. points = data['c']
  276. data.close()
  277. tri = qhull.Delaunay(points)
  278. # Check that there are not too many invalid simplices
  279. bad_count = np.isnan(tri.transform[:,0,0]).sum()
  280. assert_(bad_count < 21, bad_count)
  281. # Check the transforms
  282. self._check_barycentric_transforms(tri)
  283. @pytest.mark.slow
  284. def test_more_barycentric_transforms(self):
  285. # Triangulate some "nasty" grids
  286. eps = np.finfo(float).eps
  287. npoints = {2: 70, 3: 11, 4: 5, 5: 3}
  288. _is_32bit_platform = np.intp(0).itemsize < 8
  289. for ndim in xrange(2, 6):
  290. # Generate an uniform grid in n-d unit cube
  291. x = np.linspace(0, 1, npoints[ndim])
  292. grid = np.c_[list(map(np.ravel, np.broadcast_arrays(*np.ix_(*([x]*ndim)))))].T
  293. err_msg = "ndim=%d" % ndim
  294. # Check using regular grid
  295. tri = qhull.Delaunay(grid)
  296. self._check_barycentric_transforms(tri, err_msg=err_msg,
  297. unit_cube=True)
  298. # Check with eps-perturbations
  299. np.random.seed(1234)
  300. m = (np.random.rand(grid.shape[0]) < 0.2)
  301. grid[m,:] += 2*eps*(np.random.rand(*grid[m,:].shape) - 0.5)
  302. tri = qhull.Delaunay(grid)
  303. self._check_barycentric_transforms(tri, err_msg=err_msg,
  304. unit_cube=True,
  305. unit_cube_tol=2*eps)
  306. # Check with duplicated data
  307. tri = qhull.Delaunay(np.r_[grid, grid])
  308. self._check_barycentric_transforms(tri, err_msg=err_msg,
  309. unit_cube=True,
  310. unit_cube_tol=2*eps)
  311. if not _is_32bit_platform:
  312. # test numerically unstable, and reported to fail on 32-bit
  313. # installs
  314. # Check with larger perturbations
  315. np.random.seed(4321)
  316. m = (np.random.rand(grid.shape[0]) < 0.2)
  317. grid[m,:] += 1000*eps*(np.random.rand(*grid[m,:].shape) - 0.5)
  318. tri = qhull.Delaunay(grid)
  319. self._check_barycentric_transforms(tri, err_msg=err_msg,
  320. unit_cube=True,
  321. unit_cube_tol=1500*eps)
  322. # Check with yet larger perturbations
  323. np.random.seed(4321)
  324. m = (np.random.rand(grid.shape[0]) < 0.2)
  325. grid[m,:] += 1e6*eps*(np.random.rand(*grid[m,:].shape) - 0.5)
  326. tri = qhull.Delaunay(grid)
  327. self._check_barycentric_transforms(tri, err_msg=err_msg,
  328. unit_cube=True,
  329. unit_cube_tol=1e7*eps)
  330. class TestVertexNeighborVertices(object):
  331. def _check(self, tri):
  332. expected = [set() for j in range(tri.points.shape[0])]
  333. for s in tri.simplices:
  334. for a in s:
  335. for b in s:
  336. if a != b:
  337. expected[a].add(b)
  338. indptr, indices = tri.vertex_neighbor_vertices
  339. got = []
  340. for j in range(tri.points.shape[0]):
  341. got.append(set(map(int, indices[indptr[j]:indptr[j+1]])))
  342. assert_equal(got, expected, err_msg="%r != %r" % (got, expected))
  343. def test_triangle(self):
  344. points = np.array([(0,0), (0,1), (1,0)], dtype=np.double)
  345. tri = qhull.Delaunay(points)
  346. self._check(tri)
  347. def test_rectangle(self):
  348. points = np.array([(0,0), (0,1), (1,1), (1,0)], dtype=np.double)
  349. tri = qhull.Delaunay(points)
  350. self._check(tri)
  351. def test_complicated(self):
  352. points = np.array([(0,0), (0,1), (1,1), (1,0),
  353. (0.5, 0.5), (0.9, 0.5)], dtype=np.double)
  354. tri = qhull.Delaunay(points)
  355. self._check(tri)
  356. class TestDelaunay(object):
  357. """
  358. Check that triangulation works.
  359. """
  360. def test_masked_array_fails(self):
  361. masked_array = np.ma.masked_all(1)
  362. assert_raises(ValueError, qhull.Delaunay, masked_array)
  363. def test_array_with_nans_fails(self):
  364. points_with_nan = np.array([(0,0), (0,1), (1,1), (1,np.nan)], dtype=np.double)
  365. assert_raises(ValueError, qhull.Delaunay, points_with_nan)
  366. def test_nd_simplex(self):
  367. # simple smoke test: triangulate a n-dimensional simplex
  368. for nd in xrange(2, 8):
  369. points = np.zeros((nd+1, nd))
  370. for j in xrange(nd):
  371. points[j,j] = 1.0
  372. points[-1,:] = 1.0
  373. tri = qhull.Delaunay(points)
  374. tri.vertices.sort()
  375. assert_equal(tri.vertices, np.arange(nd+1, dtype=int)[None,:])
  376. assert_equal(tri.neighbors, -1 + np.zeros((nd+1), dtype=int)[None,:])
  377. def test_2d_square(self):
  378. # simple smoke test: 2d square
  379. points = np.array([(0,0), (0,1), (1,1), (1,0)], dtype=np.double)
  380. tri = qhull.Delaunay(points)
  381. assert_equal(tri.vertices, [[1, 3, 2], [3, 1, 0]])
  382. assert_equal(tri.neighbors, [[-1, -1, 1], [-1, -1, 0]])
  383. def test_duplicate_points(self):
  384. x = np.array([0, 1, 0, 1], dtype=np.float64)
  385. y = np.array([0, 0, 1, 1], dtype=np.float64)
  386. xp = np.r_[x, x]
  387. yp = np.r_[y, y]
  388. # shouldn't fail on duplicate points
  389. tri = qhull.Delaunay(np.c_[x, y])
  390. tri2 = qhull.Delaunay(np.c_[xp, yp])
  391. def test_pathological(self):
  392. # both should succeed
  393. points = DATASETS['pathological-1']
  394. tri = qhull.Delaunay(points)
  395. assert_equal(tri.points[tri.vertices].max(), points.max())
  396. assert_equal(tri.points[tri.vertices].min(), points.min())
  397. points = DATASETS['pathological-2']
  398. tri = qhull.Delaunay(points)
  399. assert_equal(tri.points[tri.vertices].max(), points.max())
  400. assert_equal(tri.points[tri.vertices].min(), points.min())
  401. def test_joggle(self):
  402. # Check that the option QJ indeed guarantees that all input points
  403. # occur as vertices of the triangulation
  404. points = np.random.rand(10, 2)
  405. points = np.r_[points, points] # duplicate input data
  406. tri = qhull.Delaunay(points, qhull_options="QJ Qbb Pp")
  407. assert_array_equal(np.unique(tri.simplices.ravel()),
  408. np.arange(len(points)))
  409. def test_coplanar(self):
  410. # Check that the coplanar point output option indeed works
  411. points = np.random.rand(10, 2)
  412. points = np.r_[points, points] # duplicate input data
  413. tri = qhull.Delaunay(points)
  414. assert_(len(np.unique(tri.simplices.ravel())) == len(points)//2)
  415. assert_(len(tri.coplanar) == len(points)//2)
  416. assert_(len(np.unique(tri.coplanar[:,2])) == len(points)//2)
  417. assert_(np.all(tri.vertex_to_simplex >= 0))
  418. def test_furthest_site(self):
  419. points = [(0, 0), (0, 1), (1, 0), (0.5, 0.5), (1.1, 1.1)]
  420. tri = qhull.Delaunay(points, furthest_site=True)
  421. expected = np.array([(1, 4, 0), (4, 2, 0)]) # from Qhull
  422. assert_array_equal(tri.simplices, expected)
  423. @pytest.mark.parametrize("name", sorted(INCREMENTAL_DATASETS))
  424. def test_incremental(self, name):
  425. # Test incremental construction of the triangulation
  426. chunks, opts = INCREMENTAL_DATASETS[name]
  427. points = np.concatenate(chunks, axis=0)
  428. obj = qhull.Delaunay(chunks[0], incremental=True,
  429. qhull_options=opts)
  430. for chunk in chunks[1:]:
  431. obj.add_points(chunk)
  432. obj2 = qhull.Delaunay(points)
  433. obj3 = qhull.Delaunay(chunks[0], incremental=True,
  434. qhull_options=opts)
  435. if len(chunks) > 1:
  436. obj3.add_points(np.concatenate(chunks[1:], axis=0),
  437. restart=True)
  438. # Check that the incremental mode agrees with upfront mode
  439. if name.startswith('pathological'):
  440. # XXX: These produce valid but different triangulations.
  441. # They look OK when plotted, but how to check them?
  442. assert_array_equal(np.unique(obj.simplices.ravel()),
  443. np.arange(points.shape[0]))
  444. assert_array_equal(np.unique(obj2.simplices.ravel()),
  445. np.arange(points.shape[0]))
  446. else:
  447. assert_unordered_tuple_list_equal(obj.simplices, obj2.simplices,
  448. tpl=sorted_tuple)
  449. assert_unordered_tuple_list_equal(obj2.simplices, obj3.simplices,
  450. tpl=sorted_tuple)
  451. def assert_hulls_equal(points, facets_1, facets_2):
  452. # Check that two convex hulls constructed from the same point set
  453. # are equal
  454. facets_1 = set(map(sorted_tuple, facets_1))
  455. facets_2 = set(map(sorted_tuple, facets_2))
  456. if facets_1 != facets_2 and points.shape[1] == 2:
  457. # The direct check fails for the pathological cases
  458. # --- then the convex hull from Delaunay differs (due
  459. # to rounding error etc.) from the hull computed
  460. # otherwise, by the question whether (tricoplanar)
  461. # points that lie almost exactly on the hull are
  462. # included as vertices of the hull or not.
  463. #
  464. # So we check the result, and accept it if the Delaunay
  465. # hull line segments are a subset of the usual hull.
  466. eps = 1000 * np.finfo(float).eps
  467. for a, b in facets_1:
  468. for ap, bp in facets_2:
  469. t = points[bp] - points[ap]
  470. t /= np.linalg.norm(t) # tangent
  471. n = np.array([-t[1], t[0]]) # normal
  472. # check that the two line segments are parallel
  473. # to the same line
  474. c1 = np.dot(n, points[b] - points[ap])
  475. c2 = np.dot(n, points[a] - points[ap])
  476. if not np.allclose(np.dot(c1, n), 0):
  477. continue
  478. if not np.allclose(np.dot(c2, n), 0):
  479. continue
  480. # Check that the segment (a, b) is contained in (ap, bp)
  481. c1 = np.dot(t, points[a] - points[ap])
  482. c2 = np.dot(t, points[b] - points[ap])
  483. c3 = np.dot(t, points[bp] - points[ap])
  484. if c1 < -eps or c1 > c3 + eps:
  485. continue
  486. if c2 < -eps or c2 > c3 + eps:
  487. continue
  488. # OK:
  489. break
  490. else:
  491. raise AssertionError("comparison fails")
  492. # it was OK
  493. return
  494. assert_equal(facets_1, facets_2)
  495. class TestConvexHull:
  496. def test_masked_array_fails(self):
  497. masked_array = np.ma.masked_all(1)
  498. assert_raises(ValueError, qhull.ConvexHull, masked_array)
  499. def test_array_with_nans_fails(self):
  500. points_with_nan = np.array([(0,0), (1,1), (2,np.nan)], dtype=np.double)
  501. assert_raises(ValueError, qhull.ConvexHull, points_with_nan)
  502. @pytest.mark.parametrize("name", sorted(DATASETS))
  503. def test_hull_consistency_tri(self, name):
  504. # Check that a convex hull returned by qhull in ndim
  505. # and the hull constructed from ndim delaunay agree
  506. points = DATASETS[name]
  507. tri = qhull.Delaunay(points)
  508. hull = qhull.ConvexHull(points)
  509. assert_hulls_equal(points, tri.convex_hull, hull.simplices)
  510. # Check that the hull extremes are as expected
  511. if points.shape[1] == 2:
  512. assert_equal(np.unique(hull.simplices), np.sort(hull.vertices))
  513. else:
  514. assert_equal(np.unique(hull.simplices), hull.vertices)
  515. @pytest.mark.parametrize("name", sorted(INCREMENTAL_DATASETS))
  516. def test_incremental(self, name):
  517. # Test incremental construction of the convex hull
  518. chunks, _ = INCREMENTAL_DATASETS[name]
  519. points = np.concatenate(chunks, axis=0)
  520. obj = qhull.ConvexHull(chunks[0], incremental=True)
  521. for chunk in chunks[1:]:
  522. obj.add_points(chunk)
  523. obj2 = qhull.ConvexHull(points)
  524. obj3 = qhull.ConvexHull(chunks[0], incremental=True)
  525. if len(chunks) > 1:
  526. obj3.add_points(np.concatenate(chunks[1:], axis=0),
  527. restart=True)
  528. # Check that the incremental mode agrees with upfront mode
  529. assert_hulls_equal(points, obj.simplices, obj2.simplices)
  530. assert_hulls_equal(points, obj.simplices, obj3.simplices)
  531. def test_vertices_2d(self):
  532. # The vertices should be in counterclockwise order in 2-D
  533. np.random.seed(1234)
  534. points = np.random.rand(30, 2)
  535. hull = qhull.ConvexHull(points)
  536. assert_equal(np.unique(hull.simplices), np.sort(hull.vertices))
  537. # Check counterclockwiseness
  538. x, y = hull.points[hull.vertices].T
  539. angle = np.arctan2(y - y.mean(), x - x.mean())
  540. assert_(np.all(np.diff(np.unwrap(angle)) > 0))
  541. def test_volume_area(self):
  542. # Basic check that we get back the correct volume and area for a cube
  543. points = np.array([(0, 0, 0), (0, 1, 0), (1, 0, 0), (1, 1, 0),
  544. (0, 0, 1), (0, 1, 1), (1, 0, 1), (1, 1, 1)])
  545. tri = qhull.ConvexHull(points)
  546. assert_allclose(tri.volume, 1., rtol=1e-14)
  547. assert_allclose(tri.area, 6., rtol=1e-14)
  548. class TestVoronoi:
  549. def test_masked_array_fails(self):
  550. masked_array = np.ma.masked_all(1)
  551. assert_raises(ValueError, qhull.Voronoi, masked_array)
  552. def test_simple(self):
  553. # Simple case with known Voronoi diagram
  554. points = [(0, 0), (0, 1), (0, 2),
  555. (1, 0), (1, 1), (1, 2),
  556. (2, 0), (2, 1), (2, 2)]
  557. # qhull v o Fv Qbb Qc Qz < dat
  558. output = """
  559. 2
  560. 5 10 1
  561. -10.101 -10.101
  562. 0.5 0.5
  563. 1.5 0.5
  564. 0.5 1.5
  565. 1.5 1.5
  566. 2 0 1
  567. 3 3 0 1
  568. 2 0 3
  569. 3 2 0 1
  570. 4 4 3 1 2
  571. 3 4 0 3
  572. 2 0 2
  573. 3 4 0 2
  574. 2 0 4
  575. 0
  576. 12
  577. 4 0 3 0 1
  578. 4 0 1 0 1
  579. 4 1 4 1 3
  580. 4 1 2 0 3
  581. 4 2 5 0 3
  582. 4 3 4 1 2
  583. 4 3 6 0 2
  584. 4 4 5 3 4
  585. 4 4 7 2 4
  586. 4 5 8 0 4
  587. 4 6 7 0 2
  588. 4 7 8 0 4
  589. """
  590. self._compare_qvoronoi(points, output)
  591. def _compare_qvoronoi(self, points, output, **kw):
  592. """Compare to output from 'qvoronoi o Fv < data' to Voronoi()"""
  593. # Parse output
  594. output = [list(map(float, x.split())) for x in output.strip().splitlines()]
  595. nvertex = int(output[1][0])
  596. vertices = list(map(tuple, output[3:2+nvertex])) # exclude inf
  597. nregion = int(output[1][1])
  598. regions = [[int(y)-1 for y in x[1:]]
  599. for x in output[2+nvertex:2+nvertex+nregion]]
  600. nridge = int(output[2+nvertex+nregion][0])
  601. ridge_points = [[int(y) for y in x[1:3]]
  602. for x in output[3+nvertex+nregion:]]
  603. ridge_vertices = [[int(y)-1 for y in x[3:]]
  604. for x in output[3+nvertex+nregion:]]
  605. # Compare results
  606. vor = qhull.Voronoi(points, **kw)
  607. def sorttuple(x):
  608. return tuple(sorted(x))
  609. assert_allclose(vor.vertices, vertices)
  610. assert_equal(set(map(tuple, vor.regions)),
  611. set(map(tuple, regions)))
  612. p1 = list(zip(list(map(sorttuple, ridge_points)), list(map(sorttuple, ridge_vertices))))
  613. p2 = list(zip(list(map(sorttuple, vor.ridge_points.tolist())),
  614. list(map(sorttuple, vor.ridge_vertices))))
  615. p1.sort()
  616. p2.sort()
  617. assert_equal(p1, p2)
  618. @pytest.mark.parametrize("name", sorted(DATASETS))
  619. def test_ridges(self, name):
  620. # Check that the ridges computed by Voronoi indeed separate
  621. # the regions of nearest neighborhood, by comparing the result
  622. # to KDTree.
  623. points = DATASETS[name]
  624. tree = KDTree(points)
  625. vor = qhull.Voronoi(points)
  626. for p, v in vor.ridge_dict.items():
  627. # consider only finite ridges
  628. if not np.all(np.asarray(v) >= 0):
  629. continue
  630. ridge_midpoint = vor.vertices[v].mean(axis=0)
  631. d = 1e-6 * (points[p[0]] - ridge_midpoint)
  632. dist, k = tree.query(ridge_midpoint + d, k=1)
  633. assert_equal(k, p[0])
  634. dist, k = tree.query(ridge_midpoint - d, k=1)
  635. assert_equal(k, p[1])
  636. def test_furthest_site(self):
  637. points = [(0, 0), (0, 1), (1, 0), (0.5, 0.5), (1.1, 1.1)]
  638. # qhull v o Fv Qbb Qc Qu < dat
  639. output = """
  640. 2
  641. 3 5 1
  642. -10.101 -10.101
  643. 0.6000000000000001 0.5
  644. 0.5 0.6000000000000001
  645. 3 0 1 2
  646. 2 0 1
  647. 2 0 2
  648. 0
  649. 3 0 1 2
  650. 5
  651. 4 0 2 0 2
  652. 4 0 1 0 1
  653. 4 0 4 1 2
  654. 4 1 4 0 1
  655. 4 2 4 0 2
  656. """
  657. self._compare_qvoronoi(points, output, furthest_site=True)
  658. @pytest.mark.parametrize("name", sorted(INCREMENTAL_DATASETS))
  659. def test_incremental(self, name):
  660. # Test incremental construction of the triangulation
  661. if INCREMENTAL_DATASETS[name][0][0].shape[1] > 3:
  662. # too slow (testing of the result --- qhull is still fast)
  663. return
  664. chunks, opts = INCREMENTAL_DATASETS[name]
  665. points = np.concatenate(chunks, axis=0)
  666. obj = qhull.Voronoi(chunks[0], incremental=True,
  667. qhull_options=opts)
  668. for chunk in chunks[1:]:
  669. obj.add_points(chunk)
  670. obj2 = qhull.Voronoi(points)
  671. obj3 = qhull.Voronoi(chunks[0], incremental=True,
  672. qhull_options=opts)
  673. if len(chunks) > 1:
  674. obj3.add_points(np.concatenate(chunks[1:], axis=0),
  675. restart=True)
  676. # -- Check that the incremental mode agrees with upfront mode
  677. assert_equal(len(obj.point_region), len(obj2.point_region))
  678. assert_equal(len(obj.point_region), len(obj3.point_region))
  679. # The vertices may be in different order or duplicated in
  680. # the incremental map
  681. for objx in obj, obj3:
  682. vertex_map = {-1: -1}
  683. for i, v in enumerate(objx.vertices):
  684. for j, v2 in enumerate(obj2.vertices):
  685. if np.allclose(v, v2):
  686. vertex_map[i] = j
  687. def remap(x):
  688. if hasattr(x, '__len__'):
  689. return tuple(set([remap(y) for y in x]))
  690. try:
  691. return vertex_map[x]
  692. except KeyError:
  693. raise AssertionError("incremental result has spurious vertex at %r"
  694. % (objx.vertices[x],))
  695. def simplified(x):
  696. items = set(map(sorted_tuple, x))
  697. if () in items:
  698. items.remove(())
  699. items = [x for x in items if len(x) > 1]
  700. items.sort()
  701. return items
  702. assert_equal(
  703. simplified(remap(objx.regions)),
  704. simplified(obj2.regions)
  705. )
  706. assert_equal(
  707. simplified(remap(objx.ridge_vertices)),
  708. simplified(obj2.ridge_vertices)
  709. )
  710. # XXX: compare ridge_points --- not clear exactly how to do this
  711. class Test_HalfspaceIntersection(object):
  712. def assert_unordered_allclose(self, arr1, arr2, rtol=1e-7):
  713. """Check that every line in arr1 is only once in arr2"""
  714. assert_equal(arr1.shape, arr2.shape)
  715. truths = np.zeros((arr1.shape[0],), dtype=bool)
  716. for l1 in arr1:
  717. indexes = np.nonzero((abs(arr2 - l1) < rtol).all(axis=1))[0]
  718. assert_equal(indexes.shape, (1,))
  719. truths[indexes[0]] = True
  720. assert_(truths.all())
  721. def test_cube_halfspace_intersection(self):
  722. halfspaces = np.array([[-1.0, 0.0, 0.0],
  723. [0.0, -1.0, 0.0],
  724. [1.0, 0.0, -1.0],
  725. [0.0, 1.0, -1.0]])
  726. feasible_point = np.array([0.5, 0.5])
  727. points = np.array([[0.0, 1.0], [1.0, 1.0], [0.0, 0.0], [1.0, 0.0]])
  728. hull = qhull.HalfspaceIntersection(halfspaces, feasible_point)
  729. assert_allclose(points, hull.intersections)
  730. def test_self_dual_polytope_intersection(self):
  731. fname = os.path.join(os.path.dirname(__file__), 'data',
  732. 'selfdual-4d-polytope.txt')
  733. ineqs = np.genfromtxt(fname)
  734. halfspaces = -np.hstack((ineqs[:, 1:], ineqs[:, :1]))
  735. feas_point = np.array([0., 0., 0., 0.])
  736. hs = qhull.HalfspaceIntersection(halfspaces, feas_point)
  737. assert_equal(hs.intersections.shape, (24, 4))
  738. assert_almost_equal(hs.dual_volume, 32.0)
  739. assert_equal(len(hs.dual_facets), 24)
  740. for facet in hs.dual_facets:
  741. assert_equal(len(facet), 6)
  742. dists = halfspaces[:, -1] + halfspaces[:, :-1].dot(feas_point)
  743. self.assert_unordered_allclose((halfspaces[:, :-1].T/dists).T, hs.dual_points)
  744. points = itertools.permutations([0., 0., 0.5, -0.5])
  745. for point in points:
  746. assert_equal(np.sum((hs.intersections == point).all(axis=1)), 1)
  747. def test_wrong_feasible_point(self):
  748. halfspaces = np.array([[-1.0, 0.0, 0.0],
  749. [0.0, -1.0, 0.0],
  750. [1.0, 0.0, -1.0],
  751. [0.0, 1.0, -1.0]])
  752. feasible_point = np.array([0.5, 0.5, 0.5])
  753. #Feasible point is (ndim,) instead of (ndim-1,)
  754. assert_raises(ValueError, qhull.HalfspaceIntersection, halfspaces, feasible_point)
  755. feasible_point = np.array([[0.5], [0.5]])
  756. #Feasible point is (ndim-1, 1) instead of (ndim-1,)
  757. assert_raises(ValueError, qhull.HalfspaceIntersection, halfspaces, feasible_point)
  758. feasible_point = np.array([[0.5, 0.5]])
  759. #Feasible point is (1, ndim-1) instead of (ndim-1,)
  760. assert_raises(ValueError, qhull.HalfspaceIntersection, halfspaces, feasible_point)
  761. feasible_point = np.array([-0.5, -0.5])
  762. #Feasible point is outside feasible region
  763. assert_raises(qhull.QhullError, qhull.HalfspaceIntersection, halfspaces, feasible_point)
  764. def test_incremental(self):
  765. #Cube
  766. halfspaces = np.array([[0., 0., -1., -0.5],
  767. [0., -1., 0., -0.5],
  768. [-1., 0., 0., -0.5],
  769. [1., 0., 0., -0.5],
  770. [0., 1., 0., -0.5],
  771. [0., 0., 1., -0.5]])
  772. #Cut each summit
  773. extra_normals = np.array([[1., 1., 1.],
  774. [1., 1., -1.],
  775. [1., -1., 1.],
  776. [1, -1., -1.]])
  777. offsets = np.array([[-1.]]*8)
  778. extra_halfspaces = np.hstack((np.vstack((extra_normals, -extra_normals)),
  779. offsets))
  780. feas_point = np.array([0., 0., 0.])
  781. inc_hs = qhull.HalfspaceIntersection(halfspaces, feas_point, incremental=True)
  782. inc_res_hs = qhull.HalfspaceIntersection(halfspaces, feas_point, incremental=True)
  783. for i, ehs in enumerate(extra_halfspaces):
  784. inc_hs.add_halfspaces(ehs[np.newaxis, :])
  785. inc_res_hs.add_halfspaces(ehs[np.newaxis, :], restart=True)
  786. total = np.vstack((halfspaces, extra_halfspaces[:i+1, :]))
  787. hs = qhull.HalfspaceIntersection(total, feas_point)
  788. assert_allclose(inc_hs.halfspaces, inc_res_hs.halfspaces)
  789. assert_allclose(inc_hs.halfspaces, hs.halfspaces)
  790. #Direct computation and restart should have points in same order
  791. assert_allclose(hs.intersections, inc_res_hs.intersections)
  792. #Incremental will have points in different order than direct computation
  793. self.assert_unordered_allclose(inc_hs.intersections, hs.intersections)
  794. inc_hs.close()