_kdtree.py 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920
  1. # Copyright Anne M. Archibald 2008
  2. # Released under the scipy license
  3. import numpy as np
  4. from ._ckdtree import cKDTree, cKDTreeNode
  5. __all__ = ['minkowski_distance_p', 'minkowski_distance',
  6. 'distance_matrix',
  7. 'Rectangle', 'KDTree']
  8. def minkowski_distance_p(x, y, p=2):
  9. """Compute the pth power of the L**p distance between two arrays.
  10. For efficiency, this function computes the L**p distance but does
  11. not extract the pth root. If `p` is 1 or infinity, this is equal to
  12. the actual L**p distance.
  13. The last dimensions of `x` and `y` must be the same length. Any
  14. other dimensions must be compatible for broadcasting.
  15. Parameters
  16. ----------
  17. x : (..., K) array_like
  18. Input array.
  19. y : (..., K) array_like
  20. Input array.
  21. p : float, 1 <= p <= infinity
  22. Which Minkowski p-norm to use.
  23. Returns
  24. -------
  25. dist : ndarray
  26. pth power of the distance between the input arrays.
  27. Examples
  28. --------
  29. >>> from scipy.spatial import minkowski_distance_p
  30. >>> minkowski_distance_p([[0, 0], [0, 0]], [[1, 1], [0, 1]])
  31. array([2, 1])
  32. """
  33. x = np.asarray(x)
  34. y = np.asarray(y)
  35. # Find smallest common datatype with float64 (return type of this
  36. # function) - addresses #10262.
  37. # Don't just cast to float64 for complex input case.
  38. common_datatype = np.promote_types(np.promote_types(x.dtype, y.dtype),
  39. 'float64')
  40. # Make sure x and y are NumPy arrays of correct datatype.
  41. x = x.astype(common_datatype)
  42. y = y.astype(common_datatype)
  43. if p == np.inf:
  44. return np.amax(np.abs(y-x), axis=-1)
  45. elif p == 1:
  46. return np.sum(np.abs(y-x), axis=-1)
  47. else:
  48. return np.sum(np.abs(y-x)**p, axis=-1)
  49. def minkowski_distance(x, y, p=2):
  50. """Compute the L**p distance between two arrays.
  51. The last dimensions of `x` and `y` must be the same length. Any
  52. other dimensions must be compatible for broadcasting.
  53. Parameters
  54. ----------
  55. x : (..., K) array_like
  56. Input array.
  57. y : (..., K) array_like
  58. Input array.
  59. p : float, 1 <= p <= infinity
  60. Which Minkowski p-norm to use.
  61. Returns
  62. -------
  63. dist : ndarray
  64. Distance between the input arrays.
  65. Examples
  66. --------
  67. >>> from scipy.spatial import minkowski_distance
  68. >>> minkowski_distance([[0, 0], [0, 0]], [[1, 1], [0, 1]])
  69. array([ 1.41421356, 1. ])
  70. """
  71. x = np.asarray(x)
  72. y = np.asarray(y)
  73. if p == np.inf or p == 1:
  74. return minkowski_distance_p(x, y, p)
  75. else:
  76. return minkowski_distance_p(x, y, p)**(1./p)
  77. class Rectangle:
  78. """Hyperrectangle class.
  79. Represents a Cartesian product of intervals.
  80. """
  81. def __init__(self, maxes, mins):
  82. """Construct a hyperrectangle."""
  83. self.maxes = np.maximum(maxes,mins).astype(float)
  84. self.mins = np.minimum(maxes,mins).astype(float)
  85. self.m, = self.maxes.shape
  86. def __repr__(self):
  87. return "<Rectangle %s>" % list(zip(self.mins, self.maxes))
  88. def volume(self):
  89. """Total volume."""
  90. return np.prod(self.maxes-self.mins)
  91. def split(self, d, split):
  92. """Produce two hyperrectangles by splitting.
  93. In general, if you need to compute maximum and minimum
  94. distances to the children, it can be done more efficiently
  95. by updating the maximum and minimum distances to the parent.
  96. Parameters
  97. ----------
  98. d : int
  99. Axis to split hyperrectangle along.
  100. split : float
  101. Position along axis `d` to split at.
  102. """
  103. mid = np.copy(self.maxes)
  104. mid[d] = split
  105. less = Rectangle(self.mins, mid)
  106. mid = np.copy(self.mins)
  107. mid[d] = split
  108. greater = Rectangle(mid, self.maxes)
  109. return less, greater
  110. def min_distance_point(self, x, p=2.):
  111. """
  112. Return the minimum distance between input and points in the
  113. hyperrectangle.
  114. Parameters
  115. ----------
  116. x : array_like
  117. Input.
  118. p : float, optional
  119. Input.
  120. """
  121. return minkowski_distance(
  122. 0, np.maximum(0, np.maximum(self.mins-x, x-self.maxes)),
  123. p
  124. )
  125. def max_distance_point(self, x, p=2.):
  126. """
  127. Return the maximum distance between input and points in the hyperrectangle.
  128. Parameters
  129. ----------
  130. x : array_like
  131. Input array.
  132. p : float, optional
  133. Input.
  134. """
  135. return minkowski_distance(0, np.maximum(self.maxes-x, x-self.mins), p)
  136. def min_distance_rectangle(self, other, p=2.):
  137. """
  138. Compute the minimum distance between points in the two hyperrectangles.
  139. Parameters
  140. ----------
  141. other : hyperrectangle
  142. Input.
  143. p : float
  144. Input.
  145. """
  146. return minkowski_distance(
  147. 0,
  148. np.maximum(0, np.maximum(self.mins-other.maxes,
  149. other.mins-self.maxes)),
  150. p
  151. )
  152. def max_distance_rectangle(self, other, p=2.):
  153. """
  154. Compute the maximum distance between points in the two hyperrectangles.
  155. Parameters
  156. ----------
  157. other : hyperrectangle
  158. Input.
  159. p : float, optional
  160. Input.
  161. """
  162. return minkowski_distance(
  163. 0, np.maximum(self.maxes-other.mins, other.maxes-self.mins), p)
  164. class KDTree(cKDTree):
  165. """kd-tree for quick nearest-neighbor lookup.
  166. This class provides an index into a set of k-dimensional points
  167. which can be used to rapidly look up the nearest neighbors of any
  168. point.
  169. Parameters
  170. ----------
  171. data : array_like, shape (n,m)
  172. The n data points of dimension m to be indexed. This array is
  173. not copied unless this is necessary to produce a contiguous
  174. array of doubles, and so modifying this data will result in
  175. bogus results. The data are also copied if the kd-tree is built
  176. with copy_data=True.
  177. leafsize : positive int, optional
  178. The number of points at which the algorithm switches over to
  179. brute-force. Default: 10.
  180. compact_nodes : bool, optional
  181. If True, the kd-tree is built to shrink the hyperrectangles to
  182. the actual data range. This usually gives a more compact tree that
  183. is robust against degenerated input data and gives faster queries
  184. at the expense of longer build time. Default: True.
  185. copy_data : bool, optional
  186. If True the data is always copied to protect the kd-tree against
  187. data corruption. Default: False.
  188. balanced_tree : bool, optional
  189. If True, the median is used to split the hyperrectangles instead of
  190. the midpoint. This usually gives a more compact tree and
  191. faster queries at the expense of longer build time. Default: True.
  192. boxsize : array_like or scalar, optional
  193. Apply a m-d toroidal topology to the KDTree.. The topology is generated
  194. by :math:`x_i + n_i L_i` where :math:`n_i` are integers and :math:`L_i`
  195. is the boxsize along i-th dimension. The input data shall be wrapped
  196. into :math:`[0, L_i)`. A ValueError is raised if any of the data is
  197. outside of this bound.
  198. Notes
  199. -----
  200. The algorithm used is described in Maneewongvatana and Mount 1999.
  201. The general idea is that the kd-tree is a binary tree, each of whose
  202. nodes represents an axis-aligned hyperrectangle. Each node specifies
  203. an axis and splits the set of points based on whether their coordinate
  204. along that axis is greater than or less than a particular value.
  205. During construction, the axis and splitting point are chosen by the
  206. "sliding midpoint" rule, which ensures that the cells do not all
  207. become long and thin.
  208. The tree can be queried for the r closest neighbors of any given point
  209. (optionally returning only those within some maximum distance of the
  210. point). It can also be queried, with a substantial gain in efficiency,
  211. for the r approximate closest neighbors.
  212. For large dimensions (20 is already large) do not expect this to run
  213. significantly faster than brute force. High-dimensional nearest-neighbor
  214. queries are a substantial open problem in computer science.
  215. Attributes
  216. ----------
  217. data : ndarray, shape (n,m)
  218. The n data points of dimension m to be indexed. This array is
  219. not copied unless this is necessary to produce a contiguous
  220. array of doubles. The data are also copied if the kd-tree is built
  221. with `copy_data=True`.
  222. leafsize : positive int
  223. The number of points at which the algorithm switches over to
  224. brute-force.
  225. m : int
  226. The dimension of a single data-point.
  227. n : int
  228. The number of data points.
  229. maxes : ndarray, shape (m,)
  230. The maximum value in each dimension of the n data points.
  231. mins : ndarray, shape (m,)
  232. The minimum value in each dimension of the n data points.
  233. size : int
  234. The number of nodes in the tree.
  235. """
  236. class node:
  237. @staticmethod
  238. def _create(ckdtree_node=None):
  239. """Create either an inner or leaf node, wrapping a cKDTreeNode instance"""
  240. if ckdtree_node is None:
  241. return KDTree.node(ckdtree_node)
  242. elif ckdtree_node.split_dim == -1:
  243. return KDTree.leafnode(ckdtree_node)
  244. else:
  245. return KDTree.innernode(ckdtree_node)
  246. def __init__(self, ckdtree_node=None):
  247. if ckdtree_node is None:
  248. ckdtree_node = cKDTreeNode()
  249. self._node = ckdtree_node
  250. def __lt__(self, other):
  251. return id(self) < id(other)
  252. def __gt__(self, other):
  253. return id(self) > id(other)
  254. def __le__(self, other):
  255. return id(self) <= id(other)
  256. def __ge__(self, other):
  257. return id(self) >= id(other)
  258. def __eq__(self, other):
  259. return id(self) == id(other)
  260. class leafnode(node):
  261. @property
  262. def idx(self):
  263. return self._node.indices
  264. @property
  265. def children(self):
  266. return self._node.children
  267. class innernode(node):
  268. def __init__(self, ckdtreenode):
  269. assert isinstance(ckdtreenode, cKDTreeNode)
  270. super().__init__(ckdtreenode)
  271. self.less = KDTree.node._create(ckdtreenode.lesser)
  272. self.greater = KDTree.node._create(ckdtreenode.greater)
  273. @property
  274. def split_dim(self):
  275. return self._node.split_dim
  276. @property
  277. def split(self):
  278. return self._node.split
  279. @property
  280. def children(self):
  281. return self._node.children
  282. @property
  283. def tree(self):
  284. if not hasattr(self, "_tree"):
  285. self._tree = KDTree.node._create(super().tree)
  286. return self._tree
  287. def __init__(self, data, leafsize=10, compact_nodes=True, copy_data=False,
  288. balanced_tree=True, boxsize=None):
  289. data = np.asarray(data)
  290. if data.dtype.kind == 'c':
  291. raise TypeError("KDTree does not work with complex data")
  292. # Note KDTree has different default leafsize from cKDTree
  293. super().__init__(data, leafsize, compact_nodes, copy_data,
  294. balanced_tree, boxsize)
  295. def query(
  296. self, x, k=1, eps=0, p=2, distance_upper_bound=np.inf, workers=1):
  297. r"""Query the kd-tree for nearest neighbors.
  298. Parameters
  299. ----------
  300. x : array_like, last dimension self.m
  301. An array of points to query.
  302. k : int or Sequence[int], optional
  303. Either the number of nearest neighbors to return, or a list of the
  304. k-th nearest neighbors to return, starting from 1.
  305. eps : nonnegative float, optional
  306. Return approximate nearest neighbors; the kth returned value
  307. is guaranteed to be no further than (1+eps) times the
  308. distance to the real kth nearest neighbor.
  309. p : float, 1<=p<=infinity, optional
  310. Which Minkowski p-norm to use.
  311. 1 is the sum-of-absolute-values distance ("Manhattan" distance).
  312. 2 is the usual Euclidean distance.
  313. infinity is the maximum-coordinate-difference distance.
  314. A large, finite p may cause a ValueError if overflow can occur.
  315. distance_upper_bound : nonnegative float, optional
  316. Return only neighbors within this distance. This is used to prune
  317. tree searches, so if you are doing a series of nearest-neighbor
  318. queries, it may help to supply the distance to the nearest neighbor
  319. of the most recent point.
  320. workers : int, optional
  321. Number of workers to use for parallel processing. If -1 is given
  322. all CPU threads are used. Default: 1.
  323. .. versionadded:: 1.6.0
  324. Returns
  325. -------
  326. d : float or array of floats
  327. The distances to the nearest neighbors.
  328. If ``x`` has shape ``tuple+(self.m,)``, then ``d`` has shape
  329. ``tuple+(k,)``.
  330. When k == 1, the last dimension of the output is squeezed.
  331. Missing neighbors are indicated with infinite distances.
  332. Hits are sorted by distance (nearest first).
  333. .. versionchanged:: 1.9.0
  334. Previously if ``k=None``, then `d` was an object array of
  335. shape ``tuple``, containing lists of distances. This behavior
  336. has been removed, use `query_ball_point` instead.
  337. i : integer or array of integers
  338. The index of each neighbor in ``self.data``.
  339. ``i`` is the same shape as d.
  340. Missing neighbors are indicated with ``self.n``.
  341. Examples
  342. --------
  343. >>> import numpy as np
  344. >>> from scipy.spatial import KDTree
  345. >>> x, y = np.mgrid[0:5, 2:8]
  346. >>> tree = KDTree(np.c_[x.ravel(), y.ravel()])
  347. To query the nearest neighbours and return squeezed result, use
  348. >>> dd, ii = tree.query([[0, 0], [2.2, 2.9]], k=1)
  349. >>> print(dd, ii, sep='\n')
  350. [2. 0.2236068]
  351. [ 0 13]
  352. To query the nearest neighbours and return unsqueezed result, use
  353. >>> dd, ii = tree.query([[0, 0], [2.2, 2.9]], k=[1])
  354. >>> print(dd, ii, sep='\n')
  355. [[2. ]
  356. [0.2236068]]
  357. [[ 0]
  358. [13]]
  359. To query the second nearest neighbours and return unsqueezed result,
  360. use
  361. >>> dd, ii = tree.query([[0, 0], [2.2, 2.9]], k=[2])
  362. >>> print(dd, ii, sep='\n')
  363. [[2.23606798]
  364. [0.80622577]]
  365. [[ 6]
  366. [19]]
  367. To query the first and second nearest neighbours, use
  368. >>> dd, ii = tree.query([[0, 0], [2.2, 2.9]], k=2)
  369. >>> print(dd, ii, sep='\n')
  370. [[2. 2.23606798]
  371. [0.2236068 0.80622577]]
  372. [[ 0 6]
  373. [13 19]]
  374. or, be more specific
  375. >>> dd, ii = tree.query([[0, 0], [2.2, 2.9]], k=[1, 2])
  376. >>> print(dd, ii, sep='\n')
  377. [[2. 2.23606798]
  378. [0.2236068 0.80622577]]
  379. [[ 0 6]
  380. [13 19]]
  381. """
  382. x = np.asarray(x)
  383. if x.dtype.kind == 'c':
  384. raise TypeError("KDTree does not work with complex data")
  385. if k is None:
  386. raise ValueError("k must be an integer or a sequence of integers")
  387. d, i = super().query(x, k, eps, p, distance_upper_bound, workers)
  388. if isinstance(i, int):
  389. i = np.intp(i)
  390. return d, i
  391. def query_ball_point(self, x, r, p=2., eps=0, workers=1,
  392. return_sorted=None, return_length=False):
  393. """Find all points within distance r of point(s) x.
  394. Parameters
  395. ----------
  396. x : array_like, shape tuple + (self.m,)
  397. The point or points to search for neighbors of.
  398. r : array_like, float
  399. The radius of points to return, must broadcast to the length of x.
  400. p : float, optional
  401. Which Minkowski p-norm to use. Should be in the range [1, inf].
  402. A finite large p may cause a ValueError if overflow can occur.
  403. eps : nonnegative float, optional
  404. Approximate search. Branches of the tree are not explored if their
  405. nearest points are further than ``r / (1 + eps)``, and branches are
  406. added in bulk if their furthest points are nearer than
  407. ``r * (1 + eps)``.
  408. workers : int, optional
  409. Number of jobs to schedule for parallel processing. If -1 is given
  410. all processors are used. Default: 1.
  411. .. versionadded:: 1.6.0
  412. return_sorted : bool, optional
  413. Sorts returned indicies if True and does not sort them if False. If
  414. None, does not sort single point queries, but does sort
  415. multi-point queries which was the behavior before this option
  416. was added.
  417. .. versionadded:: 1.6.0
  418. return_length : bool, optional
  419. Return the number of points inside the radius instead of a list
  420. of the indices.
  421. .. versionadded:: 1.6.0
  422. Returns
  423. -------
  424. results : list or array of lists
  425. If `x` is a single point, returns a list of the indices of the
  426. neighbors of `x`. If `x` is an array of points, returns an object
  427. array of shape tuple containing lists of neighbors.
  428. Notes
  429. -----
  430. If you have many points whose neighbors you want to find, you may save
  431. substantial amounts of time by putting them in a KDTree and using
  432. query_ball_tree.
  433. Examples
  434. --------
  435. >>> import numpy as np
  436. >>> from scipy import spatial
  437. >>> x, y = np.mgrid[0:5, 0:5]
  438. >>> points = np.c_[x.ravel(), y.ravel()]
  439. >>> tree = spatial.KDTree(points)
  440. >>> sorted(tree.query_ball_point([2, 0], 1))
  441. [5, 10, 11, 15]
  442. Query multiple points and plot the results:
  443. >>> import matplotlib.pyplot as plt
  444. >>> points = np.asarray(points)
  445. >>> plt.plot(points[:,0], points[:,1], '.')
  446. >>> for results in tree.query_ball_point(([2, 0], [3, 3]), 1):
  447. ... nearby_points = points[results]
  448. ... plt.plot(nearby_points[:,0], nearby_points[:,1], 'o')
  449. >>> plt.margins(0.1, 0.1)
  450. >>> plt.show()
  451. """
  452. x = np.asarray(x)
  453. if x.dtype.kind == 'c':
  454. raise TypeError("KDTree does not work with complex data")
  455. return super().query_ball_point(
  456. x, r, p, eps, workers, return_sorted, return_length)
  457. def query_ball_tree(self, other, r, p=2., eps=0):
  458. """
  459. Find all pairs of points between `self` and `other` whose distance is
  460. at most r.
  461. Parameters
  462. ----------
  463. other : KDTree instance
  464. The tree containing points to search against.
  465. r : float
  466. The maximum distance, has to be positive.
  467. p : float, optional
  468. Which Minkowski norm to use. `p` has to meet the condition
  469. ``1 <= p <= infinity``.
  470. eps : float, optional
  471. Approximate search. Branches of the tree are not explored
  472. if their nearest points are further than ``r/(1+eps)``, and
  473. branches are added in bulk if their furthest points are nearer
  474. than ``r * (1+eps)``. `eps` has to be non-negative.
  475. Returns
  476. -------
  477. results : list of lists
  478. For each element ``self.data[i]`` of this tree, ``results[i]`` is a
  479. list of the indices of its neighbors in ``other.data``.
  480. Examples
  481. --------
  482. You can search all pairs of points between two kd-trees within a distance:
  483. >>> import matplotlib.pyplot as plt
  484. >>> import numpy as np
  485. >>> from scipy.spatial import KDTree
  486. >>> rng = np.random.default_rng()
  487. >>> points1 = rng.random((15, 2))
  488. >>> points2 = rng.random((15, 2))
  489. >>> plt.figure(figsize=(6, 6))
  490. >>> plt.plot(points1[:, 0], points1[:, 1], "xk", markersize=14)
  491. >>> plt.plot(points2[:, 0], points2[:, 1], "og", markersize=14)
  492. >>> kd_tree1 = KDTree(points1)
  493. >>> kd_tree2 = KDTree(points2)
  494. >>> indexes = kd_tree1.query_ball_tree(kd_tree2, r=0.2)
  495. >>> for i in range(len(indexes)):
  496. ... for j in indexes[i]:
  497. ... plt.plot([points1[i, 0], points2[j, 0]],
  498. ... [points1[i, 1], points2[j, 1]], "-r")
  499. >>> plt.show()
  500. """
  501. return super().query_ball_tree(other, r, p, eps)
  502. def query_pairs(self, r, p=2., eps=0, output_type='set'):
  503. """Find all pairs of points in `self` whose distance is at most r.
  504. Parameters
  505. ----------
  506. r : positive float
  507. The maximum distance.
  508. p : float, optional
  509. Which Minkowski norm to use. `p` has to meet the condition
  510. ``1 <= p <= infinity``.
  511. eps : float, optional
  512. Approximate search. Branches of the tree are not explored
  513. if their nearest points are further than ``r/(1+eps)``, and
  514. branches are added in bulk if their furthest points are nearer
  515. than ``r * (1+eps)``. `eps` has to be non-negative.
  516. output_type : string, optional
  517. Choose the output container, 'set' or 'ndarray'. Default: 'set'
  518. .. versionadded:: 1.6.0
  519. Returns
  520. -------
  521. results : set or ndarray
  522. Set of pairs ``(i,j)``, with ``i < j``, for which the corresponding
  523. positions are close. If output_type is 'ndarray', an ndarry is
  524. returned instead of a set.
  525. Examples
  526. --------
  527. You can search all pairs of points in a kd-tree within a distance:
  528. >>> import matplotlib.pyplot as plt
  529. >>> import numpy as np
  530. >>> from scipy.spatial import KDTree
  531. >>> rng = np.random.default_rng()
  532. >>> points = rng.random((20, 2))
  533. >>> plt.figure(figsize=(6, 6))
  534. >>> plt.plot(points[:, 0], points[:, 1], "xk", markersize=14)
  535. >>> kd_tree = KDTree(points)
  536. >>> pairs = kd_tree.query_pairs(r=0.2)
  537. >>> for (i, j) in pairs:
  538. ... plt.plot([points[i, 0], points[j, 0]],
  539. ... [points[i, 1], points[j, 1]], "-r")
  540. >>> plt.show()
  541. """
  542. return super().query_pairs(r, p, eps, output_type)
  543. def count_neighbors(self, other, r, p=2., weights=None, cumulative=True):
  544. """Count how many nearby pairs can be formed.
  545. Count the number of pairs ``(x1,x2)`` can be formed, with ``x1`` drawn
  546. from ``self`` and ``x2`` drawn from ``other``, and where
  547. ``distance(x1, x2, p) <= r``.
  548. Data points on ``self`` and ``other`` are optionally weighted by the
  549. ``weights`` argument. (See below)
  550. This is adapted from the "two-point correlation" algorithm described by
  551. Gray and Moore [1]_. See notes for further discussion.
  552. Parameters
  553. ----------
  554. other : KDTree
  555. The other tree to draw points from, can be the same tree as self.
  556. r : float or one-dimensional array of floats
  557. The radius to produce a count for. Multiple radii are searched with
  558. a single tree traversal.
  559. If the count is non-cumulative(``cumulative=False``), ``r`` defines
  560. the edges of the bins, and must be non-decreasing.
  561. p : float, optional
  562. 1<=p<=infinity.
  563. Which Minkowski p-norm to use.
  564. Default 2.0.
  565. A finite large p may cause a ValueError if overflow can occur.
  566. weights : tuple, array_like, or None, optional
  567. If None, the pair-counting is unweighted.
  568. If given as a tuple, weights[0] is the weights of points in
  569. ``self``, and weights[1] is the weights of points in ``other``;
  570. either can be None to indicate the points are unweighted.
  571. If given as an array_like, weights is the weights of points in
  572. ``self`` and ``other``. For this to make sense, ``self`` and
  573. ``other`` must be the same tree. If ``self`` and ``other`` are two
  574. different trees, a ``ValueError`` is raised.
  575. Default: None
  576. .. versionadded:: 1.6.0
  577. cumulative : bool, optional
  578. Whether the returned counts are cumulative. When cumulative is set
  579. to ``False`` the algorithm is optimized to work with a large number
  580. of bins (>10) specified by ``r``. When ``cumulative`` is set to
  581. True, the algorithm is optimized to work with a small number of
  582. ``r``. Default: True
  583. .. versionadded:: 1.6.0
  584. Returns
  585. -------
  586. result : scalar or 1-D array
  587. The number of pairs. For unweighted counts, the result is integer.
  588. For weighted counts, the result is float.
  589. If cumulative is False, ``result[i]`` contains the counts with
  590. ``(-inf if i == 0 else r[i-1]) < R <= r[i]``
  591. Notes
  592. -----
  593. Pair-counting is the basic operation used to calculate the two point
  594. correlation functions from a data set composed of position of objects.
  595. Two point correlation function measures the clustering of objects and
  596. is widely used in cosmology to quantify the large scale structure
  597. in our Universe, but it may be useful for data analysis in other fields
  598. where self-similar assembly of objects also occur.
  599. The Landy-Szalay estimator for the two point correlation function of
  600. ``D`` measures the clustering signal in ``D``. [2]_
  601. For example, given the position of two sets of objects,
  602. - objects ``D`` (data) contains the clustering signal, and
  603. - objects ``R`` (random) that contains no signal,
  604. .. math::
  605. \\xi(r) = \\frac{<D, D> - 2 f <D, R> + f^2<R, R>}{f^2<R, R>},
  606. where the brackets represents counting pairs between two data sets
  607. in a finite bin around ``r`` (distance), corresponding to setting
  608. `cumulative=False`, and ``f = float(len(D)) / float(len(R))`` is the
  609. ratio between number of objects from data and random.
  610. The algorithm implemented here is loosely based on the dual-tree
  611. algorithm described in [1]_. We switch between two different
  612. pair-cumulation scheme depending on the setting of ``cumulative``.
  613. The computing time of the method we use when for
  614. ``cumulative == False`` does not scale with the total number of bins.
  615. The algorithm for ``cumulative == True`` scales linearly with the
  616. number of bins, though it is slightly faster when only
  617. 1 or 2 bins are used. [5]_.
  618. As an extension to the naive pair-counting,
  619. weighted pair-counting counts the product of weights instead
  620. of number of pairs.
  621. Weighted pair-counting is used to estimate marked correlation functions
  622. ([3]_, section 2.2),
  623. or to properly calculate the average of data per distance bin
  624. (e.g. [4]_, section 2.1 on redshift).
  625. .. [1] Gray and Moore,
  626. "N-body problems in statistical learning",
  627. Mining the sky, 2000,
  628. https://arxiv.org/abs/astro-ph/0012333
  629. .. [2] Landy and Szalay,
  630. "Bias and variance of angular correlation functions",
  631. The Astrophysical Journal, 1993,
  632. http://adsabs.harvard.edu/abs/1993ApJ...412...64L
  633. .. [3] Sheth, Connolly and Skibba,
  634. "Marked correlations in galaxy formation models",
  635. Arxiv e-print, 2005,
  636. https://arxiv.org/abs/astro-ph/0511773
  637. .. [4] Hawkins, et al.,
  638. "The 2dF Galaxy Redshift Survey: correlation functions,
  639. peculiar velocities and the matter density of the Universe",
  640. Monthly Notices of the Royal Astronomical Society, 2002,
  641. http://adsabs.harvard.edu/abs/2003MNRAS.346...78H
  642. .. [5] https://github.com/scipy/scipy/pull/5647#issuecomment-168474926
  643. Examples
  644. --------
  645. You can count neighbors number between two kd-trees within a distance:
  646. >>> import numpy as np
  647. >>> from scipy.spatial import KDTree
  648. >>> rng = np.random.default_rng()
  649. >>> points1 = rng.random((5, 2))
  650. >>> points2 = rng.random((5, 2))
  651. >>> kd_tree1 = KDTree(points1)
  652. >>> kd_tree2 = KDTree(points2)
  653. >>> kd_tree1.count_neighbors(kd_tree2, 0.2)
  654. 1
  655. This number is same as the total pair number calculated by
  656. `query_ball_tree`:
  657. >>> indexes = kd_tree1.query_ball_tree(kd_tree2, r=0.2)
  658. >>> sum([len(i) for i in indexes])
  659. 1
  660. """
  661. return super().count_neighbors(other, r, p, weights, cumulative)
  662. def sparse_distance_matrix(
  663. self, other, max_distance, p=2., output_type='dok_matrix'):
  664. """Compute a sparse distance matrix.
  665. Computes a distance matrix between two KDTrees, leaving as zero
  666. any distance greater than max_distance.
  667. Parameters
  668. ----------
  669. other : KDTree
  670. max_distance : positive float
  671. p : float, 1<=p<=infinity
  672. Which Minkowski p-norm to use.
  673. A finite large p may cause a ValueError if overflow can occur.
  674. output_type : string, optional
  675. Which container to use for output data. Options: 'dok_matrix',
  676. 'coo_matrix', 'dict', or 'ndarray'. Default: 'dok_matrix'.
  677. .. versionadded:: 1.6.0
  678. Returns
  679. -------
  680. result : dok_matrix, coo_matrix, dict or ndarray
  681. Sparse matrix representing the results in "dictionary of keys"
  682. format. If a dict is returned the keys are (i,j) tuples of indices.
  683. If output_type is 'ndarray' a record array with fields 'i', 'j',
  684. and 'v' is returned,
  685. Examples
  686. --------
  687. You can compute a sparse distance matrix between two kd-trees:
  688. >>> import numpy as np
  689. >>> from scipy.spatial import KDTree
  690. >>> rng = np.random.default_rng()
  691. >>> points1 = rng.random((5, 2))
  692. >>> points2 = rng.random((5, 2))
  693. >>> kd_tree1 = KDTree(points1)
  694. >>> kd_tree2 = KDTree(points2)
  695. >>> sdm = kd_tree1.sparse_distance_matrix(kd_tree2, 0.3)
  696. >>> sdm.toarray()
  697. array([[0. , 0. , 0.12295571, 0. , 0. ],
  698. [0. , 0. , 0. , 0. , 0. ],
  699. [0.28942611, 0. , 0. , 0.2333084 , 0. ],
  700. [0. , 0. , 0. , 0. , 0. ],
  701. [0.24617575, 0.29571802, 0.26836782, 0. , 0. ]])
  702. You can check distances above the `max_distance` are zeros:
  703. >>> from scipy.spatial import distance_matrix
  704. >>> distance_matrix(points1, points2)
  705. array([[0.56906522, 0.39923701, 0.12295571, 0.8658745 , 0.79428925],
  706. [0.37327919, 0.7225693 , 0.87665969, 0.32580855, 0.75679479],
  707. [0.28942611, 0.30088013, 0.6395831 , 0.2333084 , 0.33630734],
  708. [0.31994999, 0.72658602, 0.71124834, 0.55396483, 0.90785663],
  709. [0.24617575, 0.29571802, 0.26836782, 0.57714465, 0.6473269 ]])
  710. """
  711. return super().sparse_distance_matrix(
  712. other, max_distance, p, output_type)
  713. def distance_matrix(x, y, p=2, threshold=1000000):
  714. """Compute the distance matrix.
  715. Returns the matrix of all pair-wise distances.
  716. Parameters
  717. ----------
  718. x : (M, K) array_like
  719. Matrix of M vectors in K dimensions.
  720. y : (N, K) array_like
  721. Matrix of N vectors in K dimensions.
  722. p : float, 1 <= p <= infinity
  723. Which Minkowski p-norm to use.
  724. threshold : positive int
  725. If ``M * N * K`` > `threshold`, algorithm uses a Python loop instead
  726. of large temporary arrays.
  727. Returns
  728. -------
  729. result : (M, N) ndarray
  730. Matrix containing the distance from every vector in `x` to every vector
  731. in `y`.
  732. Examples
  733. --------
  734. >>> from scipy.spatial import distance_matrix
  735. >>> distance_matrix([[0,0],[0,1]], [[1,0],[1,1]])
  736. array([[ 1. , 1.41421356],
  737. [ 1.41421356, 1. ]])
  738. """
  739. x = np.asarray(x)
  740. m, k = x.shape
  741. y = np.asarray(y)
  742. n, kk = y.shape
  743. if k != kk:
  744. raise ValueError(f"x contains {k}-dimensional vectors but y contains "
  745. f"{kk}-dimensional vectors")
  746. if m*n*k <= threshold:
  747. return minkowski_distance(x[:,np.newaxis,:],y[np.newaxis,:,:],p)
  748. else:
  749. result = np.empty((m,n),dtype=float) # FIXME: figure out the best dtype
  750. if m < n:
  751. for i in range(m):
  752. result[i,:] = minkowski_distance(x[i],y,p)
  753. else:
  754. for j in range(n):
  755. result[:,j] = minkowski_distance(x,y[j],p)
  756. return result