test_kdeoth.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393
  1. from __future__ import division, print_function, absolute_import
  2. from scipy import stats
  3. import numpy as np
  4. from numpy.testing import (assert_almost_equal, assert_,
  5. assert_array_almost_equal, assert_array_almost_equal_nulp, assert_allclose)
  6. import pytest
  7. from pytest import raises as assert_raises
  8. def test_kde_1d():
  9. #some basic tests comparing to normal distribution
  10. np.random.seed(8765678)
  11. n_basesample = 500
  12. xn = np.random.randn(n_basesample)
  13. xnmean = xn.mean()
  14. xnstd = xn.std(ddof=1)
  15. # get kde for original sample
  16. gkde = stats.gaussian_kde(xn)
  17. # evaluate the density function for the kde for some points
  18. xs = np.linspace(-7,7,501)
  19. kdepdf = gkde.evaluate(xs)
  20. normpdf = stats.norm.pdf(xs, loc=xnmean, scale=xnstd)
  21. intervall = xs[1] - xs[0]
  22. assert_(np.sum((kdepdf - normpdf)**2)*intervall < 0.01)
  23. prob1 = gkde.integrate_box_1d(xnmean, np.inf)
  24. prob2 = gkde.integrate_box_1d(-np.inf, xnmean)
  25. assert_almost_equal(prob1, 0.5, decimal=1)
  26. assert_almost_equal(prob2, 0.5, decimal=1)
  27. assert_almost_equal(gkde.integrate_box(xnmean, np.inf), prob1, decimal=13)
  28. assert_almost_equal(gkde.integrate_box(-np.inf, xnmean), prob2, decimal=13)
  29. assert_almost_equal(gkde.integrate_kde(gkde),
  30. (kdepdf**2).sum()*intervall, decimal=2)
  31. assert_almost_equal(gkde.integrate_gaussian(xnmean, xnstd**2),
  32. (kdepdf*normpdf).sum()*intervall, decimal=2)
  33. def test_kde_1d_weighted():
  34. #some basic tests comparing to normal distribution
  35. np.random.seed(8765678)
  36. n_basesample = 500
  37. xn = np.random.randn(n_basesample)
  38. wn = np.random.rand(n_basesample)
  39. xnmean = np.average(xn, weights=wn)
  40. xnstd = np.sqrt(np.average((xn-xnmean)**2, weights=wn))
  41. # get kde for original sample
  42. gkde = stats.gaussian_kde(xn, weights=wn)
  43. # evaluate the density function for the kde for some points
  44. xs = np.linspace(-7,7,501)
  45. kdepdf = gkde.evaluate(xs)
  46. normpdf = stats.norm.pdf(xs, loc=xnmean, scale=xnstd)
  47. intervall = xs[1] - xs[0]
  48. assert_(np.sum((kdepdf - normpdf)**2)*intervall < 0.01)
  49. prob1 = gkde.integrate_box_1d(xnmean, np.inf)
  50. prob2 = gkde.integrate_box_1d(-np.inf, xnmean)
  51. assert_almost_equal(prob1, 0.5, decimal=1)
  52. assert_almost_equal(prob2, 0.5, decimal=1)
  53. assert_almost_equal(gkde.integrate_box(xnmean, np.inf), prob1, decimal=13)
  54. assert_almost_equal(gkde.integrate_box(-np.inf, xnmean), prob2, decimal=13)
  55. assert_almost_equal(gkde.integrate_kde(gkde),
  56. (kdepdf**2).sum()*intervall, decimal=2)
  57. assert_almost_equal(gkde.integrate_gaussian(xnmean, xnstd**2),
  58. (kdepdf*normpdf).sum()*intervall, decimal=2)
  59. @pytest.mark.slow
  60. def test_kde_2d():
  61. #some basic tests comparing to normal distribution
  62. np.random.seed(8765678)
  63. n_basesample = 500
  64. mean = np.array([1.0, 3.0])
  65. covariance = np.array([[1.0, 2.0], [2.0, 6.0]])
  66. # Need transpose (shape (2, 500)) for kde
  67. xn = np.random.multivariate_normal(mean, covariance, size=n_basesample).T
  68. # get kde for original sample
  69. gkde = stats.gaussian_kde(xn)
  70. # evaluate the density function for the kde for some points
  71. x, y = np.mgrid[-7:7:500j, -7:7:500j]
  72. grid_coords = np.vstack([x.ravel(), y.ravel()])
  73. kdepdf = gkde.evaluate(grid_coords)
  74. kdepdf = kdepdf.reshape(500, 500)
  75. normpdf = stats.multivariate_normal.pdf(np.dstack([x, y]), mean=mean, cov=covariance)
  76. intervall = y.ravel()[1] - y.ravel()[0]
  77. assert_(np.sum((kdepdf - normpdf)**2) * (intervall**2) < 0.01)
  78. small = -1e100
  79. large = 1e100
  80. prob1 = gkde.integrate_box([small, mean[1]], [large, large])
  81. prob2 = gkde.integrate_box([small, small], [large, mean[1]])
  82. assert_almost_equal(prob1, 0.5, decimal=1)
  83. assert_almost_equal(prob2, 0.5, decimal=1)
  84. assert_almost_equal(gkde.integrate_kde(gkde),
  85. (kdepdf**2).sum()*(intervall**2), decimal=2)
  86. assert_almost_equal(gkde.integrate_gaussian(mean, covariance),
  87. (kdepdf*normpdf).sum()*(intervall**2), decimal=2)
  88. @pytest.mark.slow
  89. def test_kde_2d_weighted():
  90. #some basic tests comparing to normal distribution
  91. np.random.seed(8765678)
  92. n_basesample = 500
  93. mean = np.array([1.0, 3.0])
  94. covariance = np.array([[1.0, 2.0], [2.0, 6.0]])
  95. # Need transpose (shape (2, 500)) for kde
  96. xn = np.random.multivariate_normal(mean, covariance, size=n_basesample).T
  97. wn = np.random.rand(n_basesample)
  98. # get kde for original sample
  99. gkde = stats.gaussian_kde(xn, weights=wn)
  100. # evaluate the density function for the kde for some points
  101. x, y = np.mgrid[-7:7:500j, -7:7:500j]
  102. grid_coords = np.vstack([x.ravel(), y.ravel()])
  103. kdepdf = gkde.evaluate(grid_coords)
  104. kdepdf = kdepdf.reshape(500, 500)
  105. normpdf = stats.multivariate_normal.pdf(np.dstack([x, y]), mean=mean, cov=covariance)
  106. intervall = y.ravel()[1] - y.ravel()[0]
  107. assert_(np.sum((kdepdf - normpdf)**2) * (intervall**2) < 0.01)
  108. small = -1e100
  109. large = 1e100
  110. prob1 = gkde.integrate_box([small, mean[1]], [large, large])
  111. prob2 = gkde.integrate_box([small, small], [large, mean[1]])
  112. assert_almost_equal(prob1, 0.5, decimal=1)
  113. assert_almost_equal(prob2, 0.5, decimal=1)
  114. assert_almost_equal(gkde.integrate_kde(gkde),
  115. (kdepdf**2).sum()*(intervall**2), decimal=2)
  116. assert_almost_equal(gkde.integrate_gaussian(mean, covariance),
  117. (kdepdf*normpdf).sum()*(intervall**2), decimal=2)
  118. def test_kde_bandwidth_method():
  119. def scotts_factor(kde_obj):
  120. """Same as default, just check that it works."""
  121. return np.power(kde_obj.n, -1./(kde_obj.d+4))
  122. np.random.seed(8765678)
  123. n_basesample = 50
  124. xn = np.random.randn(n_basesample)
  125. # Default
  126. gkde = stats.gaussian_kde(xn)
  127. # Supply a callable
  128. gkde2 = stats.gaussian_kde(xn, bw_method=scotts_factor)
  129. # Supply a scalar
  130. gkde3 = stats.gaussian_kde(xn, bw_method=gkde.factor)
  131. xs = np.linspace(-7,7,51)
  132. kdepdf = gkde.evaluate(xs)
  133. kdepdf2 = gkde2.evaluate(xs)
  134. assert_almost_equal(kdepdf, kdepdf2)
  135. kdepdf3 = gkde3.evaluate(xs)
  136. assert_almost_equal(kdepdf, kdepdf3)
  137. assert_raises(ValueError, stats.gaussian_kde, xn, bw_method='wrongstring')
  138. def test_kde_bandwidth_method_weighted():
  139. def scotts_factor(kde_obj):
  140. """Same as default, just check that it works."""
  141. return np.power(kde_obj.neff, -1./(kde_obj.d+4))
  142. np.random.seed(8765678)
  143. n_basesample = 50
  144. xn = np.random.randn(n_basesample)
  145. # Default
  146. gkde = stats.gaussian_kde(xn)
  147. # Supply a callable
  148. gkde2 = stats.gaussian_kde(xn, bw_method=scotts_factor)
  149. # Supply a scalar
  150. gkde3 = stats.gaussian_kde(xn, bw_method=gkde.factor)
  151. xs = np.linspace(-7,7,51)
  152. kdepdf = gkde.evaluate(xs)
  153. kdepdf2 = gkde2.evaluate(xs)
  154. assert_almost_equal(kdepdf, kdepdf2)
  155. kdepdf3 = gkde3.evaluate(xs)
  156. assert_almost_equal(kdepdf, kdepdf3)
  157. assert_raises(ValueError, stats.gaussian_kde, xn, bw_method='wrongstring')
  158. # Subclasses that should stay working (extracted from various sources).
  159. # Unfortunately the earlier design of gaussian_kde made it necessary for users
  160. # to create these kinds of subclasses, or call _compute_covariance() directly.
  161. class _kde_subclass1(stats.gaussian_kde):
  162. def __init__(self, dataset):
  163. self.dataset = np.atleast_2d(dataset)
  164. self.d, self.n = self.dataset.shape
  165. self.covariance_factor = self.scotts_factor
  166. self._compute_covariance()
  167. class _kde_subclass2(stats.gaussian_kde):
  168. def __init__(self, dataset):
  169. self.covariance_factor = self.scotts_factor
  170. super(_kde_subclass2, self).__init__(dataset)
  171. class _kde_subclass3(stats.gaussian_kde):
  172. def __init__(self, dataset, covariance):
  173. self.covariance = covariance
  174. stats.gaussian_kde.__init__(self, dataset)
  175. def _compute_covariance(self):
  176. self.inv_cov = np.linalg.inv(self.covariance)
  177. self._norm_factor = np.sqrt(np.linalg.det(2*np.pi * self.covariance)) \
  178. * self.n
  179. class _kde_subclass4(stats.gaussian_kde):
  180. def covariance_factor(self):
  181. return 0.5 * self.silverman_factor()
  182. def test_gaussian_kde_subclassing():
  183. x1 = np.array([-7, -5, 1, 4, 5], dtype=float)
  184. xs = np.linspace(-10, 10, num=50)
  185. # gaussian_kde itself
  186. kde = stats.gaussian_kde(x1)
  187. ys = kde(xs)
  188. # subclass 1
  189. kde1 = _kde_subclass1(x1)
  190. y1 = kde1(xs)
  191. assert_array_almost_equal_nulp(ys, y1, nulp=10)
  192. # subclass 2
  193. kde2 = _kde_subclass2(x1)
  194. y2 = kde2(xs)
  195. assert_array_almost_equal_nulp(ys, y2, nulp=10)
  196. # subclass 3
  197. kde3 = _kde_subclass3(x1, kde.covariance)
  198. y3 = kde3(xs)
  199. assert_array_almost_equal_nulp(ys, y3, nulp=10)
  200. # subclass 4
  201. kde4 = _kde_subclass4(x1)
  202. y4 = kde4(x1)
  203. y_expected = [0.06292987, 0.06346938, 0.05860291, 0.08657652, 0.07904017]
  204. assert_array_almost_equal(y_expected, y4, decimal=6)
  205. # Not a subclass, but check for use of _compute_covariance()
  206. kde5 = kde
  207. kde5.covariance_factor = lambda: kde.factor
  208. kde5._compute_covariance()
  209. y5 = kde5(xs)
  210. assert_array_almost_equal_nulp(ys, y5, nulp=10)
  211. def test_gaussian_kde_covariance_caching():
  212. x1 = np.array([-7, -5, 1, 4, 5], dtype=float)
  213. xs = np.linspace(-10, 10, num=5)
  214. # These expected values are from scipy 0.10, before some changes to
  215. # gaussian_kde. They were not compared with any external reference.
  216. y_expected = [0.02463386, 0.04689208, 0.05395444, 0.05337754, 0.01664475]
  217. # Set the bandwidth, then reset it to the default.
  218. kde = stats.gaussian_kde(x1)
  219. kde.set_bandwidth(bw_method=0.5)
  220. kde.set_bandwidth(bw_method='scott')
  221. y2 = kde(xs)
  222. assert_array_almost_equal(y_expected, y2, decimal=7)
  223. def test_gaussian_kde_monkeypatch():
  224. """Ugly, but people may rely on this. See scipy pull request 123,
  225. specifically the linked ML thread "Width of the Gaussian in stats.kde".
  226. If it is necessary to break this later on, that is to be discussed on ML.
  227. """
  228. x1 = np.array([-7, -5, 1, 4, 5], dtype=float)
  229. xs = np.linspace(-10, 10, num=50)
  230. # The old monkeypatched version to get at Silverman's Rule.
  231. kde = stats.gaussian_kde(x1)
  232. kde.covariance_factor = kde.silverman_factor
  233. kde._compute_covariance()
  234. y1 = kde(xs)
  235. # The new saner version.
  236. kde2 = stats.gaussian_kde(x1, bw_method='silverman')
  237. y2 = kde2(xs)
  238. assert_array_almost_equal_nulp(y1, y2, nulp=10)
  239. def test_kde_integer_input():
  240. """Regression test for #1181."""
  241. x1 = np.arange(5)
  242. kde = stats.gaussian_kde(x1)
  243. y_expected = [0.13480721, 0.18222869, 0.19514935, 0.18222869, 0.13480721]
  244. assert_array_almost_equal(kde(x1), y_expected, decimal=6)
  245. def test_pdf_logpdf():
  246. np.random.seed(1)
  247. n_basesample = 50
  248. xn = np.random.randn(n_basesample)
  249. # Default
  250. gkde = stats.gaussian_kde(xn)
  251. xs = np.linspace(-15, 12, 25)
  252. pdf = gkde.evaluate(xs)
  253. pdf2 = gkde.pdf(xs)
  254. assert_almost_equal(pdf, pdf2, decimal=12)
  255. logpdf = np.log(pdf)
  256. logpdf2 = gkde.logpdf(xs)
  257. assert_almost_equal(logpdf, logpdf2, decimal=12)
  258. # There are more points than data
  259. gkde = stats.gaussian_kde(xs)
  260. pdf = np.log(gkde.evaluate(xn))
  261. pdf2 = gkde.logpdf(xn)
  262. assert_almost_equal(pdf, pdf2, decimal=12)
  263. def test_pdf_logpdf_weighted():
  264. np.random.seed(1)
  265. n_basesample = 50
  266. xn = np.random.randn(n_basesample)
  267. wn = np.random.rand(n_basesample)
  268. # Default
  269. gkde = stats.gaussian_kde(xn, weights=wn)
  270. xs = np.linspace(-15, 12, 25)
  271. pdf = gkde.evaluate(xs)
  272. pdf2 = gkde.pdf(xs)
  273. assert_almost_equal(pdf, pdf2, decimal=12)
  274. logpdf = np.log(pdf)
  275. logpdf2 = gkde.logpdf(xs)
  276. assert_almost_equal(logpdf, logpdf2, decimal=12)
  277. # There are more points than data
  278. gkde = stats.gaussian_kde(xs)
  279. pdf = np.log(gkde.evaluate(xn))
  280. pdf2 = gkde.logpdf(xn)
  281. assert_almost_equal(pdf, pdf2, decimal=12)
  282. def test_weights_intact():
  283. # regression test for gh-9709: weights are not modified
  284. np.random.seed(12345)
  285. vals = np.random.lognormal(size=100)
  286. weights = np.random.choice([1.0, 10.0, 100], size=vals.size)
  287. orig_weights = weights.copy()
  288. stats.gaussian_kde(np.log10(vals), weights=weights)
  289. assert_allclose(weights, orig_weights, atol=1e-14, rtol=1e-14)
  290. def test_weights_integer():
  291. # integer weights are OK, cf gh-9709 (comment)
  292. np.random.seed(12345)
  293. values = [0.2, 13.5, 21.0, 75.0, 99.0]
  294. weights = [1, 2, 4, 8, 16] # a list of integers
  295. pdf_i = stats.gaussian_kde(values, weights=weights)
  296. pdf_f = stats.gaussian_kde(values, weights=np.float64(weights))
  297. xn = [0.3, 11, 88]
  298. assert_allclose(pdf_i.evaluate(xn),
  299. pdf_f.evaluate(xn), atol=1e-14, rtol=1e-14)