Skip to content

Commit 918b07a

Browse files
authored
Deprecating SampleDist in favor of GaussianKDE (#284)
Deprecating SampleDist in favor of GaussianKDE
1 parent 5f40d1b commit 918b07a

File tree

13 files changed

+131
-205
lines changed

13 files changed

+131
-205
lines changed

.gitignore

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
.coverage
2+
__pycache__
3+
*.egg-info
4+
*.pyc
5+
.ipynb_checkpoints
6+
.mypy_cache
7+
.envrc
8+
docs/.build

CHANGELOG.rst

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,19 @@
11
Master Branch
22
=============
33

4+
Version 4.0.1 (2020-10-26)
5+
==========================
6+
7+
Release!
8+
49
ADDED:
510
* Gaussian Mixture Model: `GaussianMixture`.
611
* Tutorial for how to use `scikit-learn` mixture models to fit a model, and
712
`chaospy` to generate quasi-random samples and orthogonal polynomials.
813
CHANGED:
914
* `chaospy.Trunc` updated to take both `lower` and `upper` at the same time.
15+
REMOVED:
16+
* `chaospy.SampleDist` removed in favor of `chaospy.GaussianKDE`.
1017

1118
Version 4.0-beta3 (2020-10-22)
1219
==============================

README.rst

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -141,8 +141,9 @@ Also a few shout-outs:
141141
| `orthopy`_ | Thanks to `Nico Schlömer`_ for providing the implementation |
142142
| `quadpy`_ | for several of the quadrature integration methods. |
143143
+--------------+--------------------------------------------------------------+
144-
| ``UQRF`` | Thanks to `Florian Künzner`_ for providing the |
145-
| | implementation for `sample distribution`_. |
144+
| ``UQRF`` | Thanks to `Florian Künzner`_ for providing the initial |
145+
| | implementation of kernel density estimation and |
146+
| | quantity-of-interest distribution. |
146147
+--------------+--------------------------------------------------------------+
147148

148149
.. _OpenTURNS: http://openturns.github.io/openturns/latest

chaospy/descriptives/quantity_of_interest.py

Lines changed: 9 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,7 @@
33
from functools import reduce
44
from operator import mul
55
import numpy
6-
7-
from .. import distributions
8-
from ..external import SampleDist
6+
import chaospy
97

108

119
def QoI_Dist(poly, dist, sample=10000, **kws):
@@ -28,7 +26,7 @@ def QoI_Dist(poly, dist, sample=10000, **kws):
2826
Number of samples used in estimation to construct the KDE.
2927
3028
Returns:
31-
(numpy.ndarray):
29+
(Distribution):
3230
The constructed quantity of interest (QoI) distributions, where
3331
``qoi_dists.shape==poly.shape``.
3432
@@ -37,44 +35,16 @@ def QoI_Dist(poly, dist, sample=10000, **kws):
3735
>>> x = chaospy.variable(1)
3836
>>> poly = chaospy.polynomial([x])
3937
>>> qoi_dist = chaospy.QoI_Dist(poly, dist)
40-
>>> values = qoi_dist[0].pdf([-0.75, 0., 0.75])
38+
>>> values = qoi_dist.pdf([-0.75, 0., 0.75])
4139
>>> values.round(8)
42-
array([0.29143037, 0.39931708, 0.29536329])
40+
array([0.29143989, 0.39939823, 0.29531414])
41+
4342
"""
4443
shape = poly.shape
4544
poly = poly.flatten()
4645
dim = len(dist)
4746

48-
#sample from the inumpyut dist
49-
samples = dist.sample(sample, **kws)
50-
51-
qoi_dists = []
52-
for i in range(0, len(poly)):
53-
#sample the polynomial solution
54-
if dim == 1:
55-
dataset = poly[i](samples)
56-
else:
57-
dataset = poly[i](*samples)
58-
59-
lo = dataset.min()
60-
up = dataset.max()
61-
62-
#creates qoi_dist
63-
qoi_dist = SampleDist(dataset, lo, up)
64-
qoi_dists.append(qoi_dist)
65-
66-
# reshape the qoi_dists to match the shape of the input poly
67-
if shape:
68-
def reshape(lst, shape):
69-
if len(shape) == 1:
70-
return lst
71-
n = reduce(mul, shape[1:])
72-
return [reshape(lst[i*n:(i+1)*n], shape[1:]) for i in range(len(lst)//n)]
73-
qoi_dists = reshape(qoi_dists, shape)
74-
else:
75-
qoi_dists = qoi_dists[0]
76-
77-
if not shape:
78-
qoi_dists = qoi_dists.item()
79-
80-
return qoi_dists
47+
#sample from the input dist
48+
samples = numpy.atleast_2d(dist.sample(sample, **kws))
49+
qoi_dist = chaospy.GaussianKDE(poly(*samples))
50+
return qoi_dist

chaospy/distributions/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@
7373
from .operators import *
7474
from .constructor import construct
7575
from .approximation import *
76-
from .kernel import GaussianKDE, GaussianMixture
76+
from .kernel import *
7777

7878
from . import (
7979
baseclass, sampler, approximation,
Lines changed: 32 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,34 @@
1-
"""Kernel density estimation."""
1+
"""
2+
In some cases a constructed distribution that are first and foremost data
3+
driven. In such scenarios it make sense to make use of
4+
`kernel density estimation`_ (KDE). In ``chaospy`` KDE can be accessed through
5+
the :func:`GaussianKDE` constructor.
6+
7+
Basic usage of the :func:`GaussianKDE` constructor involves just passing the
8+
data as input argument::
9+
10+
>>> data = [3, 4, 5, 5]
11+
>>> distribution = chaospy.GaussianKDE(data)
12+
13+
This distribution can be used as any other distributions::
14+
15+
>>> distribution.cdf([3, 3.5, 4, 4.5, 5]).round(4)
16+
array([0.1393, 0.2542, 0.3889, 0.5512, 0.7359])
17+
>>> distribution.mom(1).round(4)
18+
4.25
19+
>>> distribution.sample(4).round(4)
20+
array([4.7784, 2.8769, 5.8109, 4.2995])
21+
22+
In addition multivariate distributions supported::
23+
24+
>>> data = [[1, 2, 2, 3], [5, 5, 4, 3]]
25+
>>> distribution = chaospy.GaussianKDE(data)
26+
>>> distribution.sample(4).round(4)
27+
array([[2.081 , 3.0304, 3.0882, 0.4872],
28+
[3.2878, 2.5473, 2.2699, 5.3412]])
29+
30+
.. _kernel density estimation: \
31+
https://en.wikipedia.org/wiki/Kernel_density_estimation
32+
"""
233
from .gaussian import GaussianKDE
334
from .mixture import GaussianMixture

chaospy/distributions/kernel/gaussian.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -83,8 +83,12 @@ def _mom(self, k_loc, cache):
8383

8484
def _lower(self, idx, dim, cache):
8585
"""Lower bounds."""
86+
del dim
87+
del cache
8688
return (self.samples[idx]-10*numpy.sqrt(self.h_mat[:, idx, idx]).T).min(-1)
8789

8890
def _upper(self, idx, dim, cache):
8991
"""Upper bounds."""
92+
del dim
93+
del cache
9094
return (self.samples[idx]+10*numpy.sqrt(self.h_mat[:, idx, idx]).T).max(-1)

chaospy/external/__init__.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,4 +7,3 @@
77
"""
88
from .openturns_ import openturns_dist, OpenTURNSDist
99
from .scipy_stats import ScipyStatsDist
10-
from .samples import sample_dist, SampleDist

chaospy/external/samples.py

Lines changed: 0 additions & 157 deletions
This file was deleted.

docs/tutorials/advanced/gaussian_mixture_model.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@
7777
"`chaospy` supports Gaussian mixture model representation, but does not provide an automatic method for constructing them from data.\n",
7878
"However, this is something for example `scikit-learn` supports.\n",
7979
"It is possible to use `scikit-learn` to fit a model, and use the generated parameters in the `chaospy` implementation.\n",
80-
"For example, let us consider the (Iris example from scikit-learn's documentation)[https://scikit-learn.org/stable/auto_examples/mixture/plot_gmm_covariances.html] and its \"full\" implementation and the 2-dimensional representation:"
80+
"For example, let us consider the [Iris example from scikit-learn's documentation](https://scikit-learn.org/stable/auto_examples/mixture/plot_gmm_covariances.html) (\"full\" implementation in 2-dimensional representation):"
8181
]
8282
},
8383
{

0 commit comments

Comments
 (0)