Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
"sphinx.ext.mathjax",
"sphinx.ext.intersphinx",
"IPython.sphinxext.ipython_console_highlighting",
"matplotlib.sphinxext.plot_directive",
]

nb_execution_mode = "off"
Expand Down
1,640 changes: 881 additions & 759 deletions examples/1_feature_overview/example.ipynb

Large diffs are not rendered by default.

86 changes: 3 additions & 83 deletions examples/1_feature_overview/example.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def ignore_specific_warnings():
x_dot_test_predicted = model.predict(x_test)

# Compute derivatives with a finite difference method, for comparison
x_dot_test_computed = model.differentiate(x_test, t=dt)
x_dot_test_computed = model.differentiation_method(x_test, t=dt)

fig, axs = plt.subplots(x_test.shape[1], 1, sharex=True, figsize=(7, 9))
for i in range(x_test.shape[1]):
Expand Down Expand Up @@ -149,30 +149,6 @@ def ignore_specific_warnings():

fig.show()

# %% [markdown]
# ## Discrete time dynamical system (map)

# %%


def f(x):
return 3.6 * x * (1 - x)


if __name__ != "testing":
n_steps = 1000
else:
n_steps = 10
eps = 0.001 # Noise level
x_train_map = np.zeros((n_steps))
x_train_map[0] = 0.5
for i in range(1, n_steps):
x_train_map[i] = f(x_train_map[i - 1]) + eps * np.random.randn()
model = ps.SINDy(discrete_time=True)
model.fit(x_train_map, t=1)

model.print()

# %% [markdown]
# ## Optimization options
# In this section we provide examples of different parameters accepted by the built-in sparse regression optimizers `STLSQ`, `SR3`, `ConstrainedSR3`, `MIOSR`, `SSR`, and `FROLS`. The `Trapping` optimizer is not straightforward to use; please check out Example 8 for some examples. We also show how to use a scikit-learn sparse regressor with PySINDy.
Expand Down Expand Up @@ -782,7 +758,7 @@ def f(x):
x_dot_test_predicted = model.predict(x_test)

# Compute derivatives with a finite difference method, for comparison
x_dot_test_computed = model.differentiate(x_test, t=dt)
x_dot_test_computed = model.differentiation_method(x_test, t=dt)

fig, axs = plt.subplots(x_test.shape[1], 1, sharex=True, figsize=(7, 9))
for i in range(x_test.shape[1]):
Expand Down Expand Up @@ -905,7 +881,7 @@ def u_fun(t):
x_dot_test_predicted = model.predict(x_test, u=u_test)

# Compute derivatives with a finite difference method, for comparison
x_dot_test_computed = model.differentiate(x_test, t=dt)
x_dot_test_computed = model.differentiation_method(x_test, t=dt)

fig, axs = plt.subplots(x_test.shape[1], 1, sharex=True, figsize=(7, 9))
for i in range(x_test.shape[1]):
Expand Down Expand Up @@ -1053,62 +1029,6 @@ def u_fun(t):
model.fit(x_train, t=t)
model.print()

# %% [markdown]
# ## SINDy with control parameters (SINDyCP)
# The control input in PySINDy can be used to discover equations parameterized by control parameters in conjunction with the `ParameterizedLibrary`. We demonstrate on the logistic map
# $$ x_{n+1} = r x_n(1-x_n)$$
# which depends on a single parameter $r$.

# %%
# Iterate the map and drop the initial 500-step transient. The behavior is chaotic for r>3.6.
if __name__ != "testing":
num = 1000
N = 1000
N_drop = 500
else:
num = 20
N = 20
N_drop = 10
r0 = 3.5
rs = r0 + np.arange(num) / num * (4 - r0)
xss = []
for r in rs:
xs = []
x = 0.5
for n in range(N + N_drop):
if n >= N_drop:
xs = xs + [x]
x = r * x * (1 - x)
xss = xss + [xs]

plt.figure(figsize=(4, 4), dpi=100)
for ind in range(num):
plt.plot(np.ones(N) * rs[ind], xss[ind], ",", alpha=0.1, c="black", rasterized=True)
plt.xlabel("$r$")
plt.ylabel("$x_n$")
plt.show()

# %% [markdown]
# We construct a `parameter_library` and a `feature_library` to act on the input data `x` and the control input `u` independently. The `ParameterizedLibrary` is composed of products of the two libraries output features. This enables fine control over the library features, which is especially useful in the case of PDEs like those arising in pattern formation modeling. See this [notebook](https://github.com/dynamicslab/pysindy/blob/master/examples/17_parameterized_pattern_formation/17_parameterized_pattern_formation.ipynb) for examples.

# %%
# use four parameter values as training data
rs_train = [3.6, 3.7, 3.8, 3.9]
xs_train = [np.array(xss[np.where(np.array(rs) == r)[0][0]]) for r in rs_train]

feature_lib = ps.PolynomialLibrary(degree=3, include_bias=True)
parameter_lib = ps.PolynomialLibrary(degree=1, include_bias=True)
lib = ps.ParameterizedLibrary(
feature_library=feature_lib,
parameter_library=parameter_lib,
num_features=1,
num_parameters=1,
)
opt = ps.STLSQ(threshold=1e-1, normalize_columns=False)
model = ps.SINDy(feature_library=lib, optimizer=opt, discrete_time=True)
model.fit(xs_train, u=rs_train, t=1, feature_names=["x", "r"])
model.print()

# %% [markdown]
# ## PDEFIND Feature Overview
# PySINDy now supports SINDy for PDE identification (PDE-FIND) (Rudy, Samuel H., Steven L. Brunton, Joshua L. Proctor, and J. Nathan Kutz. "Data-driven discovery of partial differential equations." Science Advances 3, no. 4 (2017): e1602614.). We illustrate a basic example on Burgers' equation:
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ docs = [
"sphinx==8.2.3",
"pyyaml",
"sphinxcontrib-apidoc",
"matplotlib"
]
miosr = [
"gurobipy>=9.5.1,!=10.0.0"
Expand Down
3 changes: 3 additions & 0 deletions pysindy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,9 @@
from . import optimizers
from . import deeptime
from . import utils

from ._core import SINDy
from ._core import DiscreteSINDy
from ._core import AxesArray
from .differentiation import BaseDifferentiation
from .differentiation import FiniteDifference
Expand Down Expand Up @@ -65,6 +67,7 @@

__all__ = [
"SINDy",
"DiscreteSINDy",
"differentiation",
"feature_library",
"optimizers",
Expand Down
Loading