Skip to content

Commit f0c27b8

Browse files
feat: add weighted_minimal initializer
feat: add weighted_minimal initializer
2 parents baab41e + 36ead39 commit f0c27b8

File tree

4 files changed

+124
-6
lines changed

4 files changed

+124
-6
lines changed

docs/src/api/inits.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
```@docs
66
scaled_rand
77
weighted_init
8+
weighted_minimal
89
informed_init
910
minimal_init
1011
chebyshev_mapping

src/ReservoirComputing.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ export NLADefault, NLAT1, NLAT2, NLAT3, PartialSquare, ExtendedSquare
3838
export StandardStates, ExtendedStates, PaddedStates, PaddedExtendedStates
3939
export StandardRidge
4040
export scaled_rand, weighted_init, informed_init, minimal_init, chebyshev_mapping,
41-
logistic_mapping, modified_lm
41+
logistic_mapping, modified_lm, weighted_minimal
4242
export rand_sparse, delay_line, delay_line_backward, cycle_jumps,
4343
simple_cycle, pseudo_svd, chaotic_init, low_connectivity, double_cycle,
4444
selfloop_cycle, selfloop_feedback_cycle, selfloop_delayline_backward,

src/esn/esn_inits.jl

Lines changed: 121 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -108,6 +108,120 @@ function weighted_init(rng::AbstractRNG, ::Type{T}, dims::Integer...;
108108
return return_init_as(Val(return_sparse), layer_matrix)
109109
end
110110

111+
"""
112+
weighted_minimal([rng], [T], dims...;
113+
weight=0.1, return_sparse=false,
114+
sampling_type=:no_sample)
115+
116+
Create and return a minimal weighted input layer matrix.
117+
This initializer generates a weighted input matrix with equal, deterministic
118+
elements in the same construction as [`weighted_minimal]`(@ref),
119+
inspired by [^lu2017].
120+
121+
Please note that this initializer computes its own reservoir size! If
122+
the computed reservoir size is different than the provided one it will raise a
123+
warning.
124+
125+
# Arguments
126+
127+
- `rng`: Random number generator. Default is `Utils.default_rng()`
128+
from WeightInitializers.
129+
- `T`: Type of the elements in the reservoir matrix.
130+
Default is `Float32`.
131+
- `dims`: Dimensions of the matrix. Should follow `res_size x in_size`.
132+
133+
# Keyword arguments
134+
135+
- `weight`: The value for all the weights in the input matrix.
136+
Defaults to `0.1`.
137+
- `return_sparse`: flag for returning a `sparse` matrix.
138+
Default is `false`.
139+
- `sampling_type`: Sampling that decides the distribution of `weight` negative numbers.
140+
If set to `:no_sample` the sign is unchanged. If set to `:bernoulli_sample!` then each
141+
`weight` can be positive with a probability set by `positive_prob`. If set to
142+
`:irrational_sample!` the `weight` is negative if the decimal number of the
143+
irrational number chosen is odd. Default is `:no_sample`.
144+
- `positive_prob`: probability of the `weight` being positive when `sampling_type` is
145+
set to `:bernoulli_sample!`. Default is 0.5.
146+
- `irrational`: Irrational number whose decimals decide the sign of `weight`.
147+
Default is `pi`.
148+
- `start`: Which place after the decimal point the counting starts for the `irrational`
149+
sign counting. Default is 1.
150+
151+
# Examples
152+
153+
```jldoctest
154+
julia> res_input = weighted_minimal(8, 3)
155+
┌ Warning: Reservoir size has changed!
156+
157+
│ Computed reservoir size (6) does not equal the provided reservoir size (8).
158+
159+
│ Using computed value (6). Make sure to modify the reservoir initializer accordingly.
160+
161+
└ @ ReservoirComputing ~/.julia/dev/ReservoirComputing/src/esn/esn_inits.jl:159
162+
6×3 Matrix{Float32}:
163+
0.1 0.0 0.0
164+
0.1 0.0 0.0
165+
0.0 0.1 0.0
166+
0.0 0.1 0.0
167+
0.0 0.0 0.1
168+
0.0 0.0 0.1
169+
170+
julia> res_input = weighted_minimal(9, 3; weight=0.99)
171+
9×3 Matrix{Float32}:
172+
0.99 0.0 0.0
173+
0.99 0.0 0.0
174+
0.99 0.0 0.0
175+
0.0 0.99 0.0
176+
0.0 0.99 0.0
177+
0.0 0.99 0.0
178+
0.0 0.0 0.99
179+
0.0 0.0 0.99
180+
0.0 0.0 0.99
181+
182+
julia> res_input = weighted_minimal(9, 3; sampling_type=:bernoulli_sample!)
183+
9×3 Matrix{Float32}:
184+
0.1 -0.0 -0.0
185+
-0.1 -0.0 -0.0
186+
0.1 -0.0 0.0
187+
-0.0 0.1 0.0
188+
0.0 0.1 -0.0
189+
0.0 0.1 0.0
190+
-0.0 -0.0 -0.1
191+
-0.0 -0.0 0.1
192+
0.0 -0.0 0.1
193+
```
194+
195+
[^lu2017]: Lu, Zhixin, et al.
196+
"Reservoir observers: Model-free inference of unmeasured variables in
197+
chaotic systems."
198+
Chaos: An Interdisciplinary Journal of Nonlinear Science 27.4 (2017): 041102.
199+
"""
200+
function weighted_minimal(rng::AbstractRNG, ::Type{T}, dims::Integer...;
201+
weight::Number=T(0.1), return_sparse::Bool=false,
202+
sampling_type=:no_sample, kwargs...) where {T <: Number}
203+
throw_sparse_error(return_sparse)
204+
approx_res_size, in_size = dims
205+
res_size = Int(floor(approx_res_size / in_size) * in_size)
206+
if res_size != approx_res_size
207+
@warn """Reservoir size has changed!\n
208+
Computed reservoir size ($res_size) does not equal the \
209+
provided reservoir size ($approx_res_size). \n
210+
Using computed value ($res_size). Make sure to modify the \
211+
reservoir initializer accordingly. \n
212+
"""
213+
end
214+
layer_matrix = DeviceAgnostic.zeros(rng, T, res_size, in_size)
215+
q = floor(Int, res_size / in_size)
216+
217+
for idx in 1:in_size
218+
layer_matrix[((idx - 1) * q + 1):((idx) * q), idx] = T(weight) .* ones(T, q)
219+
end
220+
f_sample = getfield(@__MODULE__, sampling_type)
221+
f_sample(rng, layer_matrix; kwargs...)
222+
return return_init_as(Val(return_sparse), layer_matrix)
223+
end
224+
111225
"""
112226
informed_init([rng], [T], dims...;
113227
scaling=0.1, model_in_size, gamma=0.5)
@@ -174,7 +288,8 @@ end
174288

175289
"""
176290
minimal_init([rng], [T], dims...;
177-
sampling_type=:bernoulli, weight=0.1, irrational=pi, start=1, p=0.5)
291+
sampling_type=:bernoulli_sample!, weight=0.1, irrational=pi,
292+
start=1, p=0.5)
178293
179294
Create a layer matrix with uniform weights determined by `weight` [^rodan2010].
180295
The sign difference is randomly determined by the `sampling` chosen.
@@ -191,7 +306,7 @@ The sign difference is randomly determined by the `sampling` chosen.
191306
192307
- `weight`: The weight used to fill the layer matrix. Default is 0.1.
193308
- `sampling_type`: The sampling parameters used to generate the input matrix.
194-
Default is `:bernoulli`.
309+
Default is `:bernoulli_sample!`.
195310
- `irrational`: Irrational number chosen for sampling if `sampling_type=:irrational`.
196311
Default is `pi`.
197312
- `start`: Starting value for the irrational sample. Default is 1
@@ -1737,9 +1852,10 @@ end
17371852
#fallbacks for initializers #eventually to remove once migrated to WeightInitializers.jl
17381853
for initializer in (:rand_sparse, :delay_line, :delay_line_backward, :cycle_jumps,
17391854
:simple_cycle, :pseudo_svd, :chaotic_init, :scaled_rand, :weighted_init,
1740-
:informed_init, :minimal_init, :chebyshev_mapping, :logistic_mapping, :modified_lm,
1741-
:low_connectivity, :double_cycle, :selfloop_cycle, :selfloop_feedback_cycle,
1742-
:selfloop_delayline_backward, :selfloop_forward_connection, :forward_connection)
1855+
:weighted_minimal, :informed_init, :minimal_init, :chebyshev_mapping,
1856+
:logistic_mapping, :modified_lm, :low_connectivity, :double_cycle, :selfloop_cycle,
1857+
:selfloop_feedback_cycle, :selfloop_delayline_backward, :selfloop_forward_connection,
1858+
:forward_connection)
17431859
@eval begin
17441860
function ($initializer)(dims::Integer...; kwargs...)
17451861
return $initializer(Utils.default_rng(), Float32, dims...; kwargs...)

test/esn/test_inits.jl

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ reservoir_inits = [
3737
input_inits = [
3838
scaled_rand,
3939
weighted_init,
40+
weighted_minimal,
4041
minimal_init,
4142
minimal_init(; sampling_type=:irrational_sample!),
4243
chebyshev_mapping,

0 commit comments

Comments
 (0)