Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "ReservoirComputing"
uuid = "7c2d2b1e-3dd4-11ea-355a-8f6a8116e294"
authors = ["Francesco Martinuzzi"]
version = "0.10.8"
version = "0.10.9"

[deps]
Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
Expand Down
2 changes: 2 additions & 0 deletions docs/src/api/inits.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
weighted_init
informed_init
minimal_init
chebyshev_mapping
```

## Reservoirs
Expand All @@ -18,4 +19,5 @@
cycle_jumps
simple_cycle
pseudo_svd
chaotic_init
```
5 changes: 3 additions & 2 deletions src/ReservoirComputing.jl
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,9 @@ include("reca/reca_input_encodings.jl")
export NLADefault, NLAT1, NLAT2, NLAT3
export StandardStates, ExtendedStates, PaddedStates, PaddedExtendedStates
export StandardRidge
export scaled_rand, weighted_init, informed_init, minimal_init
export rand_sparse, delay_line, delay_line_backward, cycle_jumps, simple_cycle, pseudo_svd
export scaled_rand, weighted_init, informed_init, minimal_init, chebyshev_mapping
export rand_sparse, delay_line, delay_line_backward, cycle_jumps,
simple_cycle, pseudo_svd, chaotic_init
export RNN, MRNN, GRU, GRUParams, FullyGated, Minimal
export train
export ESN, HybridESN, KnowledgeModel, DeepESN
Expand Down
191 changes: 189 additions & 2 deletions src/esn/esn_inits.jl
Original file line number Diff line number Diff line change
Expand Up @@ -282,6 +282,86 @@ function _create_irrational(irrational::Irrational, start::Int, res_size::Int,
return T.(input_matrix)
end

"""
chebyshev_mapping([rng], [T], dims...;
amplitude=one(T), sine_divisor=one(T),
chebyshev_parameter=one(T), return_sparse=true)

Generate a Chebyshev-mapped matrix [^xie2024]. The first row is initialized
using a sine function and subsequent rows are iteratively generated
via the Chebyshev mapping. The first row is defined as:

```math
w(1, j) = amplitude * sin(j * π / (sine_divisor * n_cols))
```

for j = 1, 2, …, n_cols (with n_cols typically equal to K+1, where K is the number of input layer neurons).
Subsequent rows are generated by applying the mapping:

```math
w(i+1, j) = cos(chebyshev_parameter * acos(w(i, j)))
```

# Arguments

- `rng`: Random number generator. Default is `Utils.default_rng()`
from WeightInitializers.
- `T`: Type of the elements in the reservoir matrix.
Default is `Float32`.
- `dims`: Dimensions of the matrix. Should follow `res_size x in_size`. Here, res_size is assumed to be K+1.

# Keyword arguments

- `amplitude`: Scaling factor used to initialize the first row.
This parameter adjusts the amplitude of the sine function. Default value is one.
- `sine_divisor`: Divisor applied in the sine function's phase. Default value is one.
- `chebyshev_parameter`: Control parameter for the Chebyshev mapping in
subsequent rows. This parameter influences the distribution of the
matrix elements. Default is one.
- `return_sparse`: If `true`, the function returns the matrix as a sparse matrix. Default is `false`.

# Examples

```jldoctest
julia> input_matrix = chebyshev_mapping(10, 3)
10×3 Matrix{Float32}:
0.866025 0.866025 1.22465f-16
0.866025 0.866025 -4.37114f-8
0.866025 0.866025 -4.37114f-8
0.866025 0.866025 -4.37114f-8
0.866025 0.866025 -4.37114f-8
0.866025 0.866025 -4.37114f-8
0.866025 0.866025 -4.37114f-8
0.866025 0.866025 -4.37114f-8
0.866025 0.866025 -4.37114f-8
0.866025 0.866025 -4.37114f-8
```

[^xie2024]: Xie, Minzhi, Qianxue Wang, and Simin Yu.
"Time Series Prediction of ESN Based on Chebyshev Mapping and Strongly
Connected Topology."
Neural Processing Letters 56.1 (2024): 30.
"""
function chebyshev_mapping(rng::AbstractRNG, ::Type{T}, dims::Integer...;
amplitude::AbstractFloat=one(T), sine_divisor::AbstractFloat=one(T),
chebyshev_parameter::AbstractFloat=one(T),
return_sparse::Bool=false) where {T <: Number}
input_matrix = DeviceAgnostic.zeros(rng, T, dims...)
n_rows, n_cols = dims[1], dims[2]

for idx_cols in 1:n_cols
input_matrix[1, idx_cols] = amplitude * sin(idx_cols * pi / (sine_divisor * n_cols))
end
for idx_rows in 2:n_rows
for idx_cols in 1:n_cols
input_matrix[idx_rows, idx_cols] = cos(chebyshev_parameter * acos(input_matrix[
idx_rows - 1, idx_cols]))
end
end

return return_sparse ? sparse(input_matrix) : input_matrix
end

### reservoirs

"""
Expand Down Expand Up @@ -672,11 +752,118 @@ function get_sparsity(M, dim)
return size(M[M .!= 0], 1) / (dim * dim - size(M[M .!= 0], 1)) #nonzero/zero elements
end

function digital_chaotic_adjacency(rng::AbstractRNG, bit_precision::Integer;
extra_edge_probability::AbstractFloat=0.1)
matrix_order = 2^(2 * bit_precision)
adjacency_matrix = zeros(Int, matrix_order, matrix_order)
for row_index in 1:(matrix_order - 1)
adjacency_matrix[row_index, row_index + 1] = 1
end
adjacency_matrix[matrix_order, 1] = 1
for row_index in 1:matrix_order, column_index in 1:matrix_order
if row_index != column_index && rand(rng) < extra_edge_probability
adjacency_matrix[row_index, column_index] = 1
end
end

return adjacency_matrix
end

"""
chaotic_init([rng], [T], dims...;
extra_edge_probability=T(0.1), spectral_radius=one(T),
return_sparse_matrix=true)

Construct a chaotic reservoir matrix using a digital chaotic system [^xie2024].

The matrix topology is derived from a strongly connected adjacency
matrix based on a digital chaotic system operating at finite precision.
If the requested matrix order does not exactly match a valid order the
closest valid order is used.

# Arguments

- `rng`: Random number generator. Default is `Utils.default_rng()`
from WeightInitializers.
- `T`: Type of the elements in the reservoir matrix.
Default is `Float32`.
- `dims`: Dimensions of the reservoir matrix.

# Keyword arguments

- `extra_edge_probability`: Probability of adding extra random edges in
the adjacency matrix to enhance connectivity. Default is 0.1.
- `desired_spectral_radius`: The target spectral radius for the
reservoir matrix. Default is one.
- `return_sparse_matrix`: If `true`, the function returns the
reservoir matrix as a sparse matrix. Default is `true`.

# Examples

```jldoctest
julia> res_matrix = chaotic_init(8, 8)
┌ Warning:
│ Adjusting reservoir matrix order:
│ from 8 (requested) to 4
│ based on computed bit precision = 1.
└ @ ReservoirComputing ~/.julia/dev/ReservoirComputing/src/esn/esn_inits.jl:805
4×4 SparseArrays.SparseMatrixCSC{Float32, Int64} with 6 stored entries:
⋅ -0.600945 ⋅ ⋅
⋅ ⋅ 0.132667 2.21354
⋅ -2.60383 ⋅ -2.90391
-0.578156 ⋅ ⋅ ⋅
```

[^xie2024]: Xie, Minzhi, Qianxue Wang, and Simin Yu.
"Time Series Prediction of ESN Based on Chebyshev Mapping and Strongly
Connected Topology."
Neural Processing Letters 56.1 (2024): 30.
"""
function chaotic_init(rng::AbstractRNG, ::Type{T}, dims::Integer...;
extra_edge_probability::AbstractFloat=T(0.1), spectral_radius::AbstractFloat=one(T),
return_sparse_matrix::Bool=true) where {T <: Number}
requested_order = first(dims)
if length(dims) > 1 && dims[2] != requested_order
@warn """\n
Using dims[1] = $requested_order for the chaotic reservoir matrix order.\n
"""
end
d_estimate = log2(requested_order) / 2
d_floor = max(floor(Int, d_estimate), 1)
d_ceil = ceil(Int, d_estimate)
candidate_order_floor = 2^(2 * d_floor)
candidate_order_ceil = 2^(2 * d_ceil)
chosen_bit_precision = abs(candidate_order_floor - requested_order) <=
abs(candidate_order_ceil - requested_order) ? d_floor : d_ceil
actual_matrix_order = 2^(2 * chosen_bit_precision)
if actual_matrix_order != requested_order
@warn """\n
Adjusting reservoir matrix order:
from $requested_order (requested) to $actual_matrix_order
based on computed bit precision = $chosen_bit_precision. \n
"""
end

random_weight_matrix = T(2) * rand(rng, T, actual_matrix_order, actual_matrix_order) .-
T(1)
adjacency_matrix = digital_chaotic_adjacency(
rng, chosen_bit_precision; extra_edge_probability=extra_edge_probability)
reservoir_matrix = random_weight_matrix .* adjacency_matrix
current_spectral_radius = maximum(abs, eigvals(reservoir_matrix))
if current_spectral_radius != 0
reservoir_matrix .*= spectral_radius / current_spectral_radius
end

return return_sparse_matrix ? sparse(reservoir_matrix) : reservoir_matrix
end

### fallbacks
#fallbacks for initializers #eventually to remove once migrated to WeightInitializers.jl
for initializer in (:rand_sparse, :delay_line, :delay_line_backward, :cycle_jumps,
:simple_cycle, :pseudo_svd,
:scaled_rand, :weighted_init, :informed_init, :minimal_init)
:simple_cycle, :pseudo_svd, :chaotic_init,
:scaled_rand, :weighted_init, :informed_init, :minimal_init, :chebyshev_mapping)
@eval begin
function ($initializer)(dims::Integer...; kwargs...)
return $initializer(Utils.default_rng(), Float32, dims...; kwargs...)
Expand Down
10 changes: 6 additions & 4 deletions test/esn/test_inits.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
using ReservoirComputing, LinearAlgebra, Random, SparseArrays

const res_size = 30
const in_size = 3
const res_size = 16
const in_size = 4
const radius = 1.0
const sparsity = 0.1
const weight = 0.2
Expand All @@ -24,13 +24,15 @@ reservoir_inits = [
delay_line_backward,
cycle_jumps,
simple_cycle,
pseudo_svd
pseudo_svd,
chaotic_init
]
input_inits = [
scaled_rand,
weighted_init,
minimal_init,
minimal_init(; sampling_type=:irrational)
minimal_init(; sampling_type=:irrational),
chebyshev_mapping
]

@testset "Reservoir Initializers" begin
Expand Down
Loading