11# ## input layers
22"""
3- scaled_rand([rng::AbstractRNG=Utils.default_rng() ], [T=Float32 ], dims...;
3+ scaled_rand([rng], [T], dims...;
44 scaling=0.1)
55
66Create and return a matrix with random values, uniformly distributed within
@@ -41,8 +41,8 @@ function scaled_rand(rng::AbstractRNG, ::Type{T}, dims::Integer...;
4141end
4242
4343"""
44- weighted_init([rng::AbstractRNG=Utils.default_rng() ], [T=Float32 ], dims...;
45- scaling=0.1)
44+ weighted_init([rng], [T], dims...;
45+ scaling=0.1, return_sparse=true )
4646
4747Create and return a matrix representing a weighted input layer.
4848This initializer generates a weighted input matrix with random non-zero
@@ -57,6 +57,8 @@ elements distributed uniformly within the range [-`scaling`, `scaling`] [^Lu2017
5757 - `dims`: Dimensions of the matrix. Should follow `res_size x in_size`.
5858 - `scaling`: The scaling factor for the weight distribution.
5959 Defaults to `0.1`.
60+ - `return_sparse`: flag for returning a `sparse` matrix.
61+ Default is `true`.
6062
6163# Examples
6264
@@ -77,7 +79,7 @@ julia> res_input = weighted_init(8, 3)
7779 Chaos: An Interdisciplinary Journal of Nonlinear Science 27.4 (2017): 041102.
7880"""
7981function weighted_init (rng:: AbstractRNG , :: Type{T} , dims:: Integer... ;
80- scaling= T (0.1 )) where {T <: Number }
82+ scaling= T (0.1 ), return_sparse :: Bool = true ) where {T <: Number }
8183 approx_res_size, in_size = dims
8284 res_size = Int (floor (approx_res_size / in_size) * in_size)
8385 layer_matrix = DeviceAgnostic. zeros (rng, T, res_size, in_size)
@@ -88,11 +90,11 @@ function weighted_init(rng::AbstractRNG, ::Type{T}, dims::Integer...;
8890 T (0.5 )) .* (T (2 ) * scaling)
8991 end
9092
91- return layer_matrix
93+ return return_sparse ? sparse (layer_matrix) : layer_matrix
9294end
9395
9496"""
95- informed_init([rng::AbstractRNG=Utils.default_rng() ], [T=Float32 ], dims...;
97+ informed_init([rng], [T], dims...;
9698 scaling=0.1, model_in_size, gamma=0.5)
9799
98100Create an input layer for informed echo state networks [^Pathak2018].
@@ -152,7 +154,7 @@ function informed_init(rng::AbstractRNG, ::Type{T}, dims::Integer...;
152154end
153155
154156"""
155- minimal_init([rng::AbstractRNG=Utils.default_rng() ], [T=Float32 ], dims...;
157+ minimal_init([rng], [T], dims...;
156158 sampling_type=:bernoulli, weight=0.1, irrational=pi, start=1, p=0.5)
157159
158160Create a layer matrix with uniform weights determined by `weight`. The sign difference
283285# ## reservoirs
284286
285287"""
286- rand_sparse([rng::AbstractRNG=Utils.default_rng() ], [T=Float32 ], dims...;
287- radius=1.0, sparsity=0.1, std=1.0)
288+ rand_sparse([rng], [T], dims...;
289+ radius=1.0, sparsity=0.1, std=1.0, return_sparse=true )
288290
289291Create and return a random sparse reservoir matrix.
290292The matrix will be of size specified by `dims`, with specified `sparsity`
@@ -301,6 +303,8 @@ and scaled spectral radius according to `radius`.
301303 Defaults to 1.0.
302304 - `sparsity`: The sparsity level of the reservoir matrix,
303305 controlling the fraction of zero elements. Defaults to 0.1.
306+ - `return_sparse`: flag for returning a `sparse` matrix.
307+ Default is `true`.
304308
305309# Examples
306310
@@ -316,7 +320,7 @@ julia> res_matrix = rand_sparse(5, 5; sparsity=0.5)
316320"""
317321function rand_sparse (rng:: AbstractRNG , :: Type{T} , dims:: Integer... ;
318322 radius= T (1.0 ), sparsity= T (0.1 ), std= T (1.0 ),
319- return_sparse:: Bool = false ) where {T <: Number }
323+ return_sparse:: Bool = true ) where {T <: Number }
320324 lcl_sparsity = T (1 ) - sparsity # consistency with current implementations
321325 reservoir_matrix = sparse_init (rng, T, dims... ; sparsity= lcl_sparsity, std= std)
322326 rho_w = maximum (abs .(eigvals (reservoir_matrix)))
@@ -329,8 +333,8 @@ function rand_sparse(rng::AbstractRNG, ::Type{T}, dims::Integer...;
329333end
330334
331335"""
332- delay_line([rng::AbstractRNG=Utils.default_rng() ], [T=Float32 ], dims...;
333- weight=0.1)
336+ delay_line([rng], [T], dims...;
337+ weight=0.1, return_sparse=true )
334338
335339Create and return a delay line reservoir matrix [^Rodan2010].
336340
@@ -343,6 +347,8 @@ Create and return a delay line reservoir matrix [^Rodan2010].
343347 - `dims`: Dimensions of the reservoir matrix.
344348 - `weight`: Determines the value of all connections in the reservoir.
345349 Default is 0.1.
350+ - `return_sparse`: flag for returning a `sparse` matrix.
351+ Default is `true`.
346352
347353# Examples
348354
@@ -368,7 +374,7 @@ julia> res_matrix = delay_line(5, 5; weight=1)
368374 IEEE transactions on neural networks 22.1 (2010): 131-144.
369375"""
370376function delay_line (rng:: AbstractRNG , :: Type{T} , dims:: Integer... ;
371- weight= T (0.1 ), return_sparse:: Bool = false ) where {T <: Number }
377+ weight= T (0.1 ), return_sparse:: Bool = true ) where {T <: Number }
372378 reservoir_matrix = DeviceAgnostic. zeros (rng, T, dims... )
373379 @assert length (dims) == 2 && dims[1 ] == dims[2 ] " The dimensions
374380 must define a square matrix (e.g., (100, 100))"
@@ -381,8 +387,8 @@ function delay_line(rng::AbstractRNG, ::Type{T}, dims::Integer...;
381387end
382388
383389"""
384- delay_line_backward([rng::AbstractRNG=Utils.default_rng() ], [T=Float32 ], dims...;
385- weight = 0.1, fb_weight = 0.2)
390+ delay_line_backward([rng], [T], dims...;
391+ weight = 0.1, fb_weight = 0.2, return_sparse=true )
386392
387393Create a delay line backward reservoir with the specified by `dims` and weights.
388394Creates a matrix with backward connections as described in [^Rodan2010].
@@ -398,6 +404,8 @@ Creates a matrix with backward connections as described in [^Rodan2010].
398404 forward connections in the reservoir. Default is 0.1
399405 - `fb_weight`: Determines the absolute value of backward connections
400406 in the reservoir. Default is 0.2
407+ - `return_sparse`: flag for returning a `sparse` matrix.
408+ Default is `true`.
401409
402410# Examples
403411
@@ -423,7 +431,7 @@ julia> res_matrix = delay_line_backward(Float16, 5, 5)
423431 IEEE transactions on neural networks 22.1 (2010): 131-144.
424432"""
425433function delay_line_backward (rng:: AbstractRNG , :: Type{T} , dims:: Integer... ;
426- weight= T (0.1 ), fb_weight= T (0.2 ), return_sparse:: Bool = false ) where {T <: Number }
434+ weight= T (0.1 ), fb_weight= T (0.2 ), return_sparse:: Bool = true ) where {T <: Number }
427435 res_size = first (dims)
428436 reservoir_matrix = DeviceAgnostic. zeros (rng, T, dims... )
429437
@@ -436,8 +444,8 @@ function delay_line_backward(rng::AbstractRNG, ::Type{T}, dims::Integer...;
436444end
437445
438446"""
439- cycle_jumps([rng::AbstractRNG=Utils.default_rng() ], [T=Float32 ], dims...;
440- cycle_weight = 0.1, jump_weight = 0.1, jump_size = 3)
447+ cycle_jumps([rng], [T], dims...;
448+ cycle_weight = 0.1, jump_weight = 0.1, jump_size = 3, return_sparse=true )
441449
442450Create a cycle jumps reservoir with the specified dimensions,
443451cycle weight, jump weight, and jump size.
@@ -455,6 +463,8 @@ cycle weight, jump weight, and jump size.
455463 Default is 0.1.
456464 - `jump_size`: The number of steps between jump connections.
457465 Default is 3.
466+ - `return_sparse`: flag for returning a `sparse` matrix.
467+ Default is `true`.
458468
459469# Examples
460470
@@ -481,7 +491,7 @@ julia> res_matrix = cycle_jumps(5, 5; jump_size=2)
481491"""
482492function cycle_jumps (rng:: AbstractRNG , :: Type{T} , dims:: Integer... ;
483493 cycle_weight:: Number = T (0.1 ), jump_weight:: Number = T (0.1 ),
484- jump_size:: Int = 3 , return_sparse:: Bool = false ) where {T <: Number }
494+ jump_size:: Int = 3 , return_sparse:: Bool = true ) where {T <: Number }
485495 res_size = first (dims)
486496 reservoir_matrix = DeviceAgnostic. zeros (rng, T, dims... )
487497
@@ -504,8 +514,8 @@ function cycle_jumps(rng::AbstractRNG, ::Type{T}, dims::Integer...;
504514end
505515
506516"""
507- simple_cycle([rng::AbstractRNG=Utils.default_rng() ], [T=Float32 ], dims...;
508- weight = 0.1)
517+ simple_cycle([rng], [T], dims...;
518+ weight = 0.1, return_sparse=true )
509519
510520Create a simple cycle reservoir with the specified dimensions and weight.
511521
@@ -517,6 +527,8 @@ Create a simple cycle reservoir with the specified dimensions and weight.
517527 - `dims`: Dimensions of the reservoir matrix.
518528 - `weight`: Weight of the connections in the reservoir matrix.
519529 Default is 0.1.
530+ - `return_sparse`: flag for returning a `sparse` matrix.
531+ Default is `true`.
520532
521533# Examples
522534
@@ -542,7 +554,7 @@ julia> res_matrix = simple_cycle(5, 5; weight=11)
542554 IEEE transactions on neural networks 22.1 (2010): 131-144.
543555"""
544556function simple_cycle (rng:: AbstractRNG , :: Type{T} , dims:: Integer... ;
545- weight= T (0.1 ), return_sparse:: Bool = false ) where {T <: Number }
557+ weight= T (0.1 ), return_sparse:: Bool = true ) where {T <: Number }
546558 reservoir_matrix = DeviceAgnostic. zeros (rng, T, dims... )
547559
548560 for i in 1 : (dims[1 ] - 1 )
@@ -554,8 +566,9 @@ function simple_cycle(rng::AbstractRNG, ::Type{T}, dims::Integer...;
554566end
555567
556568"""
557- pseudo_svd([rng::AbstractRNG=Utils.default_rng()], [T=Float32], dims...;
558- max_value=1.0, sparsity=0.1, sorted = true, reverse_sort = false)
569+ pseudo_svd([rng], [T], dims...;
570+ max_value=1.0, sparsity=0.1, sorted = true, reverse_sort = false,
571+ return_sparse=true)
559572
560573Returns an initializer to build a sparse reservoir matrix with the given
561574`sparsity` by using a pseudo-SVD approach as described in [^yang].
@@ -575,6 +588,8 @@ Returns an initializer to build a sparse reservoir matrix with the given
575588 creating the diagonal matrix. Default is `true`.
576589 - `reverse_sort`: A boolean indicating whether to reverse the sorted
577590 singular values. Default is `false`.
591+ - `return_sparse`: flag for returning a `sparse` matrix.
592+ Default is `true`.
578593
579594# Examples
580595
@@ -592,7 +607,7 @@ julia> res_matrix = pseudo_svd(5, 5)
592607"""
593608function pseudo_svd (rng:: AbstractRNG , :: Type{T} , dims:: Integer... ;
594609 max_value:: Number = T (1.0 ), sparsity:: Number = 0.1 , sorted:: Bool = true ,
595- reverse_sort:: Bool = false , return_sparse:: Bool = false ) where {T <: Number }
610+ reverse_sort:: Bool = false , return_sparse:: Bool = true ) where {T <: Number }
596611 reservoir_matrix = create_diag (rng, T, dims[1 ],
597612 max_value;
598613 sorted= sorted,
0 commit comments