diff --git a/Project.toml b/Project.toml index 7a0d3f84..a8eb16b5 100644 --- a/Project.toml +++ b/Project.toml @@ -1,11 +1,10 @@ name = "ReservoirComputing" uuid = "7c2d2b1e-3dd4-11ea-355a-8f6a8116e294" authors = ["Francesco Martinuzzi"] -version = "0.11.3" +version = "0.11.4" [deps] Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e" -CellularAutomata = "878138dc-5b27-11ea-1a71-cb95d38d6b29" Compat = "34da2185-b29b-5c13-b0c7-acf172513d20" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" NNlib = "872c559c-99b0-510c-b3b7-b6c96a88d5cd" @@ -14,11 +13,13 @@ Reexport = "189a3867-3050-52da-a836-e630ba90ab69" WeightInitializers = "d49dbf32-c5c2-4618-8acc-27bb2598ef2d" [weakdeps] +CellularAutomata = "878138dc-5b27-11ea-1a71-cb95d38d6b29" LIBSVM = "b1bec4e5-fd48-53fe-b0cb-9723c09d164b" MLJLinearModels = "6ee0df7b-362f-4a72-a706-9e79364fb692" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" [extensions] +RCCellularAutomataExt = "CellularAutomata" RCLIBSVMExt = "LIBSVM" RCMLJLinearModelsExt = "MLJLinearModels" RCSparseArraysExt = "SparseArrays" @@ -26,7 +27,7 @@ RCSparseArraysExt = "SparseArrays" [compat] Adapt = "4.1.1" Aqua = "0.8" -CellularAutomata = "0.0.2" +CellularAutomata = "0.0.6" Compat = "4.16.0" DifferentialEquations = "7.16.1" LIBSVM = "0.8" @@ -53,4 +54,4 @@ Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["Aqua", "Test", "SafeTestsets", "DifferentialEquations", "MLJLinearModels", "LIBSVM", "Statistics", "SparseArrays"] +test = ["Aqua", "Test", "SafeTestsets", "DifferentialEquations", "MLJLinearModels", "LIBSVM", "Statistics", "SparseArrays", "CellularAutomata"] diff --git a/docs/Project.toml b/docs/Project.toml index fbe7b1ed..b8c5aa34 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -12,12 +12,12 @@ ReservoirComputing = "7c2d2b1e-3dd4-11ea-355a-8f6a8116e294" StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91" [compat] -CellularAutomata = "0.0.2" +CellularAutomata = "0.0.6" DifferentialEquations = "7.16.1" Documenter = "1" DocumenterCitations = "1" OrdinaryDiffEq = "6" Plots = "1" PredefinedDynamicalSystems = "1" -ReservoirComputing = "0.11.0" +ReservoirComputing = "0.11.4" StatsBase = "0.34.4" diff --git a/docs/src/general/different_training.md b/docs/src/general/different_training.md index bb92680c..4490e7d6 100644 --- a/docs/src/general/different_training.md +++ b/docs/src/general/different_training.md @@ -29,4 +29,4 @@ To change the regularization coefficient in the ridge example, using for example ## Support Vector Regression -Contrary to the `LinearModel`s, no wrappers are needed for support vector regression. By using [LIBSVM.jl](https://github.com/JuliaML/LIBSVM.jl), LIBSVM wrappers in Julia, it is possible to call both `epsilonSVR()` or `nuSVR()` directly in `train()`. For the full range of kernels provided and the parameters to call, we refer the user to the official [documentation](https://www.csie.ntu.edu.tw/%7Ecjlin/libsvm/). Like before, if one intends to use LIBSVM regressors, it is necessary to specify `using LIBSVM`. +Contrary to the `LinearModel`s, no wrappers are needed for support vector regression. By using [LIBSVM.jl](https://github.com/JuliaML/LIBSVM.jl), LIBSVM wrappers in Julia, it is possible to call both `epsilonSVR()` or `nuSVR()` directly in `train()`. For the full range of kernels provided and the parameters to call, we refer the user to the official documentation. Like before, if one intends to use LIBSVM regressors, it is necessary to specify `using LIBSVM`. diff --git a/src/reca/reca_input_encodings.jl b/ext/RCCellularAutomataExt.jl similarity index 58% rename from src/reca/reca_input_encodings.jl rename to ext/RCCellularAutomataExt.jl index ca31ce23..9b1de800 100644 --- a/src/reca/reca_input_encodings.jl +++ b/ext/RCCellularAutomataExt.jl @@ -1,24 +1,49 @@ -abstract type AbstractInputEncoding end -abstract type AbstractEncodingData end +module RCCellularAutomataExt +using ReservoirComputing: RECA, RandomMapping, RandomMaps +import ReservoirComputing: train, next_state_prediction!, AbstractOutputLayer, NLADefault, + StandardStates, obtain_prediction +using CellularAutomata +using Random: randperm + +function RECA(train_data, + automata; + generations = 8, + input_encoding = RandomMapping(), + nla_type = NLADefault(), + states_type = StandardStates()) + in_size = size(train_data, 1) + #res_size = obtain_res_size(input_encoding, generations) + state_encoding = create_encoding(input_encoding, train_data, generations) + states = reca_create_states(state_encoding, automata, train_data) + + return RECA(train_data, automata, state_encoding, nla_type, states, states_type) +end -struct RandomMapping{I, T} <: AbstractInputEncoding - permutations::I - expansion_size::T +#training dispatch +function train(reca::RECA, target_data, training_method = StandardRidge; kwargs...) + states_new = reca.states_type(reca.nla_type, reca.states, reca.train_data) + return train(training_method, Float32.(states_new), Float32.(target_data); kwargs...) end -""" - RandomMapping(permutations, expansion_size) - RandomMapping(permutations; expansion_size=40) - RandomMapping(;permutations=8, expansion_size=40) +#predict dispatch +function (reca::RECA)(prediction, + output_layer::AbstractOutputLayer, + initial_conditions = output_layer.last_value, + last_state = zeros(reca.input_encoding.ca_size)) + return obtain_prediction(reca, prediction, last_state, output_layer; + initial_conditions = initial_conditions) +end -Random mapping of the input data directly in the reservoir. The `expansion_size` -determines the dimension of the single reservoir, and `permutations` determines the -number of total reservoirs that will be connected, each with a different mapping. -The detail of this implementation can be found in [1]. +function next_state_prediction!(reca::RECA, x, out, i, args...) + rm = reca.input_encoding + x = encoding(rm, out, x) + ca = CellularAutomaton(reca.automata, x, rm.generations + 1) + ca_states = ca.evolution[2:end, :] + x_new = reshape(transpose(ca_states), rm.states_size) + x = ca.evolution[end, :] + return x, x_new +end -[1] Nichele, Stefano, and Andreas Molund. “Deep reservoir computing using cellular -automata.” arXiv preprint arXiv:1703.02806 (2017). -""" function RandomMapping(; permutations = 8, expansion_size = 40) RandomMapping(permutations, expansion_size) end @@ -27,15 +52,6 @@ function RandomMapping(permutations; expansion_size = 40) RandomMapping(permutations, expansion_size) end -struct RandomMaps{T, E, G, M, S} <: AbstractEncodingData - permutations::T - expansion_size::E - generations::G - maps::M - states_size::S - ca_size::S -end - function create_encoding(rm::RandomMapping, input_data, generations) maps = init_maps(size(input_data, 1), rm.permutations, rm.expansion_size) states_size = generations * rm.expansion_size * rm.permutations @@ -105,3 +121,5 @@ function mapping(input_size, mapped_vector_size) #sample(1:mapped_vector_size, input_size; replace=false) return randperm(mapped_vector_size)[1:input_size] end + +end #module diff --git a/src/ReservoirComputing.jl b/src/ReservoirComputing.jl index 028aad50..ba70527a 100644 --- a/src/ReservoirComputing.jl +++ b/src/ReservoirComputing.jl @@ -1,7 +1,6 @@ module ReservoirComputing using Adapt: adapt -using CellularAutomata: CellularAutomaton using Compat: @compat using LinearAlgebra: eigvals, mul!, I, qr, Diagonal using NNlib: fast_act, sigmoid @@ -15,24 +14,19 @@ abstract type AbstractReservoirComputer end @compat(public, (create_states)) #general -include("states.jl") -include("predict.jl") - -#general training -include("train/linear_regression.jl") - +include("generics/states.jl") +include("generics/predict.jl") +include("generics/linear_regression.jl") +#extensions +include("extensions/reca.jl") #esn -include("esn/inits_components.jl") -include("esn/esn_inits.jl") -include("esn/esn_reservoir_drivers.jl") -include("esn/esn.jl") -include("esn/deepesn.jl") -include("esn/hybridesn.jl") -include("esn/esn_predict.jl") - -#reca -include("reca/reca.jl") -include("reca/reca_input_encodings.jl") +include("inits/inits_components.jl") +include("inits/esn_inits.jl") +include("layers/esn_reservoir_drivers.jl") +include("models/esn.jl") +include("models/deepesn.jl") +include("models/hybridesn.jl") +include("models/esn_predict.jl") export NLADefault, NLAT1, NLAT2, NLAT3, PartialSquare, ExtendedSquare export StandardStates, ExtendedStates, PaddedStates, PaddedExtendedStates @@ -48,8 +42,9 @@ export add_jumps!, backward_connection!, delay_line!, reverse_simple_cycle!, export RNN, MRNN, GRU, GRUParams, FullyGated, Minimal export train export ESN, HybridESN, KnowledgeModel, DeepESN +export Generative, Predictive, OutputLayer +#reca export RECA export RandomMapping, RandomMaps -export Generative, Predictive, OutputLayer end #module diff --git a/src/extensions/reca.jl b/src/extensions/reca.jl new file mode 100644 index 00000000..15ca89f4 --- /dev/null +++ b/src/extensions/reca.jl @@ -0,0 +1,55 @@ +abstract type AbstractInputEncoding end +abstract type AbstractEncodingData end + +""" + RandomMapping(permutations, expansion_size) + RandomMapping(permutations; expansion_size=40) + RandomMapping(;permutations=8, expansion_size=40) + +Random mapping of the input data directly in the reservoir. The `expansion_size` +determines the dimension of the single reservoir, and `permutations` determines the +number of total reservoirs that will be connected, each with a different mapping. +The detail of this implementation can be found in [1]. + +[1] Nichele, Stefano, and Andreas Molund. “Deep reservoir computing using cellular +automata.” arXiv preprint arXiv:1703.02806 (2017). +""" +struct RandomMapping{I, T} <: AbstractInputEncoding + permutations::I + expansion_size::T +end + +struct RandomMaps{T, E, G, M, S} <: AbstractEncodingData + permutations::T + expansion_size::E + generations::G + maps::M + states_size::S + ca_size::S +end + +abstract type AbstractReca <: AbstractReservoirComputer end + +""" + RECA(train_data, + automata; + generations = 8, + input_encoding=RandomMapping(), + nla_type = NLADefault(), + states_type = StandardStates()) + +[1] Yilmaz, Ozgur. “_Reservoir computing using cellular automata._” +arXiv preprint arXiv:1410.0162 (2014). + +[2] Nichele, Stefano, and Andreas Molund. “_Deep reservoir computing using cellular +automata._” arXiv preprint arXiv:1703.02806 (2017). +""" +struct RECA{S, R, E, T, Q} <: AbstractReca + #res_size::I + train_data::S + automata::R + input_encoding::E + nla_type::ReservoirComputing.NonLinearAlgorithm + states::T + states_type::Q +end diff --git a/src/train/linear_regression.jl b/src/generics/linear_regression.jl similarity index 100% rename from src/train/linear_regression.jl rename to src/generics/linear_regression.jl diff --git a/src/predict.jl b/src/generics/predict.jl similarity index 100% rename from src/predict.jl rename to src/generics/predict.jl diff --git a/src/states.jl b/src/generics/states.jl similarity index 100% rename from src/states.jl rename to src/generics/states.jl diff --git a/src/esn/esn_inits.jl b/src/inits/esn_inits.jl similarity index 100% rename from src/esn/esn_inits.jl rename to src/inits/esn_inits.jl diff --git a/src/esn/inits_components.jl b/src/inits/inits_components.jl similarity index 100% rename from src/esn/inits_components.jl rename to src/inits/inits_components.jl diff --git a/src/esn/esn_reservoir_drivers.jl b/src/layers/esn_reservoir_drivers.jl similarity index 100% rename from src/esn/esn_reservoir_drivers.jl rename to src/layers/esn_reservoir_drivers.jl diff --git a/src/esn/deepesn.jl b/src/models/deepesn.jl similarity index 100% rename from src/esn/deepesn.jl rename to src/models/deepesn.jl diff --git a/src/esn/esn.jl b/src/models/esn.jl similarity index 100% rename from src/esn/esn.jl rename to src/models/esn.jl diff --git a/src/esn/esn_predict.jl b/src/models/esn_predict.jl similarity index 100% rename from src/esn/esn_predict.jl rename to src/models/esn_predict.jl diff --git a/src/esn/hybridesn.jl b/src/models/hybridesn.jl similarity index 100% rename from src/esn/hybridesn.jl rename to src/models/hybridesn.jl diff --git a/src/reca/reca.jl b/src/reca/reca.jl deleted file mode 100644 index 8af5b2d3..00000000 --- a/src/reca/reca.jl +++ /dev/null @@ -1,64 +0,0 @@ -abstract type AbstractReca <: AbstractReservoirComputer end - -struct RECA{S, R, E, T, Q} <: AbstractReca - #res_size::I - train_data::S - automata::R - input_encoding::E - nla_type::ReservoirComputing.NonLinearAlgorithm - states::T - states_type::Q -end - -""" - RECA(train_data, - automata; - generations = 8, - input_encoding=RandomMapping(), - nla_type = NLADefault(), - states_type = StandardStates()) - -[1] Yilmaz, Ozgur. “_Reservoir computing using cellular automata._” -arXiv preprint arXiv:1410.0162 (2014). - -[2] Nichele, Stefano, and Andreas Molund. “_Deep reservoir computing using cellular -automata._” arXiv preprint arXiv:1703.02806 (2017). -""" -function RECA(train_data, - automata; - generations = 8, - input_encoding = RandomMapping(), - nla_type = NLADefault(), - states_type = StandardStates()) - in_size = size(train_data, 1) - #res_size = obtain_res_size(input_encoding, generations) - state_encoding = create_encoding(input_encoding, train_data, generations) - states = reca_create_states(state_encoding, automata, train_data) - - return RECA(train_data, automata, state_encoding, nla_type, states, states_type) -end - -#training dispatch -function train(reca::AbstractReca, target_data, training_method = StandardRidge; kwargs...) - states_new = reca.states_type(reca.nla_type, reca.states, reca.train_data) - return train(training_method, Float32.(states_new), Float32.(target_data); kwargs...) -end - -#predict dispatch -function (reca::RECA)(prediction, - output_layer::AbstractOutputLayer, - initial_conditions = output_layer.last_value, - last_state = zeros(reca.input_encoding.ca_size)) - return obtain_prediction(reca, prediction, last_state, output_layer; - initial_conditions = initial_conditions) -end - -function next_state_prediction!(reca::RECA, x, out, i, args...) - rm = reca.input_encoding - x = encoding(rm, out, x) - ca = CellularAutomaton(reca.automata, x, rm.generations + 1) - ca_states = ca.evolution[2:end, :] - x_new = reshape(transpose(ca_states), rm.states_size) - x = ca.evolution[end, :] - return x, x_new -end