|
50 | 50 |
|
51 | 51 | (c::Chain)(x) = applychain(c.layers, x)
|
52 | 52 |
|
53 |
| -""" |
54 |
| - applychain(layers, x) |
55 |
| -
|
56 |
| -Calculates the forward results of the complete chain provided as a `Tuple`, `AbstractVector`, |
57 |
| -or a `NamedTuple` of layers with `x` as model input. Users are encouraged to call a chain |
58 |
| -instead of using this function directly. |
59 |
| -
|
60 |
| -# Examples |
61 |
| -
|
62 |
| -```jldoctest; filter = r"[+-]?([0-9]*[.])?[0-9]+" |
63 |
| -julia> using Flux: applychain |
64 |
| -
|
65 |
| -julia> c = Chain(Dense(10 => 5, σ), Dense(5 => 2), softmax); |
66 |
| -
|
67 |
| -julia> applychain(c.layers, rand(10)) # will output a 2 element vector as our chain has 2 neurons in the final output layer |
68 |
| -2-element Vector{Float64}: |
69 |
| - 0.5101459322326873 |
70 |
| - 0.4898540677673126 |
71 |
| -
|
72 |
| -julia> applychain([Dense(10 => 5, σ), Dense(5 => 2), softmax], rand(10)) # will output a 2 element vector as our chain has 2 neurons in the final output layer |
73 |
| -2-element Vector{Float64}: |
74 |
| - 0.5101459322326873 |
75 |
| - 0.4898540677673126 |
76 |
| -
|
77 |
| -julia> c(rand(10)) # encouraged method |
78 |
| -2-element Vector{Float64}: |
79 |
| - 0.4861734115447846 |
80 |
| - 0.5138265884552153 |
81 |
| -``` |
82 |
| -""" |
| 53 | +# Calculates the forward results of the complete chain provided as a `Tuple`, `AbstractVector`, |
| 54 | +# or a `NamedTuple` of layers with `x` as model input. Users are encouraged to call a chain |
| 55 | +# instead of using this function directly. |
83 | 56 | @generated function applychain(layers::Tuple{Vararg{<:Any,N}}, x) where {N}
|
84 | 57 | symbols = vcat(:x, [gensym() for _ in 1:N])
|
85 | 58 | calls = [:($(symbols[i+1]) = layers[$i]($(symbols[i]))) for i in 1:N]
|
@@ -134,20 +107,7 @@ julia> activations(c, 1)
|
134 | 107 | """
|
135 | 108 | activations(c::Chain, input) = extraChain(Tuple(c.layers), input)
|
136 | 109 |
|
137 |
| -""" |
138 |
| - extraChain(fs::Tuple, x) |
139 |
| -
|
140 |
| -Calculates the forward results of each layer provided in a `Tuple` with `x` as model input. |
141 |
| -
|
142 |
| -# Examples |
143 |
| -
|
144 |
| -```jldoctest; filter = r"[+-]?([0-9]*[.])?[0-9]+" |
145 |
| -julia> using Flux: extraChain |
146 |
| -
|
147 |
| -julia> extraChain((Dense(10 => 5, σ), Dense(5 => 2), softmax), rand(10)) # will output a tuple of 3 lists (with length = 5, 2, and 2) as our chain has 3 layers |
148 |
| -([0.3274892431795043, 0.5360197770386552, 0.3447464835514667, 0.5273025865532305, 0.7513168089280781], [-0.3533774181890544, -0.010937055274926138], [0.4152168057978045, 0.5847831942021956]) |
149 |
| -``` |
150 |
| -""" |
| 110 | +# Calculates the forward results of each layer provided in a `Tuple` with `x` as model input. |
151 | 111 | function extraChain(fs::Tuple, x)
|
152 | 112 | res = first(fs)(x)
|
153 | 113 | return (res, extraChain(Base.tail(fs), res)...)
|
|
0 commit comments