@@ -21,8 +21,8 @@ julia> softmax([1,2,3.])
21
21
"""
22
22
function softmax (xs:: AbstractArray ; dims= 1 )
23
23
max_ = maximum (xs, dims= dims)
24
- out = exp .(xs .- max_)
25
- out . = out . / sum! (max_, out )
24
+ exp_ = exp .(xs .- max_)
25
+ exp_ . / sum (exp_, dims = dims )
26
26
end
27
27
28
28
function softmax! (out:: AbstractVecOrMat{T} , xs:: AbstractVecOrMat{T} ) where {T}
@@ -58,7 +58,7 @@ function ∇softmax!(out::AbstractVecOrMat, Δ::AbstractVecOrMat, xs::AbstractVe
58
58
end
59
59
function ∇softmax (Δ, xs; dims= 1 )
60
60
sf = softmax (xs, dims= dims)
61
- out = sf .* (Δ .- sum (Δ .* sf, dims= dims))
61
+ sf .* (Δ .- sum (Δ .* sf, dims= dims))
62
62
end
63
63
∇softmax! (Δ, xs) = ∇softmax! (Δ, Δ, xs)
64
64
@@ -72,9 +72,9 @@ computing cross entropy loss.
72
72
"""
73
73
function logsoftmax (xs:: AbstractArray ; dims= 1 )
74
74
max_ = maximum (xs, dims= dims)
75
- out = exp .(xs .- max_)
76
- log_ = log .(sum (out , dims= dims))
77
- out . = (xs .- max_) .- log_
75
+ exp_ = exp .(xs .- max_)
76
+ log_ = log .(sum (exp_ , dims= dims))
77
+ (xs .- max_) .- log_
78
78
end
79
79
80
80
function logsoftmax! (out:: AbstractVecOrMat , xs:: AbstractVecOrMat )
0 commit comments