We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent c514d05 commit 1584f86Copy full SHA for 1584f86
src/softmax.jl
@@ -59,7 +59,7 @@ end
59
logsoftmax(xs) = log.(exp.(xs) ./ sum(exp.(xs)))
60
61
`logsoftmax(xs)` computes the log of `softmax(xs)`, but in a more numerically stable
62
-way than directly taking the log of the the softmax function, which is commonly used in
+way than directly taking the log of the softmax function, which is commonly used in
63
computing cross entropy loss.
64
"""
65
logsoftmax(xs) = logsoftmax!(similar(xs), xs)
@@ -82,4 +82,4 @@ function logsoftmax!(out::AbstractVecOrMat, xs::AbstractVecOrMat)
82
return out
83
end
84
∇logsoftmax(Δ, xs) = ∇softmax(Δ ./ max.(eps(eltype(xs)),softmax(xs)), xs)
85
-∇logsoftmax!(Δ, xs) = ∇softmax!(Δ, Δ, xs)
+∇logsoftmax!(Δ, xs) = ∇softmax!(Δ, Δ, xs)
0 commit comments