Skip to content

Commit cd4c59f

Browse files
add some docstrings (#172)
* add some docstrings * cleanup
1 parent 7ee354d commit cd4c59f

File tree

3 files changed

+56
-17
lines changed

3 files changed

+56
-17
lines changed

src/conv.jl

Lines changed: 19 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -164,18 +164,33 @@ if is_nnpack_available()
164164
end
165165
end
166166

167-
function conv(x, w::AbstractArray{T, N}; stride = 1, pad = 0, dilation = 1, flipped = false) where {T, N}
167+
"""
168+
conv(x, w; stride=1, pad=0, dilation=1, flipped=false)
169+
170+
Apply convolution filter `w` to input `x`. `x` and `w` are 3d/4d/5d tensors
171+
in 1d/2d/3d convolutions respectively.
172+
"""
173+
function conv(x, w::AbstractArray{T, N}; stride=1, pad=0, dilation=1, flipped=false) where {T, N}
168174
stride = expand(Val(N-2), stride)
169175
pad = expand(Val(N-2), pad)
170176
dilation = expand(Val(N-2), dilation)
171-
cdims = DenseConvDims(x, w; stride = stride, padding = pad, dilation = dilation, flipkernel = flipped)
177+
cdims = DenseConvDims(x, w; stride=stride, padding=pad, dilation=dilation, flipkernel=flipped)
172178
return conv(x, w, cdims)
173179
end
174180

175-
function depthwiseconv(x, w::AbstractArray{T, N}; stride = 1, pad = 0, dilation = 1, flipped = false) where {T, N}
181+
182+
183+
184+
"""
185+
depthwiseconv(x, w; stride=1, pad=0, dilation=1, flipped=false)
186+
187+
Depthwise convolution operation with filter `w` on input `x`. `x` and `w`
188+
are 3d/4d/5d tensors in 1d/2d/3d convolutions respectively.
189+
"""
190+
function depthwiseconv(x, w::AbstractArray{T, N}; stride=1, pad=0, dilation=1, flipped=false) where {T, N}
176191
stride = expand(Val(N-2), stride)
177192
pad = expand(Val(N-2), pad)
178193
dilation = expand(Val(N-2), dilation)
179-
cdims = DepthwiseConvDims(x, w; stride = stride, padding = pad, dilation = dilation, flipkernel = flipped)
194+
cdims = DepthwiseConvDims(x, w; stride=stride, padding=pad, dilation=dilation, flipkernel=flipped)
180195
return depthwiseconv(x, w, cdims)
181196
end

src/pooling.jl

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -140,16 +140,28 @@ end
140140
expand(N, i::Tuple) = i
141141
expand(N, i::Integer) = ntuple(_ -> i, N)
142142

143-
function maxpool(x, k::NTuple{N, Integer}; pad = 0, stride = k) where N
143+
144+
"""
145+
maxpool(x, k::NTuple; pad=0, stride=k)
146+
147+
Perform max pool operation with window size `k` on input tensor `x`.
148+
"""
149+
function maxpool(x, k::NTuple{N, Integer}; pad=0, stride=k) where N
144150
pad = expand(Val(N), pad)
145151
stride = expand(Val(N), stride)
146-
pdims = PoolDims(x, k; padding = pad, stride = stride)
152+
pdims = PoolDims(x, k; padding=pad, stride=stride)
147153
return maxpool(x, pdims)
148154
end
149155

150-
function meanpool(x, k::NTuple{N, Integer}; pad = 0, stride = k) where N
156+
157+
"""
158+
meanpool(x, k::NTuple; pad=0, stride=k)
159+
160+
Perform mean pool operation with window size `k` on input tensor `x`.
161+
"""
162+
function meanpool(x, k::NTuple{N, Integer}; pad=0, stride=k) where N
151163
pad = expand(Val(N), pad)
152164
stride = expand(Val(N), stride)
153-
pdims = PoolDims(x, k; padding = pad, stride = stride)
165+
pdims = PoolDims(x, k; padding=pad, stride=stride)
154166
return meanpool(x, pdims)
155167
end

src/softmax.jl

Lines changed: 21 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2,22 +2,28 @@ export softmax, softmax!, ∇softmax, ∇softmax!,
22
logsoftmax, logsoftmax!, ∇logsoftmax, ∇logsoftmax!
33

44
"""
5-
softmax(xs) = exp.(xs) ./ sum(exp.(xs))
5+
softmax(x; dims=1)
6+
7+
[Softmax](https://en.wikipedia.org/wiki/Softmax_function) turns input array `x`
8+
into probability distributions that sum to 1 along the dimensions specified by `dims`.
9+
It is semantically equivalent to the following:
610
7-
[Softmax](https://en.wikipedia.org/wiki/Softmax_function) takes
8-
log-probabilities (any real vector) and returns a probability distribution that
9-
sums to 1.
11+
softmax(x; dims=1) = exp.(x) ./ sum(exp.(x), dims=dims)
1012
11-
If given a matrix it will by default (`dims=1`) treat it as a batch of vectors,
12-
with each column independent. Keyword `dims=2` will instead treat rows independently, etc.
13+
with additional manipulations enhancing numerical stability.
1314
14-
```
15-
julia> softmax([1,2,3.])
15+
For a matrix input `x` it will by default (`dims=1`) treat it as a batch of vectors,
16+
with each column independent. Keyword `dims=2` will instead treat rows independently,
17+
etc...
18+
```julia-repl
19+
julia> softmax([1, 2, 3])
1620
3-element Array{Float64,1}:
1721
0.0900306
1822
0.244728
1923
0.665241
2024
```
25+
26+
See also [`logsoftmax`](@ref).
2127
"""
2228
function softmax(xs::AbstractArray; dims=1)
2329
max_ = maximum(xs, dims=dims)
@@ -64,11 +70,17 @@ end
6470

6571

6672
"""
67-
logsoftmax(xs) = log.(exp.(xs) ./ sum(exp.(xs)))
73+
logsoftmax(x; dims=1)
6874
6975
Computes the log of softmax in a more numerically stable
7076
way than directly taking `log.(softmax(xs))`. Commonly used in
7177
computing cross entropy loss.
78+
79+
It is semantically equivalent to the following:
80+
81+
logsoftmax(x; dims=1) = x .- log.(sum(exp.(x), dims=dims))
82+
83+
See also [`softmax`](@ref).
7284
"""
7385
function logsoftmax(xs::AbstractArray; dims=1)
7486
max_ = maximum(xs, dims=dims)

0 commit comments

Comments
 (0)