Skip to content

Commit 2e24d96

Browse files
committed
Resolve conflicts
2 parents d67b443 + f97688e commit 2e24d96

File tree

7 files changed

+40
-8
lines changed

7 files changed

+40
-8
lines changed

.github/workflows/clean_preview.yml

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
# from https://github.com/CliMA/ClimaTimeSteppers.jl
2+
name: Doc Preview Cleanup
3+
4+
on:
5+
pull_request:
6+
types: [closed]
7+
8+
jobs:
9+
doc-preview-cleanup:
10+
runs-on: ubuntu-latest
11+
steps:
12+
- name: Checkout gh-pages branch
13+
uses: actions/checkout@v2
14+
with:
15+
ref: gh-pages
16+
- name: Delete preview and history + push changes
17+
run: |
18+
if [ -d "previews/PR$PRNUM" ]; then
19+
git config user.name "Documenter.jl"
20+
git config user.email "[email protected]"
21+
git rm -rf "previews/PR$PRNUM"
22+
git commit -m "delete preview"
23+
git branch gh-pages-new $(echo "delete history" | git commit-tree HEAD^{tree})
24+
git push --force origin gh-pages-new:gh-pages
25+
fi
26+
env:
27+
PRNUM: ${{ github.event.number }}

.github/workflows/pr_comment.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ jobs:
77
runs-on: ubuntu-latest
88
steps:
99
- name: Create PR comment
10-
if: github.event_name == 'pull_request' && github.repository == github.event.pull_request.head.repo.full_name && ${{ github.event.label.name == 'documentation' }} # if this is a pull request build AND the pull request is NOT made from a fork
10+
if: github.event_name == 'pull_request' && github.repository == github.event.pull_request.head.repo.full_name && github.event.label.name == 'documentation' # if this is a pull request build AND the pull request is NOT made from a fork
1111
uses: thollander/actions-comment-pull-request@71efef56b184328c7ef1f213577c3a90edaa4aff
1212
with:
1313
message: 'Once the build has completed, you can preview any updated documentation at this URL: https://fluxml.ai/Flux.jl/previews/PR${{ github.event.number }}/ in ~20 minutes'

docs/src/saving.md

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,9 @@ versions of Flux).
5555
Saving models this way will only store the trainable parameters which
5656
will result in incorrect behavior for layers like `BatchNorm`.
5757

58-
```jldoctest saving
58+
```julia
59+
julia> using Flux
60+
5961
julia> model = Chain(Dense(10 => 5,relu),Dense(5 => 2),softmax)
6062
Chain(
6163
Dense(10 => 5, relu), # 55 parameters
@@ -134,4 +136,3 @@ exactly where you left off. BSON is smart enough to [cache values](https://githu
134136
opt = ADAM()
135137
@save "model-$(now()).bson" model opt
136138
```
137-

src/layers/basic.jl

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ function Chain(; kw...)
4444
end
4545

4646
@forward Chain.layers Base.getindex, Base.length, Base.first, Base.last,
47-
Base.iterate, Base.lastindex, Base.keys
47+
Base.iterate, Base.lastindex, Base.keys, Base.firstindex
4848

4949
@functor Chain
5050

@@ -68,7 +68,6 @@ end
6868
Base.getindex(c::Chain, i::AbstractArray) = Chain(c.layers[i])
6969
Base.getindex(c::Chain{<:NamedTuple}, i::AbstractArray) =
7070
Chain(NamedTuple{Base.keys(c)[i]}(Tuple(c.layers)[i]))
71-
7271
function Base.show(io::IO, c::Chain)
7372
print(io, "Chain(")
7473
_show_layers(io, c.layers)

src/layers/recurrent.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ rnn.state = hidden(rnn.cell)
8383
reset!(m::Recur) = (m.state = m.cell.state0)
8484
reset!(m) = foreach(reset!, functor(m)[1])
8585

86-
flip(f, xs) = reverse(f.(reverse(xs)))
86+
flip(f, xs) = reverse([f(x) for x in reverse(xs)])
8787

8888
function (m::Recur)(x::AbstractArray{T, 3}) where T
8989
h = [m(x_t) for x_t in eachslice(x, dims=3)]

src/optimise/train.jl

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,8 @@ Multiple callbacks can be passed to `cb` as array.
112112
"""
113113
function train!(loss, ps::Params, data, opt::AbstractOptimiser; cb = () -> ())
114114
cb = runall(cb)
115-
n = (Base.IteratorSize(typeof(data)) == Base.HasLength()) ? length(data) : 0
115+
itrsz = Base.IteratorSize(typeof(data))
116+
n = (itrsz == Base.HasLength()) || (itrsz == Base.HasShape{1}()) ? length(data) : 0
116117
@withprogress for (i, d) in enumerate(data)
117118
try
118119
gs = gradient(ps) do
@@ -129,7 +130,7 @@ function train!(loss, ps::Params, data, opt::AbstractOptimiser; cb = () -> ())
129130
rethrow(ex)
130131
end
131132
end
132-
@logprogress i / n
133+
@logprogress iszero(n) ? nothing : i / n
133134
end
134135
end
135136

test/layers/basic.jl

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,10 @@ import Flux: activations
3131
@test_throws ArgumentError Chain(layers = Dense(10, 10), two = identity) # reserved name
3232

3333
@test_nowarn Chain([Dense(10, 5, σ), Dense(5, 2)])(randn(Float32, 10)) # vector of layers
34+
35+
c = Chain(Dense(10, 5, σ), Dense(5, 2), Dense(2, 1, relu))
36+
@test c[1] == c[begin]
37+
@test c[3] == c[end]
3438
end
3539

3640
@testset "Activations" begin

0 commit comments

Comments
 (0)