Skip to content

Commit f2d41b9

Browse files
authored
Always call callback (#1184)
* Always call callback * Update callbacks.jl
1 parent 07faeee commit f2d41b9

File tree

3 files changed

+30
-36
lines changed

3 files changed

+30
-36
lines changed

docs/src/user/config.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ In addition to the solver, you can alter the behavior of the Optim package by us
5151
* `successive_f_tol`: Determines the number of times the objective is allowed to increase across iterations. Defaults to 1.
5252
* `iterations`: How many iterations will run before the algorithm gives up? Defaults to `1_000`.
5353
* `time_limit`: A soft upper limit on the total run time. Defaults to `NaN` (unlimited).
54-
* `callback`: A function to be called during tracing. The return value should be a boolean, where `true` will stop the `optimize` call early. The callback function is called every `show_every`th iteration. If `store_trace` is false, the argument to the callback is of the type [`OptimizationState`](https://github.com/JuliaNLSolvers/Optim.jl/blob/a1035134ca1f3ebe855f1cde034e32683178225a/src/types.jl#L155), describing the state of the current iteration. If `store_trace` is true, the argument is a list of all the states from the first iteration to the current.
54+
* `callback`: A function to be called during tracing. The return value should be a boolean, where `true` will stop the `optimize` call early. The callback function is called every iteration. If `store_trace` is false, the argument to the callback is of the type [`OptimizationState`](https://github.com/JuliaNLSolvers/Optim.jl/blob/a1035134ca1f3ebe855f1cde034e32683178225a/src/types.jl#L155), describing the state of the current iteration. If `store_trace` is true, the argument is a list of all the states from the first iteration to the current.
5555

5656
!!! tip "Disabling a termination criterion"
5757
If the `x_abstol`, `x_reltol`, `f_abstol`, `f_reltol`, `g_tol`, or `time_limit` tolerances are set to `NaN` all comparisons will be false internally, and this fact can be used to turn off the check. For example, `x_reltol` defaults to `0`. This does not mean that the check is turned off it only means that we stop at exactly zero change. However, if we set it to `NaN` specifically, the check of the termination criterion is always false and as such we will never stop due to any value of the infinity norm of the vector of relative changes.

src/utilities/update.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ function update!(
2020
flush(stdout)
2121
end
2222
end
23-
if callback !== nothing && (iteration % show_every == 0)
23+
if callback !== nothing
2424
if store_trace
2525
stopped = callback(tr)
2626
else

test/general/callbacks.jl

Lines changed: 28 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -9,76 +9,70 @@
99
d3 = TwiceDifferentiable(f, g!, h!, initial_x)
1010

1111
for method in (NelderMead(), SimulatedAnnealing())
12-
ot_run = false
12+
ot_count = 0
1313
cb = tr -> begin
14-
@test tr[end].iteration % 3 == 0
15-
ot_run = true
14+
ot_count += 1
1615
false
1716
end
1817
options = Optim.Options(callback = cb, show_every = 3, store_trace = true)
19-
optimize(f, initial_x, method, options)
20-
@test ot_run
18+
res1 = optimize(f, initial_x, method, options)
19+
@test ot_count == 1+res1.iterations
2120

22-
os_run = false
21+
os_count_2 = 0
2322
cb = os -> begin
24-
@test os.iteration % 3 == 0
25-
os_run = true
23+
os_count_2 += 1
2624
false
2725
end
2826
options = Optim.Options(callback = cb, show_every = 3)
29-
optimize(f, initial_x, method, options)
30-
@test os_run
27+
res2 = optimize(f, initial_x, method, options)
28+
@test os_count_2 == 1+res2.iterations
3129

3230
# Test early stopping by callbacks
3331
options = Optim.Options(callback = x -> x.iteration == 5 ? true : false)
34-
optimize(f, zeros(2), NelderMead(), options)
32+
res3 = optimize(f, zeros(2), NelderMead(), options)
33+
@test res3.iterations == 5
3534
end
3635

37-
for method in
38-
(BFGS(), ConjugateGradient(), GradientDescent(), MomentumGradientDescent())
39-
ot_run = false
36+
for method in (BFGS(), ConjugateGradient(), GradientDescent(), MomentumGradientDescent())
37+
ot_count = 0
4038
cb = tr -> begin
41-
@test tr[end].iteration % 3 == 0
42-
ot_run = true
39+
ot_count += 1
4340
false
4441
end
4542
options = Optim.Options(callback = cb, show_every = 3, store_trace = true)
43+
res1 = optimize(d2, initial_x, method, options)
44+
@test ot_count == 1+res1.iterations
4645

47-
optimize(d2, initial_x, method, options)
48-
@test ot_run
49-
50-
os_run = false
46+
os_count = 0
5147
cb = os -> begin
52-
@test os.iteration % 3 == 0
53-
os_run = true
48+
os_count += 1
5449
false
5550
end
5651
options = Optim.Options(callback = cb, show_every = 3)
57-
optimize(d2, initial_x, method, options)
58-
@test os_run
52+
res2 = optimize(d2, initial_x, method, options)
53+
@test os_count == 1+res2.iterations
5954
end
6055

6156
for method in (Newton(),)
62-
ot_run = false
57+
ot_count = 0
6358
cb = tr -> begin
64-
@test tr[end].iteration % 3 == 0
65-
ot_run = true
59+
ot_count += 1
6660
false
6761
end
6862
options = Optim.Options(callback = cb, show_every = 3, store_trace = true)
69-
optimize(d3, initial_x, method, options)
70-
@test ot_run
63+
res1 = optimize(d3, initial_x, method, options)
64+
@test ot_count == 1+res1.iterations
7165

72-
os_run = false
66+
os_count = 0
7367
cb = os -> begin
74-
@test os.iteration % 3 == 0
75-
os_run = true
68+
os_count += 1
7669
false
7770
end
7871
options = Optim.Options(callback = cb, show_every = 3)
79-
optimize(d3, initial_x, method, options)
80-
@test os_run
72+
res2 = optimize(d3, initial_x, method, options)
73+
@test os_count == 1+res2.iterations
8174
end
75+
8276
res = optimize(x -> x^2, -5, 5, callback = _ -> true)
8377
@test res.iterations == 0
8478
end

0 commit comments

Comments
 (0)