Skip to content
This repository was archived by the owner on Aug 25, 2025. It is now read-only.

Commit 58f2fc6

Browse files
Merge pull request #154 from ChrisRackauckas/fix-formatting
Apply JuliaFormatter to fix code formatting
2 parents 57a75ee + b333cfa commit 58f2fc6

File tree

5 files changed

+77
-37
lines changed

5 files changed

+77
-37
lines changed

ext/OptimizationEnzymeExt.jl

Lines changed: 13 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ function set_runtime_activity2(
8585
Enzyme.set_runtime_activity(a, RTA)
8686
end
8787
function_annotation(::Nothing) = Nothing
88-
function_annotation(::AutoEnzyme{<:Any, A}) where A = A
88+
function_annotation(::AutoEnzyme{<:Any, A}) where {A} = A
8989
function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, x,
9090
adtype::AutoEnzyme, p, num_cons = 0;
9191
g = false, h = false, hv = false, fg = false, fgh = false,
@@ -225,9 +225,12 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, x,
225225
if func_annot <: Enzyme.Const
226226
basefunc = Enzyme.Const(basefunc)
227227
elseif func_annot <: Enzyme.Duplicated || func_annot <: Enzyme.BatchDuplicated
228-
basefunc = Enzyme.BatchDuplicated(basefunc, Tuple(make_zero(basefunc) for i in 1:length(x)))
229-
elseif func_annot <: Enzyme.DuplicatedNoNeed || func_annot <: Enzyme.BatchDuplicatedNoNeed
230-
basefunc = Enzyme.BatchDuplicatedNoNeed(basefunc, Tuple(make_zero(basefunc) for i in 1:length(x)))
228+
basefunc = Enzyme.BatchDuplicated(basefunc, Tuple(make_zero(basefunc)
229+
for i in 1:length(x)))
230+
elseif func_annot <: Enzyme.DuplicatedNoNeed ||
231+
func_annot <: Enzyme.BatchDuplicatedNoNeed
232+
basefunc = Enzyme.BatchDuplicatedNoNeed(basefunc, Tuple(make_zero(basefunc)
233+
for i in 1:length(x)))
231234
end
232235
# else
233236
# seeds = Enzyme.onehot(zeros(eltype(x), num_cons))
@@ -241,12 +244,14 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, x,
241244
Enzyme.make_zero!(jc)
242245
end
243246
Enzyme.make_zero!(y)
244-
if func_annot <: Enzyme.Duplicated || func_annot <: Enzyme.BatchDuplicated || func_annot <: Enzyme.DuplicatedNoNeed || func_annot <: Enzyme.BatchDuplicatedNoNeed
247+
if func_annot <: Enzyme.Duplicated || func_annot <: Enzyme.BatchDuplicated ||
248+
func_annot <: Enzyme.DuplicatedNoNeed ||
249+
func_annot <: Enzyme.BatchDuplicatedNoNeed
245250
for bf in basefunc.dval
246251
Enzyme.make_zero!(bf)
247252
end
248253
end
249-
Enzyme.autodiff(fmode, basefunc , BatchDuplicated(y, Jaccache),
254+
Enzyme.autodiff(fmode, basefunc, BatchDuplicated(y, Jaccache),
250255
BatchDuplicated(θ, seeds), Const(p))
251256
for i in eachindex(θ)
252257
if J isa Vector
@@ -575,7 +580,8 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, x
575580
for i in eachindex(Jaccache)
576581
Enzyme.make_zero!(Jaccache[i])
577582
end
578-
Jaccache, y = Enzyme.autodiff(WithPrimal(fmode), f.cons, Duplicated,
583+
Jaccache,
584+
y = Enzyme.autodiff(WithPrimal(fmode), f.cons, Duplicated,
579585
BatchDuplicated(θ, seeds), Const(p))
580586
if size(y, 1) == 1
581587
return reduce(vcat, Jaccache)

ext/OptimizationZygoteExt.jl

Lines changed: 34 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ function OptimizationBase.instantiate_function(
3030
adtype, soadtype = OptimizationBase.generate_adtype(adtype)
3131

3232
if g == true && f.grad === nothing
33-
prep_grad = prepare_gradient(f.f, adtype, x, Constant(p), strict=Val(false))
33+
prep_grad = prepare_gradient(f.f, adtype, x, Constant(p), strict = Val(false))
3434
function grad(res, θ)
3535
gradient!(f.f, res, prep_grad, adtype, θ, Constant(p))
3636
end
@@ -47,7 +47,7 @@ function OptimizationBase.instantiate_function(
4747

4848
if fg == true && f.fg === nothing
4949
if g == false
50-
prep_grad = prepare_gradient(f.f, adtype, x, Constant(p), strict=Val(false))
50+
prep_grad = prepare_gradient(f.f, adtype, x, Constant(p), strict = Val(false))
5151
end
5252
function fg!(res, θ)
5353
(y, _) = value_and_gradient!(f.f, res, prep_grad, adtype, θ, Constant(p))
@@ -68,7 +68,7 @@ function OptimizationBase.instantiate_function(
6868
hess_sparsity = f.hess_prototype
6969
hess_colors = f.hess_colorvec
7070
if h == true && f.hess === nothing
71-
prep_hess = prepare_hessian(f.f, soadtype, x, Constant(p), strict=Val(false))
71+
prep_hess = prepare_hessian(f.f, soadtype, x, Constant(p), strict = Val(false))
7272
function hess(res, θ)
7373
hessian!(f.f, res, prep_hess, soadtype, θ, Constant(p))
7474
end
@@ -85,13 +85,17 @@ function OptimizationBase.instantiate_function(
8585

8686
if fgh == true && f.fgh === nothing
8787
function fgh!(G, H, θ)
88-
(y, _, _) = value_derivative_and_second_derivative!(
88+
(y,
89+
_,
90+
_) = value_derivative_and_second_derivative!(
8991
f.f, G, H, prep_hess, soadtype, θ, Constant(p))
9092
return y
9193
end
9294
if p !== SciMLBase.NullParameters() && p !== nothing
9395
function fgh!(G, H, θ, p)
94-
(y, _, _) = value_derivative_and_second_derivative!(
96+
(y,
97+
_,
98+
_) = value_derivative_and_second_derivative!(
9599
f.f, G, H, prep_hess, soadtype, θ, Constant(p))
96100
return y
97101
end
@@ -143,7 +147,7 @@ function OptimizationBase.instantiate_function(
143147
cons_jac_prototype = f.cons_jac_prototype
144148
cons_jac_colorvec = f.cons_jac_colorvec
145149
if cons !== nothing && cons_j == true && f.cons_j === nothing
146-
prep_jac = prepare_jacobian(cons_oop, adtype, x, strict=Val(false))
150+
prep_jac = prepare_jacobian(cons_oop, adtype, x, strict = Val(false))
147151
function cons_j!(J, θ)
148152
jacobian!(cons_oop, J, prep_jac, adtype, θ)
149153
if size(J, 1) == 1
@@ -157,7 +161,8 @@ function OptimizationBase.instantiate_function(
157161
end
158162

159163
if f.cons_vjp === nothing && cons_vjp == true && cons !== nothing
160-
prep_pullback = prepare_pullback(cons_oop, adtype, x, (ones(eltype(x), num_cons),), strict=Val(false))
164+
prep_pullback = prepare_pullback(
165+
cons_oop, adtype, x, (ones(eltype(x), num_cons),), strict = Val(false))
161166
function cons_vjp!(J, θ, v)
162167
pullback!(cons_oop, (J,), prep_pullback, adtype, θ, (v,))
163168
end
@@ -169,7 +174,7 @@ function OptimizationBase.instantiate_function(
169174

170175
if cons !== nothing && f.cons_jvp === nothing && cons_jvp == true
171176
prep_pushforward = prepare_pushforward(
172-
cons_oop, adtype, x, (ones(eltype(x), length(x)),), strict=Val(false))
177+
cons_oop, adtype, x, (ones(eltype(x), length(x)),), strict = Val(false))
173178
function cons_jvp!(J, θ, v)
174179
pushforward!(cons_oop, (J,), prep_pushforward, adtype, θ, (v,))
175180
end
@@ -182,7 +187,8 @@ function OptimizationBase.instantiate_function(
182187
conshess_sparsity = f.cons_hess_prototype
183188
conshess_colors = f.cons_hess_colorvec
184189
if cons !== nothing && cons_h == true && f.cons_h === nothing
185-
prep_cons_hess = [prepare_hessian(cons_oop, soadtype, x, Constant(i), strict=Val(false))
190+
prep_cons_hess = [prepare_hessian(
191+
cons_oop, soadtype, x, Constant(i), strict = Val(false))
186192
for i in 1:num_cons]
187193

188194
function cons_h!(H, θ)
@@ -201,7 +207,7 @@ function OptimizationBase.instantiate_function(
201207
if f.lag_h === nothing && cons !== nothing && lag_h == true
202208
lag_extras = prepare_hessian(
203209
lagrangian, soadtype, x, Constant(one(eltype(x))),
204-
Constant(ones(eltype(x), num_cons)), Constant(p), strict=Val(false))
210+
Constant(ones(eltype(x), num_cons)), Constant(p), strict = Val(false))
205211
lag_hess_prototype = zeros(Bool, num_cons, length(x))
206212

207213
function lag_h!(H::AbstractMatrix, θ, σ, λ)
@@ -294,7 +300,8 @@ function OptimizationBase.instantiate_function(
294300
adtype, soadtype = OptimizationBase.generate_sparse_adtype(adtype)
295301

296302
if g == true && f.grad === nothing
297-
extras_grad = prepare_gradient(f.f, adtype.dense_ad, x, Constant(p), strict=Val(false))
303+
extras_grad = prepare_gradient(
304+
f.f, adtype.dense_ad, x, Constant(p), strict = Val(false))
298305
function grad(res, θ)
299306
gradient!(f.f, res, extras_grad, adtype.dense_ad, θ, Constant(p))
300307
end
@@ -311,16 +318,19 @@ function OptimizationBase.instantiate_function(
311318

312319
if fg == true && f.fg === nothing
313320
if g == false
314-
extras_grad = prepare_gradient(f.f, adtype.dense_ad, x, Constant(p), strict=Val(false))
321+
extras_grad = prepare_gradient(
322+
f.f, adtype.dense_ad, x, Constant(p), strict = Val(false))
315323
end
316324
function fg!(res, θ)
317-
(y, _) = value_and_gradient!(
325+
(y,
326+
_) = value_and_gradient!(
318327
f.f, res, extras_grad, adtype.dense_ad, θ, Constant(p))
319328
return y
320329
end
321330
if p !== SciMLBase.NullParameters() && p !== nothing
322331
function fg!(res, θ, p)
323-
(y, _) = value_and_gradient!(
332+
(y,
333+
_) = value_and_gradient!(
324334
f.f, res, extras_grad, adtype.dense_ad, θ, Constant(p))
325335
return y
326336
end
@@ -334,7 +344,7 @@ function OptimizationBase.instantiate_function(
334344
hess_sparsity = f.hess_prototype
335345
hess_colors = f.hess_colorvec
336346
if h == true && f.hess === nothing
337-
prep_hess = prepare_hessian(f.f, soadtype, x, Constant(p), strict=Val(false))
347+
prep_hess = prepare_hessian(f.f, soadtype, x, Constant(p), strict = Val(false))
338348
function hess(res, θ)
339349
hessian!(f.f, res, prep_hess, soadtype, θ, Constant(p))
340350
end
@@ -354,14 +364,18 @@ function OptimizationBase.instantiate_function(
354364

355365
if fgh == true && f.fgh === nothing
356366
function fgh!(G, H, θ)
357-
(y, _, _) = value_derivative_and_second_derivative!(
367+
(y,
368+
_,
369+
_) = value_derivative_and_second_derivative!(
358370
f.f, G, H, θ, prep_hess, soadtype, Constant(p))
359371
return y
360372
end
361373

362374
if p !== SciMLBase.NullParameters() && p !== nothing
363375
function fgh!(G, H, θ, p)
364-
(y, _, _) = value_derivative_and_second_derivative!(
376+
(y,
377+
_,
378+
_) = value_derivative_and_second_derivative!(
365379
f.f, G, H, θ, prep_hess, soadtype, Constant(p))
366380
return y
367381
end
@@ -458,7 +472,8 @@ function OptimizationBase.instantiate_function(
458472
conshess_sparsity = f.cons_hess_prototype
459473
conshess_colors = f.cons_hess_colorvec
460474
if cons !== nothing && f.cons_h === nothing && cons_h == true
461-
prep_cons_hess = [prepare_hessian(cons_oop, soadtype, x, Constant(i), strict=Val(false))
475+
prep_cons_hess = [prepare_hessian(
476+
cons_oop, soadtype, x, Constant(i), strict = Val(false))
462477
for i in 1:num_cons]
463478
colores = getfield.(prep_cons_hess, :coloring_result)
464479
conshess_sparsity = getfield.(colores, :A)
@@ -479,7 +494,7 @@ function OptimizationBase.instantiate_function(
479494
if cons !== nothing && f.lag_h === nothing && lag_h == true
480495
lag_extras = prepare_hessian(
481496
lagrangian, soadtype, x, Constant(one(eltype(x))),
482-
Constant(ones(eltype(x), num_cons)), Constant(p), strict=Val(false))
497+
Constant(ones(eltype(x), num_cons)), Constant(p), strict = Val(false))
483498
lag_hess_prototype = lag_extras.coloring_result.A
484499
lag_hess_colors = lag_extras.coloring_result.color
485500

src/OptimizationDIExt.jl

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -78,13 +78,17 @@ function instantiate_function(
7878

7979
if fgh == true && f.fgh === nothing
8080
function fgh!(G, H, θ)
81-
(y, _, _) = value_derivative_and_second_derivative!(
81+
(y,
82+
_,
83+
_) = value_derivative_and_second_derivative!(
8284
f.f, G, H, prep_hess, soadtype, θ, Constant(p))
8385
return y
8486
end
8587
if p !== SciMLBase.NullParameters() && p !== nothing
8688
function fgh!(G, H, θ, p)
87-
(y, _, _) = value_derivative_and_second_derivative!(
89+
(y,
90+
_,
91+
_) = value_derivative_and_second_derivative!(
8892
f.f, G, H, prep_hess, soadtype, θ, Constant(p))
8993
return y
9094
end
@@ -338,13 +342,17 @@ function instantiate_function(
338342

339343
if fgh == true && f.fgh === nothing
340344
function fgh!(θ)
341-
(y, G, H) = value_derivative_and_second_derivative(
345+
(y,
346+
G,
347+
H) = value_derivative_and_second_derivative(
342348
f.f, prep_hess, adtype, θ, Constant(p))
343349
return y, G, H
344350
end
345351
if p !== SciMLBase.NullParameters() && p !== nothing
346352
function fgh!(θ, p)
347-
(y, G, H) = value_derivative_and_second_derivative(
353+
(y,
354+
G,
355+
H) = value_derivative_and_second_derivative(
348356
f.f, prep_hess, adtype, θ, Constant(p))
349357
return y, G, H
350358
end

src/OptimizationDISparseExt.jl

Lines changed: 16 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -41,13 +41,15 @@ function instantiate_function(
4141
prep_grad = prepare_gradient(f.f, adtype.dense_ad, x, Constant(p))
4242
end
4343
function fg!(res, θ)
44-
(y, _) = value_and_gradient!(
44+
(y,
45+
_) = value_and_gradient!(
4546
f.f, res, prep_grad, adtype.dense_ad, θ, Constant(p))
4647
return y
4748
end
4849
if p !== SciMLBase.NullParameters()
4950
function fg!(res, θ, p)
50-
(y, _) = value_and_gradient!(
51+
(y,
52+
_) = value_and_gradient!(
5153
f.f, res, prep_grad, adtype.dense_ad, θ, Constant(p))
5254
return y
5355
end
@@ -81,13 +83,17 @@ function instantiate_function(
8183

8284
if fgh == true && f.fgh === nothing
8385
function fgh!(G, H, θ)
84-
(y, _, _) = value_derivative_and_second_derivative!(
86+
(y,
87+
_,
88+
_) = value_derivative_and_second_derivative!(
8589
f.f, G, H, prep_hess, soadtype.dense_ad, θ, Constant(p))
8690
return y
8791
end
8892
if p !== SciMLBase.NullParameters() && p !== nothing
8993
function fgh!(G, H, θ, p)
90-
(y, _, _) = value_derivative_and_second_derivative!(
94+
(y,
95+
_,
96+
_) = value_derivative_and_second_derivative!(
9197
f.f, G, H, prep_hess, soadtype.dense_ad, θ, Constant(p))
9298
return y
9399
end
@@ -336,14 +342,18 @@ function instantiate_function(
336342

337343
if fgh == true && f.fgh === nothing
338344
function fgh!(θ)
339-
(y, G, H) = value_derivative_and_second_derivative(
345+
(y,
346+
G,
347+
H) = value_derivative_and_second_derivative(
340348
f.f, prep_hess, soadtype, θ, Constant(p))
341349
return y, G, H
342350
end
343351

344352
if p !== SciMLBase.NullParameters() && p !== nothing
345353
function fgh!(θ, p)
346-
(y, G, H) = value_derivative_and_second_derivative(
354+
(y,
355+
G,
356+
H) = value_derivative_and_second_derivative(
347357
f.f, prep_hess, soadtype, θ, Constant(p))
348358
return y, G, H
349359
end

test/matrixvalued.jl

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,8 @@ using Test, ReverseDiff
1313
# 1. Matrix Factorization
1414
@show adtype
1515
function matrix_factorization_objective(X, A)
16-
U, V = @view(X[1:size(A, 1), 1:Int(size(A, 2) / 2)]),
16+
U,
17+
V = @view(X[1:size(A, 1), 1:Int(size(A, 2) / 2)]),
1718
@view(X[1:size(A, 1), (Int(size(A, 2) / 2) + 1):size(A, 2)])
1819
return norm(A - U * V')
1920
end

0 commit comments

Comments
 (0)