Skip to content

Commit 503dd3d

Browse files
committed
fix Parametric Heteroscedastic Model
1 parent 0525191 commit 503dd3d

File tree

2 files changed

+4
-6
lines changed

2 files changed

+4
-6
lines changed

examples/3-parametric-heteroscedastic/Project.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,13 +2,14 @@
22
AbstractGPs = "99985d1d-32ba-4be9-9821-2ec096f28918"
33
AbstractGPsMakie = "7834405d-1089-4985-bd30-732a30b92057"
44
CairoMakie = "13f3f980-e62b-5c42-98c6-ff1f3baf88f0"
5+
DifferentiationInterface = "a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63"
56
KernelFunctions = "ec8451be-7e33-11e9-00cf-bbf324bd1392"
67
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
78
Literate = "98b081ad-f1c9-55d3-8b20-4c87d4299306"
9+
Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6"
810
Optim = "429524aa-4258-5aef-a3af-852621145aeb"
911
ParameterHandling = "2412ca09-6db7-441c-8e3a-88d5709968c5"
1012
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
11-
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
1213

1314
[compat]
1415
AbstractGPs = "0.5"
@@ -18,4 +19,3 @@ KernelFunctions = "0.10"
1819
Literate = "2"
1920
Optim = "1"
2021
ParameterHandling = "0.4, 0.5"
21-
Zygote = "0.6, 0.7"

examples/3-parametric-heteroscedastic/script.jl

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
using AbstractGPs
1212
using AbstractGPsMakie
1313
using CairoMakie
14-
using DifferentiationInterface
14+
import DifferentiationInterface as DI
1515
using KernelFunctions
1616
using Mooncake
1717
using Optim
@@ -51,11 +51,9 @@ end;
5151
# We do not derive and implement the gradient function manually here but instead use reverse-mode automatic differentiation with DifferentiationInterface + Mooncake.
5252
# When computing gradients, the objective function is evaluated as well.
5353
# We can exploit this and [avoid re-evaluating the objective function](https://julianlsolvers.github.io/Optim.jl/stable/#user/tipsandtricks/#avoid-repeating-computations) in such cases.
54-
backend = AutoMooncake()
5554
function objective_and_gradient(F, G, flat_θ)
5655
if G !== nothing
57-
val, grad = value_and_gradient(objective, backend, flat_θ)
58-
copyto!(G, only(grad))
56+
val, grad = DI.value_and_gradient!(objective, G, AutoMooncake(), flat_θ)
5957
if F !== nothing
6058
return val
6159
end

0 commit comments

Comments
 (0)