Skip to content

Commit 61b06f6

Browse files
github-actions[bot]CompatHelper Juliadevmotionyebai
authored
CompatHelper: bump compat for LogDensityProblems to 2, (keep existing compat) (#1917)
* CompatHelper: bump compat for LogDensityProblems to 2, (keep existing compat) * Use LogDensityProblemsAD * Fix tests for Gibbs (#1920) * Update ad.jl * Update ad.jl Co-authored-by: CompatHelper Julia <[email protected]> Co-authored-by: David Widmann <[email protected]> Co-authored-by: Hong Ge <[email protected]>
1 parent 51fddaf commit 61b06f6

File tree

14 files changed

+35
-27
lines changed

14 files changed

+35
-27
lines changed

Project.toml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
name = "Turing"
22
uuid = "fce5fe82-541a-59a6-adf8-730c64b5f9a0"
3-
version = "0.23.0"
3+
version = "0.23.1"
44

55
[deps]
66
AbstractMCMC = "80f14c24-f653-4e6a-9b94-39d6b0f70001"
@@ -20,6 +20,7 @@ ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
2020
Libtask = "6f1fad26-d15e-5dc8-ae53-837a1d7b8c9f"
2121
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
2222
LogDensityProblems = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c"
23+
LogDensityProblemsAD = "996a588d-648d-4e1f-a8f0-a84b347e47b1"
2324
MCMCChains = "c7f686f2-ff18-58e9-bc7b-31028e88f75d"
2425
NamedArrays = "86f7a689-2022-50b4-a561-43c23ac3c673"
2526
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
@@ -49,8 +50,9 @@ DocStringExtensions = "0.8, 0.9"
4950
DynamicPPL = "0.21"
5051
EllipticalSliceSampling = "0.5, 1"
5152
ForwardDiff = "0.10.3"
52-
LogDensityProblems = "0.12, 1"
5353
Libtask = "0.7, 0.8"
54+
LogDensityProblems = "2"
55+
LogDensityProblemsAD = "1"
5456
MCMCChains = "5"
5557
NamedArrays = "0.9"
5658
Reexport = "0.2, 1"

src/contrib/inference/dynamichmc.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ function gibbs_state(
4444
varinfo::AbstractVarInfo,
4545
)
4646
# Update the log density function and its cached evaluation.
47-
= LogDensityProblems.ADgradient(Turing.LogDensityFunction(varinfo, model, spl, DynamicPPL.DefaultContext()))
47+
= LogDensityProblemsAD.ADgradient(Turing.LogDensityFunction(varinfo, model, spl, DynamicPPL.DefaultContext()))
4848
Q = DynamicHMC.evaluate_ℓ(ℓ, varinfo[spl])
4949
return DynamicNUTSState(ℓ, varinfo, Q, state.metric, state.stepsize)
5050
end
@@ -65,7 +65,7 @@ function DynamicPPL.initialstep(
6565
end
6666

6767
# Define log-density function.
68-
= LogDensityProblems.ADgradient(Turing.LogDensityFunction(vi, model, spl, DynamicPPL.DefaultContext()))
68+
= LogDensityProblemsAD.ADgradient(Turing.LogDensityFunction(vi, model, spl, DynamicPPL.DefaultContext()))
6969

7070
# Perform initial step.
7171
results = DynamicHMC.mcmc_keep_warmup(

src/contrib/inference/sghmc.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ function DynamicPPL.initialstep(
6262

6363
# Compute initial sample and state.
6464
sample = Transition(vi)
65-
= LogDensityProblems.ADgradient(Turing.LogDensityFunction(vi, model, spl, DynamicPPL.DefaultContext()))
65+
= LogDensityProblemsAD.ADgradient(Turing.LogDensityFunction(vi, model, spl, DynamicPPL.DefaultContext()))
6666
state = SGHMCState(ℓ, vi, zero(vi[spl]))
6767

6868
return sample, state
@@ -215,7 +215,7 @@ function DynamicPPL.initialstep(
215215

216216
# Create first sample and state.
217217
sample = SGLDTransition(vi, zero(spl.alg.stepsize(0)))
218-
= LogDensityProblems.ADgradient(Turing.LogDensityFunction(vi, model, spl, DynamicPPL.DefaultContext()))
218+
= LogDensityProblemsAD.ADgradient(Turing.LogDensityFunction(vi, model, spl, DynamicPPL.DefaultContext()))
219219
state = SGLDState(ℓ, vi, 1)
220220

221221
return sample, state

src/essential/Essential.jl

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ using StatsFuns: logsumexp, softmax
1616

1717
import AdvancedPS
1818
import LogDensityProblems
19+
import LogDensityProblemsAD
1920

2021
include("container.jl")
2122
include("ad.jl")

src/essential/ad.jl

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -78,11 +78,11 @@ Find the autodifferentiation backend of the algorithm `alg`.
7878
getADbackend(spl::Sampler) = getADbackend(spl.alg)
7979
getADbackend(::SampleFromPrior) = ADBackend()()
8080

81-
function LogDensityProblems.ADgradient(ℓ::Turing.LogDensityFunction)
82-
return LogDensityProblems.ADgradient(getADbackend(ℓ.sampler), ℓ)
81+
function LogDensityProblemsAD.ADgradient(ℓ::Turing.LogDensityFunction)
82+
return LogDensityProblemsAD.ADgradient(getADbackend(ℓ.sampler), ℓ)
8383
end
8484

85-
function LogDensityProblems.ADgradient(ad::ForwardDiffAD, ℓ::Turing.LogDensityFunction)
85+
function LogDensityProblemsAD.ADgradient(ad::ForwardDiffAD, ℓ::Turing.LogDensityFunction)
8686
θ =.varinfo[ℓ.sampler]
8787
f = Base.Fix1(LogDensityProblems.logdensity, ℓ)
8888

@@ -99,21 +99,21 @@ function LogDensityProblems.ADgradient(ad::ForwardDiffAD, ℓ::Turing.LogDensity
9999
ForwardDiff.GradientConfig(f, θ, ForwardDiff.Chunk(length(θ), chunk_size), tag)
100100
end
101101

102-
return LogDensityProblems.ADgradient(Val(:ForwardDiff), ℓ; gradientconfig=config)
102+
return LogDensityProblemsAD.ADgradient(Val(:ForwardDiff), ℓ; gradientconfig=config)
103103
end
104104

105-
function LogDensityProblems.ADgradient(::TrackerAD, ℓ::Turing.LogDensityFunction)
106-
return LogDensityProblems.ADgradient(Val(:Tracker), ℓ)
105+
function LogDensityProblemsAD.ADgradient(::TrackerAD, ℓ::Turing.LogDensityFunction)
106+
return LogDensityProblemsAD.ADgradient(Val(:Tracker), ℓ)
107107
end
108108

109-
function LogDensityProblems.ADgradient(::ZygoteAD, ℓ::Turing.LogDensityFunction)
110-
return LogDensityProblems.ADgradient(Val(:Zygote), ℓ)
109+
function LogDensityProblemsAD.ADgradient(::ZygoteAD, ℓ::Turing.LogDensityFunction)
110+
return LogDensityProblemsAD.ADgradient(Val(:Zygote), ℓ)
111111
end
112112

113113
for cache in (:true, :false)
114114
@eval begin
115-
function LogDensityProblems.ADgradient(::ReverseDiffAD{$cache}, ℓ::Turing.LogDensityFunction)
116-
return LogDensityProblems.ADgradient(Val(:ReverseDiff), ℓ; compile=Val($cache))
115+
function LogDensityProblemsAD.ADgradient(::ReverseDiffAD{$cache}, ℓ::Turing.LogDensityFunction)
116+
return LogDensityProblemsAD.ADgradient(Val(:ReverseDiff), ℓ; compile=Val($cache))
117117
end
118118
end
119119
end

src/inference/Inference.jl

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ import BangBang
3131
import ..Essential: getADbackend
3232
import EllipticalSliceSampling
3333
import LogDensityProblems
34+
import LogDensityProblemsAD
3435
import Random
3536
import MCMCChains
3637
import StatsBase: predict

src/inference/hmc.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ function DynamicPPL.initialstep(
158158
# Create a Hamiltonian.
159159
metricT = getmetricT(spl.alg)
160160
metric = metricT(length(theta))
161-
= LogDensityProblems.ADgradient(
161+
= LogDensityProblemsAD.ADgradient(
162162
Turing.LogDensityFunction(vi, model, spl, DynamicPPL.DefaultContext())
163163
)
164164
logπ = Base.Fix1(LogDensityProblems.logdensity, ℓ)
@@ -264,7 +264,7 @@ end
264264

265265
function get_hamiltonian(model, spl, vi, state, n)
266266
metric = gen_metric(n, spl, state)
267-
= LogDensityProblems.ADgradient(
267+
= LogDensityProblemsAD.ADgradient(
268268
Turing.LogDensityFunction(vi, model, spl, DynamicPPL.DefaultContext())
269269
)
270270
ℓπ = Base.Fix1(LogDensityProblems.logdensity, ℓ)

src/modes/ModeEstimation.jl

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ using DynamicPPL: Model, AbstractContext, VarInfo, VarName,
1212
get_and_set_val!, istrans
1313

1414
import LogDensityProblems
15+
import LogDensityProblemsAD
1516

1617
export constrained_space,
1718
MAP,
@@ -111,7 +112,7 @@ function (f::OptimLogDensity)(F, G, z)
111112
if G !== nothing
112113
# Calculate negative log joint and its gradient.
113114
# TODO: Make OptimLogDensity already an LogDensityProblems.ADgradient? Allow to specify AD?
114-
= LogDensityProblems.ADgradient(f)
115+
= LogDensityProblemsAD.ADgradient(f)
115116
neglogp, ∇neglogp = LogDensityProblems.logdensity_and_gradient(ℓ, z)
116117

117118
# Save the gradient to the pre-allocated array.

test/Project.toml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000"
1212
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
1313
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
1414
LogDensityProblems = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c"
15+
LogDensityProblemsAD = "996a588d-648d-4e1f-a8f0-a84b347e47b1"
1516
MCMCChains = "c7f686f2-ff18-58e9-bc7b-31028e88f75d"
1617
NamedArrays = "86f7a689-2022-50b4-a561-43c23ac3c673"
1718
Optim = "429524aa-4258-5aef-a3af-852621145aeb"
@@ -42,7 +43,8 @@ DynamicHMC = "2.1.6, 3.0"
4243
DynamicPPL = "0.21"
4344
FiniteDifferences = "0.10.8, 0.11, 0.12"
4445
ForwardDiff = "0.10.12 - 0.10.32"
45-
LogDensityProblems = "0.12, 1"
46+
LogDensityProblems = "2"
47+
LogDensityProblemsAD = "1"
4648
MCMCChains = "5"
4749
NamedArrays = "0.9.4"
4850
Optim = "0.22, 1.0"

test/essential/ad.jl

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,14 +30,14 @@
3030
= Turing.LogDensityFunction(vi, ad_test_f, SampleFromPrior(), DynamicPPL.DefaultContext())
3131
x = map(x->Float64(x), vi[SampleFromPrior()])
3232

33-
trackerℓ = LogDensityProblems.ADgradient(TrackerAD(), ℓ)
34-
@test trackerℓ isa LogDensityProblems.TrackerGradientLogDensity
33+
trackerℓ = LogDensityProblemsAD.ADgradient(TrackerAD(), ℓ)
34+
@test trackerℓ isa LogDensityProblemsAD.TrackerGradientLogDensity
3535
@test trackerℓ.===
3636
∇E1 = LogDensityProblems.logdensity_and_gradient(trackerℓ, x)[2]
3737
@test sort(∇E1) grad_FWAD atol=1e-9
3838

39-
zygoteℓ = LogDensityProblems.ADgradient(ZygoteAD(), ℓ)
40-
@test zygoteℓ isa LogDensityProblems.ZygoteGradientLogDensity
39+
zygoteℓ = LogDensityProblemsAD.ADgradient(ZygoteAD(), ℓ)
40+
@test zygoteℓ isa LogDensityProblemsAD.ZygoteGradientLogDensity
4141
@test zygoteℓ.===
4242
∇E2 = LogDensityProblems.logdensity_and_gradient(zygoteℓ, x)[2]
4343
@test sort(∇E2) grad_FWAD atol=1e-9

0 commit comments

Comments
 (0)