File tree Expand file tree Collapse file tree 4 files changed +0
-55
lines changed Expand file tree Collapse file tree 4 files changed +0
-55
lines changed Original file line number Diff line number Diff line change @@ -6,7 +6,6 @@ Accessors = "7d9f7c33-5ae7-4f3b-8dc6-eff91059b697"
6
6
Aqua = " 4c88cf16-eb10-579e-8560-4a9242c79595"
7
7
Bijectors = " 76274a88-744f-5084-9051-94815aaf08c4"
8
8
Combinatorics = " 861a8166-3701-5b0c-9a16-15d98fcdc6aa"
9
- Compat = " 34da2185-b29b-5c13-b0c7-acf172513d20"
10
9
DifferentiationInterface = " a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63"
11
10
Distributed = " 8ba89e20-285c-5b6f-9357-94700520ee1b"
12
11
Distributions = " 31c24e10-a181-5473-b8eb-7969acd0382f"
@@ -36,7 +35,6 @@ Accessors = "0.1"
36
35
Aqua = " 0.8"
37
36
Bijectors = " 0.15.1"
38
37
Combinatorics = " 1"
39
- Compat = " 4.3.0"
40
38
DifferentiationInterface = " 0.6.41, 0.7"
41
39
Distributions = " 0.25"
42
40
DistributionsAD = " 0.6.3"
Load Diff This file was deleted.
Original file line number Diff line number Diff line change @@ -15,7 +15,6 @@ using MCMCChains
15
15
using StableRNGs
16
16
using ReverseDiff
17
17
using Zygote
18
- using Compat
19
18
20
19
using Distributed
21
20
using LinearAlgebra
@@ -79,9 +78,6 @@ include("test_util.jl")
79
78
end
80
79
81
80
if GROUP == " All" || GROUP == " Group2"
82
- @testset " compat" begin
83
- include (joinpath (" compat" , " ad.jl" ))
84
- end
85
81
@testset " extensions" begin
86
82
include (" ext/DynamicPPLMCMCChainsExt.jl" )
87
83
include (" ext/DynamicPPLJETExt.jl" )
Original file line number Diff line number Diff line change 8
8
end
9
9
const gdemo_default = gdemo_d ()
10
10
11
- # TODO (penelopeysm): Remove this (and also test/compat/ad.jl)
12
- function test_model_ad (model, logp_manual)
13
- vi = VarInfo (model)
14
- x = vi[:]
15
-
16
- # Log probabilities using the model.
17
- ℓ = DynamicPPL. LogDensityFunction (model, vi)
18
- logp_model = Base. Fix1 (LogDensityProblems. logdensity, ℓ)
19
-
20
- # Check that both functions return the same values.
21
- lp = logp_manual (x)
22
- @test logp_model (x) ≈ lp
23
-
24
- # Gradients based on the manual implementation.
25
- grad = ForwardDiff. gradient (logp_manual, x)
26
-
27
- # Gradients based on the model.
28
- @test ForwardDiff. gradient (logp_model, x) ≈ grad
29
- end
30
-
31
11
"""
32
12
short_varinfo_name(vi::AbstractVarInfo)
33
13
You can’t perform that action at this time.
0 commit comments