@@ -101,20 +101,6 @@ def samples_to_broadcast_to(request, samples_to_broadcast):
101
101
return to_shape , size , samples , broadcast_shape
102
102
103
103
104
- @pytest .fixture
105
- def fixture_model ():
106
- with pm .Model () as model :
107
- n = 5
108
- dim = 4
109
- with pm .Model ():
110
- cov = pm .InverseGamma ("cov" , alpha = 1 , beta = 1 )
111
- x = pm .Normal ("x" , mu = np .ones ((dim ,)), sigma = pm .math .sqrt (cov ), shape = (n , dim ))
112
- eps = pm .HalfNormal ("eps" , np .ones ((n , 1 )), shape = (n , dim ))
113
- mu = pm .Deterministic ("mu" , at .sum (x + eps , axis = - 1 ))
114
- y = pm .Normal ("y" , mu = mu , sigma = 1 , shape = (n ,))
115
- return model , [cov , x , eps , y ]
116
-
117
-
118
104
class TestShapesBroadcasting :
119
105
@pytest .mark .parametrize (
120
106
"bad_input" ,
@@ -215,16 +201,6 @@ def test_broadcast_dist_samples_to(self, samples_to_broadcast_to):
215
201
broadcast_dist_samples_to (to_shape , samples , size = size )
216
202
217
203
218
- @pytest .mark .xfail (reason = "InverseGamma was not yet refactored" )
219
- def test_sample_generate_values (fixture_model , fixture_sizes ):
220
- model , RVs = fixture_model
221
- size = to_tuple (fixture_sizes )
222
- with model :
223
- prior = pm .sample_prior_predictive (samples = fixture_sizes )
224
- for rv in RVs :
225
- assert prior [rv .name ].shape == size + tuple (rv .distribution .shape )
226
-
227
-
228
204
class TestShapeDimsSize :
229
205
@pytest .mark .parametrize ("param_shape" , [(), (3 ,)])
230
206
@pytest .mark .parametrize ("batch_shape" , [(), (3 ,)])
0 commit comments