1
1
import theano .tensor as tt
2
2
3
3
from ..model import FreeRV
4
- from ..theanof import gradient
4
+ from ..theanof import gradient , floatX
5
5
from . import distribution
6
6
from ..math import logit , invlogit
7
+ from .distribution import draw_values
7
8
import numpy as np
8
9
9
10
__all__ = ['transform' , 'stick_breaking' , 'logodds' , 'interval' ,
@@ -22,6 +23,9 @@ class Transform(object):
22
23
def forward (self , x ):
23
24
raise NotImplementedError
24
25
26
+ def forward_val (self , x , point ):
27
+ raise NotImplementedError
28
+
25
29
def backward (self , z ):
26
30
raise NotImplementedError
27
31
@@ -55,6 +59,7 @@ def __init__(self, dist, transform, *args, **kwargs):
55
59
arguments to Distribution"""
56
60
forward = transform .forward
57
61
testval = forward (dist .default ())
62
+ forward_val = transform .forward_val
58
63
59
64
self .dist = dist
60
65
self .transform_used = transform
@@ -85,6 +90,9 @@ def backward(self, x):
85
90
86
91
def forward (self , x ):
87
92
return tt .log (x )
93
+
94
+ def forward_val (self , x , point = None ):
95
+ return self .forward (x )
88
96
89
97
def jacobian_det (self , x ):
90
98
return x
@@ -103,6 +111,9 @@ def backward(self, x):
103
111
104
112
def forward (self , x ):
105
113
return logit (x )
114
+
115
+ def forward_val (self , x , point = None ):
116
+ return self .forward (x )
106
117
107
118
logodds = LogOdds ()
108
119
@@ -125,6 +136,14 @@ def forward(self, x):
125
136
a , b = self .a , self .b
126
137
return tt .log (x - a ) - tt .log (b - x )
127
138
139
+ def forward_val (self , x , point = None ):
140
+ # 2017-06-19
141
+ # the `self.a-0.` below is important for the testval to propagates
142
+ # For an explanation see pull/2328#issuecomment-309303811
143
+ a , b = draw_values ([self .a - 0. , self .b - 0. ],
144
+ point = point )
145
+ return floatX (tt .log (x - a ) - tt .log (b - x ))
146
+
128
147
def jacobian_det (self , x ):
129
148
s = tt .nnet .softplus (- x )
130
149
return tt .log (self .b - self .a ) - 2 * s - x
@@ -147,8 +166,15 @@ def backward(self, x):
147
166
148
167
def forward (self , x ):
149
168
a = self .a
150
- r = tt .log (x - a )
151
- return r
169
+ return tt .log (x - a )
170
+
171
+ def forward_val (self , x , point = None ):
172
+ # 2017-06-19
173
+ # the `self.a-0.` below is important for the testval to propagates
174
+ # For an explanation see pull/2328#issuecomment-309303811
175
+ a = draw_values ([self .a - 0. ],
176
+ point = point )[0 ]
177
+ return floatX (tt .log (x - a ))
152
178
153
179
def jacobian_det (self , x ):
154
180
return x
@@ -171,8 +197,15 @@ def backward(self, x):
171
197
172
198
def forward (self , x ):
173
199
b = self .b
174
- r = tt .log (b - x )
175
- return r
200
+ return tt .log (b - x )
201
+
202
+ def forward_val (self , x , point = None ):
203
+ # 2017-06-19
204
+ # the `self.b-0.` below is important for the testval to propagates
205
+ # For an explanation see pull/2328#issuecomment-309303811
206
+ b = draw_values ([self .b - 0. ],
207
+ point = point )[0 ]
208
+ return floatX (tt .log (b - x ))
176
209
177
210
def jacobian_det (self , x ):
178
211
return x
@@ -191,6 +224,9 @@ def backward(self, y):
191
224
def forward (self , x ):
192
225
return x [:- 1 ]
193
226
227
+ def forward_val (self , x , point = None ):
228
+ return self .forward (x )
229
+
194
230
def jacobian_det (self , x ):
195
231
return 0
196
232
@@ -224,6 +260,9 @@ def forward(self, x_):
224
260
y = logit (z ) - eq_share
225
261
return y .T
226
262
263
+ def forward_val (self , x , point = None ):
264
+ return self .forward (x )
265
+
227
266
def backward (self , y_ ):
228
267
y = y_ .T
229
268
Km1 = y .shape [0 ]
@@ -262,6 +301,9 @@ def backward(self, y):
262
301
def forward (self , x ):
263
302
return tt .as_tensor_variable (x )
264
303
304
+ def forward_val (self , x , point = None ):
305
+ return self .forward (x )
306
+
265
307
def jacobian_det (self , x ):
266
308
return 0
267
309
@@ -280,5 +322,8 @@ def backward(self, x):
280
322
def forward (self , y ):
281
323
return tt .advanced_set_subtensor1 (y , tt .log (y [self .diag_idxs ]), self .diag_idxs )
282
324
325
+ def forward_val (self , x , point = None ):
326
+ return self .forward (x )
327
+
283
328
def jacobian_det (self , y ):
284
329
return tt .sum (y [self .diag_idxs ])
0 commit comments