1
- '''
2
- Created on Mar 7, 2011
3
-
4
- @author: johnsalvatier
5
- '''
6
1
from point import *
7
2
from types import *
8
3
9
4
from theano import theano , tensor as t , function
10
5
from theano .gof .graph import inputs
11
6
12
- import numpy as np
7
+ import numpy as np
13
8
from functools import wraps
14
9
15
- __all__ = ['Model' , 'compilef' , 'gradient' , 'hessian' , 'modelcontext' , 'Point' ]
10
+ __all__ = ['Model' , 'compilef' , 'gradient' , 'hessian' , 'modelcontext' , 'Point' ]
16
11
17
12
18
13
19
- class Context (object ):
20
- def __enter__ (self ):
14
+ class Context (object ):
15
+ def __enter__ (self ):
21
16
type (self ).get_contexts ().append (self )
22
17
return self
23
18
@@ -28,7 +23,7 @@ def __exit__(self, typ, value, traceback):
28
23
def get_contexts (cls ):
29
24
if not hasattr (cls , "contexts" ):
30
25
cls .contexts = []
31
-
26
+
32
27
return cls .contexts
33
28
34
29
@classmethod
@@ -38,45 +33,45 @@ def get_context(cls):
38
33
except IndexError :
39
34
raise TypeError ("No context on context stack" )
40
35
41
- def modelcontext (model ):
36
+ def modelcontext (model ):
42
37
if model is None :
43
- return Model .get_context ()
38
+ return Model .get_context ()
44
39
return model
45
40
46
41
class Model (Context ):
47
42
"""
48
- Base class for encapsulation of the variables and
43
+ Base class for encapsulation of the variables and
49
44
likelihood factors of a model.
50
45
"""
51
46
52
47
def __init__ (self ):
53
48
self .vars = []
54
49
self .factors = []
55
-
50
+
56
51
@property
57
52
def logp (model ):
58
53
"""
59
54
log-probability of the model
60
-
55
+
61
56
Parameters
62
57
----------
63
-
64
- model : Model
58
+
59
+ model : Model
65
60
66
61
Returns
67
62
-------
68
63
69
64
logp : Theano scalar
70
-
65
+
71
66
"""
72
67
return t .add (* map (t .sum , model .factors ))
73
68
74
69
@property
75
- def logpc (model ):
70
+ def logpc (model ):
76
71
"""Compiled log probability density function"""
77
72
return compilef (model .logp )
78
73
79
- def dlogpc (model , vars = None ):
74
+ def dlogpc (model , vars = None ):
80
75
"""Compiled log probability density gradient function"""
81
76
return compilef (gradient (model .logp , vars ))
82
77
@@ -92,7 +87,7 @@ def test_point(self):
92
87
@property
93
88
def cont_vars (model ):
94
89
"""All the continuous variables in the model"""
95
- return typefilter (model .vars , continuous_types )
90
+ return typefilter (model .vars , continuous_types )
96
91
97
92
"""
98
93
these functions add random variables
@@ -108,8 +103,8 @@ def Var(model, name, dist):
108
103
model .factors .append (dist .logp (var ))
109
104
return var
110
105
111
- def TransformedVar (model , name , dist , trans ):
112
- tvar = model .Var (trans .name + '_' + name , trans .apply (dist ))
106
+ def TransformedVar (model , name , dist , trans ):
107
+ tvar = model .Var (trans .name + '_' + name , trans .apply (dist ))
113
108
114
109
return named (trans .backward (tvar ),name ), tvar
115
110
@@ -118,26 +113,26 @@ def AddPotential(model, potential):
118
113
119
114
120
115
def Point (* args ,** kwargs ):
121
- """
122
- Build a point. Uses same args as dict() does.
123
- Filters out variables not in the model. All keys are strings.
116
+ """
117
+ Build a point. Uses same args as dict() does.
118
+ Filters out variables not in the model. All keys are strings.
124
119
125
120
Parameters
126
121
----------
127
- *args, **kwargs
122
+ *args, **kwargs
128
123
arguments to build a dict
129
124
"""
130
125
if 'model' in kwargs :
131
126
model = kwargs ['model' ]
132
127
del kwargs ['model' ]
133
- else :
128
+ else :
134
129
model = Model .get_context ()
135
130
136
131
d = dict (* args , ** kwargs )
137
132
varnames = map (str , model .vars )
138
- return dict ((str (k ),np .array (v ))
139
- for (k ,v ) in d .iteritems ()
140
- if str (k ) in varnames )
133
+ return dict ((str (k ),np .array (v ))
134
+ for (k ,v ) in d .iteritems ()
135
+ if str (k ) in varnames )
141
136
142
137
143
138
def compilef (outs , mode = None ):
@@ -148,42 +143,42 @@ def compilef(outs, mode = None):
148
143
----------
149
144
outs : Theano variable or iterable of Theano variables
150
145
mode : Theano compilation mode
151
-
146
+
152
147
Returns
153
148
-------
154
149
Compiled Theano function
155
150
"""
156
151
return PointFunc (
157
- function (inputvars (outs ), outs ,
158
- allow_input_downcast = True ,
152
+ function (inputvars (outs ), outs ,
153
+ allow_input_downcast = True ,
159
154
on_unused_input = 'ignore' ,
160
155
mode = mode )
161
156
)
162
157
163
158
def named (var , name ):
164
- var .name = name
159
+ var .name = name
165
160
return var
166
161
167
162
def as_iterargs (data ):
168
- if isinstance (data , tuple ):
163
+ if isinstance (data , tuple ):
169
164
return data
170
165
if hasattr (data , 'columns' ): #data frames
171
- return [np .asarray (data [c ]) for c in data .columns ]
166
+ return [np .asarray (data [c ]) for c in data .columns ]
172
167
else :
173
168
return [data ]
174
169
175
- def makeiter (a ):
170
+ def makeiter (a ):
176
171
if isinstance (a , (tuple , list )):
177
172
return a
178
173
else :
179
174
return [a ]
180
175
181
- def inputvars (a ):
176
+ def inputvars (a ):
182
177
return [v for v in inputs (makeiter (a )) if isinstance (v , t .TensorVariable )]
183
178
184
179
"""
185
- Theano derivative functions
186
- """
180
+ Theano derivative functions
181
+ """
187
182
188
183
def cont_inputs (f ):
189
184
return typefilter (inputvars (f ), continuous_types )
@@ -193,7 +188,7 @@ def gradient1(f, v):
193
188
return t .flatten (t .grad (f , v , disconnected_inputs = 'warn' ))
194
189
195
190
def gradient (f , vars = None ):
196
- if not vars :
191
+ if not vars :
197
192
vars = cont_inputs (f )
198
193
199
194
return t .concatenate ([gradient1 (f , v ) for v in vars ], axis = 0 )
@@ -202,14 +197,14 @@ def jacobian1(f, v):
202
197
"""jacobian of f wrt v"""
203
198
f = t .flatten (f )
204
199
idx = t .arange (f .shape [0 ])
205
-
206
- def grad_i (i ):
200
+
201
+ def grad_i (i ):
207
202
return gradient1 (f [i ], v )
208
203
209
204
return theano .map (grad_i , idx )[0 ]
210
205
211
206
def jacobian (f , vars = None ):
212
- if not vars :
207
+ if not vars :
213
208
vars = cont_inputs (f )
214
209
215
210
return t .concatenate ([jacobian1 (f , v ) for v in vars ], axis = 1 )
@@ -218,6 +213,6 @@ def hessian(f, vars = None):
218
213
return - jacobian (gradient (f , vars ), vars )
219
214
220
215
221
- #theano stuff
216
+ #theano stuff
222
217
theano .config .warn .sum_div_dimshuffle_bug = False
223
218
theano .config .compute_test_value = 'raise'
0 commit comments