You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
# raise NotImplementedError("You *need* to reimplement hash, even if it's just python's default. See the documentation for more info.")
21
20
21
+
def_addparam(self, *a, **kw):
22
+
# Add it here because many don't even have params. This avoids misuse.
23
+
ifnothasattr(self, '_params'):
24
+
self._params= []
25
+
26
+
param=df.Param(*a, **kw)
27
+
self._params.append(param)
28
+
returnparam
29
+
22
30
defzero_grad_parameters(self):
23
-
_, grads=self.unique_parameters() # Here, it's just a matter of performance. But even then, not really.
24
-
forgradingrads:
25
-
grad.set_value(_np.zeros_like(grad.get_value()))
26
-
27
-
defparameters(self):
28
-
params, grads= [], []
29
-
30
-
ifhasattr(self, 'weight'):
31
-
asserthasattr(self, 'grad_weight'), "The layer {} has a `weight` variable but no `grad_weight`, you probably forget to implement it.".format(df.utils.typename(self))
32
-
params+= [self.weight]
33
-
grads+= [self.grad_weight]
34
-
35
-
ifhasattr(self, 'bias'):
36
-
asserthasattr(self, 'grad_bias'), "The layer {} has a `bias` variable but no `grad_bias`, you probably forget to implement it.".format(df.utils.typename(self))
37
-
params+= [self.bias]
38
-
grads+= [self.grad_bias]
39
-
40
-
returnparams, grads
41
-
42
-
defunique_parameters(self):
43
-
# We actually need to remove duplicates from the list of parameters
44
-
# (and their corresponding gradients) in order to support reusing
0 commit comments