Skip to content

Commit b00bea8

Browse files
committed
Make order and name of init parameters consistent.
1 parent 1b1bce1 commit b00bea8

File tree

3 files changed

+9
-9
lines changed

3 files changed

+9
-9
lines changed

beacon8/layers/Linear.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,16 +8,16 @@
88

99
class Linear(Module):
1010

11-
def __init__(self, nin, nout, init=xavier(), with_bias=True, init_b=zero):
11+
def __init__(self, nin, nout, with_bias=True, initW=xavier(), initB=zero):
1212
Module.__init__(self)
1313

1414
self.nin = nin
1515
self.nout = nout
1616
self.with_bias = with_bias
1717

18-
self.weight, self.grad_weight = create_param_and_grad((nin, nout), init, fan=(nin, nout), name='Wlin_{}x{}'.format(nin, nout))
18+
self.weight, self.grad_weight = create_param_and_grad((nin, nout), initW, fan=(nin, nout), name='Wlin_{}x{}'.format(nin, nout))
1919
if self.with_bias:
20-
self.bias, self.grad_bias = create_param_and_grad(nout, init_b, name='blin_{}'.format(nout))
20+
self.bias, self.grad_bias = create_param_and_grad(nout, initB, name='blin_{}'.format(nout))
2121

2222
def symb_forward(self, symb_input):
2323
out = _th.tensor.dot(symb_input, self.weight)

beacon8/layers/SpatialConvolution.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88

99
class SpatialConvolution(Module):
10-
def __init__(self, n_input_plane, n_output_plane, k_w, k_h, d_w=1, d_h=1, with_bias=True, border_mode='valid', imshape=None, init=xavier(), init_b=zero):
10+
def __init__(self, n_input_plane, n_output_plane, k_w, k_h, d_w=1, d_h=1, with_bias=True, initW=xavier(), initB=zero, border_mode='valid', imshape=None):
1111
Module.__init__(self)
1212
self.n_input_plane = n_input_plane
1313
self.n_output_plane = n_output_plane
@@ -22,9 +22,9 @@ def __init__(self, n_input_plane, n_output_plane, k_w, k_h, d_w=1, d_h=1, with_b
2222
self.w_shape = (n_output_plane, n_input_plane, k_h, k_w)
2323
w_fan = (n_input_plane*k_w*k_h, n_output_plane*k_w*k_h)
2424

25-
self.weight, self.grad_weight = create_param_and_grad(self.w_shape, init, fan=w_fan, name='Wconv_{},{}@{}x{}'.format(n_input_plane, n_output_plane, k_w, k_h))
25+
self.weight, self.grad_weight = create_param_and_grad(self.w_shape, initW, fan=w_fan, name='Wconv_{},{}@{}x{}'.format(n_input_plane, n_output_plane, k_w, k_h))
2626
if self.with_bias:
27-
self.bias, self.grad_bias = create_param_and_grad(n_output_plane, init_b, name='bconv_{}'.format(n_output_plane))
27+
self.bias, self.grad_bias = create_param_and_grad(n_output_plane, initB, name='bconv_{}'.format(n_output_plane))
2828

2929
def symb_forward(self, symb_input):
3030
conv_output = _th.tensor.nnet.conv.conv2d(symb_input, self.weight,

beacon8/layers/SpatialConvolutionCUDNN.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99

1010
class SpatialConvolutionCUDNN(Module):
11-
def __init__(self, n_input_plane, n_output_plane, k_w, k_h, d_w=1, d_h=1, pad_w=0, pad_h=0, with_bias=True, init=xavier(), init_b=zero):
11+
def __init__(self, n_input_plane, n_output_plane, k_w, k_h, d_w=1, d_h=1, pad_w=0, pad_h=0, with_bias=True, initW=xavier(), initB=zero):
1212
Module.__init__(self)
1313
self.n_input_plane = n_input_plane
1414
self.n_output_plane = n_output_plane
@@ -23,9 +23,9 @@ def __init__(self, n_input_plane, n_output_plane, k_w, k_h, d_w=1, d_h=1, pad_w=
2323
w_shape = (n_output_plane, n_input_plane, k_h, k_w)
2424
w_fan = (n_input_plane*k_w*k_h, n_output_plane*k_w*k_h)
2525

26-
self.weight, self.grad_weight = create_param_and_grad(w_shape, init, fan=w_fan, name='Wconv_{},{}@{}x{}'.format(n_input_plane, n_output_plane, k_w, k_h))
26+
self.weight, self.grad_weight = create_param_and_grad(w_shape, initW, fan=w_fan, name='Wconv_{},{}@{}x{}'.format(n_input_plane, n_output_plane, k_w, k_h))
2727
if self.with_bias:
28-
self.bias, self.grad_bias = create_param_and_grad(n_output_plane, init_b, name='bconv_{}'.format(n_output_plane))
28+
self.bias, self.grad_bias = create_param_and_grad(n_output_plane, initB, name='bconv_{}'.format(n_output_plane))
2929

3030
def symb_forward(self, symb_input):
3131
conv_output = _dnn.dnn_conv(img=symb_input,

0 commit comments

Comments
 (0)