Skip to content

Commit 4bf4d00

Browse files
committed
Add non-cudnn conv and maxpool layers.
1 parent 290a5d7 commit 4bf4d00

File tree

3 files changed

+77
-1
lines changed

3 files changed

+77
-1
lines changed
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
from .Module import Module
2+
3+
import theano as _th
4+
import numpy as _np
5+
6+
7+
class SpatialConvolution(Module):
8+
def __init__(self, n_input_plane, n_output_plane, k_w, k_h, d_w=1, d_h=1, with_bias=True, border_mode='valid', imshape=None):
9+
Module.__init__(self)
10+
self.n_input_plane = n_input_plane
11+
self.n_output_plane = n_output_plane
12+
self.k_w = k_w
13+
self.k_h = k_h
14+
self.d_w = d_w
15+
self.d_h = d_h
16+
self.with_bias = with_bias
17+
self.border_mode = border_mode
18+
self.imshape = imshape
19+
20+
w_bound = _np.sqrt(4. / ((self.n_input_plane + self.n_output_plane) * self.k_w * self.k_h))
21+
W = _np.random.uniform(low=-w_bound, high=w_bound, size=(n_output_plane, n_input_plane, k_h, k_w))
22+
self.weight = _th.shared(W.astype(dtype=_th.config.floatX))
23+
self.grad_weight = _th.shared((W*0).astype(dtype=_th.config.floatX))
24+
25+
if self.with_bias:
26+
self.bias = _th.shared(_np.zeros(shape=(n_output_plane, ), dtype=_th.config.floatX))
27+
self.grad_bias = _th.shared(_np.zeros(shape=(n_output_plane, ), dtype=_th.config.floatX))
28+
29+
def symb_forward(self, symb_input):
30+
conv_output = _th.tensor.nnet.conv.conv2d(symb_input, self.weight,
31+
image_shape=(None, self.n_input_plane) + (self.imshape or (None, None)),
32+
filter_shape=(self.n_output_plane, self.n_input_plane, self.k_h, self.k_w),
33+
border_mode=self.border_mode,
34+
subsample=(self.d_h, self.d_w)
35+
)
36+
37+
if self.with_bias:
38+
return conv_output + self.bias.dimshuffle('x', 0, 'x', 'x')
39+
else:
40+
return conv_output
41+
Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
from .Module import Module
2+
3+
import theano.tensor as _T
4+
5+
6+
class SpatialMaxPooling(Module):
7+
def __init__(self, k_w, k_h, d_w=None, d_h=None, pad_w=0, pad_h=0, ignore_border=False):
8+
Module.__init__(self)
9+
self.k_w = k_w
10+
self.k_h = k_h
11+
self.ignore_border = ignore_border
12+
13+
if d_w is None:
14+
self.d_w = self.k_w
15+
else:
16+
self.d_w = d_w
17+
18+
if d_h is None:
19+
self.d_h = self.k_h
20+
else:
21+
self.d_h = d_h
22+
23+
self.pad_w = pad_w
24+
self.pad_h = pad_h
25+
26+
def symb_forward(self, symb_input):
27+
return _T.signal.downsample.max_pool_2d(
28+
symb_input,
29+
ds=(self.k_h, self.k_w),
30+
ignore_border=self.ignore_border,
31+
st=(self.d_h, self.d_w),
32+
padding=(self.pad_h, self.pad_w)
33+
)

beacon8/layers/__init__.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,5 +8,7 @@
88
from .AddConstant import *
99
from .Log import *
1010
from .Reshape import *
11+
from .SpatialConvolution import *
12+
from .SpatialMaxPooling import *
1113
from .SpatialConvolutionCUDNN import *
12-
from .SpatialMaxPoolingCUDNN import *
14+
from .SpatialMaxPoolingCUDNN import *

0 commit comments

Comments
 (0)