Skip to content
This repository was archived by the owner on Aug 31, 2021. It is now read-only.

Commit edc98e6

Browse files
committed
Merge branch 'master' of github.com:google/skflow
2 parents 7cdfb0d + 90c86fc commit edc98e6

File tree

3 files changed

+48
-35
lines changed

3 files changed

+48
-35
lines changed

examples/README.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,8 @@ Image classification
2424
Text classification
2525
-------------------
2626

27-
* [Text Classification Using Recurrent Neural Networks on Words](text_classification.py) (See also [Simplified Version Using Built-in RNN Model](text_classification_builtin_rnn_model.py))
27+
* [Text Classification Using Recurrent Neural Networks on Words](text_classification.py)
28+
(See also [Simplified Version Using Built-in RNN Model](text_classification_builtin_rnn_model.py) with easy to use built-in parameters)
2829
* [Text Classification Using Convolutional Neural Networks on Words](text_classification_cnn.py)
2930
* [Text Classification Using Recurrent Neural Networks on Characters](text_classification_character_rnn.py)
3031
* [Text Classification Using Convolutional Neural Networks on Characters](text_classification_character_cnn.py)

examples/resnet.py

Lines changed: 27 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,9 @@
1515
"""
1616
This example builds deep residual network for mnist data.
1717
Reference Paper: http://arxiv.org/pdf/1512.03385.pdf
18+
19+
Note that this is still a work-in-progress. Feel free to submit a PR
20+
to make this better.
1821
"""
1922

2023
import os
@@ -31,32 +34,30 @@
3134

3235

3336
def res_net(x, y, activation=tf.nn.relu):
34-
"""Builds a residual network.
37+
"""Builds a residual network. Note that if the input tensor is 2D, it must be
38+
square in order to be converted to a 4D tensor.
3539
3640
Borrowed structure from here: https://github.com/pkmital/tensorflow_tutorials/blob/master/10_residual_network.py
3741
3842
Args:
3943
x: Input of the network
4044
y: Output of the network
4145
activation: Activation function to apply after each convolution
42-
Raises:
43-
ValueError
44-
If a 2D tensor is not square, it cannot be converted to a
45-
4D tensor.
4646
"""
47-
LayerBlock = namedtuple(
48-
'LayerBlock', ['num_layers', 'num_filters', 'bottleneck_size'])
49-
blocks = [LayerBlock(3, 128, 32),
50-
LayerBlock(3, 256, 64),
51-
LayerBlock(3, 512, 128),
52-
LayerBlock(3, 1024, 256)]
53-
54-
# Input check
47+
48+
# Configurations for each bottleneck block
49+
BottleneckBlock = namedtuple(
50+
'BottleneckBlock', ['num_layers', 'num_filters', 'bottleneck_size'])
51+
blocks = [BottleneckBlock(3, 128, 32),
52+
BottleneckBlock(3, 256, 64),
53+
BottleneckBlock(3, 512, 128),
54+
BottleneckBlock(3, 1024, 256)]
55+
5556
input_shape = x.get_shape().as_list()
57+
58+
# Reshape the input into the right shape if it's 2D tensor
5659
if len(input_shape) == 2:
5760
ndim = int(sqrt(input_shape[1]))
58-
if ndim * ndim != input_shape[1]:
59-
raise ValueError('input_shape should be square')
6061
x = tf.reshape(x, [-1, ndim, ndim, 1])
6162

6263
# First convolution expands to 64 channels
@@ -74,11 +75,13 @@ def res_net(x, y, activation=tf.nn.relu):
7475
[1, 1], [1, 1, 1, 1],
7576
padding='VALID', bias=True)
7677

77-
# Create resnets for each residual block
78+
# Create each bottleneck building block for each layer
7879
for block_i, block in enumerate(blocks):
7980
for layer_i in range(block.num_layers):
8081

8182
name = 'block_%d/layer_%d' % (block_i, layer_i)
83+
84+
# 1x1 convolution responsible for reducing dimension
8285
with tf.variable_scope(name + '/conv_in'):
8386
conv = skflow.ops.conv2d(net, block.num_filters,
8487
[1, 1], [1, 1, 1, 1],
@@ -95,6 +98,7 @@ def res_net(x, y, activation=tf.nn.relu):
9598
batch_norm=True,
9699
bias=False)
97100

101+
# 1x1 convolution responsible for restoring dimension
98102
with tf.variable_scope(name + '/conv_out'):
99103
conv = skflow.ops.conv2d(conv, block.num_filters,
100104
[1, 1], [1, 1, 1, 1],
@@ -103,6 +107,8 @@ def res_net(x, y, activation=tf.nn.relu):
103107
batch_norm=True,
104108
bias=False)
105109

110+
# shortcut connections that turn the network into its counterpart
111+
# residual function (identity shortcut)
106112
net = conv + net
107113

108114
try:
@@ -116,16 +122,13 @@ def res_net(x, y, activation=tf.nn.relu):
116122
except IndexError:
117123
pass
118124

119-
125+
net_shape = net.get_shape().as_list()
120126
net = tf.nn.avg_pool(net,
121-
ksize=[1, net.get_shape().as_list()[1],
122-
net.get_shape().as_list()[2], 1],
127+
ksize=[1, net_shape[1], net_shape[2], 1],
123128
strides=[1, 1, 1, 1], padding='VALID')
124-
net = tf.reshape(
125-
net,
126-
[-1, net.get_shape().as_list()[1] *
127-
net.get_shape().as_list()[2] *
128-
net.get_shape().as_list()[3]])
129+
130+
net_shape = net.get_shape().as_list()
131+
net = tf.reshape(net, [-1, net_shape[1] * net_shape[2] * net_shape[3]])
129132

130133
return skflow.models.logistic_regression(net, y)
131134

skflow/tests/test_nonlinear.py

Lines changed: 19 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -65,31 +65,40 @@ def testRNN(self):
6565
[2, 2, 3, 4, 5],
6666
[3, 3, 1, 2, 1],
6767
[2, 4, 5, 4, 1]]), dtype=np.float32)
68+
# labels for classification
6869
labels = np.array(list([1, 0, 1, 0]), dtype=np.float32)
70+
# targets for regression
71+
targets = np.array(list([10, 16, 10, 16]), dtype=np.float32)
72+
test_data = np.array(list([[1, 3, 3, 2, 1], [2, 3, 4, 5, 6]]))
6973
def input_fn(X):
7074
return tf.split(1, 5, X)
7175

7276
# Classification
7377
classifier = skflow.TensorFlowRNNClassifier(
7478
rnn_size=2, cell_type='lstm', n_classes=2, input_op_fn=input_fn)
7579
classifier.fit(data, labels)
76-
predictions = classifier.predict(np.array(list([[1, 3, 3, 2, 1],
77-
[2, 3, 4, 5, 6]])))
80+
classifier.weights_
81+
classifier.bias_
82+
predictions = classifier.predict(test_data)
7883
self.assertAllClose(predictions, np.array([1, 0]))
7984

80-
classifier = skflow.TensorFlowRNNClassifier(
81-
rnn_size=2, cell_type='gru', n_classes=2, input_op_fn=input_fn)
8285
classifier = skflow.TensorFlowRNNClassifier(
8386
rnn_size=2, cell_type='rnn', n_classes=2,
8487
input_op_fn=input_fn, num_layers=2)
85-
86-
# Regression
87-
classifier = skflow.TensorFlowRNNRegressor(
88-
rnn_size=2, cell_type='lstm', input_op_fn=input_fn)
8988
classifier.fit(data, labels)
90-
predictions = classifier.predict(np.array(list([[1, 3, 3, 2, 1],
91-
[2, 3, 4, 5, 6]])))
89+
classifier = skflow.TensorFlowRNNClassifier(
90+
rnn_size=2, cell_type='invalid_cell_type', n_classes=2,
91+
input_op_fn=input_fn, num_layers=2)
92+
with self.assertRaises(ValueError):
93+
classifier.fit(data, labels)
9294

95+
# Regression
96+
regressor = skflow.TensorFlowRNNRegressor(
97+
rnn_size=2, cell_type='gru', input_op_fn=input_fn)
98+
regressor.fit(data, targets)
99+
regressor.weights_
100+
regressor.bias_
101+
predictions = regressor.predict(test_data)
93102

94103
if __name__ == "__main__":
95104
tf.test.main()

0 commit comments

Comments
 (0)