Skip to content

Commit d6c62e8

Browse files
committed
Rewrite mnist.py and add mnist_test.py
1 parent 91115ab commit d6c62e8

File tree

2 files changed

+78
-23
lines changed

2 files changed

+78
-23
lines changed

python/paddle/v2/dataset/mnist.py

Lines changed: 51 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -1,39 +1,67 @@
1-
import sklearn.datasets.mldata
2-
import sklearn.model_selection
1+
import paddle.v2.dataset.common
2+
import subprocess
33
import numpy
4-
from common import DATA_HOME
54

6-
__all__ = ['train_creator', 'test_creator']
5+
URL_PREFIX = 'http://yann.lecun.com/exdb/mnist/'
76

7+
TEST_IMAGE_URL = URL_PREFIX + 't10k-images-idx3-ubyte.gz'
8+
TEST_IMAGE_MD5 = '25e3cc63507ef6e98d5dc541e8672bb6'
89

9-
def __mnist_reader_creator__(data, target):
10-
def reader():
11-
n_samples = data.shape[0]
12-
for i in xrange(n_samples):
13-
yield (data[i] / 255.0).astype(numpy.float32), int(target[i])
10+
TEST_LABEL_URL = URL_PREFIX + 't10k-labels-idx1-ubyte.gz'
11+
TEST_LABEL_MD5 = '4e9511fe019b2189026bd0421ba7b688'
12+
13+
TRAIN_IMAGE_URL = URL_PREFIX + 'train-images-idx3-ubyte.gz'
14+
TRAIN_IMAGE_MD5 = 'f68b3c2dcbeaaa9fbdd348bbdeb94873'
1415

15-
return reader
16+
TRAIN_LABEL_URL = URL_PREFIX + 'train-labels-idx1-ubyte.gz'
17+
TRAIN_LABEL_MD5 = 'd53e105ee54ea40749a09fcbcd1e9432'
1618

1719

18-
TEST_SIZE = 10000
20+
def reader_creator(image_filename, label_filename, buffer_size):
21+
def reader():
22+
# According to http://stackoverflow.com/a/38061619/724872, we
23+
# cannot use standard package gzip here.
24+
m = subprocess.Popen(["zcat", image_filename], stdout=subprocess.PIPE)
25+
m.stdout.read(16) # skip some magic bytes
26+
27+
l = subprocess.Popen(["zcat", label_filename], stdout=subprocess.PIPE)
28+
l.stdout.read(8) # skip some magic bytes
1929

20-
data = sklearn.datasets.mldata.fetch_mldata(
21-
"MNIST original", data_home=DATA_HOME)
22-
X_train, X_test, y_train, y_test = sklearn.model_selection.train_test_split(
23-
data.data, data.target, test_size=TEST_SIZE, random_state=0)
30+
while True:
31+
labels = numpy.fromfile(
32+
l.stdout, 'ubyte', count=buffer_size
33+
).astype("int")
2434

35+
if labels.size != buffer_size:
36+
break # numpy.fromfile returns empty slice after EOF.
2537

26-
def train_creator():
27-
return __mnist_reader_creator__(X_train, y_train)
38+
images = numpy.fromfile(
39+
m.stdout, 'ubyte', count=buffer_size * 28 * 28
40+
).reshape((buffer_size, 28 * 28)
41+
).astype('float32')
2842

43+
images = images / 255.0 * 2.0 - 1.0
2944

30-
def test_creator():
31-
return __mnist_reader_creator__(X_test, y_test)
45+
for i in xrange(buffer_size):
46+
yield images[i, :], labels[i]
3247

48+
m.terminate()
49+
l.terminate()
3350

34-
def unittest():
35-
assert len(list(test_creator()())) == TEST_SIZE
51+
return reader()
3652

53+
def train():
54+
return reader_creator(
55+
paddle.v2.dataset.common.download(
56+
TRAIN_IMAGE_URL, 'mnist', TRAIN_IMAGE_MD5),
57+
paddle.v2.dataset.common.download(
58+
TRAIN_LABEL_URL, 'mnist', TRAIN_LABEL_MD5),
59+
100)
3760

38-
if __name__ == '__main__':
39-
unittest()
61+
def test():
62+
return reader_creator(
63+
paddle.v2.dataset.common.download(
64+
TEST_IMAGE_URL, 'mnist', TEST_IMAGE_MD5),
65+
paddle.v2.dataset.common.download(
66+
TEST_LABEL_URL, 'mnist', TEST_LABEL_MD5),
67+
100)
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
import paddle.v2.dataset.mnist
2+
import unittest
3+
4+
class TestMNIST(unittest.TestCase):
5+
def check_reader(self, reader):
6+
sum = 0
7+
for l in reader:
8+
self.assertEqual(l[0].size, 784)
9+
self.assertEqual(l[1].size, 1)
10+
self.assertLess(l[1], 10)
11+
self.assertGreaterEqual(l[1], 0)
12+
sum += 1
13+
return sum
14+
15+
def test_train(self):
16+
self.assertEqual(
17+
self.check_reader(paddle.v2.dataset.mnist.train()),
18+
60000)
19+
20+
def test_test(self):
21+
self.assertEqual(
22+
self.check_reader(paddle.v2.dataset.mnist.test()),
23+
10000)
24+
25+
26+
if __name__ == '__main__':
27+
unittest.main()

0 commit comments

Comments
 (0)