mnist.py 13.3 KB
Newer Older
Tian Qi Chen's avatar
Tian Qi Chen committed
1
from __future__ import print_function
2
import warnings
Tian Qi Chen's avatar
Tian Qi Chen committed
3
4
5
6
import torch.utils.data as data
from PIL import Image
import os
import os.path
7
import gzip
8
import numpy as np
Tian Qi Chen's avatar
Tian Qi Chen committed
9
10
import torch
import codecs
11
from .utils import download_url, makedir_exist_ok
Tian Qi Chen's avatar
Tian Qi Chen committed
12

13

Tian Qi Chen's avatar
Tian Qi Chen committed
14
class MNIST(data.Dataset):
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
    """`MNIST <http://yann.lecun.com/exdb/mnist/>`_ Dataset.

    Args:
        root (string): Root directory of dataset where ``processed/training.pt``
            and  ``processed/test.pt`` exist.
        train (bool, optional): If True, creates dataset from ``training.pt``,
            otherwise from ``test.pt``.
        download (bool, optional): If true, downloads the dataset from the internet and
            puts it in root directory. If dataset is already downloaded, it is not
            downloaded again.
        transform (callable, optional): A function/transform that  takes in an PIL image
            and returns a transformed version. E.g, ``transforms.RandomCrop``
        target_transform (callable, optional): A function/transform that takes in the
            target and transforms it.
    """
Tian Qi Chen's avatar
Tian Qi Chen committed
30
31
32
33
34
35
    urls = [
        'http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz',
        'http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz',
        'http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz',
        'http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz',
    ]
36
37
    training_file = 'training.pt'
    test_file = 'test.pt'
38
39
40
    classes = ['0 - zero', '1 - one', '2 - two', '3 - three', '4 - four',
               '5 - five', '6 - six', '7 - seven', '8 - eight', '9 - nine']

41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
    @property
    def train_labels(self):
        warnings.warn("train_labels has been renamed targets")
        return self.targets

    @property
    def test_labels(self):
        warnings.warn("test_labels has been renamed targets")
        return self.targets

    @property
    def train_data(self):
        warnings.warn("train_data has been renamed data")
        return self.data

    @property
    def test_data(self):
        warnings.warn("test_data has been renamed data")
        return self.data

Tian Qi Chen's avatar
Tian Qi Chen committed
61
    def __init__(self, root, train=True, transform=None, target_transform=None, download=False):
62
        self.root = os.path.expanduser(root)
Tian Qi Chen's avatar
Tian Qi Chen committed
63
64
        self.transform = transform
        self.target_transform = target_transform
65
        self.train = train  # training set or test set
Tian Qi Chen's avatar
Tian Qi Chen committed
66
67
68
69
70

        if download:
            self.download()

        if not self._check_exists():
71
72
            raise RuntimeError('Dataset not found.' +
                               ' You can use download=True to download it')
Tian Qi Chen's avatar
Tian Qi Chen committed
73
74

        if self.train:
75
            data_file = self.training_file
Tian Qi Chen's avatar
Tian Qi Chen committed
76
        else:
77
78
            data_file = self.test_file
        self.data, self.targets = torch.load(os.path.join(self.processed_folder, data_file))
Tian Qi Chen's avatar
Tian Qi Chen committed
79
80

    def __getitem__(self, index):
81
82
83
84
85
86
87
        """
        Args:
            index (int): Index

        Returns:
            tuple: (image, target) where target is index of the target class.
        """
88
        img, target = self.data[index], int(self.targets[index])
Tian Qi Chen's avatar
Tian Qi Chen committed
89
90
91
92
93
94
95
96
97
98
99
100
101
102

        # doing this so that it is consistent with all other datasets
        # to return a PIL Image
        img = Image.fromarray(img.numpy(), mode='L')

        if self.transform is not None:
            img = self.transform(img)

        if self.target_transform is not None:
            target = self.target_transform(target)

        return img, target

    def __len__(self):
103
        return len(self.data)
Tian Qi Chen's avatar
Tian Qi Chen committed
104

105
106
107
108
109
110
111
112
113
114
115
116
    @property
    def raw_folder(self):
        return os.path.join(self.root, self.__class__.__name__, 'raw')

    @property
    def processed_folder(self):
        return os.path.join(self.root, self.__class__.__name__, 'processed')

    @property
    def class_to_idx(self):
        return {_class: i for i, _class in enumerate(self.classes)}

Tian Qi Chen's avatar
Tian Qi Chen committed
117
    def _check_exists(self):
118
119
120
121
122
123
124
125
126
127
128
        return os.path.exists(os.path.join(self.processed_folder, self.training_file)) and \
            os.path.exists(os.path.join(self.processed_folder, self.test_file))

    @staticmethod
    def extract_gzip(gzip_path, remove_finished=False):
        print('Extracting {}'.format(gzip_path))
        with open(gzip_path.replace('.gz', ''), 'wb') as out_f, \
                gzip.GzipFile(gzip_path) as zip_f:
            out_f.write(zip_f.read())
        if remove_finished:
            os.unlink(gzip_path)
Tian Qi Chen's avatar
Tian Qi Chen committed
129
130

    def download(self):
131
        """Download the MNIST data if it doesn't exist in processed_folder already."""
Tian Qi Chen's avatar
Tian Qi Chen committed
132
133
134
135

        if self._check_exists():
            return

136
137
        makedir_exist_ok(self.raw_folder)
        makedir_exist_ok(self.processed_folder)
Tian Qi Chen's avatar
Tian Qi Chen committed
138

139
        # download files
Tian Qi Chen's avatar
Tian Qi Chen committed
140
141
        for url in self.urls:
            filename = url.rpartition('/')[2]
142
            file_path = os.path.join(self.raw_folder, filename)
143
            download_url(url, root=self.raw_folder, filename=filename, md5=None)
144
            self.extract_gzip(gzip_path=file_path, remove_finished=True)
Tian Qi Chen's avatar
Tian Qi Chen committed
145
146

        # process and save as torch files
Adam Paszke's avatar
Adam Paszke committed
147
148
        print('Processing...')

Tian Qi Chen's avatar
Tian Qi Chen committed
149
        training_set = (
150
151
            read_image_file(os.path.join(self.raw_folder, 'train-images-idx3-ubyte')),
            read_label_file(os.path.join(self.raw_folder, 'train-labels-idx1-ubyte'))
Tian Qi Chen's avatar
Tian Qi Chen committed
152
153
        )
        test_set = (
154
155
            read_image_file(os.path.join(self.raw_folder, 't10k-images-idx3-ubyte')),
            read_label_file(os.path.join(self.raw_folder, 't10k-labels-idx1-ubyte'))
Tian Qi Chen's avatar
Tian Qi Chen committed
156
        )
157
        with open(os.path.join(self.processed_folder, self.training_file), 'wb') as f:
Tian Qi Chen's avatar
Tian Qi Chen committed
158
            torch.save(training_set, f)
159
        with open(os.path.join(self.processed_folder, self.test_file), 'wb') as f:
Tian Qi Chen's avatar
Tian Qi Chen committed
160
161
162
163
            torch.save(test_set, f)

        print('Done!')

164
165
166
167
168
169
170
171
172
173
174
175
    def __repr__(self):
        fmt_str = 'Dataset ' + self.__class__.__name__ + '\n'
        fmt_str += '    Number of datapoints: {}\n'.format(self.__len__())
        tmp = 'train' if self.train is True else 'test'
        fmt_str += '    Split: {}\n'.format(tmp)
        fmt_str += '    Root Location: {}\n'.format(self.root)
        tmp = '    Transforms (if any): '
        fmt_str += '{0}{1}\n'.format(tmp, self.transform.__repr__().replace('\n', '\n' + ' ' * len(tmp)))
        tmp = '    Target Transforms (if any): '
        fmt_str += '{0}{1}'.format(tmp, self.target_transform.__repr__().replace('\n', '\n' + ' ' * len(tmp)))
        return fmt_str

176

177
class FashionMNIST(MNIST):
178
179
180
181
182
183
184
185
186
187
188
189
190
191
    """`Fashion-MNIST <https://github.com/zalandoresearch/fashion-mnist>`_ Dataset.

    Args:
        root (string): Root directory of dataset where ``processed/training.pt``
            and  ``processed/test.pt`` exist.
        train (bool, optional): If True, creates dataset from ``training.pt``,
            otherwise from ``test.pt``.
        download (bool, optional): If true, downloads the dataset from the internet and
            puts it in root directory. If dataset is already downloaded, it is not
            downloaded again.
        transform (callable, optional): A function/transform that  takes in an PIL image
            and returns a transformed version. E.g, ``transforms.RandomCrop``
        target_transform (callable, optional): A function/transform that takes in the
            target and transforms it.
192
193
194
195
196
197
198
    """
    urls = [
        'http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/train-images-idx3-ubyte.gz',
        'http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/train-labels-idx1-ubyte.gz',
        'http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/t10k-images-idx3-ubyte.gz',
        'http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/t10k-labels-idx1-ubyte.gz',
    ]
199
200
    classes = ['T-shirt/top', 'Trouser', 'Pullover', 'Dress', 'Coat', 'Sandal',
               'Shirt', 'Sneaker', 'Bag', 'Ankle boot']
201
202


hysts's avatar
hysts committed
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
class KMNIST(MNIST):
    """`Kuzushiji-MNIST <https://github.com/rois-codh/kmnist>`_ Dataset.

    Args:
        root (string): Root directory of dataset where ``processed/training.pt``
            and  ``processed/test.pt`` exist.
        train (bool, optional): If True, creates dataset from ``training.pt``,
            otherwise from ``test.pt``.
        download (bool, optional): If true, downloads the dataset from the internet and
            puts it in root directory. If dataset is already downloaded, it is not
            downloaded again.
        transform (callable, optional): A function/transform that  takes in an PIL image
            and returns a transformed version. E.g, ``transforms.RandomCrop``
        target_transform (callable, optional): A function/transform that takes in the
            target and transforms it.
    """
    urls = [
        'http://codh.rois.ac.jp/kmnist/dataset/kmnist/train-images-idx3-ubyte.gz',
        'http://codh.rois.ac.jp/kmnist/dataset/kmnist/train-labels-idx1-ubyte.gz',
        'http://codh.rois.ac.jp/kmnist/dataset/kmnist/t10k-images-idx3-ubyte.gz',
        'http://codh.rois.ac.jp/kmnist/dataset/kmnist/t10k-labels-idx1-ubyte.gz',
    ]
    classes = ['o', 'ki', 'su', 'tsu', 'na', 'ha', 'ma', 'ya', 're', 'wo']


228
class EMNIST(MNIST):
Alex Alemi's avatar
Alex Alemi committed
229
    """`EMNIST <https://www.westernsydney.edu.au/bens/home/reproducible_research/emnist>`_ Dataset.
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246

    Args:
        root (string): Root directory of dataset where ``processed/training.pt``
            and  ``processed/test.pt`` exist.
        split (string): The dataset has 6 different splits: ``byclass``, ``bymerge``,
            ``balanced``, ``letters``, ``digits`` and ``mnist``. This argument specifies
            which one to use.
        train (bool, optional): If True, creates dataset from ``training.pt``,
            otherwise from ``test.pt``.
        download (bool, optional): If true, downloads the dataset from the internet and
            puts it in root directory. If dataset is already downloaded, it is not
            downloaded again.
        transform (callable, optional): A function/transform that  takes in an PIL image
            and returns a transformed version. E.g, ``transforms.RandomCrop``
        target_transform (callable, optional): A function/transform that takes in the
            target and transforms it.
    """
Alex Alemi's avatar
Alex Alemi committed
247
248
    # Updated URL from https://www.westernsydney.edu.au/bens/home/reproducible_research/emnist
    url = 'https://cloudstor.aarnet.edu.au/plus/index.php/s/54h3OuGJhFLwAlQ/download'
249
250
251
252
253
254
255
256
257
258
259
    splits = ('byclass', 'bymerge', 'balanced', 'letters', 'digits', 'mnist')

    def __init__(self, root, split, **kwargs):
        if split not in self.splits:
            raise ValueError('Split "{}" not found. Valid splits are: {}'.format(
                split, ', '.join(self.splits),
            ))
        self.split = split
        self.training_file = self._training_file(split)
        self.test_file = self._test_file(split)
        super(EMNIST, self).__init__(root, **kwargs)
Tian Qi Chen's avatar
Tian Qi Chen committed
260

261
262
    @staticmethod
    def _training_file(split):
263
264
        return 'training_{}.pt'.format(split)

265
266
    @staticmethod
    def _test_file(split):
267
268
269
270
271
272
        return 'test_{}.pt'.format(split)

    def download(self):
        """Download the EMNIST data if it doesn't exist in processed_folder already."""
        import shutil
        import zipfile
273

274
275
276
        if self._check_exists():
            return

277
278
        makedir_exist_ok(self.raw_folder)
        makedir_exist_ok(self.processed_folder)
279

280
        # download files
281
        filename = self.url.rpartition('/')[2]
282
283
        file_path = os.path.join(self.raw_folder, filename)
        download_url(self.url, root=self.raw_folder, filename=filename, md5=None)
284
285
286

        print('Extracting zip archive')
        with zipfile.ZipFile(file_path) as zip_f:
287
            zip_f.extractall(self.raw_folder)
288
        os.unlink(file_path)
289
        gzip_folder = os.path.join(self.raw_folder, 'gzip')
290
291
        for gzip_file in os.listdir(gzip_folder):
            if gzip_file.endswith('.gz'):
292
                self.extract_gzip(gzip_path=os.path.join(gzip_folder, gzip_file))
293
294
295
296
297

        # process and save as torch files
        for split in self.splits:
            print('Processing ' + split)
            training_set = (
298
299
                read_image_file(os.path.join(gzip_folder, 'emnist-{}-train-images-idx3-ubyte'.format(split))),
                read_label_file(os.path.join(gzip_folder, 'emnist-{}-train-labels-idx1-ubyte'.format(split)))
300
301
            )
            test_set = (
302
303
                read_image_file(os.path.join(gzip_folder, 'emnist-{}-test-images-idx3-ubyte'.format(split))),
                read_label_file(os.path.join(gzip_folder, 'emnist-{}-test-labels-idx1-ubyte'.format(split)))
304
            )
305
            with open(os.path.join(self.processed_folder, self._training_file(split)), 'wb') as f:
306
                torch.save(training_set, f)
307
            with open(os.path.join(self.processed_folder, self._test_file(split)), 'wb') as f:
308
                torch.save(test_set, f)
309
        shutil.rmtree(gzip_folder)
310
311
312
313
314
315

        print('Done!')


def get_int(b):
    return int(codecs.encode(b, 'hex'), 16)
Tian Qi Chen's avatar
Tian Qi Chen committed
316

317

Tian Qi Chen's avatar
Tian Qi Chen committed
318
319
320
321
322
def read_label_file(path):
    with open(path, 'rb') as f:
        data = f.read()
        assert get_int(data[:4]) == 2049
        length = get_int(data[4:8])
323
324
        parsed = np.frombuffer(data, dtype=np.uint8, offset=8)
        return torch.from_numpy(parsed).view(length).long()
Tian Qi Chen's avatar
Tian Qi Chen committed
325

326

Tian Qi Chen's avatar
Tian Qi Chen committed
327
328
329
330
331
332
333
def read_image_file(path):
    with open(path, 'rb') as f:
        data = f.read()
        assert get_int(data[:4]) == 2051
        length = get_int(data[4:8])
        num_rows = get_int(data[8:12])
        num_cols = get_int(data[12:16])
334
335
        parsed = np.frombuffer(data, dtype=np.uint8, offset=16)
        return torch.from_numpy(parsed).view(length, num_rows, num_cols)