cifar.py 6.61 KB
Newer Older
Soumith Chintala's avatar
Soumith Chintala committed
1
2
3
4
5
6
7
8
9
10
11
from __future__ import print_function
from PIL import Image
import os
import os.path
import numpy as np
import sys
if sys.version_info[0] == 2:
    import cPickle as pickle
else:
    import pickle

soumith's avatar
soumith committed
12
import torch.utils.data as data
soumith's avatar
soumith committed
13
from .utils import download_url, check_integrity
14

15

Soumith Chintala's avatar
Soumith Chintala committed
16
class CIFAR10(data.Dataset):
17
18
19
20
    """`CIFAR10 <https://www.cs.toronto.edu/~kriz/cifar.html>`_ Dataset.

    Args:
        root (string): Root directory of dataset where directory
21
            ``cifar-10-batches-py`` exists or will be saved to if download is set to True.
22
23
24
25
26
27
28
29
30
31
32
        train (bool, optional): If True, creates dataset from training set, otherwise
            creates from test set.
        transform (callable, optional): A function/transform that  takes in an PIL image
            and returns a transformed version. E.g, ``transforms.RandomCrop``
        target_transform (callable, optional): A function/transform that takes in the
            target and transforms it.
        download (bool, optional): If true, downloads the dataset from the internet and
            puts it in root directory. If dataset is already downloaded, it is not
            downloaded again.

    """
Soumith Chintala's avatar
Soumith Chintala committed
33
    base_folder = 'cifar-10-batches-py'
Tzu-Wei Huang's avatar
Tzu-Wei Huang committed
34
    url = "https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz"
Soumith Chintala's avatar
Soumith Chintala committed
35
    filename = "cifar-10-python.tar.gz"
zhoumingjun's avatar
zhoumingjun committed
36
    tgz_md5 = 'c58f30108f718f92721af3b95e74349a'
Soumith Chintala's avatar
Soumith Chintala committed
37
    train_list = [
38
39
40
41
42
        ['data_batch_1', 'c99cafc152244af753f735de768cd75f'],
        ['data_batch_2', 'd4bba439e000b95fd0a9bffe97cbabec'],
        ['data_batch_3', '54ebc095f3ab1f0389bbae665268c751'],
        ['data_batch_4', '634d18415352ddfa80567beed471001a'],
        ['data_batch_5', '482c414d41f54cd18b22e5b47cb7c3cb'],
Soumith Chintala's avatar
Soumith Chintala committed
43
44
45
    ]

    test_list = [
46
        ['test_batch', '40351d587109b95175f43aff81a1287e'],
Soumith Chintala's avatar
Soumith Chintala committed
47
    ]
48
49
50
51
52
53
    meta = {
        'filename': 'batches.meta',
        'key': 'label_names',
        'md5': '5ff9c542aee3614f3951f8cda6e48888',
    }

54
55
56
    def __init__(self, root, train=True,
                 transform=None, target_transform=None,
                 download=False):
57
        self.root = os.path.expanduser(root)
Soumith Chintala's avatar
Soumith Chintala committed
58
59
        self.transform = transform
        self.target_transform = target_transform
60
61
        self.train = train  # training set or test set

Soumith Chintala's avatar
Soumith Chintala committed
62
63
64
65
        if download:
            self.download()

        if not self._check_integrity():
66
67
            raise RuntimeError('Dataset not found or corrupted.' +
                               ' You can use download=True to download it')
68

69
        if self.train:
70
71
72
73
74
75
76
77
78
79
80
            downloaded_list = self.train_list
        else:
            downloaded_list = self.test_list

        self.data = []
        self.targets = []

        # now load the picked numpy arrays
        for file_name, checksum in downloaded_list:
            file_path = os.path.join(self.root, self.base_folder, file_name)
            with open(file_path, 'rb') as f:
Adam Lerer's avatar
Adam Lerer committed
81
                if sys.version_info[0] == 2:
82
                    entry = pickle.load(f)
Adam Lerer's avatar
Adam Lerer committed
83
                else:
84
85
                    entry = pickle.load(f, encoding='latin1')
                self.data.append(entry['data'])
86
                if 'labels' in entry:
87
                    self.targets.extend(entry['labels'])
88
                else:
89
                    self.targets.extend(entry['fine_labels'])
90

91
92
        self.data = np.vstack(self.data).reshape(-1, 3, 32, 32)
        self.data = self.data.transpose((0, 2, 3, 1))  # convert to HWC
Soumith Chintala's avatar
Soumith Chintala committed
93

94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
        self._load_meta()

    def _load_meta(self):
        path = os.path.join(self.root, self.base_folder, self.meta['filename'])
        if not check_integrity(path, self.meta['md5']):
            raise RuntimeError('Dataset metadata file not found or corrupted.' +
                               ' You can use download=True to download it')
        with open(path, 'rb') as infile:
            if sys.version_info[0] == 2:
                data = pickle.load(infile)
            else:
                data = pickle.load(infile, encoding='latin1')
            self.classes = data[self.meta['key']]
        self.class_to_idx = {_class: i for i, _class in enumerate(self.classes)}

Soumith Chintala's avatar
Soumith Chintala committed
109
    def __getitem__(self, index):
110
111
112
113
114
115
116
        """
        Args:
            index (int): Index

        Returns:
            tuple: (image, target) where target is index of the target class.
        """
117
        img, target = self.data[index], self.targets[index]
118

119
120
        # doing this so that it is consistent with all other datasets
        # to return a PIL Image
121
        img = Image.fromarray(img)
Soumith Chintala's avatar
Soumith Chintala committed
122
123
124
125
126
127
128
129
130
131

        if self.transform is not None:
            img = self.transform(img)

        if self.target_transform is not None:
            target = self.target_transform(target)

        return img, target

    def __len__(self):
132
        return len(self.data)
Soumith Chintala's avatar
Soumith Chintala committed
133
134
135

    def _check_integrity(self):
        root = self.root
136
        for fentry in (self.train_list + self.test_list):
Soumith Chintala's avatar
Soumith Chintala committed
137
138
            filename, md5 = fentry[0], fentry[1]
            fpath = os.path.join(root, self.base_folder, filename)
soumith's avatar
soumith committed
139
            if not check_integrity(fpath, md5):
Soumith Chintala's avatar
Soumith Chintala committed
140
141
142
143
144
145
146
147
148
                return False
        return True

    def download(self):
        import tarfile

        if self._check_integrity():
            print('Files already downloaded and verified')
            return
149

150
        download_url(self.url, self.root, self.filename, self.tgz_md5)
Soumith Chintala's avatar
Soumith Chintala committed
151
152

        # extract file
153
154
        with tarfile.open(os.path.join(self.root, self.filename), "r:gz") as tar:
            tar.extractall(path=self.root)
Soumith Chintala's avatar
Soumith Chintala committed
155

156
157
158
159
160
161
162
163
164
165
166
167
    def __repr__(self):
        fmt_str = 'Dataset ' + self.__class__.__name__ + '\n'
        fmt_str += '    Number of datapoints: {}\n'.format(self.__len__())
        tmp = 'train' if self.train is True else 'test'
        fmt_str += '    Split: {}\n'.format(tmp)
        fmt_str += '    Root Location: {}\n'.format(self.root)
        tmp = '    Transforms (if any): '
        fmt_str += '{0}{1}\n'.format(tmp, self.transform.__repr__().replace('\n', '\n' + ' ' * len(tmp)))
        tmp = '    Target Transforms (if any): '
        fmt_str += '{0}{1}'.format(tmp, self.target_transform.__repr__().replace('\n', '\n' + ' ' * len(tmp)))
        return fmt_str

Soumith Chintala's avatar
Soumith Chintala committed
168
169

class CIFAR100(CIFAR10):
jvmancuso's avatar
jvmancuso committed
170
171
172
173
    """`CIFAR100 <https://www.cs.toronto.edu/~kriz/cifar.html>`_ Dataset.

    This is a subclass of the `CIFAR10` Dataset.
    """
Soumith Chintala's avatar
Soumith Chintala committed
174
    base_folder = 'cifar-100-python'
Tzu-Wei Huang's avatar
Tzu-Wei Huang committed
175
    url = "https://www.cs.toronto.edu/~kriz/cifar-100-python.tar.gz"
Soumith Chintala's avatar
Soumith Chintala committed
176
177
178
    filename = "cifar-100-python.tar.gz"
    tgz_md5 = 'eb9058c3a382ffc7106e4002c42a8d85'
    train_list = [
179
        ['train', '16019d7e3df5f24257cddd939b257f8d'],
Soumith Chintala's avatar
Soumith Chintala committed
180
181
182
    ]

    test_list = [
183
        ['test', 'f0ef6b0ae62326f3e7ffdfab6717acfc'],
Soumith Chintala's avatar
Soumith Chintala committed
184
    ]
185
186
187
188
189
    meta = {
        'filename': 'meta',
        'key': 'fine_label_names',
        'md5': '7973b15100ade9c7d40fb424638fde48',
    }