cifar.py 5.93 KB
Newer Older
Soumith Chintala's avatar
Soumith Chintala committed
1
2
3
4
5
6
from __future__ import print_function
from PIL import Image
import os
import os.path
import numpy as np
import sys
7

Soumith Chintala's avatar
Soumith Chintala committed
8
9
10
11
12
if sys.version_info[0] == 2:
    import cPickle as pickle
else:
    import pickle

13
from .vision import VisionDataset
14
from .utils import check_integrity, download_and_extract_archive
15

16

17
class CIFAR10(VisionDataset):
18
19
20
21
    """`CIFAR10 <https://www.cs.toronto.edu/~kriz/cifar.html>`_ Dataset.

    Args:
        root (string): Root directory of dataset where directory
22
            ``cifar-10-batches-py`` exists or will be saved to if download is set to True.
23
24
        train (bool, optional): If True, creates dataset from training set, otherwise
            creates from test set.
Tongzhou Wang's avatar
Tongzhou Wang committed
25
        transform (callable, optional): A function/transform that takes in an PIL image
26
27
28
29
30
31
32
33
            and returns a transformed version. E.g, ``transforms.RandomCrop``
        target_transform (callable, optional): A function/transform that takes in the
            target and transforms it.
        download (bool, optional): If true, downloads the dataset from the internet and
            puts it in root directory. If dataset is already downloaded, it is not
            downloaded again.

    """
Soumith Chintala's avatar
Soumith Chintala committed
34
    base_folder = 'cifar-10-batches-py'
Tzu-Wei Huang's avatar
Tzu-Wei Huang committed
35
    url = "https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz"
Soumith Chintala's avatar
Soumith Chintala committed
36
    filename = "cifar-10-python.tar.gz"
zhoumingjun's avatar
zhoumingjun committed
37
    tgz_md5 = 'c58f30108f718f92721af3b95e74349a'
Soumith Chintala's avatar
Soumith Chintala committed
38
    train_list = [
39
40
41
42
43
        ['data_batch_1', 'c99cafc152244af753f735de768cd75f'],
        ['data_batch_2', 'd4bba439e000b95fd0a9bffe97cbabec'],
        ['data_batch_3', '54ebc095f3ab1f0389bbae665268c751'],
        ['data_batch_4', '634d18415352ddfa80567beed471001a'],
        ['data_batch_5', '482c414d41f54cd18b22e5b47cb7c3cb'],
Soumith Chintala's avatar
Soumith Chintala committed
44
45
46
    ]

    test_list = [
47
        ['test_batch', '40351d587109b95175f43aff81a1287e'],
Soumith Chintala's avatar
Soumith Chintala committed
48
    ]
49
50
51
52
53
54
    meta = {
        'filename': 'batches.meta',
        'key': 'label_names',
        'md5': '5ff9c542aee3614f3951f8cda6e48888',
    }

55
56
57
    def __init__(self, root, train=True,
                 transform=None, target_transform=None,
                 download=False):
58
59

        super(CIFAR10, self).__init__(root)
Soumith Chintala's avatar
Soumith Chintala committed
60
61
        self.transform = transform
        self.target_transform = target_transform
62

63
64
        self.train = train  # training set or test set

Soumith Chintala's avatar
Soumith Chintala committed
65
66
67
68
        if download:
            self.download()

        if not self._check_integrity():
69
70
            raise RuntimeError('Dataset not found or corrupted.' +
                               ' You can use download=True to download it')
71

72
        if self.train:
73
74
75
76
77
78
79
80
81
82
83
            downloaded_list = self.train_list
        else:
            downloaded_list = self.test_list

        self.data = []
        self.targets = []

        # now load the picked numpy arrays
        for file_name, checksum in downloaded_list:
            file_path = os.path.join(self.root, self.base_folder, file_name)
            with open(file_path, 'rb') as f:
Adam Lerer's avatar
Adam Lerer committed
84
                if sys.version_info[0] == 2:
85
                    entry = pickle.load(f)
Adam Lerer's avatar
Adam Lerer committed
86
                else:
87
88
                    entry = pickle.load(f, encoding='latin1')
                self.data.append(entry['data'])
89
                if 'labels' in entry:
90
                    self.targets.extend(entry['labels'])
91
                else:
92
                    self.targets.extend(entry['fine_labels'])
93

94
95
        self.data = np.vstack(self.data).reshape(-1, 3, 32, 32)
        self.data = self.data.transpose((0, 2, 3, 1))  # convert to HWC
Soumith Chintala's avatar
Soumith Chintala committed
96

97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
        self._load_meta()

    def _load_meta(self):
        path = os.path.join(self.root, self.base_folder, self.meta['filename'])
        if not check_integrity(path, self.meta['md5']):
            raise RuntimeError('Dataset metadata file not found or corrupted.' +
                               ' You can use download=True to download it')
        with open(path, 'rb') as infile:
            if sys.version_info[0] == 2:
                data = pickle.load(infile)
            else:
                data = pickle.load(infile, encoding='latin1')
            self.classes = data[self.meta['key']]
        self.class_to_idx = {_class: i for i, _class in enumerate(self.classes)}

Soumith Chintala's avatar
Soumith Chintala committed
112
    def __getitem__(self, index):
113
114
115
116
117
118
119
        """
        Args:
            index (int): Index

        Returns:
            tuple: (image, target) where target is index of the target class.
        """
120
        img, target = self.data[index], self.targets[index]
121

122
123
        # doing this so that it is consistent with all other datasets
        # to return a PIL Image
124
        img = Image.fromarray(img)
Soumith Chintala's avatar
Soumith Chintala committed
125
126
127
128
129
130
131
132
133
134

        if self.transform is not None:
            img = self.transform(img)

        if self.target_transform is not None:
            target = self.target_transform(target)

        return img, target

    def __len__(self):
135
        return len(self.data)
Soumith Chintala's avatar
Soumith Chintala committed
136
137
138

    def _check_integrity(self):
        root = self.root
139
        for fentry in (self.train_list + self.test_list):
Soumith Chintala's avatar
Soumith Chintala committed
140
141
            filename, md5 = fentry[0], fentry[1]
            fpath = os.path.join(root, self.base_folder, filename)
soumith's avatar
soumith committed
142
            if not check_integrity(fpath, md5):
Soumith Chintala's avatar
Soumith Chintala committed
143
144
145
146
147
148
149
                return False
        return True

    def download(self):
        if self._check_integrity():
            print('Files already downloaded and verified')
            return
150
        download_and_extract_archive(self.url, self.root, self.filename, self.tgz_md5)
Soumith Chintala's avatar
Soumith Chintala committed
151

152
153
    def extra_repr(self):
        return "Split: {}".format("Train" if self.train is True else "Test")
154

Soumith Chintala's avatar
Soumith Chintala committed
155
156

class CIFAR100(CIFAR10):
jvmancuso's avatar
jvmancuso committed
157
158
159
160
    """`CIFAR100 <https://www.cs.toronto.edu/~kriz/cifar.html>`_ Dataset.

    This is a subclass of the `CIFAR10` Dataset.
    """
Soumith Chintala's avatar
Soumith Chintala committed
161
    base_folder = 'cifar-100-python'
Tzu-Wei Huang's avatar
Tzu-Wei Huang committed
162
    url = "https://www.cs.toronto.edu/~kriz/cifar-100-python.tar.gz"
Soumith Chintala's avatar
Soumith Chintala committed
163
164
165
    filename = "cifar-100-python.tar.gz"
    tgz_md5 = 'eb9058c3a382ffc7106e4002c42a8d85'
    train_list = [
166
        ['train', '16019d7e3df5f24257cddd939b257f8d'],
Soumith Chintala's avatar
Soumith Chintala committed
167
168
169
    ]

    test_list = [
170
        ['test', 'f0ef6b0ae62326f3e7ffdfab6717acfc'],
Soumith Chintala's avatar
Soumith Chintala committed
171
    ]
172
173
174
175
176
    meta = {
        'filename': 'meta',
        'key': 'fine_label_names',
        'md5': '7973b15100ade9c7d40fb424638fde48',
    }