cifar.py 5.61 KB
Newer Older
Soumith Chintala's avatar
Soumith Chintala committed
1
2
3
4
from PIL import Image
import os
import os.path
import numpy as np
5
import pickle
Soumith Chintala's avatar
Soumith Chintala committed
6

7
from .vision import VisionDataset
8
from .utils import check_integrity, download_and_extract_archive
9

10

11
class CIFAR10(VisionDataset):
12
13
14
15
    """`CIFAR10 <https://www.cs.toronto.edu/~kriz/cifar.html>`_ Dataset.

    Args:
        root (string): Root directory of dataset where directory
16
            ``cifar-10-batches-py`` exists or will be saved to if download is set to True.
17
18
        train (bool, optional): If True, creates dataset from training set, otherwise
            creates from test set.
Tongzhou Wang's avatar
Tongzhou Wang committed
19
        transform (callable, optional): A function/transform that takes in an PIL image
20
21
22
23
24
25
26
27
            and returns a transformed version. E.g, ``transforms.RandomCrop``
        target_transform (callable, optional): A function/transform that takes in the
            target and transforms it.
        download (bool, optional): If true, downloads the dataset from the internet and
            puts it in root directory. If dataset is already downloaded, it is not
            downloaded again.

    """
Soumith Chintala's avatar
Soumith Chintala committed
28
    base_folder = 'cifar-10-batches-py'
Tzu-Wei Huang's avatar
Tzu-Wei Huang committed
29
    url = "https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz"
Soumith Chintala's avatar
Soumith Chintala committed
30
    filename = "cifar-10-python.tar.gz"
zhoumingjun's avatar
zhoumingjun committed
31
    tgz_md5 = 'c58f30108f718f92721af3b95e74349a'
Soumith Chintala's avatar
Soumith Chintala committed
32
    train_list = [
33
34
35
36
37
        ['data_batch_1', 'c99cafc152244af753f735de768cd75f'],
        ['data_batch_2', 'd4bba439e000b95fd0a9bffe97cbabec'],
        ['data_batch_3', '54ebc095f3ab1f0389bbae665268c751'],
        ['data_batch_4', '634d18415352ddfa80567beed471001a'],
        ['data_batch_5', '482c414d41f54cd18b22e5b47cb7c3cb'],
Soumith Chintala's avatar
Soumith Chintala committed
38
39
40
    ]

    test_list = [
41
        ['test_batch', '40351d587109b95175f43aff81a1287e'],
Soumith Chintala's avatar
Soumith Chintala committed
42
    ]
43
44
45
46
47
48
    meta = {
        'filename': 'batches.meta',
        'key': 'label_names',
        'md5': '5ff9c542aee3614f3951f8cda6e48888',
    }

49
    def __init__(self, root, train=True, transform=None, target_transform=None,
50
                 download=False):
51

52
53
        super(CIFAR10, self).__init__(root, transform=transform,
                                      target_transform=target_transform)
54

55
56
        self.train = train  # training set or test set

Soumith Chintala's avatar
Soumith Chintala committed
57
58
59
60
        if download:
            self.download()

        if not self._check_integrity():
61
62
            raise RuntimeError('Dataset not found or corrupted.' +
                               ' You can use download=True to download it')
63

64
        if self.train:
65
66
67
68
69
70
71
72
73
74
75
            downloaded_list = self.train_list
        else:
            downloaded_list = self.test_list

        self.data = []
        self.targets = []

        # now load the picked numpy arrays
        for file_name, checksum in downloaded_list:
            file_path = os.path.join(self.root, self.base_folder, file_name)
            with open(file_path, 'rb') as f:
76
                entry = pickle.load(f, encoding='latin1')
77
                self.data.append(entry['data'])
78
                if 'labels' in entry:
79
                    self.targets.extend(entry['labels'])
80
                else:
81
                    self.targets.extend(entry['fine_labels'])
82

83
84
        self.data = np.vstack(self.data).reshape(-1, 3, 32, 32)
        self.data = self.data.transpose((0, 2, 3, 1))  # convert to HWC
Soumith Chintala's avatar
Soumith Chintala committed
85

86
87
88
89
90
91
92
93
        self._load_meta()

    def _load_meta(self):
        path = os.path.join(self.root, self.base_folder, self.meta['filename'])
        if not check_integrity(path, self.meta['md5']):
            raise RuntimeError('Dataset metadata file not found or corrupted.' +
                               ' You can use download=True to download it')
        with open(path, 'rb') as infile:
94
            data = pickle.load(infile, encoding='latin1')
95
96
97
            self.classes = data[self.meta['key']]
        self.class_to_idx = {_class: i for i, _class in enumerate(self.classes)}

Soumith Chintala's avatar
Soumith Chintala committed
98
    def __getitem__(self, index):
99
100
101
102
103
104
105
        """
        Args:
            index (int): Index

        Returns:
            tuple: (image, target) where target is index of the target class.
        """
106
        img, target = self.data[index], self.targets[index]
107

108
109
        # doing this so that it is consistent with all other datasets
        # to return a PIL Image
110
        img = Image.fromarray(img)
Soumith Chintala's avatar
Soumith Chintala committed
111
112
113
114
115
116
117
118
119
120

        if self.transform is not None:
            img = self.transform(img)

        if self.target_transform is not None:
            target = self.target_transform(target)

        return img, target

    def __len__(self):
121
        return len(self.data)
Soumith Chintala's avatar
Soumith Chintala committed
122
123
124

    def _check_integrity(self):
        root = self.root
125
        for fentry in (self.train_list + self.test_list):
Soumith Chintala's avatar
Soumith Chintala committed
126
127
            filename, md5 = fentry[0], fentry[1]
            fpath = os.path.join(root, self.base_folder, filename)
soumith's avatar
soumith committed
128
            if not check_integrity(fpath, md5):
Soumith Chintala's avatar
Soumith Chintala committed
129
130
131
132
133
134
135
                return False
        return True

    def download(self):
        if self._check_integrity():
            print('Files already downloaded and verified')
            return
136
        download_and_extract_archive(self.url, self.root, filename=self.filename, md5=self.tgz_md5)
Soumith Chintala's avatar
Soumith Chintala committed
137

138
139
    def extra_repr(self):
        return "Split: {}".format("Train" if self.train is True else "Test")
140

Soumith Chintala's avatar
Soumith Chintala committed
141
142

class CIFAR100(CIFAR10):
jvmancuso's avatar
jvmancuso committed
143
144
145
146
    """`CIFAR100 <https://www.cs.toronto.edu/~kriz/cifar.html>`_ Dataset.

    This is a subclass of the `CIFAR10` Dataset.
    """
Soumith Chintala's avatar
Soumith Chintala committed
147
    base_folder = 'cifar-100-python'
Tzu-Wei Huang's avatar
Tzu-Wei Huang committed
148
    url = "https://www.cs.toronto.edu/~kriz/cifar-100-python.tar.gz"
Soumith Chintala's avatar
Soumith Chintala committed
149
150
151
    filename = "cifar-100-python.tar.gz"
    tgz_md5 = 'eb9058c3a382ffc7106e4002c42a8d85'
    train_list = [
152
        ['train', '16019d7e3df5f24257cddd939b257f8d'],
Soumith Chintala's avatar
Soumith Chintala committed
153
154
155
    ]

    test_list = [
156
        ['test', 'f0ef6b0ae62326f3e7ffdfab6717acfc'],
Soumith Chintala's avatar
Soumith Chintala committed
157
    ]
158
159
160
161
162
    meta = {
        'filename': 'meta',
        'key': 'fine_label_names',
        'md5': '7973b15100ade9c7d40fb424638fde48',
    }