imagenet.py 8.52 KB
Newer Older
1
2
import warnings
from contextlib import contextmanager
Philip Meier's avatar
Philip Meier committed
3
4
import os
import shutil
5
import tempfile
6
from typing import Any, Dict, List, Iterator, Optional, Tuple
Philip Meier's avatar
Philip Meier committed
7
8
import torch
from .folder import ImageFolder
9
10
11
12
13
14
from .utils import check_integrity, extract_archive, verify_str_arg

ARCHIVE_META = {
    'train': ('ILSVRC2012_img_train.tar', '1d675b47d978889d74fa0da5fadfb00e'),
    'val': ('ILSVRC2012_img_val.tar', '29b22e2961454d5413ddabcf34fc5622'),
    'devkit': ('ILSVRC2012_devkit_t12.tar.gz', 'fa75699e90414af021442c21a62c3abf')
Philip Meier's avatar
Philip Meier committed
15
16
}

17
18
META_FILE = "meta.bin"

Philip Meier's avatar
Philip Meier committed
19
20
21
22
23
24
25
26
27
28
29
30
31
32

class ImageNet(ImageFolder):
    """`ImageNet <http://image-net.org/>`_ 2012 Classification Dataset.

    Args:
        root (string): Root directory of the ImageNet Dataset.
        split (string, optional): The dataset split, supports ``train``, or ``val``.
        transform (callable, optional): A function/transform that  takes in an PIL image
            and returns a transformed version. E.g, ``transforms.RandomCrop``
        target_transform (callable, optional): A function/transform that takes in the
            target and transforms it.
        loader (callable, optional): A function to load an image given its path.

     Attributes:
Philip Meier's avatar
Philip Meier committed
33
        classes (list): List of the class name tuples.
Philip Meier's avatar
Philip Meier committed
34
35
        class_to_idx (dict): Dict with items (class_name, class_index).
        wnids (list): List of the WordNet IDs.
Philip Meier's avatar
Philip Meier committed
36
        wnid_to_idx (dict): Dict with items (wordnet_id, class_index).
Philip Meier's avatar
Philip Meier committed
37
38
39
40
        imgs (list): List of (image path, class_index) tuples
        targets (list): The class_index value for each image in the dataset
    """

41
    def __init__(self, root: str, split: str = 'train', download: Optional[str] = None, **kwargs: Any) -> None:
42
43
44
45
46
47
48
49
50
51
        if download is True:
            msg = ("The dataset is no longer publicly accessible. You need to "
                   "download the archives externally and place them in the root "
                   "directory.")
            raise RuntimeError(msg)
        elif download is False:
            msg = ("The use of the download flag is deprecated, since the dataset "
                   "is no longer publicly accessible.")
            warnings.warn(msg, RuntimeWarning)

Philip Meier's avatar
Philip Meier committed
52
        root = self.root = os.path.expanduser(root)
53
        self.split = verify_str_arg(split, "split", ("train", "val"))
Philip Meier's avatar
Philip Meier committed
54

55
56
        self.parse_archives()
        wnid_to_classes = load_meta_file(self.root)[0]
Philip Meier's avatar
Philip Meier committed
57
58
59
60
61

        super(ImageNet, self).__init__(self.split_folder, **kwargs)
        self.root = root

        self.wnids = self.classes
Philip Meier's avatar
Philip Meier committed
62
        self.wnid_to_idx = self.class_to_idx
Philip Meier's avatar
Philip Meier committed
63
64
        self.classes = [wnid_to_classes[wnid] for wnid in self.wnids]
        self.class_to_idx = {cls: idx
Philip Meier's avatar
Philip Meier committed
65
                             for idx, clss in enumerate(self.classes)
Philip Meier's avatar
Philip Meier committed
66
67
                             for cls in clss}

68
    def parse_archives(self) -> None:
69
70
        if not check_integrity(os.path.join(self.root, META_FILE)):
            parse_devkit_archive(self.root)
Philip Meier's avatar
Philip Meier committed
71
72
73

        if not os.path.isdir(self.split_folder):
            if self.split == 'train':
74
                parse_train_archive(self.root)
Philip Meier's avatar
Philip Meier committed
75
            elif self.split == 'val':
76
                parse_val_archive(self.root)
Philip Meier's avatar
Philip Meier committed
77
78

    @property
79
    def split_folder(self) -> str:
Philip Meier's avatar
Philip Meier committed
80
81
        return os.path.join(self.root, self.split)

82
    def extra_repr(self) -> str:
83
        return "Split: {split}".format(**self.__dict__)
Philip Meier's avatar
Philip Meier committed
84
85


86
def load_meta_file(root: str, file: Optional[str] = None) -> Tuple[Dict[str, str], List[str]]:
87
88
89
90
91
92
93
94
95
96
97
    if file is None:
        file = META_FILE
    file = os.path.join(root, file)

    if check_integrity(file):
        return torch.load(file)
    else:
        msg = ("The meta file {} is not present in the root directory or is corrupted. "
               "This file is automatically created by the ImageNet dataset.")
        raise RuntimeError(msg.format(file, root))

Philip Meier's avatar
Philip Meier committed
98

99
def _verify_archive(root: str, file: str, md5: str) -> None:
100
101
102
103
    if not check_integrity(os.path.join(root, file), md5):
        msg = ("The archive {} is not present in the root directory or is corrupted. "
               "You need to download it externally and place it in {}.")
        raise RuntimeError(msg.format(file, root))
Philip Meier's avatar
Philip Meier committed
104

105

106
def parse_devkit_archive(root: str, file: Optional[str] = None) -> None:
107
108
109
110
111
112
113
114
    """Parse the devkit archive of the ImageNet2012 classification dataset and save
    the meta information in a binary file.

    Args:
        root (str): Root directory containing the devkit archive
        file (str, optional): Name of devkit archive. Defaults to
            'ILSVRC2012_devkit_t12.tar.gz'
    """
Philip Meier's avatar
Philip Meier committed
115
116
    import scipy.io as sio

117
    def parse_meta_mat(devkit_root: str) -> Tuple[Dict[int, str], Dict[str, str]]:
118
119
120
121
122
123
124
125
126
127
128
        metafile = os.path.join(devkit_root, "data", "meta.mat")
        meta = sio.loadmat(metafile, squeeze_me=True)['synsets']
        nums_children = list(zip(*meta))[4]
        meta = [meta[idx] for idx, num_children in enumerate(nums_children)
                if num_children == 0]
        idcs, wnids, classes = list(zip(*meta))[:3]
        classes = [tuple(clss.split(', ')) for clss in classes]
        idx_to_wnid = {idx: wnid for idx, wnid in zip(idcs, wnids)}
        wnid_to_classes = {wnid: clss for wnid, clss in zip(wnids, classes)}
        return idx_to_wnid, wnid_to_classes

129
    def parse_val_groundtruth_txt(devkit_root: str) -> List[int]:
130
131
132
133
134
135
136
        file = os.path.join(devkit_root, "data",
                            "ILSVRC2012_validation_ground_truth.txt")
        with open(file, 'r') as txtfh:
            val_idcs = txtfh.readlines()
        return [int(val_idx) for val_idx in val_idcs]

    @contextmanager
137
    def get_tmp_dir() -> Iterator[str]:
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
        tmp_dir = tempfile.mkdtemp()
        try:
            yield tmp_dir
        finally:
            shutil.rmtree(tmp_dir)

    archive_meta = ARCHIVE_META["devkit"]
    if file is None:
        file = archive_meta[0]
    md5 = archive_meta[1]

    _verify_archive(root, file, md5)

    with get_tmp_dir() as tmp_dir:
        extract_archive(os.path.join(root, file), tmp_dir)

        devkit_root = os.path.join(tmp_dir, "ILSVRC2012_devkit_t12")
        idx_to_wnid, wnid_to_classes = parse_meta_mat(devkit_root)
        val_idcs = parse_val_groundtruth_txt(devkit_root)
        val_wnids = [idx_to_wnid[idx] for idx in val_idcs]

        torch.save((wnid_to_classes, val_wnids), os.path.join(root, META_FILE))


162
def parse_train_archive(root: str, file: Optional[str] = None, folder: str = "train") -> None:
163
164
    """Parse the train images archive of the ImageNet2012 classification dataset and
    prepare it for usage with the ImageNet dataset.
Philip Meier's avatar
Philip Meier committed
165

166
167
168
169
170
171
172
173
174
175
176
    Args:
        root (str): Root directory containing the train images archive
        file (str, optional): Name of train images archive. Defaults to
            'ILSVRC2012_img_train.tar'
        folder (str, optional): Optional name for train images folder. Defaults to
            'train'
    """
    archive_meta = ARCHIVE_META["train"]
    if file is None:
        file = archive_meta[0]
    md5 = archive_meta[1]
Philip Meier's avatar
Philip Meier committed
177

178
    _verify_archive(root, file, md5)
Philip Meier's avatar
Philip Meier committed
179

180
181
    train_root = os.path.join(root, folder)
    extract_archive(os.path.join(root, file), train_root)
Philip Meier's avatar
Philip Meier committed
182

183
184
    archives = [os.path.join(train_root, archive) for archive in os.listdir(train_root)]
    for archive in archives:
185
        extract_archive(archive, os.path.splitext(archive)[0], remove_finished=True)
Philip Meier's avatar
Philip Meier committed
186
187


188
189
190
def parse_val_archive(
    root: str, file: Optional[str] = None, wnids: Optional[List[str]] = None, folder: str = "val"
) -> None:
191
192
    """Parse the validation images archive of the ImageNet2012 classification dataset
    and prepare it for usage with the ImageNet dataset.
Philip Meier's avatar
Philip Meier committed
193

194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
    Args:
        root (str): Root directory containing the validation images archive
        file (str, optional): Name of validation images archive. Defaults to
            'ILSVRC2012_img_val.tar'
        wnids (list, optional): List of WordNet IDs of the validation images. If None
            is given, the IDs are loaded from the meta file in the root directory
        folder (str, optional): Optional name for validation images folder. Defaults to
            'val'
    """
    archive_meta = ARCHIVE_META["val"]
    if file is None:
        file = archive_meta[0]
    md5 = archive_meta[1]
    if wnids is None:
        wnids = load_meta_file(root)[1]

    _verify_archive(root, file, md5)
Philip Meier's avatar
Philip Meier committed
211

212
213
    val_root = os.path.join(root, folder)
    extract_archive(os.path.join(root, file), val_root)
Philip Meier's avatar
Philip Meier committed
214

215
216
217
218
    images = sorted([os.path.join(val_root, image) for image in os.listdir(val_root)])

    for wnid in set(wnids):
        os.mkdir(os.path.join(val_root, wnid))
Philip Meier's avatar
Philip Meier committed
219

220
221
    for wnid, img_file in zip(wnids, images):
        shutil.move(img_file, os.path.join(val_root, wnid, os.path.basename(img_file)))