imagenet.py 7.94 KB
Newer Older
Philip Meier's avatar
Philip Meier committed
1
2
import os
import shutil
3
import tempfile
4
from contextlib import contextmanager
5
from typing import Any, Dict, List, Iterator, Optional, Tuple
6

Philip Meier's avatar
Philip Meier committed
7
import torch
8

Philip Meier's avatar
Philip Meier committed
9
from .folder import ImageFolder
10
11
12
from .utils import check_integrity, extract_archive, verify_str_arg

ARCHIVE_META = {
13
14
15
    "train": ("ILSVRC2012_img_train.tar", "1d675b47d978889d74fa0da5fadfb00e"),
    "val": ("ILSVRC2012_img_val.tar", "29b22e2961454d5413ddabcf34fc5622"),
    "devkit": ("ILSVRC2012_devkit_t12.tar.gz", "fa75699e90414af021442c21a62c3abf"),
Philip Meier's avatar
Philip Meier committed
16
17
}

18
19
META_FILE = "meta.bin"

Philip Meier's avatar
Philip Meier committed
20
21
22
23
24
25
26
27
28
29
30
31
32
33

class ImageNet(ImageFolder):
    """`ImageNet <http://image-net.org/>`_ 2012 Classification Dataset.

    Args:
        root (string): Root directory of the ImageNet Dataset.
        split (string, optional): The dataset split, supports ``train``, or ``val``.
        transform (callable, optional): A function/transform that  takes in an PIL image
            and returns a transformed version. E.g, ``transforms.RandomCrop``
        target_transform (callable, optional): A function/transform that takes in the
            target and transforms it.
        loader (callable, optional): A function to load an image given its path.

     Attributes:
Philip Meier's avatar
Philip Meier committed
34
        classes (list): List of the class name tuples.
Philip Meier's avatar
Philip Meier committed
35
36
        class_to_idx (dict): Dict with items (class_name, class_index).
        wnids (list): List of the WordNet IDs.
Philip Meier's avatar
Philip Meier committed
37
        wnid_to_idx (dict): Dict with items (wordnet_id, class_index).
Philip Meier's avatar
Philip Meier committed
38
39
40
41
        imgs (list): List of (image path, class_index) tuples
        targets (list): The class_index value for each image in the dataset
    """

42
    def __init__(self, root: str, split: str = "train", **kwargs: Any) -> None:
Philip Meier's avatar
Philip Meier committed
43
        root = self.root = os.path.expanduser(root)
44
        self.split = verify_str_arg(split, "split", ("train", "val"))
Philip Meier's avatar
Philip Meier committed
45

46
47
        self.parse_archives()
        wnid_to_classes = load_meta_file(self.root)[0]
Philip Meier's avatar
Philip Meier committed
48

49
        super().__init__(self.split_folder, **kwargs)
Philip Meier's avatar
Philip Meier committed
50
51
52
        self.root = root

        self.wnids = self.classes
Philip Meier's avatar
Philip Meier committed
53
        self.wnid_to_idx = self.class_to_idx
Philip Meier's avatar
Philip Meier committed
54
        self.classes = [wnid_to_classes[wnid] for wnid in self.wnids]
55
        self.class_to_idx = {cls: idx for idx, clss in enumerate(self.classes) for cls in clss}
Philip Meier's avatar
Philip Meier committed
56

57
    def parse_archives(self) -> None:
58
59
        if not check_integrity(os.path.join(self.root, META_FILE)):
            parse_devkit_archive(self.root)
Philip Meier's avatar
Philip Meier committed
60
61

        if not os.path.isdir(self.split_folder):
62
            if self.split == "train":
63
                parse_train_archive(self.root)
64
            elif self.split == "val":
65
                parse_val_archive(self.root)
Philip Meier's avatar
Philip Meier committed
66
67

    @property
68
    def split_folder(self) -> str:
Philip Meier's avatar
Philip Meier committed
69
70
        return os.path.join(self.root, self.split)

71
    def extra_repr(self) -> str:
72
        return "Split: {split}".format(**self.__dict__)
Philip Meier's avatar
Philip Meier committed
73
74


75
def load_meta_file(root: str, file: Optional[str] = None) -> Tuple[Dict[str, str], List[str]]:
76
77
78
79
80
81
82
    if file is None:
        file = META_FILE
    file = os.path.join(root, file)

    if check_integrity(file):
        return torch.load(file)
    else:
83
84
85
86
        msg = (
            "The meta file {} is not present in the root directory or is corrupted. "
            "This file is automatically created by the ImageNet dataset."
        )
87
88
        raise RuntimeError(msg.format(file, root))

Philip Meier's avatar
Philip Meier committed
89

90
def _verify_archive(root: str, file: str, md5: str) -> None:
91
    if not check_integrity(os.path.join(root, file), md5):
92
93
94
95
        msg = (
            "The archive {} is not present in the root directory or is corrupted. "
            "You need to download it externally and place it in {}."
        )
96
        raise RuntimeError(msg.format(file, root))
Philip Meier's avatar
Philip Meier committed
97

98

99
def parse_devkit_archive(root: str, file: Optional[str] = None) -> None:
100
101
102
103
104
105
106
107
    """Parse the devkit archive of the ImageNet2012 classification dataset and save
    the meta information in a binary file.

    Args:
        root (str): Root directory containing the devkit archive
        file (str, optional): Name of devkit archive. Defaults to
            'ILSVRC2012_devkit_t12.tar.gz'
    """
Philip Meier's avatar
Philip Meier committed
108
109
    import scipy.io as sio

110
    def parse_meta_mat(devkit_root: str) -> Tuple[Dict[int, str], Dict[str, Tuple[str, ...]]]:
111
        metafile = os.path.join(devkit_root, "data", "meta.mat")
112
        meta = sio.loadmat(metafile, squeeze_me=True)["synsets"]
113
        nums_children = list(zip(*meta))[4]
114
        meta = [meta[idx] for idx, num_children in enumerate(nums_children) if num_children == 0]
115
        idcs, wnids, classes = list(zip(*meta))[:3]
116
        classes = [tuple(clss.split(", ")) for clss in classes]
117
118
119
120
        idx_to_wnid = {idx: wnid for idx, wnid in zip(idcs, wnids)}
        wnid_to_classes = {wnid: clss for wnid, clss in zip(wnids, classes)}
        return idx_to_wnid, wnid_to_classes

121
    def parse_val_groundtruth_txt(devkit_root: str) -> List[int]:
122
        file = os.path.join(devkit_root, "data", "ILSVRC2012_validation_ground_truth.txt")
123
        with open(file) as txtfh:
124
125
126
127
            val_idcs = txtfh.readlines()
        return [int(val_idx) for val_idx in val_idcs]

    @contextmanager
128
    def get_tmp_dir() -> Iterator[str]:
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
        tmp_dir = tempfile.mkdtemp()
        try:
            yield tmp_dir
        finally:
            shutil.rmtree(tmp_dir)

    archive_meta = ARCHIVE_META["devkit"]
    if file is None:
        file = archive_meta[0]
    md5 = archive_meta[1]

    _verify_archive(root, file, md5)

    with get_tmp_dir() as tmp_dir:
        extract_archive(os.path.join(root, file), tmp_dir)

        devkit_root = os.path.join(tmp_dir, "ILSVRC2012_devkit_t12")
        idx_to_wnid, wnid_to_classes = parse_meta_mat(devkit_root)
        val_idcs = parse_val_groundtruth_txt(devkit_root)
        val_wnids = [idx_to_wnid[idx] for idx in val_idcs]

        torch.save((wnid_to_classes, val_wnids), os.path.join(root, META_FILE))


153
def parse_train_archive(root: str, file: Optional[str] = None, folder: str = "train") -> None:
154
155
    """Parse the train images archive of the ImageNet2012 classification dataset and
    prepare it for usage with the ImageNet dataset.
Philip Meier's avatar
Philip Meier committed
156

157
158
159
160
161
162
163
164
165
166
167
    Args:
        root (str): Root directory containing the train images archive
        file (str, optional): Name of train images archive. Defaults to
            'ILSVRC2012_img_train.tar'
        folder (str, optional): Optional name for train images folder. Defaults to
            'train'
    """
    archive_meta = ARCHIVE_META["train"]
    if file is None:
        file = archive_meta[0]
    md5 = archive_meta[1]
Philip Meier's avatar
Philip Meier committed
168

169
    _verify_archive(root, file, md5)
Philip Meier's avatar
Philip Meier committed
170

171
172
    train_root = os.path.join(root, folder)
    extract_archive(os.path.join(root, file), train_root)
Philip Meier's avatar
Philip Meier committed
173

174
175
    archives = [os.path.join(train_root, archive) for archive in os.listdir(train_root)]
    for archive in archives:
176
        extract_archive(archive, os.path.splitext(archive)[0], remove_finished=True)
Philip Meier's avatar
Philip Meier committed
177
178


179
180
181
def parse_val_archive(
    root: str, file: Optional[str] = None, wnids: Optional[List[str]] = None, folder: str = "val"
) -> None:
182
183
    """Parse the validation images archive of the ImageNet2012 classification dataset
    and prepare it for usage with the ImageNet dataset.
Philip Meier's avatar
Philip Meier committed
184

185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
    Args:
        root (str): Root directory containing the validation images archive
        file (str, optional): Name of validation images archive. Defaults to
            'ILSVRC2012_img_val.tar'
        wnids (list, optional): List of WordNet IDs of the validation images. If None
            is given, the IDs are loaded from the meta file in the root directory
        folder (str, optional): Optional name for validation images folder. Defaults to
            'val'
    """
    archive_meta = ARCHIVE_META["val"]
    if file is None:
        file = archive_meta[0]
    md5 = archive_meta[1]
    if wnids is None:
        wnids = load_meta_file(root)[1]

    _verify_archive(root, file, md5)
Philip Meier's avatar
Philip Meier committed
202

203
204
    val_root = os.path.join(root, folder)
    extract_archive(os.path.join(root, file), val_root)
Philip Meier's avatar
Philip Meier committed
205

206
    images = sorted(os.path.join(val_root, image) for image in os.listdir(val_root))
207
208
209

    for wnid in set(wnids):
        os.mkdir(os.path.join(val_root, wnid))
Philip Meier's avatar
Philip Meier committed
210

211
212
    for wnid, img_file in zip(wnids, images):
        shutil.move(img_file, os.path.join(val_root, wnid, os.path.basename(img_file)))