"vscode:/vscode.git/clone" did not exist on "6f046dbf1806962aa7ec0d9e09a02a248a744fde"
imagenet.py 8.46 KB
Newer Older
Philip Meier's avatar
Philip Meier committed
1
2
import os
import shutil
3
import tempfile
4
5
import warnings
from contextlib import contextmanager
6
from typing import Any, Dict, List, Iterator, Optional, Tuple
7

Philip Meier's avatar
Philip Meier committed
8
import torch
9

Philip Meier's avatar
Philip Meier committed
10
from .folder import ImageFolder
11
12
13
from .utils import check_integrity, extract_archive, verify_str_arg

ARCHIVE_META = {
14
15
16
    "train": ("ILSVRC2012_img_train.tar", "1d675b47d978889d74fa0da5fadfb00e"),
    "val": ("ILSVRC2012_img_val.tar", "29b22e2961454d5413ddabcf34fc5622"),
    "devkit": ("ILSVRC2012_devkit_t12.tar.gz", "fa75699e90414af021442c21a62c3abf"),
Philip Meier's avatar
Philip Meier committed
17
18
}

19
20
META_FILE = "meta.bin"

Philip Meier's avatar
Philip Meier committed
21
22
23
24
25
26
27
28
29
30
31
32
33
34

class ImageNet(ImageFolder):
    """`ImageNet <http://image-net.org/>`_ 2012 Classification Dataset.

    Args:
        root (string): Root directory of the ImageNet Dataset.
        split (string, optional): The dataset split, supports ``train``, or ``val``.
        transform (callable, optional): A function/transform that  takes in an PIL image
            and returns a transformed version. E.g, ``transforms.RandomCrop``
        target_transform (callable, optional): A function/transform that takes in the
            target and transforms it.
        loader (callable, optional): A function to load an image given its path.

     Attributes:
Philip Meier's avatar
Philip Meier committed
35
        classes (list): List of the class name tuples.
Philip Meier's avatar
Philip Meier committed
36
37
        class_to_idx (dict): Dict with items (class_name, class_index).
        wnids (list): List of the WordNet IDs.
Philip Meier's avatar
Philip Meier committed
38
        wnid_to_idx (dict): Dict with items (wordnet_id, class_index).
Philip Meier's avatar
Philip Meier committed
39
40
41
42
        imgs (list): List of (image path, class_index) tuples
        targets (list): The class_index value for each image in the dataset
    """

43
    def __init__(self, root: str, split: str = "train", download: Optional[str] = None, **kwargs: Any) -> None:
44
        if download is True:
45
46
47
48
49
            msg = (
                "The dataset is no longer publicly accessible. You need to "
                "download the archives externally and place them in the root "
                "directory."
            )
50
51
            raise RuntimeError(msg)
        elif download is False:
52
            msg = "The use of the download flag is deprecated, since the dataset is no longer publicly accessible."
53
54
            warnings.warn(msg, RuntimeWarning)

Philip Meier's avatar
Philip Meier committed
55
        root = self.root = os.path.expanduser(root)
56
        self.split = verify_str_arg(split, "split", ("train", "val"))
Philip Meier's avatar
Philip Meier committed
57

58
59
        self.parse_archives()
        wnid_to_classes = load_meta_file(self.root)[0]
Philip Meier's avatar
Philip Meier committed
60

61
        super().__init__(self.split_folder, **kwargs)
Philip Meier's avatar
Philip Meier committed
62
63
64
        self.root = root

        self.wnids = self.classes
Philip Meier's avatar
Philip Meier committed
65
        self.wnid_to_idx = self.class_to_idx
Philip Meier's avatar
Philip Meier committed
66
        self.classes = [wnid_to_classes[wnid] for wnid in self.wnids]
67
        self.class_to_idx = {cls: idx for idx, clss in enumerate(self.classes) for cls in clss}
Philip Meier's avatar
Philip Meier committed
68

69
    def parse_archives(self) -> None:
70
71
        if not check_integrity(os.path.join(self.root, META_FILE)):
            parse_devkit_archive(self.root)
Philip Meier's avatar
Philip Meier committed
72
73

        if not os.path.isdir(self.split_folder):
74
            if self.split == "train":
75
                parse_train_archive(self.root)
76
            elif self.split == "val":
77
                parse_val_archive(self.root)
Philip Meier's avatar
Philip Meier committed
78
79

    @property
80
    def split_folder(self) -> str:
Philip Meier's avatar
Philip Meier committed
81
82
        return os.path.join(self.root, self.split)

83
    def extra_repr(self) -> str:
84
        return "Split: {split}".format(**self.__dict__)
Philip Meier's avatar
Philip Meier committed
85
86


87
def load_meta_file(root: str, file: Optional[str] = None) -> Tuple[Dict[str, str], List[str]]:
88
89
90
91
92
93
94
    if file is None:
        file = META_FILE
    file = os.path.join(root, file)

    if check_integrity(file):
        return torch.load(file)
    else:
95
96
97
98
        msg = (
            "The meta file {} is not present in the root directory or is corrupted. "
            "This file is automatically created by the ImageNet dataset."
        )
99
100
        raise RuntimeError(msg.format(file, root))

Philip Meier's avatar
Philip Meier committed
101

102
def _verify_archive(root: str, file: str, md5: str) -> None:
103
    if not check_integrity(os.path.join(root, file), md5):
104
105
106
107
        msg = (
            "The archive {} is not present in the root directory or is corrupted. "
            "You need to download it externally and place it in {}."
        )
108
        raise RuntimeError(msg.format(file, root))
Philip Meier's avatar
Philip Meier committed
109

110

111
def parse_devkit_archive(root: str, file: Optional[str] = None) -> None:
112
113
114
115
116
117
118
119
    """Parse the devkit archive of the ImageNet2012 classification dataset and save
    the meta information in a binary file.

    Args:
        root (str): Root directory containing the devkit archive
        file (str, optional): Name of devkit archive. Defaults to
            'ILSVRC2012_devkit_t12.tar.gz'
    """
Philip Meier's avatar
Philip Meier committed
120
121
    import scipy.io as sio

122
    def parse_meta_mat(devkit_root: str) -> Tuple[Dict[int, str], Dict[str, Tuple[str, ...]]]:
123
        metafile = os.path.join(devkit_root, "data", "meta.mat")
124
        meta = sio.loadmat(metafile, squeeze_me=True)["synsets"]
125
        nums_children = list(zip(*meta))[4]
126
        meta = [meta[idx] for idx, num_children in enumerate(nums_children) if num_children == 0]
127
        idcs, wnids, classes = list(zip(*meta))[:3]
128
        classes = [tuple(clss.split(", ")) for clss in classes]
129
130
131
132
        idx_to_wnid = {idx: wnid for idx, wnid in zip(idcs, wnids)}
        wnid_to_classes = {wnid: clss for wnid, clss in zip(wnids, classes)}
        return idx_to_wnid, wnid_to_classes

133
    def parse_val_groundtruth_txt(devkit_root: str) -> List[int]:
134
        file = os.path.join(devkit_root, "data", "ILSVRC2012_validation_ground_truth.txt")
135
        with open(file) as txtfh:
136
137
138
139
            val_idcs = txtfh.readlines()
        return [int(val_idx) for val_idx in val_idcs]

    @contextmanager
140
    def get_tmp_dir() -> Iterator[str]:
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
        tmp_dir = tempfile.mkdtemp()
        try:
            yield tmp_dir
        finally:
            shutil.rmtree(tmp_dir)

    archive_meta = ARCHIVE_META["devkit"]
    if file is None:
        file = archive_meta[0]
    md5 = archive_meta[1]

    _verify_archive(root, file, md5)

    with get_tmp_dir() as tmp_dir:
        extract_archive(os.path.join(root, file), tmp_dir)

        devkit_root = os.path.join(tmp_dir, "ILSVRC2012_devkit_t12")
        idx_to_wnid, wnid_to_classes = parse_meta_mat(devkit_root)
        val_idcs = parse_val_groundtruth_txt(devkit_root)
        val_wnids = [idx_to_wnid[idx] for idx in val_idcs]

        torch.save((wnid_to_classes, val_wnids), os.path.join(root, META_FILE))


165
def parse_train_archive(root: str, file: Optional[str] = None, folder: str = "train") -> None:
166
167
    """Parse the train images archive of the ImageNet2012 classification dataset and
    prepare it for usage with the ImageNet dataset.
Philip Meier's avatar
Philip Meier committed
168

169
170
171
172
173
174
175
176
177
178
179
    Args:
        root (str): Root directory containing the train images archive
        file (str, optional): Name of train images archive. Defaults to
            'ILSVRC2012_img_train.tar'
        folder (str, optional): Optional name for train images folder. Defaults to
            'train'
    """
    archive_meta = ARCHIVE_META["train"]
    if file is None:
        file = archive_meta[0]
    md5 = archive_meta[1]
Philip Meier's avatar
Philip Meier committed
180

181
    _verify_archive(root, file, md5)
Philip Meier's avatar
Philip Meier committed
182

183
184
    train_root = os.path.join(root, folder)
    extract_archive(os.path.join(root, file), train_root)
Philip Meier's avatar
Philip Meier committed
185

186
187
    archives = [os.path.join(train_root, archive) for archive in os.listdir(train_root)]
    for archive in archives:
188
        extract_archive(archive, os.path.splitext(archive)[0], remove_finished=True)
Philip Meier's avatar
Philip Meier committed
189
190


191
192
193
def parse_val_archive(
    root: str, file: Optional[str] = None, wnids: Optional[List[str]] = None, folder: str = "val"
) -> None:
194
195
    """Parse the validation images archive of the ImageNet2012 classification dataset
    and prepare it for usage with the ImageNet dataset.
Philip Meier's avatar
Philip Meier committed
196

197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
    Args:
        root (str): Root directory containing the validation images archive
        file (str, optional): Name of validation images archive. Defaults to
            'ILSVRC2012_img_val.tar'
        wnids (list, optional): List of WordNet IDs of the validation images. If None
            is given, the IDs are loaded from the meta file in the root directory
        folder (str, optional): Optional name for validation images folder. Defaults to
            'val'
    """
    archive_meta = ARCHIVE_META["val"]
    if file is None:
        file = archive_meta[0]
    md5 = archive_meta[1]
    if wnids is None:
        wnids = load_meta_file(root)[1]

    _verify_archive(root, file, md5)
Philip Meier's avatar
Philip Meier committed
214

215
216
    val_root = os.path.join(root, folder)
    extract_archive(os.path.join(root, file), val_root)
Philip Meier's avatar
Philip Meier committed
217

218
    images = sorted(os.path.join(val_root, image) for image in os.listdir(val_root))
219
220
221

    for wnid in set(wnids):
        os.mkdir(os.path.join(val_root, wnid))
Philip Meier's avatar
Philip Meier committed
222

223
224
    for wnid, img_file in zip(wnids, images):
        shutil.move(img_file, os.path.join(val_root, wnid, os.path.basename(img_file)))