# Copyright (c) Microsoft Corporation. # Licensed under the MIT license. import torch import torch.nn as nn import numpy as np import PIL class CrossEntropyLabelSmooth(nn.Module): def __init__(self, num_classes, epsilon): super(CrossEntropyLabelSmooth, self).__init__() self.num_classes = num_classes self.epsilon = epsilon self.logsoftmax = nn.LogSoftmax(dim=1) def forward(self, inputs, targets): log_probs = self.logsoftmax(inputs) targets = torch.zeros_like(log_probs).scatter_(1, targets.unsqueeze(1), 1) targets = (1 - self.epsilon) * targets + self.epsilon / self.num_classes loss = (-targets * log_probs).mean(0).sum() return loss def accuracy(output, target, topk=(1, 5)): """ Computes the precision@k for the specified values of k """ maxk = max(topk) batch_size = target.size(0) _, pred = output.topk(maxk, 1, True, True) pred = pred.t() # one-hot case if target.ndimension() > 1: target = target.max(1)[1] correct = pred.eq(target.view(1, -1).expand_as(pred)) res = dict() for k in topk: correct_k = correct[:k].reshape(-1).float().sum(0) res["acc{}".format(k)] = correct_k.mul_(1.0 / batch_size).item() return res class ToBGRTensor(object): def __call__(self, img): assert isinstance(img, (np.ndarray, PIL.Image.Image)) if isinstance(img, PIL.Image.Image): img = np.asarray(img) img = img[:,:, ::-1] # 2 BGR img = np.transpose(img, [2, 0, 1]) # 2 (3, H, W) img = np.ascontiguousarray(img) img = torch.from_numpy(img).float() return img def get_archchoice_by_model(model): result = {} for k, v in model.items(): assert k in v result[k] = model[k].split("_")[-1] return result