"vscode:/vscode.git/clone" did not exist on "519bd41ff303d14972d8e8b253f83aa194c3dd01"
Unverified Commit 5c57f5ec authored by Francisco Massa's avatar Francisco Massa Committed by GitHub
Browse files

Expand usage logging to models (#4735)

Summary: We would like to track the internal usage of TorchVision components. We're tracking [datasets usage](https://fburl.com/daiquery/dqpmemn3

) now. This diff expand the tracking to all models.

Reviewed By: fmassa

Differential Revision: D31441632

fbshipit-source-id: e26072e582ac9f832c2056307ebf0eccf2ed6c9c
Co-authored-by: default avatarKai Zhang <kaizh@fb.com>
parent 02b5a817
......@@ -4,6 +4,8 @@ from typing import Any, Callable, List, Optional, Tuple
import torch
import torch.utils.data as data
from ..utils import _log_api_usage_once
class VisionDataset(data.Dataset):
"""
......@@ -33,7 +35,7 @@ class VisionDataset(data.Dataset):
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
) -> None:
torch._C._log_api_usage_once(f"torchvision.datasets.{self.__class__.__name__}")
_log_api_usage_once(self)
if isinstance(root, torch._six.string_classes):
root = os.path.expanduser(root)
self.root = root
......
......@@ -4,6 +4,7 @@ import torch
import torch.nn as nn
from .._internally_replaced_utils import load_state_dict_from_url
from ..utils import _log_api_usage_once
__all__ = ["AlexNet", "alexnet"]
......@@ -17,6 +18,7 @@ model_urls = {
class AlexNet(nn.Module):
def __init__(self, num_classes: int = 1000, dropout: float = 0.5) -> None:
super(AlexNet, self).__init__()
_log_api_usage_once(self)
self.features = nn.Sequential(
nn.Conv2d(3, 64, kernel_size=11, stride=4, padding=2),
nn.ReLU(inplace=True),
......
......@@ -9,6 +9,7 @@ import torch.utils.checkpoint as cp
from torch import Tensor
from .._internally_replaced_utils import load_state_dict_from_url
from ..utils import _log_api_usage_once
__all__ = ["DenseNet", "densenet121", "densenet169", "densenet201", "densenet161"]
......@@ -162,6 +163,7 @@ class DenseNet(nn.Module):
) -> None:
super(DenseNet, self).__init__()
_log_api_usage_once(self)
# First convolution
self.features = nn.Sequential(
......
......@@ -9,6 +9,7 @@ from torchvision.ops import StochasticDepth
from .._internally_replaced_utils import load_state_dict_from_url
from ..ops.misc import ConvNormActivation, SqueezeExcitation
from ..utils import _log_api_usage_once
from ._utils import _make_divisible
......@@ -169,6 +170,7 @@ class EfficientNet(nn.Module):
norm_layer (Optional[Callable[..., nn.Module]]): Module specifying the normalization layer to use
"""
super().__init__()
_log_api_usage_once(self)
if not inverted_residual_setting:
raise ValueError("The inverted_residual_setting should not be empty")
......
......@@ -8,6 +8,7 @@ import torch.nn.functional as F
from torch import Tensor
from .._internally_replaced_utils import load_state_dict_from_url
from ..utils import _log_api_usage_once
__all__ = ["GoogLeNet", "googlenet", "GoogLeNetOutputs", "_GoogLeNetOutputs"]
......@@ -75,6 +76,7 @@ class GoogLeNet(nn.Module):
dropout_aux: float = 0.7,
) -> None:
super(GoogLeNet, self).__init__()
_log_api_usage_once(self)
if blocks is None:
blocks = [BasicConv2d, Inception, InceptionAux]
if init_weights is None:
......
......@@ -7,6 +7,7 @@ import torch.nn.functional as F
from torch import nn, Tensor
from .._internally_replaced_utils import load_state_dict_from_url
from ..utils import _log_api_usage_once
__all__ = ["Inception3", "inception_v3", "InceptionOutputs", "_InceptionOutputs"]
......@@ -73,6 +74,7 @@ class Inception3(nn.Module):
dropout: float = 0.5,
) -> None:
super(Inception3, self).__init__()
_log_api_usage_once(self)
if inception_blocks is None:
inception_blocks = [BasicConv2d, InceptionA, InceptionB, InceptionC, InceptionD, InceptionE, InceptionAux]
if init_weights is None:
......
......@@ -6,6 +6,7 @@ import torch.nn as nn
from torch import Tensor
from .._internally_replaced_utils import load_state_dict_from_url
from ..utils import _log_api_usage_once
__all__ = ["MNASNet", "mnasnet0_5", "mnasnet0_75", "mnasnet1_0", "mnasnet1_3"]
......@@ -97,6 +98,7 @@ class MNASNet(torch.nn.Module):
def __init__(self, alpha: float, num_classes: int = 1000, dropout: float = 0.2) -> None:
super(MNASNet, self).__init__()
_log_api_usage_once(self)
assert alpha > 0.0
self.alpha = alpha
self.num_classes = num_classes
......
......@@ -7,6 +7,7 @@ from torch import nn
from .._internally_replaced_utils import load_state_dict_from_url
from ..ops.misc import ConvNormActivation
from ..utils import _log_api_usage_once
from ._utils import _make_divisible
......@@ -110,6 +111,7 @@ class MobileNetV2(nn.Module):
"""
super(MobileNetV2, self).__init__()
_log_api_usage_once(self)
if block is None:
block = InvertedResidual
......
......@@ -7,6 +7,7 @@ from torch import nn, Tensor
from .._internally_replaced_utils import load_state_dict_from_url
from ..ops.misc import ConvNormActivation, SqueezeExcitation as SElayer
from ..utils import _log_api_usage_once
from ._utils import _make_divisible
......@@ -150,6 +151,7 @@ class MobileNetV3(nn.Module):
dropout (float): The droupout probability
"""
super().__init__()
_log_api_usage_once(self)
if not inverted_residual_setting:
raise ValueError("The inverted_residual_setting should not be empty")
......
......@@ -13,6 +13,7 @@ from torch import nn, Tensor
from .._internally_replaced_utils import load_state_dict_from_url
from ..ops.misc import ConvNormActivation, SqueezeExcitation
from ..utils import _log_api_usage_once
from ._utils import _make_divisible
......@@ -309,6 +310,7 @@ class RegNet(nn.Module):
activation: Optional[Callable[..., nn.Module]] = None,
) -> None:
super().__init__()
_log_api_usage_once(self)
if stem_type is None:
stem_type = SimpleStemIN
......
......@@ -5,6 +5,7 @@ import torch.nn as nn
from torch import Tensor
from .._internally_replaced_utils import load_state_dict_from_url
from ..utils import _log_api_usage_once
__all__ = [
......@@ -173,6 +174,7 @@ class ResNet(nn.Module):
norm_layer: Optional[Callable[..., nn.Module]] = None,
) -> None:
super(ResNet, self).__init__()
_log_api_usage_once(self)
if norm_layer is None:
norm_layer = nn.BatchNorm2d
self._norm_layer = norm_layer
......
......@@ -5,6 +5,7 @@ import torch.nn as nn
from torch import Tensor
from .._internally_replaced_utils import load_state_dict_from_url
from ..utils import _log_api_usage_once
__all__ = ["ShuffleNetV2", "shufflenet_v2_x0_5", "shufflenet_v2_x1_0", "shufflenet_v2_x1_5", "shufflenet_v2_x2_0"]
......@@ -99,6 +100,7 @@ class ShuffleNetV2(nn.Module):
inverted_residual: Callable[..., nn.Module] = InvertedResidual,
) -> None:
super(ShuffleNetV2, self).__init__()
_log_api_usage_once(self)
if len(stages_repeats) != 3:
raise ValueError("expected stages_repeats as list of 3 positive ints")
......
......@@ -5,6 +5,7 @@ import torch.nn as nn
import torch.nn.init as init
from .._internally_replaced_utils import load_state_dict_from_url
from ..utils import _log_api_usage_once
__all__ = ["SqueezeNet", "squeezenet1_0", "squeezenet1_1"]
......@@ -35,6 +36,7 @@ class Fire(nn.Module):
class SqueezeNet(nn.Module):
def __init__(self, version: str = "1_0", num_classes: int = 1000, dropout: float = 0.5) -> None:
super(SqueezeNet, self).__init__()
_log_api_usage_once(self)
self.num_classes = num_classes
if version == "1_0":
self.features = nn.Sequential(
......
......@@ -4,6 +4,7 @@ import torch
import torch.nn as nn
from .._internally_replaced_utils import load_state_dict_from_url
from ..utils import _log_api_usage_once
__all__ = [
......@@ -36,6 +37,7 @@ class VGG(nn.Module):
self, features: nn.Module, num_classes: int = 1000, init_weights: bool = True, dropout: float = 0.5
) -> None:
super(VGG, self).__init__()
_log_api_usage_once(self)
self.features = features
self.avgpool = nn.AdaptiveAvgPool2d((7, 7))
self.classifier = nn.Sequential(
......
......@@ -303,3 +303,7 @@ def draw_segmentation_masks(
def _generate_color_palette(num_masks: int):
palette = torch.tensor([2 ** 25 - 1, 2 ** 15 - 1, 2 ** 21 - 1])
return [tuple((i * palette) % 255) for i in range(num_masks)]
def _log_api_usage_once(obj: object) -> None:
torch._C._log_api_usage_once(f"{obj.__module__}.{obj.__class__.__name__}")
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment