Unverified Commit d716c426 authored by Kai Zhang's avatar Kai Zhang Committed by GitHub
Browse files

revamp log api usage method (#5072)

* revamp log api usage method
parent e0c5cc41
......@@ -35,7 +35,7 @@ class VisionDataset(data.Dataset):
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
) -> None:
_log_api_usage_once(self)
_log_api_usage_once("datasets", self.__class__.__name__)
if isinstance(root, torch._six.string_classes):
root = os.path.expanduser(root)
self.root = root
......
......@@ -18,7 +18,7 @@ model_urls = {
class AlexNet(nn.Module):
def __init__(self, num_classes: int = 1000, dropout: float = 0.5) -> None:
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
self.features = nn.Sequential(
nn.Conv2d(3, 64, kernel_size=11, stride=4, padding=2),
nn.ReLU(inplace=True),
......
......@@ -163,7 +163,7 @@ class DenseNet(nn.Module):
) -> None:
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
# First convolution
self.features = nn.Sequential(
......
......@@ -27,7 +27,7 @@ class GeneralizedRCNN(nn.Module):
def __init__(self, backbone: nn.Module, rpn: nn.Module, roi_heads: nn.Module, transform: nn.Module) -> None:
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
self.transform = transform
self.backbone = backbone
self.rpn = rpn
......
......@@ -337,7 +337,7 @@ class RetinaNet(nn.Module):
topk_candidates=1000,
):
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
if not hasattr(backbone, "out_channels"):
raise ValueError(
......
......@@ -182,7 +182,7 @@ class SSD(nn.Module):
positive_fraction: float = 0.25,
):
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
self.backbone = backbone
......
......@@ -120,7 +120,7 @@ class SSDLiteFeatureExtractorMobileNet(nn.Module):
min_depth: int = 16,
):
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
assert not backbone[c4_pos].use_res_connect
self.features = nn.Sequential(
......
......@@ -170,7 +170,7 @@ class EfficientNet(nn.Module):
norm_layer (Optional[Callable[..., nn.Module]]): Module specifying the normalization layer to use
"""
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
if not inverted_residual_setting:
raise ValueError("The inverted_residual_setting should not be empty")
......
......@@ -39,7 +39,7 @@ class GoogLeNet(nn.Module):
dropout_aux: float = 0.7,
) -> None:
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
if blocks is None:
blocks = [BasicConv2d, Inception, InceptionAux]
if init_weights is None:
......
......@@ -37,7 +37,7 @@ class Inception3(nn.Module):
dropout: float = 0.5,
) -> None:
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
if inception_blocks is None:
inception_blocks = [BasicConv2d, InceptionA, InceptionB, InceptionC, InceptionD, InceptionE, InceptionAux]
if init_weights is None:
......
......@@ -98,7 +98,7 @@ class MNASNet(torch.nn.Module):
def __init__(self, alpha: float, num_classes: int = 1000, dropout: float = 0.2) -> None:
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
assert alpha > 0.0
self.alpha = alpha
self.num_classes = num_classes
......
......@@ -111,7 +111,7 @@ class MobileNetV2(nn.Module):
"""
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
if block is None:
block = InvertedResidual
......
......@@ -151,7 +151,7 @@ class MobileNetV3(nn.Module):
dropout (float): The droupout probability
"""
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
if not inverted_residual_setting:
raise ValueError("The inverted_residual_setting should not be empty")
......
......@@ -440,7 +440,7 @@ class RAFT(nn.Module):
If ``None`` (default), the flow is upsampled using interpolation.
"""
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
self.feature_encoder = feature_encoder
self.context_encoder = context_encoder
......
......@@ -310,7 +310,7 @@ class RegNet(nn.Module):
activation: Optional[Callable[..., nn.Module]] = None,
) -> None:
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
if stem_type is None:
stem_type = SimpleStemIN
......
......@@ -174,7 +174,7 @@ class ResNet(nn.Module):
norm_layer: Optional[Callable[..., nn.Module]] = None,
) -> None:
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
if norm_layer is None:
norm_layer = nn.BatchNorm2d
self._norm_layer = norm_layer
......
......@@ -13,7 +13,7 @@ class _SimpleSegmentationModel(nn.Module):
def __init__(self, backbone: nn.Module, classifier: nn.Module, aux_classifier: Optional[nn.Module] = None) -> None:
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
self.backbone = backbone
self.classifier = classifier
self.aux_classifier = aux_classifier
......
......@@ -38,7 +38,7 @@ class LRASPP(nn.Module):
self, backbone: nn.Module, low_channels: int, high_channels: int, num_classes: int, inter_channels: int = 128
) -> None:
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
self.backbone = backbone
self.classifier = LRASPPHead(low_channels, high_channels, num_classes, inter_channels)
......
......@@ -100,7 +100,7 @@ class ShuffleNetV2(nn.Module):
inverted_residual: Callable[..., nn.Module] = InvertedResidual,
) -> None:
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
if len(stages_repeats) != 3:
raise ValueError("expected stages_repeats as list of 3 positive ints")
......
......@@ -36,7 +36,7 @@ class Fire(nn.Module):
class SqueezeNet(nn.Module):
def __init__(self, version: str = "1_0", num_classes: int = 1000, dropout: float = 0.5) -> None:
super().__init__()
_log_api_usage_once(self)
_log_api_usage_once("models", self.__class__.__name__)
self.num_classes = num_classes
if version == "1_0":
self.features = nn.Sequential(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment