Unverified Commit 8886a3cf authored by Yoshitomo Matsubara's avatar Yoshitomo Matsubara Committed by GitHub
Browse files

Rename prototype weight names to comply with PEP8 (#5257)



* renamed ImageNet weights

* renamed COCO weights

* renamed COCO with VOC labels weights

* renamed Kinetics 400 weights

* rename default with DEFAULT

* update test

* fix typos

* update test

* update test

* update test

* indent as w was weight_enum

* revert

* Adding back the capitalization test
Co-authored-by: default avatarVasilis Vryniotis <datumbox@users.noreply.github.com>
parent c6722307
...@@ -24,7 +24,7 @@ __all__ = [ ...@@ -24,7 +24,7 @@ __all__ = [
class GoogLeNet_QuantizedWeights(WeightsEnum): class GoogLeNet_QuantizedWeights(WeightsEnum):
ImageNet1K_FBGEMM_V1 = Weights( IMAGENET1K_FBGEMM_V1 = Weights(
url="https://download.pytorch.org/models/quantized/googlenet_fbgemm-c00238cf.pth", url="https://download.pytorch.org/models/quantized/googlenet_fbgemm-c00238cf.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -39,20 +39,20 @@ class GoogLeNet_QuantizedWeights(WeightsEnum): ...@@ -39,20 +39,20 @@ class GoogLeNet_QuantizedWeights(WeightsEnum):
"backend": "fbgemm", "backend": "fbgemm",
"quantization": "ptq", "quantization": "ptq",
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#post-training-quantized-models", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#post-training-quantized-models",
"unquantized": GoogLeNet_Weights.ImageNet1K_V1, "unquantized": GoogLeNet_Weights.IMAGENET1K_V1,
"acc@1": 69.826, "acc@1": 69.826,
"acc@5": 89.404, "acc@5": 89.404,
}, },
) )
default = ImageNet1K_FBGEMM_V1 DEFAULT = IMAGENET1K_FBGEMM_V1
@handle_legacy_interface( @handle_legacy_interface(
weights=( weights=(
"pretrained", "pretrained",
lambda kwargs: GoogLeNet_QuantizedWeights.ImageNet1K_FBGEMM_V1 lambda kwargs: GoogLeNet_QuantizedWeights.IMAGENET1K_FBGEMM_V1
if kwargs.get("quantize", False) if kwargs.get("quantize", False)
else GoogLeNet_Weights.ImageNet1K_V1, else GoogLeNet_Weights.IMAGENET1K_V1,
) )
) )
def googlenet( def googlenet(
......
...@@ -23,7 +23,7 @@ __all__ = [ ...@@ -23,7 +23,7 @@ __all__ = [
class Inception_V3_QuantizedWeights(WeightsEnum): class Inception_V3_QuantizedWeights(WeightsEnum):
ImageNet1K_FBGEMM_V1 = Weights( IMAGENET1K_FBGEMM_V1 = Weights(
url="https://download.pytorch.org/models/quantized/inception_v3_google_fbgemm-71447a44.pth", url="https://download.pytorch.org/models/quantized/inception_v3_google_fbgemm-71447a44.pth",
transforms=partial(ImageNetEval, crop_size=299, resize_size=342), transforms=partial(ImageNetEval, crop_size=299, resize_size=342),
meta={ meta={
...@@ -38,20 +38,20 @@ class Inception_V3_QuantizedWeights(WeightsEnum): ...@@ -38,20 +38,20 @@ class Inception_V3_QuantizedWeights(WeightsEnum):
"backend": "fbgemm", "backend": "fbgemm",
"quantization": "ptq", "quantization": "ptq",
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#post-training-quantized-models", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#post-training-quantized-models",
"unquantized": Inception_V3_Weights.ImageNet1K_V1, "unquantized": Inception_V3_Weights.IMAGENET1K_V1,
"acc@1": 77.176, "acc@1": 77.176,
"acc@5": 93.354, "acc@5": 93.354,
}, },
) )
default = ImageNet1K_FBGEMM_V1 DEFAULT = IMAGENET1K_FBGEMM_V1
@handle_legacy_interface( @handle_legacy_interface(
weights=( weights=(
"pretrained", "pretrained",
lambda kwargs: Inception_V3_QuantizedWeights.ImageNet1K_FBGEMM_V1 lambda kwargs: Inception_V3_QuantizedWeights.IMAGENET1K_FBGEMM_V1
if kwargs.get("quantize", False) if kwargs.get("quantize", False)
else Inception_V3_Weights.ImageNet1K_V1, else Inception_V3_Weights.IMAGENET1K_V1,
) )
) )
def inception_v3( def inception_v3(
......
...@@ -24,7 +24,7 @@ __all__ = [ ...@@ -24,7 +24,7 @@ __all__ = [
class MobileNet_V2_QuantizedWeights(WeightsEnum): class MobileNet_V2_QuantizedWeights(WeightsEnum):
ImageNet1K_QNNPACK_V1 = Weights( IMAGENET1K_QNNPACK_V1 = Weights(
url="https://download.pytorch.org/models/quantized/mobilenet_v2_qnnpack_37f702c5.pth", url="https://download.pytorch.org/models/quantized/mobilenet_v2_qnnpack_37f702c5.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -39,20 +39,20 @@ class MobileNet_V2_QuantizedWeights(WeightsEnum): ...@@ -39,20 +39,20 @@ class MobileNet_V2_QuantizedWeights(WeightsEnum):
"backend": "qnnpack", "backend": "qnnpack",
"quantization": "qat", "quantization": "qat",
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#qat-mobilenetv2", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#qat-mobilenetv2",
"unquantized": MobileNet_V2_Weights.ImageNet1K_V1, "unquantized": MobileNet_V2_Weights.IMAGENET1K_V1,
"acc@1": 71.658, "acc@1": 71.658,
"acc@5": 90.150, "acc@5": 90.150,
}, },
) )
default = ImageNet1K_QNNPACK_V1 DEFAULT = IMAGENET1K_QNNPACK_V1
@handle_legacy_interface( @handle_legacy_interface(
weights=( weights=(
"pretrained", "pretrained",
lambda kwargs: MobileNet_V2_QuantizedWeights.ImageNet1K_QNNPACK_V1 lambda kwargs: MobileNet_V2_QuantizedWeights.IMAGENET1K_QNNPACK_V1
if kwargs.get("quantize", False) if kwargs.get("quantize", False)
else MobileNet_V2_Weights.ImageNet1K_V1, else MobileNet_V2_Weights.IMAGENET1K_V1,
) )
) )
def mobilenet_v2( def mobilenet_v2(
......
...@@ -57,7 +57,7 @@ def _mobilenet_v3_model( ...@@ -57,7 +57,7 @@ def _mobilenet_v3_model(
class MobileNet_V3_Large_QuantizedWeights(WeightsEnum): class MobileNet_V3_Large_QuantizedWeights(WeightsEnum):
ImageNet1K_QNNPACK_V1 = Weights( IMAGENET1K_QNNPACK_V1 = Weights(
url="https://download.pytorch.org/models/quantized/mobilenet_v3_large_qnnpack-5bcacf28.pth", url="https://download.pytorch.org/models/quantized/mobilenet_v3_large_qnnpack-5bcacf28.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -72,20 +72,20 @@ class MobileNet_V3_Large_QuantizedWeights(WeightsEnum): ...@@ -72,20 +72,20 @@ class MobileNet_V3_Large_QuantizedWeights(WeightsEnum):
"backend": "qnnpack", "backend": "qnnpack",
"quantization": "qat", "quantization": "qat",
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#qat-mobilenetv3", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#qat-mobilenetv3",
"unquantized": MobileNet_V3_Large_Weights.ImageNet1K_V1, "unquantized": MobileNet_V3_Large_Weights.IMAGENET1K_V1,
"acc@1": 73.004, "acc@1": 73.004,
"acc@5": 90.858, "acc@5": 90.858,
}, },
) )
default = ImageNet1K_QNNPACK_V1 DEFAULT = IMAGENET1K_QNNPACK_V1
@handle_legacy_interface( @handle_legacy_interface(
weights=( weights=(
"pretrained", "pretrained",
lambda kwargs: MobileNet_V3_Large_QuantizedWeights.ImageNet1K_QNNPACK_V1 lambda kwargs: MobileNet_V3_Large_QuantizedWeights.IMAGENET1K_QNNPACK_V1
if kwargs.get("quantize", False) if kwargs.get("quantize", False)
else MobileNet_V3_Large_Weights.ImageNet1K_V1, else MobileNet_V3_Large_Weights.IMAGENET1K_V1,
) )
) )
def mobilenet_v3_large( def mobilenet_v3_large(
......
...@@ -66,7 +66,7 @@ _COMMON_META = { ...@@ -66,7 +66,7 @@ _COMMON_META = {
class ResNet18_QuantizedWeights(WeightsEnum): class ResNet18_QuantizedWeights(WeightsEnum):
ImageNet1K_FBGEMM_V1 = Weights( IMAGENET1K_FBGEMM_V1 = Weights(
url="https://download.pytorch.org/models/quantized/resnet18_fbgemm_16fa66dd.pth", url="https://download.pytorch.org/models/quantized/resnet18_fbgemm_16fa66dd.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -74,16 +74,16 @@ class ResNet18_QuantizedWeights(WeightsEnum): ...@@ -74,16 +74,16 @@ class ResNet18_QuantizedWeights(WeightsEnum):
"architecture": "ResNet", "architecture": "ResNet",
"publication_year": 2015, "publication_year": 2015,
"num_params": 11689512, "num_params": 11689512,
"unquantized": ResNet18_Weights.ImageNet1K_V1, "unquantized": ResNet18_Weights.IMAGENET1K_V1,
"acc@1": 69.494, "acc@1": 69.494,
"acc@5": 88.882, "acc@5": 88.882,
}, },
) )
default = ImageNet1K_FBGEMM_V1 DEFAULT = IMAGENET1K_FBGEMM_V1
class ResNet50_QuantizedWeights(WeightsEnum): class ResNet50_QuantizedWeights(WeightsEnum):
ImageNet1K_FBGEMM_V1 = Weights( IMAGENET1K_FBGEMM_V1 = Weights(
url="https://download.pytorch.org/models/quantized/resnet50_fbgemm_bf931d71.pth", url="https://download.pytorch.org/models/quantized/resnet50_fbgemm_bf931d71.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -91,12 +91,12 @@ class ResNet50_QuantizedWeights(WeightsEnum): ...@@ -91,12 +91,12 @@ class ResNet50_QuantizedWeights(WeightsEnum):
"architecture": "ResNet", "architecture": "ResNet",
"publication_year": 2015, "publication_year": 2015,
"num_params": 25557032, "num_params": 25557032,
"unquantized": ResNet50_Weights.ImageNet1K_V1, "unquantized": ResNet50_Weights.IMAGENET1K_V1,
"acc@1": 75.920, "acc@1": 75.920,
"acc@5": 92.814, "acc@5": 92.814,
}, },
) )
ImageNet1K_FBGEMM_V2 = Weights( IMAGENET1K_FBGEMM_V2 = Weights(
url="https://download.pytorch.org/models/quantized/resnet50_fbgemm-23753f79.pth", url="https://download.pytorch.org/models/quantized/resnet50_fbgemm-23753f79.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -104,16 +104,16 @@ class ResNet50_QuantizedWeights(WeightsEnum): ...@@ -104,16 +104,16 @@ class ResNet50_QuantizedWeights(WeightsEnum):
"architecture": "ResNet", "architecture": "ResNet",
"publication_year": 2015, "publication_year": 2015,
"num_params": 25557032, "num_params": 25557032,
"unquantized": ResNet50_Weights.ImageNet1K_V2, "unquantized": ResNet50_Weights.IMAGENET1K_V2,
"acc@1": 80.282, "acc@1": 80.282,
"acc@5": 94.976, "acc@5": 94.976,
}, },
) )
default = ImageNet1K_FBGEMM_V2 DEFAULT = IMAGENET1K_FBGEMM_V2
class ResNeXt101_32X8D_QuantizedWeights(WeightsEnum): class ResNeXt101_32X8D_QuantizedWeights(WeightsEnum):
ImageNet1K_FBGEMM_V1 = Weights( IMAGENET1K_FBGEMM_V1 = Weights(
url="https://download.pytorch.org/models/quantized/resnext101_32x8_fbgemm_09835ccf.pth", url="https://download.pytorch.org/models/quantized/resnext101_32x8_fbgemm_09835ccf.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -121,12 +121,12 @@ class ResNeXt101_32X8D_QuantizedWeights(WeightsEnum): ...@@ -121,12 +121,12 @@ class ResNeXt101_32X8D_QuantizedWeights(WeightsEnum):
"architecture": "ResNeXt", "architecture": "ResNeXt",
"publication_year": 2016, "publication_year": 2016,
"num_params": 88791336, "num_params": 88791336,
"unquantized": ResNeXt101_32X8D_Weights.ImageNet1K_V1, "unquantized": ResNeXt101_32X8D_Weights.IMAGENET1K_V1,
"acc@1": 78.986, "acc@1": 78.986,
"acc@5": 94.480, "acc@5": 94.480,
}, },
) )
ImageNet1K_FBGEMM_V2 = Weights( IMAGENET1K_FBGEMM_V2 = Weights(
url="https://download.pytorch.org/models/quantized/resnext101_32x8_fbgemm-ee16d00c.pth", url="https://download.pytorch.org/models/quantized/resnext101_32x8_fbgemm-ee16d00c.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -134,20 +134,20 @@ class ResNeXt101_32X8D_QuantizedWeights(WeightsEnum): ...@@ -134,20 +134,20 @@ class ResNeXt101_32X8D_QuantizedWeights(WeightsEnum):
"architecture": "ResNeXt", "architecture": "ResNeXt",
"publication_year": 2016, "publication_year": 2016,
"num_params": 88791336, "num_params": 88791336,
"unquantized": ResNeXt101_32X8D_Weights.ImageNet1K_V2, "unquantized": ResNeXt101_32X8D_Weights.IMAGENET1K_V2,
"acc@1": 82.574, "acc@1": 82.574,
"acc@5": 96.132, "acc@5": 96.132,
}, },
) )
default = ImageNet1K_FBGEMM_V2 DEFAULT = IMAGENET1K_FBGEMM_V2
@handle_legacy_interface( @handle_legacy_interface(
weights=( weights=(
"pretrained", "pretrained",
lambda kwargs: ResNet18_QuantizedWeights.ImageNet1K_FBGEMM_V1 lambda kwargs: ResNet18_QuantizedWeights.IMAGENET1K_FBGEMM_V1
if kwargs.get("quantize", False) if kwargs.get("quantize", False)
else ResNet18_Weights.ImageNet1K_V1, else ResNet18_Weights.IMAGENET1K_V1,
) )
) )
def resnet18( def resnet18(
...@@ -165,9 +165,9 @@ def resnet18( ...@@ -165,9 +165,9 @@ def resnet18(
@handle_legacy_interface( @handle_legacy_interface(
weights=( weights=(
"pretrained", "pretrained",
lambda kwargs: ResNet50_QuantizedWeights.ImageNet1K_FBGEMM_V1 lambda kwargs: ResNet50_QuantizedWeights.IMAGENET1K_FBGEMM_V1
if kwargs.get("quantize", False) if kwargs.get("quantize", False)
else ResNet50_Weights.ImageNet1K_V1, else ResNet50_Weights.IMAGENET1K_V1,
) )
) )
def resnet50( def resnet50(
...@@ -185,9 +185,9 @@ def resnet50( ...@@ -185,9 +185,9 @@ def resnet50(
@handle_legacy_interface( @handle_legacy_interface(
weights=( weights=(
"pretrained", "pretrained",
lambda kwargs: ResNeXt101_32X8D_QuantizedWeights.ImageNet1K_FBGEMM_V1 lambda kwargs: ResNeXt101_32X8D_QuantizedWeights.IMAGENET1K_FBGEMM_V1
if kwargs.get("quantize", False) if kwargs.get("quantize", False)
else ResNeXt101_32X8D_Weights.ImageNet1K_V1, else ResNeXt101_32X8D_Weights.IMAGENET1K_V1,
) )
) )
def resnext101_32x8d( def resnext101_32x8d(
......
...@@ -65,41 +65,41 @@ _COMMON_META = { ...@@ -65,41 +65,41 @@ _COMMON_META = {
class ShuffleNet_V2_X0_5_QuantizedWeights(WeightsEnum): class ShuffleNet_V2_X0_5_QuantizedWeights(WeightsEnum):
ImageNet1K_FBGEMM_V1 = Weights( IMAGENET1K_FBGEMM_V1 = Weights(
url="https://download.pytorch.org/models/quantized/shufflenetv2_x0.5_fbgemm-00845098.pth", url="https://download.pytorch.org/models/quantized/shufflenetv2_x0.5_fbgemm-00845098.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
**_COMMON_META, **_COMMON_META,
"num_params": 1366792, "num_params": 1366792,
"unquantized": ShuffleNet_V2_X0_5_Weights.ImageNet1K_V1, "unquantized": ShuffleNet_V2_X0_5_Weights.IMAGENET1K_V1,
"acc@1": 57.972, "acc@1": 57.972,
"acc@5": 79.780, "acc@5": 79.780,
}, },
) )
default = ImageNet1K_FBGEMM_V1 DEFAULT = IMAGENET1K_FBGEMM_V1
class ShuffleNet_V2_X1_0_QuantizedWeights(WeightsEnum): class ShuffleNet_V2_X1_0_QuantizedWeights(WeightsEnum):
ImageNet1K_FBGEMM_V1 = Weights( IMAGENET1K_FBGEMM_V1 = Weights(
url="https://download.pytorch.org/models/quantized/shufflenetv2_x1_fbgemm-db332c57.pth", url="https://download.pytorch.org/models/quantized/shufflenetv2_x1_fbgemm-db332c57.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
**_COMMON_META, **_COMMON_META,
"num_params": 2278604, "num_params": 2278604,
"unquantized": ShuffleNet_V2_X1_0_Weights.ImageNet1K_V1, "unquantized": ShuffleNet_V2_X1_0_Weights.IMAGENET1K_V1,
"acc@1": 68.360, "acc@1": 68.360,
"acc@5": 87.582, "acc@5": 87.582,
}, },
) )
default = ImageNet1K_FBGEMM_V1 DEFAULT = IMAGENET1K_FBGEMM_V1
@handle_legacy_interface( @handle_legacy_interface(
weights=( weights=(
"pretrained", "pretrained",
lambda kwargs: ShuffleNet_V2_X0_5_QuantizedWeights.ImageNet1K_FBGEMM_V1 lambda kwargs: ShuffleNet_V2_X0_5_QuantizedWeights.IMAGENET1K_FBGEMM_V1
if kwargs.get("quantize", False) if kwargs.get("quantize", False)
else ShuffleNet_V2_X0_5_Weights.ImageNet1K_V1, else ShuffleNet_V2_X0_5_Weights.IMAGENET1K_V1,
) )
) )
def shufflenet_v2_x0_5( def shufflenet_v2_x0_5(
...@@ -118,9 +118,9 @@ def shufflenet_v2_x0_5( ...@@ -118,9 +118,9 @@ def shufflenet_v2_x0_5(
@handle_legacy_interface( @handle_legacy_interface(
weights=( weights=(
"pretrained", "pretrained",
lambda kwargs: ShuffleNet_V2_X1_0_QuantizedWeights.ImageNet1K_FBGEMM_V1 lambda kwargs: ShuffleNet_V2_X1_0_QuantizedWeights.IMAGENET1K_FBGEMM_V1
if kwargs.get("quantize", False) if kwargs.get("quantize", False)
else ShuffleNet_V2_X1_0_Weights.ImageNet1K_V1, else ShuffleNet_V2_X1_0_Weights.IMAGENET1K_V1,
) )
) )
def shufflenet_v2_x1_0( def shufflenet_v2_x1_0(
......
...@@ -75,7 +75,7 @@ def _regnet( ...@@ -75,7 +75,7 @@ def _regnet(
class RegNet_Y_400MF_Weights(WeightsEnum): class RegNet_Y_400MF_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/regnet_y_400mf-c65dace8.pth", url="https://download.pytorch.org/models/regnet_y_400mf-c65dace8.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -86,7 +86,7 @@ class RegNet_Y_400MF_Weights(WeightsEnum): ...@@ -86,7 +86,7 @@ class RegNet_Y_400MF_Weights(WeightsEnum):
"acc@5": 91.716, "acc@5": 91.716,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_y_400mf-e6988f5f.pth", url="https://download.pytorch.org/models/regnet_y_400mf-e6988f5f.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -97,11 +97,11 @@ class RegNet_Y_400MF_Weights(WeightsEnum): ...@@ -97,11 +97,11 @@ class RegNet_Y_400MF_Weights(WeightsEnum):
"acc@5": 92.742, "acc@5": 92.742,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class RegNet_Y_800MF_Weights(WeightsEnum): class RegNet_Y_800MF_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/regnet_y_800mf-1b27b58c.pth", url="https://download.pytorch.org/models/regnet_y_800mf-1b27b58c.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -112,7 +112,7 @@ class RegNet_Y_800MF_Weights(WeightsEnum): ...@@ -112,7 +112,7 @@ class RegNet_Y_800MF_Weights(WeightsEnum):
"acc@5": 93.136, "acc@5": 93.136,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_y_800mf-58fc7688.pth", url="https://download.pytorch.org/models/regnet_y_800mf-58fc7688.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -123,11 +123,11 @@ class RegNet_Y_800MF_Weights(WeightsEnum): ...@@ -123,11 +123,11 @@ class RegNet_Y_800MF_Weights(WeightsEnum):
"acc@5": 94.502, "acc@5": 94.502,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class RegNet_Y_1_6GF_Weights(WeightsEnum): class RegNet_Y_1_6GF_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/regnet_y_1_6gf-b11a554e.pth", url="https://download.pytorch.org/models/regnet_y_1_6gf-b11a554e.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -138,7 +138,7 @@ class RegNet_Y_1_6GF_Weights(WeightsEnum): ...@@ -138,7 +138,7 @@ class RegNet_Y_1_6GF_Weights(WeightsEnum):
"acc@5": 93.966, "acc@5": 93.966,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_y_1_6gf-0d7bc02a.pth", url="https://download.pytorch.org/models/regnet_y_1_6gf-0d7bc02a.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -149,11 +149,11 @@ class RegNet_Y_1_6GF_Weights(WeightsEnum): ...@@ -149,11 +149,11 @@ class RegNet_Y_1_6GF_Weights(WeightsEnum):
"acc@5": 95.444, "acc@5": 95.444,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class RegNet_Y_3_2GF_Weights(WeightsEnum): class RegNet_Y_3_2GF_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/regnet_y_3_2gf-b5a9779c.pth", url="https://download.pytorch.org/models/regnet_y_3_2gf-b5a9779c.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -164,7 +164,7 @@ class RegNet_Y_3_2GF_Weights(WeightsEnum): ...@@ -164,7 +164,7 @@ class RegNet_Y_3_2GF_Weights(WeightsEnum):
"acc@5": 94.576, "acc@5": 94.576,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_y_3_2gf-9180c971.pth", url="https://download.pytorch.org/models/regnet_y_3_2gf-9180c971.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -175,11 +175,11 @@ class RegNet_Y_3_2GF_Weights(WeightsEnum): ...@@ -175,11 +175,11 @@ class RegNet_Y_3_2GF_Weights(WeightsEnum):
"acc@5": 95.972, "acc@5": 95.972,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class RegNet_Y_8GF_Weights(WeightsEnum): class RegNet_Y_8GF_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/regnet_y_8gf-d0d0e4a8.pth", url="https://download.pytorch.org/models/regnet_y_8gf-d0d0e4a8.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -190,7 +190,7 @@ class RegNet_Y_8GF_Weights(WeightsEnum): ...@@ -190,7 +190,7 @@ class RegNet_Y_8GF_Weights(WeightsEnum):
"acc@5": 95.048, "acc@5": 95.048,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_y_8gf-dc2b1b54.pth", url="https://download.pytorch.org/models/regnet_y_8gf-dc2b1b54.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -201,11 +201,11 @@ class RegNet_Y_8GF_Weights(WeightsEnum): ...@@ -201,11 +201,11 @@ class RegNet_Y_8GF_Weights(WeightsEnum):
"acc@5": 96.330, "acc@5": 96.330,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class RegNet_Y_16GF_Weights(WeightsEnum): class RegNet_Y_16GF_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/regnet_y_16gf-9e6ed7dd.pth", url="https://download.pytorch.org/models/regnet_y_16gf-9e6ed7dd.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -216,7 +216,7 @@ class RegNet_Y_16GF_Weights(WeightsEnum): ...@@ -216,7 +216,7 @@ class RegNet_Y_16GF_Weights(WeightsEnum):
"acc@5": 95.240, "acc@5": 95.240,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_y_16gf-3e4a00f9.pth", url="https://download.pytorch.org/models/regnet_y_16gf-3e4a00f9.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -227,11 +227,11 @@ class RegNet_Y_16GF_Weights(WeightsEnum): ...@@ -227,11 +227,11 @@ class RegNet_Y_16GF_Weights(WeightsEnum):
"acc@5": 96.328, "acc@5": 96.328,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class RegNet_Y_32GF_Weights(WeightsEnum): class RegNet_Y_32GF_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/regnet_y_32gf-4dee3f7a.pth", url="https://download.pytorch.org/models/regnet_y_32gf-4dee3f7a.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -242,7 +242,7 @@ class RegNet_Y_32GF_Weights(WeightsEnum): ...@@ -242,7 +242,7 @@ class RegNet_Y_32GF_Weights(WeightsEnum):
"acc@5": 95.340, "acc@5": 95.340,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_y_32gf-8db6d4b5.pth", url="https://download.pytorch.org/models/regnet_y_32gf-8db6d4b5.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -253,7 +253,7 @@ class RegNet_Y_32GF_Weights(WeightsEnum): ...@@ -253,7 +253,7 @@ class RegNet_Y_32GF_Weights(WeightsEnum):
"acc@5": 96.498, "acc@5": 96.498,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class RegNet_Y_128GF_Weights(WeightsEnum): class RegNet_Y_128GF_Weights(WeightsEnum):
...@@ -262,7 +262,7 @@ class RegNet_Y_128GF_Weights(WeightsEnum): ...@@ -262,7 +262,7 @@ class RegNet_Y_128GF_Weights(WeightsEnum):
class RegNet_X_400MF_Weights(WeightsEnum): class RegNet_X_400MF_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/regnet_x_400mf-adf1edd5.pth", url="https://download.pytorch.org/models/regnet_x_400mf-adf1edd5.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -273,7 +273,7 @@ class RegNet_X_400MF_Weights(WeightsEnum): ...@@ -273,7 +273,7 @@ class RegNet_X_400MF_Weights(WeightsEnum):
"acc@5": 90.950, "acc@5": 90.950,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_x_400mf-62229a5f.pth", url="https://download.pytorch.org/models/regnet_x_400mf-62229a5f.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -284,11 +284,11 @@ class RegNet_X_400MF_Weights(WeightsEnum): ...@@ -284,11 +284,11 @@ class RegNet_X_400MF_Weights(WeightsEnum):
"acc@5": 92.322, "acc@5": 92.322,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class RegNet_X_800MF_Weights(WeightsEnum): class RegNet_X_800MF_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/regnet_x_800mf-ad17e45c.pth", url="https://download.pytorch.org/models/regnet_x_800mf-ad17e45c.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -299,7 +299,7 @@ class RegNet_X_800MF_Weights(WeightsEnum): ...@@ -299,7 +299,7 @@ class RegNet_X_800MF_Weights(WeightsEnum):
"acc@5": 92.348, "acc@5": 92.348,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_x_800mf-94a99ebd.pth", url="https://download.pytorch.org/models/regnet_x_800mf-94a99ebd.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -310,11 +310,11 @@ class RegNet_X_800MF_Weights(WeightsEnum): ...@@ -310,11 +310,11 @@ class RegNet_X_800MF_Weights(WeightsEnum):
"acc@5": 93.826, "acc@5": 93.826,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class RegNet_X_1_6GF_Weights(WeightsEnum): class RegNet_X_1_6GF_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/regnet_x_1_6gf-e3633e7f.pth", url="https://download.pytorch.org/models/regnet_x_1_6gf-e3633e7f.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -325,7 +325,7 @@ class RegNet_X_1_6GF_Weights(WeightsEnum): ...@@ -325,7 +325,7 @@ class RegNet_X_1_6GF_Weights(WeightsEnum):
"acc@5": 93.440, "acc@5": 93.440,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_x_1_6gf-a12f2b72.pth", url="https://download.pytorch.org/models/regnet_x_1_6gf-a12f2b72.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -336,11 +336,11 @@ class RegNet_X_1_6GF_Weights(WeightsEnum): ...@@ -336,11 +336,11 @@ class RegNet_X_1_6GF_Weights(WeightsEnum):
"acc@5": 94.922, "acc@5": 94.922,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class RegNet_X_3_2GF_Weights(WeightsEnum): class RegNet_X_3_2GF_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/regnet_x_3_2gf-f342aeae.pth", url="https://download.pytorch.org/models/regnet_x_3_2gf-f342aeae.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -351,7 +351,7 @@ class RegNet_X_3_2GF_Weights(WeightsEnum): ...@@ -351,7 +351,7 @@ class RegNet_X_3_2GF_Weights(WeightsEnum):
"acc@5": 93.992, "acc@5": 93.992,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_x_3_2gf-7071aa85.pth", url="https://download.pytorch.org/models/regnet_x_3_2gf-7071aa85.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -362,11 +362,11 @@ class RegNet_X_3_2GF_Weights(WeightsEnum): ...@@ -362,11 +362,11 @@ class RegNet_X_3_2GF_Weights(WeightsEnum):
"acc@5": 95.430, "acc@5": 95.430,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class RegNet_X_8GF_Weights(WeightsEnum): class RegNet_X_8GF_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/regnet_x_8gf-03ceed89.pth", url="https://download.pytorch.org/models/regnet_x_8gf-03ceed89.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -377,7 +377,7 @@ class RegNet_X_8GF_Weights(WeightsEnum): ...@@ -377,7 +377,7 @@ class RegNet_X_8GF_Weights(WeightsEnum):
"acc@5": 94.686, "acc@5": 94.686,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_x_8gf-2b70d774.pth", url="https://download.pytorch.org/models/regnet_x_8gf-2b70d774.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -388,11 +388,11 @@ class RegNet_X_8GF_Weights(WeightsEnum): ...@@ -388,11 +388,11 @@ class RegNet_X_8GF_Weights(WeightsEnum):
"acc@5": 95.678, "acc@5": 95.678,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class RegNet_X_16GF_Weights(WeightsEnum): class RegNet_X_16GF_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/regnet_x_16gf-2007eb11.pth", url="https://download.pytorch.org/models/regnet_x_16gf-2007eb11.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -403,7 +403,7 @@ class RegNet_X_16GF_Weights(WeightsEnum): ...@@ -403,7 +403,7 @@ class RegNet_X_16GF_Weights(WeightsEnum):
"acc@5": 94.944, "acc@5": 94.944,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_x_16gf-ba3796d7.pth", url="https://download.pytorch.org/models/regnet_x_16gf-ba3796d7.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -414,11 +414,11 @@ class RegNet_X_16GF_Weights(WeightsEnum): ...@@ -414,11 +414,11 @@ class RegNet_X_16GF_Weights(WeightsEnum):
"acc@5": 96.196, "acc@5": 96.196,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class RegNet_X_32GF_Weights(WeightsEnum): class RegNet_X_32GF_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/regnet_x_32gf-9d47f8d0.pth", url="https://download.pytorch.org/models/regnet_x_32gf-9d47f8d0.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -429,7 +429,7 @@ class RegNet_X_32GF_Weights(WeightsEnum): ...@@ -429,7 +429,7 @@ class RegNet_X_32GF_Weights(WeightsEnum):
"acc@5": 95.248, "acc@5": 95.248,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_x_32gf-6eb8fdc6.pth", url="https://download.pytorch.org/models/regnet_x_32gf-6eb8fdc6.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -440,10 +440,10 @@ class RegNet_X_32GF_Weights(WeightsEnum): ...@@ -440,10 +440,10 @@ class RegNet_X_32GF_Weights(WeightsEnum):
"acc@5": 96.288, "acc@5": 96.288,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
@handle_legacy_interface(weights=("pretrained", RegNet_Y_400MF_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", RegNet_Y_400MF_Weights.IMAGENET1K_V1))
def regnet_y_400mf(*, weights: Optional[RegNet_Y_400MF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet: def regnet_y_400mf(*, weights: Optional[RegNet_Y_400MF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet:
weights = RegNet_Y_400MF_Weights.verify(weights) weights = RegNet_Y_400MF_Weights.verify(weights)
...@@ -451,7 +451,7 @@ def regnet_y_400mf(*, weights: Optional[RegNet_Y_400MF_Weights] = None, progress ...@@ -451,7 +451,7 @@ def regnet_y_400mf(*, weights: Optional[RegNet_Y_400MF_Weights] = None, progress
return _regnet(params, weights, progress, **kwargs) return _regnet(params, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", RegNet_Y_800MF_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", RegNet_Y_800MF_Weights.IMAGENET1K_V1))
def regnet_y_800mf(*, weights: Optional[RegNet_Y_800MF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet: def regnet_y_800mf(*, weights: Optional[RegNet_Y_800MF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet:
weights = RegNet_Y_800MF_Weights.verify(weights) weights = RegNet_Y_800MF_Weights.verify(weights)
...@@ -459,7 +459,7 @@ def regnet_y_800mf(*, weights: Optional[RegNet_Y_800MF_Weights] = None, progress ...@@ -459,7 +459,7 @@ def regnet_y_800mf(*, weights: Optional[RegNet_Y_800MF_Weights] = None, progress
return _regnet(params, weights, progress, **kwargs) return _regnet(params, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", RegNet_Y_1_6GF_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", RegNet_Y_1_6GF_Weights.IMAGENET1K_V1))
def regnet_y_1_6gf(*, weights: Optional[RegNet_Y_1_6GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet: def regnet_y_1_6gf(*, weights: Optional[RegNet_Y_1_6GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet:
weights = RegNet_Y_1_6GF_Weights.verify(weights) weights = RegNet_Y_1_6GF_Weights.verify(weights)
...@@ -469,7 +469,7 @@ def regnet_y_1_6gf(*, weights: Optional[RegNet_Y_1_6GF_Weights] = None, progress ...@@ -469,7 +469,7 @@ def regnet_y_1_6gf(*, weights: Optional[RegNet_Y_1_6GF_Weights] = None, progress
return _regnet(params, weights, progress, **kwargs) return _regnet(params, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", RegNet_Y_3_2GF_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", RegNet_Y_3_2GF_Weights.IMAGENET1K_V1))
def regnet_y_3_2gf(*, weights: Optional[RegNet_Y_3_2GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet: def regnet_y_3_2gf(*, weights: Optional[RegNet_Y_3_2GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet:
weights = RegNet_Y_3_2GF_Weights.verify(weights) weights = RegNet_Y_3_2GF_Weights.verify(weights)
...@@ -479,7 +479,7 @@ def regnet_y_3_2gf(*, weights: Optional[RegNet_Y_3_2GF_Weights] = None, progress ...@@ -479,7 +479,7 @@ def regnet_y_3_2gf(*, weights: Optional[RegNet_Y_3_2GF_Weights] = None, progress
return _regnet(params, weights, progress, **kwargs) return _regnet(params, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", RegNet_Y_8GF_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", RegNet_Y_8GF_Weights.IMAGENET1K_V1))
def regnet_y_8gf(*, weights: Optional[RegNet_Y_8GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet: def regnet_y_8gf(*, weights: Optional[RegNet_Y_8GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet:
weights = RegNet_Y_8GF_Weights.verify(weights) weights = RegNet_Y_8GF_Weights.verify(weights)
...@@ -489,7 +489,7 @@ def regnet_y_8gf(*, weights: Optional[RegNet_Y_8GF_Weights] = None, progress: bo ...@@ -489,7 +489,7 @@ def regnet_y_8gf(*, weights: Optional[RegNet_Y_8GF_Weights] = None, progress: bo
return _regnet(params, weights, progress, **kwargs) return _regnet(params, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", RegNet_Y_16GF_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", RegNet_Y_16GF_Weights.IMAGENET1K_V1))
def regnet_y_16gf(*, weights: Optional[RegNet_Y_16GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet: def regnet_y_16gf(*, weights: Optional[RegNet_Y_16GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet:
weights = RegNet_Y_16GF_Weights.verify(weights) weights = RegNet_Y_16GF_Weights.verify(weights)
...@@ -499,7 +499,7 @@ def regnet_y_16gf(*, weights: Optional[RegNet_Y_16GF_Weights] = None, progress: ...@@ -499,7 +499,7 @@ def regnet_y_16gf(*, weights: Optional[RegNet_Y_16GF_Weights] = None, progress:
return _regnet(params, weights, progress, **kwargs) return _regnet(params, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", RegNet_Y_32GF_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", RegNet_Y_32GF_Weights.IMAGENET1K_V1))
def regnet_y_32gf(*, weights: Optional[RegNet_Y_32GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet: def regnet_y_32gf(*, weights: Optional[RegNet_Y_32GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet:
weights = RegNet_Y_32GF_Weights.verify(weights) weights = RegNet_Y_32GF_Weights.verify(weights)
...@@ -519,7 +519,7 @@ def regnet_y_128gf(*, weights: Optional[RegNet_Y_128GF_Weights] = None, progress ...@@ -519,7 +519,7 @@ def regnet_y_128gf(*, weights: Optional[RegNet_Y_128GF_Weights] = None, progress
return _regnet(params, weights, progress, **kwargs) return _regnet(params, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", RegNet_X_400MF_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", RegNet_X_400MF_Weights.IMAGENET1K_V1))
def regnet_x_400mf(*, weights: Optional[RegNet_X_400MF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet: def regnet_x_400mf(*, weights: Optional[RegNet_X_400MF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet:
weights = RegNet_X_400MF_Weights.verify(weights) weights = RegNet_X_400MF_Weights.verify(weights)
...@@ -527,7 +527,7 @@ def regnet_x_400mf(*, weights: Optional[RegNet_X_400MF_Weights] = None, progress ...@@ -527,7 +527,7 @@ def regnet_x_400mf(*, weights: Optional[RegNet_X_400MF_Weights] = None, progress
return _regnet(params, weights, progress, **kwargs) return _regnet(params, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", RegNet_X_800MF_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", RegNet_X_800MF_Weights.IMAGENET1K_V1))
def regnet_x_800mf(*, weights: Optional[RegNet_X_800MF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet: def regnet_x_800mf(*, weights: Optional[RegNet_X_800MF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet:
weights = RegNet_X_800MF_Weights.verify(weights) weights = RegNet_X_800MF_Weights.verify(weights)
...@@ -535,7 +535,7 @@ def regnet_x_800mf(*, weights: Optional[RegNet_X_800MF_Weights] = None, progress ...@@ -535,7 +535,7 @@ def regnet_x_800mf(*, weights: Optional[RegNet_X_800MF_Weights] = None, progress
return _regnet(params, weights, progress, **kwargs) return _regnet(params, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", RegNet_X_1_6GF_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", RegNet_X_1_6GF_Weights.IMAGENET1K_V1))
def regnet_x_1_6gf(*, weights: Optional[RegNet_X_1_6GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet: def regnet_x_1_6gf(*, weights: Optional[RegNet_X_1_6GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet:
weights = RegNet_X_1_6GF_Weights.verify(weights) weights = RegNet_X_1_6GF_Weights.verify(weights)
...@@ -543,7 +543,7 @@ def regnet_x_1_6gf(*, weights: Optional[RegNet_X_1_6GF_Weights] = None, progress ...@@ -543,7 +543,7 @@ def regnet_x_1_6gf(*, weights: Optional[RegNet_X_1_6GF_Weights] = None, progress
return _regnet(params, weights, progress, **kwargs) return _regnet(params, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", RegNet_X_3_2GF_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", RegNet_X_3_2GF_Weights.IMAGENET1K_V1))
def regnet_x_3_2gf(*, weights: Optional[RegNet_X_3_2GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet: def regnet_x_3_2gf(*, weights: Optional[RegNet_X_3_2GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet:
weights = RegNet_X_3_2GF_Weights.verify(weights) weights = RegNet_X_3_2GF_Weights.verify(weights)
...@@ -551,7 +551,7 @@ def regnet_x_3_2gf(*, weights: Optional[RegNet_X_3_2GF_Weights] = None, progress ...@@ -551,7 +551,7 @@ def regnet_x_3_2gf(*, weights: Optional[RegNet_X_3_2GF_Weights] = None, progress
return _regnet(params, weights, progress, **kwargs) return _regnet(params, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", RegNet_X_8GF_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", RegNet_X_8GF_Weights.IMAGENET1K_V1))
def regnet_x_8gf(*, weights: Optional[RegNet_X_8GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet: def regnet_x_8gf(*, weights: Optional[RegNet_X_8GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet:
weights = RegNet_X_8GF_Weights.verify(weights) weights = RegNet_X_8GF_Weights.verify(weights)
...@@ -559,7 +559,7 @@ def regnet_x_8gf(*, weights: Optional[RegNet_X_8GF_Weights] = None, progress: bo ...@@ -559,7 +559,7 @@ def regnet_x_8gf(*, weights: Optional[RegNet_X_8GF_Weights] = None, progress: bo
return _regnet(params, weights, progress, **kwargs) return _regnet(params, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", RegNet_X_16GF_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", RegNet_X_16GF_Weights.IMAGENET1K_V1))
def regnet_x_16gf(*, weights: Optional[RegNet_X_16GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet: def regnet_x_16gf(*, weights: Optional[RegNet_X_16GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet:
weights = RegNet_X_16GF_Weights.verify(weights) weights = RegNet_X_16GF_Weights.verify(weights)
...@@ -567,7 +567,7 @@ def regnet_x_16gf(*, weights: Optional[RegNet_X_16GF_Weights] = None, progress: ...@@ -567,7 +567,7 @@ def regnet_x_16gf(*, weights: Optional[RegNet_X_16GF_Weights] = None, progress:
return _regnet(params, weights, progress, **kwargs) return _regnet(params, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", RegNet_X_32GF_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", RegNet_X_32GF_Weights.IMAGENET1K_V1))
def regnet_x_32gf(*, weights: Optional[RegNet_X_32GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet: def regnet_x_32gf(*, weights: Optional[RegNet_X_32GF_Weights] = None, progress: bool = True, **kwargs: Any) -> RegNet:
weights = RegNet_X_32GF_Weights.verify(weights) weights = RegNet_X_32GF_Weights.verify(weights)
......
...@@ -61,7 +61,7 @@ _COMMON_META = { ...@@ -61,7 +61,7 @@ _COMMON_META = {
class ResNet18_Weights(WeightsEnum): class ResNet18_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/resnet18-f37072fd.pth", url="https://download.pytorch.org/models/resnet18-f37072fd.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -74,11 +74,11 @@ class ResNet18_Weights(WeightsEnum): ...@@ -74,11 +74,11 @@ class ResNet18_Weights(WeightsEnum):
"acc@5": 89.078, "acc@5": 89.078,
}, },
) )
default = ImageNet1K_V1 DEFAULT = IMAGENET1K_V1
class ResNet34_Weights(WeightsEnum): class ResNet34_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/resnet34-b627a593.pth", url="https://download.pytorch.org/models/resnet34-b627a593.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -91,11 +91,11 @@ class ResNet34_Weights(WeightsEnum): ...@@ -91,11 +91,11 @@ class ResNet34_Weights(WeightsEnum):
"acc@5": 91.420, "acc@5": 91.420,
}, },
) )
default = ImageNet1K_V1 DEFAULT = IMAGENET1K_V1
class ResNet50_Weights(WeightsEnum): class ResNet50_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/resnet50-0676ba61.pth", url="https://download.pytorch.org/models/resnet50-0676ba61.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -108,7 +108,7 @@ class ResNet50_Weights(WeightsEnum): ...@@ -108,7 +108,7 @@ class ResNet50_Weights(WeightsEnum):
"acc@5": 92.862, "acc@5": 92.862,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/resnet50-11ad3fa6.pth", url="https://download.pytorch.org/models/resnet50-11ad3fa6.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -121,11 +121,11 @@ class ResNet50_Weights(WeightsEnum): ...@@ -121,11 +121,11 @@ class ResNet50_Weights(WeightsEnum):
"acc@5": 95.434, "acc@5": 95.434,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class ResNet101_Weights(WeightsEnum): class ResNet101_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/resnet101-63fe2227.pth", url="https://download.pytorch.org/models/resnet101-63fe2227.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -138,7 +138,7 @@ class ResNet101_Weights(WeightsEnum): ...@@ -138,7 +138,7 @@ class ResNet101_Weights(WeightsEnum):
"acc@5": 93.546, "acc@5": 93.546,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/resnet101-cd907fc2.pth", url="https://download.pytorch.org/models/resnet101-cd907fc2.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -151,11 +151,11 @@ class ResNet101_Weights(WeightsEnum): ...@@ -151,11 +151,11 @@ class ResNet101_Weights(WeightsEnum):
"acc@5": 95.780, "acc@5": 95.780,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class ResNet152_Weights(WeightsEnum): class ResNet152_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/resnet152-394f9c45.pth", url="https://download.pytorch.org/models/resnet152-394f9c45.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -168,7 +168,7 @@ class ResNet152_Weights(WeightsEnum): ...@@ -168,7 +168,7 @@ class ResNet152_Weights(WeightsEnum):
"acc@5": 94.046, "acc@5": 94.046,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/resnet152-f82ba261.pth", url="https://download.pytorch.org/models/resnet152-f82ba261.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -181,11 +181,11 @@ class ResNet152_Weights(WeightsEnum): ...@@ -181,11 +181,11 @@ class ResNet152_Weights(WeightsEnum):
"acc@5": 96.002, "acc@5": 96.002,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class ResNeXt50_32X4D_Weights(WeightsEnum): class ResNeXt50_32X4D_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/resnext50_32x4d-7cdf4587.pth", url="https://download.pytorch.org/models/resnext50_32x4d-7cdf4587.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -198,7 +198,7 @@ class ResNeXt50_32X4D_Weights(WeightsEnum): ...@@ -198,7 +198,7 @@ class ResNeXt50_32X4D_Weights(WeightsEnum):
"acc@5": 93.698, "acc@5": 93.698,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/resnext50_32x4d-1a0047aa.pth", url="https://download.pytorch.org/models/resnext50_32x4d-1a0047aa.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -211,11 +211,11 @@ class ResNeXt50_32X4D_Weights(WeightsEnum): ...@@ -211,11 +211,11 @@ class ResNeXt50_32X4D_Weights(WeightsEnum):
"acc@5": 95.340, "acc@5": 95.340,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class ResNeXt101_32X8D_Weights(WeightsEnum): class ResNeXt101_32X8D_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/resnext101_32x8d-8ba56ff5.pth", url="https://download.pytorch.org/models/resnext101_32x8d-8ba56ff5.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -228,7 +228,7 @@ class ResNeXt101_32X8D_Weights(WeightsEnum): ...@@ -228,7 +228,7 @@ class ResNeXt101_32X8D_Weights(WeightsEnum):
"acc@5": 94.526, "acc@5": 94.526,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/resnext101_32x8d-110c445d.pth", url="https://download.pytorch.org/models/resnext101_32x8d-110c445d.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -241,11 +241,11 @@ class ResNeXt101_32X8D_Weights(WeightsEnum): ...@@ -241,11 +241,11 @@ class ResNeXt101_32X8D_Weights(WeightsEnum):
"acc@5": 96.228, "acc@5": 96.228,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class Wide_ResNet50_2_Weights(WeightsEnum): class Wide_ResNet50_2_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/wide_resnet50_2-95faca4d.pth", url="https://download.pytorch.org/models/wide_resnet50_2-95faca4d.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -258,7 +258,7 @@ class Wide_ResNet50_2_Weights(WeightsEnum): ...@@ -258,7 +258,7 @@ class Wide_ResNet50_2_Weights(WeightsEnum):
"acc@5": 94.086, "acc@5": 94.086,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/wide_resnet50_2-9ba9bcbe.pth", url="https://download.pytorch.org/models/wide_resnet50_2-9ba9bcbe.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -271,11 +271,11 @@ class Wide_ResNet50_2_Weights(WeightsEnum): ...@@ -271,11 +271,11 @@ class Wide_ResNet50_2_Weights(WeightsEnum):
"acc@5": 95.758, "acc@5": 95.758,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
class Wide_ResNet101_2_Weights(WeightsEnum): class Wide_ResNet101_2_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/wide_resnet101_2-32ee1156.pth", url="https://download.pytorch.org/models/wide_resnet101_2-32ee1156.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -288,7 +288,7 @@ class Wide_ResNet101_2_Weights(WeightsEnum): ...@@ -288,7 +288,7 @@ class Wide_ResNet101_2_Weights(WeightsEnum):
"acc@5": 94.284, "acc@5": 94.284,
}, },
) )
ImageNet1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/wide_resnet101_2-d733dc28.pth", url="https://download.pytorch.org/models/wide_resnet101_2-d733dc28.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232), transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={ meta={
...@@ -301,45 +301,45 @@ class Wide_ResNet101_2_Weights(WeightsEnum): ...@@ -301,45 +301,45 @@ class Wide_ResNet101_2_Weights(WeightsEnum):
"acc@5": 96.020, "acc@5": 96.020,
}, },
) )
default = ImageNet1K_V2 DEFAULT = IMAGENET1K_V2
@handle_legacy_interface(weights=("pretrained", ResNet18_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", ResNet18_Weights.IMAGENET1K_V1))
def resnet18(*, weights: Optional[ResNet18_Weights] = None, progress: bool = True, **kwargs: Any) -> ResNet: def resnet18(*, weights: Optional[ResNet18_Weights] = None, progress: bool = True, **kwargs: Any) -> ResNet:
weights = ResNet18_Weights.verify(weights) weights = ResNet18_Weights.verify(weights)
return _resnet(BasicBlock, [2, 2, 2, 2], weights, progress, **kwargs) return _resnet(BasicBlock, [2, 2, 2, 2], weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", ResNet34_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", ResNet34_Weights.IMAGENET1K_V1))
def resnet34(*, weights: Optional[ResNet34_Weights] = None, progress: bool = True, **kwargs: Any) -> ResNet: def resnet34(*, weights: Optional[ResNet34_Weights] = None, progress: bool = True, **kwargs: Any) -> ResNet:
weights = ResNet34_Weights.verify(weights) weights = ResNet34_Weights.verify(weights)
return _resnet(BasicBlock, [3, 4, 6, 3], weights, progress, **kwargs) return _resnet(BasicBlock, [3, 4, 6, 3], weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", ResNet50_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", ResNet50_Weights.IMAGENET1K_V1))
def resnet50(*, weights: Optional[ResNet50_Weights] = None, progress: bool = True, **kwargs: Any) -> ResNet: def resnet50(*, weights: Optional[ResNet50_Weights] = None, progress: bool = True, **kwargs: Any) -> ResNet:
weights = ResNet50_Weights.verify(weights) weights = ResNet50_Weights.verify(weights)
return _resnet(Bottleneck, [3, 4, 6, 3], weights, progress, **kwargs) return _resnet(Bottleneck, [3, 4, 6, 3], weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", ResNet101_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", ResNet101_Weights.IMAGENET1K_V1))
def resnet101(*, weights: Optional[ResNet101_Weights] = None, progress: bool = True, **kwargs: Any) -> ResNet: def resnet101(*, weights: Optional[ResNet101_Weights] = None, progress: bool = True, **kwargs: Any) -> ResNet:
weights = ResNet101_Weights.verify(weights) weights = ResNet101_Weights.verify(weights)
return _resnet(Bottleneck, [3, 4, 23, 3], weights, progress, **kwargs) return _resnet(Bottleneck, [3, 4, 23, 3], weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", ResNet152_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", ResNet152_Weights.IMAGENET1K_V1))
def resnet152(*, weights: Optional[ResNet152_Weights] = None, progress: bool = True, **kwargs: Any) -> ResNet: def resnet152(*, weights: Optional[ResNet152_Weights] = None, progress: bool = True, **kwargs: Any) -> ResNet:
weights = ResNet152_Weights.verify(weights) weights = ResNet152_Weights.verify(weights)
return _resnet(Bottleneck, [3, 8, 36, 3], weights, progress, **kwargs) return _resnet(Bottleneck, [3, 8, 36, 3], weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", ResNeXt50_32X4D_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", ResNeXt50_32X4D_Weights.IMAGENET1K_V1))
def resnext50_32x4d( def resnext50_32x4d(
*, weights: Optional[ResNeXt50_32X4D_Weights] = None, progress: bool = True, **kwargs: Any *, weights: Optional[ResNeXt50_32X4D_Weights] = None, progress: bool = True, **kwargs: Any
) -> ResNet: ) -> ResNet:
...@@ -350,7 +350,7 @@ def resnext50_32x4d( ...@@ -350,7 +350,7 @@ def resnext50_32x4d(
return _resnet(Bottleneck, [3, 4, 6, 3], weights, progress, **kwargs) return _resnet(Bottleneck, [3, 4, 6, 3], weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", ResNeXt101_32X8D_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", ResNeXt101_32X8D_Weights.IMAGENET1K_V1))
def resnext101_32x8d( def resnext101_32x8d(
*, weights: Optional[ResNeXt101_32X8D_Weights] = None, progress: bool = True, **kwargs: Any *, weights: Optional[ResNeXt101_32X8D_Weights] = None, progress: bool = True, **kwargs: Any
) -> ResNet: ) -> ResNet:
...@@ -361,7 +361,7 @@ def resnext101_32x8d( ...@@ -361,7 +361,7 @@ def resnext101_32x8d(
return _resnet(Bottleneck, [3, 4, 23, 3], weights, progress, **kwargs) return _resnet(Bottleneck, [3, 4, 23, 3], weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", Wide_ResNet50_2_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", Wide_ResNet50_2_Weights.IMAGENET1K_V1))
def wide_resnet50_2( def wide_resnet50_2(
*, weights: Optional[Wide_ResNet50_2_Weights] = None, progress: bool = True, **kwargs: Any *, weights: Optional[Wide_ResNet50_2_Weights] = None, progress: bool = True, **kwargs: Any
) -> ResNet: ) -> ResNet:
...@@ -371,7 +371,7 @@ def wide_resnet50_2( ...@@ -371,7 +371,7 @@ def wide_resnet50_2(
return _resnet(Bottleneck, [3, 4, 6, 3], weights, progress, **kwargs) return _resnet(Bottleneck, [3, 4, 6, 3], weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", Wide_ResNet101_2_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", Wide_ResNet101_2_Weights.IMAGENET1K_V1))
def wide_resnet101_2( def wide_resnet101_2(
*, weights: Optional[Wide_ResNet101_2_Weights] = None, progress: bool = True, **kwargs: Any *, weights: Optional[Wide_ResNet101_2_Weights] = None, progress: bool = True, **kwargs: Any
) -> ResNet: ) -> ResNet:
......
...@@ -34,7 +34,7 @@ _COMMON_META = { ...@@ -34,7 +34,7 @@ _COMMON_META = {
class DeepLabV3_ResNet50_Weights(WeightsEnum): class DeepLabV3_ResNet50_Weights(WeightsEnum):
CocoWithVocLabels_V1 = Weights( COCO_WITH_VOC_LABELS_V1 = Weights(
url="https://download.pytorch.org/models/deeplabv3_resnet50_coco-cd0a2569.pth", url="https://download.pytorch.org/models/deeplabv3_resnet50_coco-cd0a2569.pth",
transforms=partial(VocEval, resize_size=520), transforms=partial(VocEval, resize_size=520),
meta={ meta={
...@@ -45,11 +45,11 @@ class DeepLabV3_ResNet50_Weights(WeightsEnum): ...@@ -45,11 +45,11 @@ class DeepLabV3_ResNet50_Weights(WeightsEnum):
"acc": 92.4, "acc": 92.4,
}, },
) )
default = CocoWithVocLabels_V1 DEFAULT = COCO_WITH_VOC_LABELS_V1
class DeepLabV3_ResNet101_Weights(WeightsEnum): class DeepLabV3_ResNet101_Weights(WeightsEnum):
CocoWithVocLabels_V1 = Weights( COCO_WITH_VOC_LABELS_V1 = Weights(
url="https://download.pytorch.org/models/deeplabv3_resnet101_coco-586e9e4e.pth", url="https://download.pytorch.org/models/deeplabv3_resnet101_coco-586e9e4e.pth",
transforms=partial(VocEval, resize_size=520), transforms=partial(VocEval, resize_size=520),
meta={ meta={
...@@ -60,11 +60,11 @@ class DeepLabV3_ResNet101_Weights(WeightsEnum): ...@@ -60,11 +60,11 @@ class DeepLabV3_ResNet101_Weights(WeightsEnum):
"acc": 92.4, "acc": 92.4,
}, },
) )
default = CocoWithVocLabels_V1 DEFAULT = COCO_WITH_VOC_LABELS_V1
class DeepLabV3_MobileNet_V3_Large_Weights(WeightsEnum): class DeepLabV3_MobileNet_V3_Large_Weights(WeightsEnum):
CocoWithVocLabels_V1 = Weights( COCO_WITH_VOC_LABELS_V1 = Weights(
url="https://download.pytorch.org/models/deeplabv3_mobilenet_v3_large-fc3c493d.pth", url="https://download.pytorch.org/models/deeplabv3_mobilenet_v3_large-fc3c493d.pth",
transforms=partial(VocEval, resize_size=520), transforms=partial(VocEval, resize_size=520),
meta={ meta={
...@@ -75,12 +75,12 @@ class DeepLabV3_MobileNet_V3_Large_Weights(WeightsEnum): ...@@ -75,12 +75,12 @@ class DeepLabV3_MobileNet_V3_Large_Weights(WeightsEnum):
"acc": 91.2, "acc": 91.2,
}, },
) )
default = CocoWithVocLabels_V1 DEFAULT = COCO_WITH_VOC_LABELS_V1
@handle_legacy_interface( @handle_legacy_interface(
weights=("pretrained", DeepLabV3_ResNet50_Weights.CocoWithVocLabels_V1), weights=("pretrained", DeepLabV3_ResNet50_Weights.COCO_WITH_VOC_LABELS_V1),
weights_backbone=("pretrained_backbone", ResNet50_Weights.ImageNet1K_V1), weights_backbone=("pretrained_backbone", ResNet50_Weights.IMAGENET1K_V1),
) )
def deeplabv3_resnet50( def deeplabv3_resnet50(
*, *,
...@@ -111,8 +111,8 @@ def deeplabv3_resnet50( ...@@ -111,8 +111,8 @@ def deeplabv3_resnet50(
@handle_legacy_interface( @handle_legacy_interface(
weights=("pretrained", DeepLabV3_ResNet101_Weights.CocoWithVocLabels_V1), weights=("pretrained", DeepLabV3_ResNet101_Weights.COCO_WITH_VOC_LABELS_V1),
weights_backbone=("pretrained_backbone", ResNet101_Weights.ImageNet1K_V1), weights_backbone=("pretrained_backbone", ResNet101_Weights.IMAGENET1K_V1),
) )
def deeplabv3_resnet101( def deeplabv3_resnet101(
*, *,
...@@ -143,8 +143,8 @@ def deeplabv3_resnet101( ...@@ -143,8 +143,8 @@ def deeplabv3_resnet101(
@handle_legacy_interface( @handle_legacy_interface(
weights=("pretrained", DeepLabV3_MobileNet_V3_Large_Weights.CocoWithVocLabels_V1), weights=("pretrained", DeepLabV3_MobileNet_V3_Large_Weights.COCO_WITH_VOC_LABELS_V1),
weights_backbone=("pretrained_backbone", MobileNet_V3_Large_Weights.ImageNet1K_V1), weights_backbone=("pretrained_backbone", MobileNet_V3_Large_Weights.IMAGENET1K_V1),
) )
def deeplabv3_mobilenet_v3_large( def deeplabv3_mobilenet_v3_large(
*, *,
......
...@@ -24,7 +24,7 @@ _COMMON_META = { ...@@ -24,7 +24,7 @@ _COMMON_META = {
class FCN_ResNet50_Weights(WeightsEnum): class FCN_ResNet50_Weights(WeightsEnum):
CocoWithVocLabels_V1 = Weights( COCO_WITH_VOC_LABELS_V1 = Weights(
url="https://download.pytorch.org/models/fcn_resnet50_coco-1167a1af.pth", url="https://download.pytorch.org/models/fcn_resnet50_coco-1167a1af.pth",
transforms=partial(VocEval, resize_size=520), transforms=partial(VocEval, resize_size=520),
meta={ meta={
...@@ -35,11 +35,11 @@ class FCN_ResNet50_Weights(WeightsEnum): ...@@ -35,11 +35,11 @@ class FCN_ResNet50_Weights(WeightsEnum):
"acc": 91.4, "acc": 91.4,
}, },
) )
default = CocoWithVocLabels_V1 DEFAULT = COCO_WITH_VOC_LABELS_V1
class FCN_ResNet101_Weights(WeightsEnum): class FCN_ResNet101_Weights(WeightsEnum):
CocoWithVocLabels_V1 = Weights( COCO_WITH_VOC_LABELS_V1 = Weights(
url="https://download.pytorch.org/models/fcn_resnet101_coco-7ecb50ca.pth", url="https://download.pytorch.org/models/fcn_resnet101_coco-7ecb50ca.pth",
transforms=partial(VocEval, resize_size=520), transforms=partial(VocEval, resize_size=520),
meta={ meta={
...@@ -50,12 +50,12 @@ class FCN_ResNet101_Weights(WeightsEnum): ...@@ -50,12 +50,12 @@ class FCN_ResNet101_Weights(WeightsEnum):
"acc": 91.9, "acc": 91.9,
}, },
) )
default = CocoWithVocLabels_V1 DEFAULT = COCO_WITH_VOC_LABELS_V1
@handle_legacy_interface( @handle_legacy_interface(
weights=("pretrained", FCN_ResNet50_Weights.CocoWithVocLabels_V1), weights=("pretrained", FCN_ResNet50_Weights.COCO_WITH_VOC_LABELS_V1),
weights_backbone=("pretrained_backbone", ResNet50_Weights.ImageNet1K_V1), weights_backbone=("pretrained_backbone", ResNet50_Weights.IMAGENET1K_V1),
) )
def fcn_resnet50( def fcn_resnet50(
*, *,
...@@ -86,8 +86,8 @@ def fcn_resnet50( ...@@ -86,8 +86,8 @@ def fcn_resnet50(
@handle_legacy_interface( @handle_legacy_interface(
weights=("pretrained", FCN_ResNet101_Weights.CocoWithVocLabels_V1), weights=("pretrained", FCN_ResNet101_Weights.COCO_WITH_VOC_LABELS_V1),
weights_backbone=("pretrained_backbone", ResNet101_Weights.ImageNet1K_V1), weights_backbone=("pretrained_backbone", ResNet101_Weights.IMAGENET1K_V1),
) )
def fcn_resnet101( def fcn_resnet101(
*, *,
......
...@@ -15,7 +15,7 @@ __all__ = ["LRASPP", "LRASPP_MobileNet_V3_Large_Weights", "lraspp_mobilenet_v3_l ...@@ -15,7 +15,7 @@ __all__ = ["LRASPP", "LRASPP_MobileNet_V3_Large_Weights", "lraspp_mobilenet_v3_l
class LRASPP_MobileNet_V3_Large_Weights(WeightsEnum): class LRASPP_MobileNet_V3_Large_Weights(WeightsEnum):
CocoWithVocLabels_V1 = Weights( COCO_WITH_VOC_LABELS_V1 = Weights(
url="https://download.pytorch.org/models/lraspp_mobilenet_v3_large-d234d4ea.pth", url="https://download.pytorch.org/models/lraspp_mobilenet_v3_large-d234d4ea.pth",
transforms=partial(VocEval, resize_size=520), transforms=partial(VocEval, resize_size=520),
meta={ meta={
...@@ -30,12 +30,12 @@ class LRASPP_MobileNet_V3_Large_Weights(WeightsEnum): ...@@ -30,12 +30,12 @@ class LRASPP_MobileNet_V3_Large_Weights(WeightsEnum):
"acc": 91.2, "acc": 91.2,
}, },
) )
default = CocoWithVocLabels_V1 DEFAULT = COCO_WITH_VOC_LABELS_V1
@handle_legacy_interface( @handle_legacy_interface(
weights=("pretrained", LRASPP_MobileNet_V3_Large_Weights.CocoWithVocLabels_V1), weights=("pretrained", LRASPP_MobileNet_V3_Large_Weights.COCO_WITH_VOC_LABELS_V1),
weights_backbone=("pretrained_backbone", MobileNet_V3_Large_Weights.ImageNet1K_V1), weights_backbone=("pretrained_backbone", MobileNet_V3_Large_Weights.IMAGENET1K_V1),
) )
def lraspp_mobilenet_v3_large( def lraspp_mobilenet_v3_large(
*, *,
......
...@@ -53,7 +53,7 @@ _COMMON_META = { ...@@ -53,7 +53,7 @@ _COMMON_META = {
class ShuffleNet_V2_X0_5_Weights(WeightsEnum): class ShuffleNet_V2_X0_5_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/shufflenetv2_x0.5-f707e7126e.pth", url="https://download.pytorch.org/models/shufflenetv2_x0.5-f707e7126e.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -63,11 +63,11 @@ class ShuffleNet_V2_X0_5_Weights(WeightsEnum): ...@@ -63,11 +63,11 @@ class ShuffleNet_V2_X0_5_Weights(WeightsEnum):
"acc@5": 88.316, "acc@5": 88.316,
}, },
) )
default = ImageNet1K_V1 DEFAULT = IMAGENET1K_V1
class ShuffleNet_V2_X1_0_Weights(WeightsEnum): class ShuffleNet_V2_X1_0_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/shufflenetv2_x1-5666bf0f80.pth", url="https://download.pytorch.org/models/shufflenetv2_x1-5666bf0f80.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -77,7 +77,7 @@ class ShuffleNet_V2_X1_0_Weights(WeightsEnum): ...@@ -77,7 +77,7 @@ class ShuffleNet_V2_X1_0_Weights(WeightsEnum):
"acc@5": 81.746, "acc@5": 81.746,
}, },
) )
default = ImageNet1K_V1 DEFAULT = IMAGENET1K_V1
class ShuffleNet_V2_X1_5_Weights(WeightsEnum): class ShuffleNet_V2_X1_5_Weights(WeightsEnum):
...@@ -88,7 +88,7 @@ class ShuffleNet_V2_X2_0_Weights(WeightsEnum): ...@@ -88,7 +88,7 @@ class ShuffleNet_V2_X2_0_Weights(WeightsEnum):
pass pass
@handle_legacy_interface(weights=("pretrained", ShuffleNet_V2_X0_5_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", ShuffleNet_V2_X0_5_Weights.IMAGENET1K_V1))
def shufflenet_v2_x0_5( def shufflenet_v2_x0_5(
*, weights: Optional[ShuffleNet_V2_X0_5_Weights] = None, progress: bool = True, **kwargs: Any *, weights: Optional[ShuffleNet_V2_X0_5_Weights] = None, progress: bool = True, **kwargs: Any
) -> ShuffleNetV2: ) -> ShuffleNetV2:
...@@ -97,7 +97,7 @@ def shufflenet_v2_x0_5( ...@@ -97,7 +97,7 @@ def shufflenet_v2_x0_5(
return _shufflenetv2(weights, progress, [4, 8, 4], [24, 48, 96, 192, 1024], **kwargs) return _shufflenetv2(weights, progress, [4, 8, 4], [24, 48, 96, 192, 1024], **kwargs)
@handle_legacy_interface(weights=("pretrained", ShuffleNet_V2_X1_0_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", ShuffleNet_V2_X1_0_Weights.IMAGENET1K_V1))
def shufflenet_v2_x1_0( def shufflenet_v2_x1_0(
*, weights: Optional[ShuffleNet_V2_X1_0_Weights] = None, progress: bool = True, **kwargs: Any *, weights: Optional[ShuffleNet_V2_X1_0_Weights] = None, progress: bool = True, **kwargs: Any
) -> ShuffleNetV2: ) -> ShuffleNetV2:
......
...@@ -25,7 +25,7 @@ _COMMON_META = { ...@@ -25,7 +25,7 @@ _COMMON_META = {
class SqueezeNet1_0_Weights(WeightsEnum): class SqueezeNet1_0_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/squeezenet1_0-b66bff10.pth", url="https://download.pytorch.org/models/squeezenet1_0-b66bff10.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -36,11 +36,11 @@ class SqueezeNet1_0_Weights(WeightsEnum): ...@@ -36,11 +36,11 @@ class SqueezeNet1_0_Weights(WeightsEnum):
"acc@5": 80.420, "acc@5": 80.420,
}, },
) )
default = ImageNet1K_V1 DEFAULT = IMAGENET1K_V1
class SqueezeNet1_1_Weights(WeightsEnum): class SqueezeNet1_1_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/squeezenet1_1-b8a52dc0.pth", url="https://download.pytorch.org/models/squeezenet1_1-b8a52dc0.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -51,10 +51,10 @@ class SqueezeNet1_1_Weights(WeightsEnum): ...@@ -51,10 +51,10 @@ class SqueezeNet1_1_Weights(WeightsEnum):
"acc@5": 80.624, "acc@5": 80.624,
}, },
) )
default = ImageNet1K_V1 DEFAULT = IMAGENET1K_V1
@handle_legacy_interface(weights=("pretrained", SqueezeNet1_0_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", SqueezeNet1_0_Weights.IMAGENET1K_V1))
def squeezenet1_0( def squeezenet1_0(
*, weights: Optional[SqueezeNet1_0_Weights] = None, progress: bool = True, **kwargs: Any *, weights: Optional[SqueezeNet1_0_Weights] = None, progress: bool = True, **kwargs: Any
) -> SqueezeNet: ) -> SqueezeNet:
...@@ -71,7 +71,7 @@ def squeezenet1_0( ...@@ -71,7 +71,7 @@ def squeezenet1_0(
return model return model
@handle_legacy_interface(weights=("pretrained", SqueezeNet1_1_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", SqueezeNet1_1_Weights.IMAGENET1K_V1))
def squeezenet1_1( def squeezenet1_1(
*, weights: Optional[SqueezeNet1_1_Weights] = None, progress: bool = True, **kwargs: Any *, weights: Optional[SqueezeNet1_1_Weights] = None, progress: bool = True, **kwargs: Any
) -> SqueezeNet: ) -> SqueezeNet:
......
...@@ -53,7 +53,7 @@ _COMMON_META = { ...@@ -53,7 +53,7 @@ _COMMON_META = {
class VGG11_Weights(WeightsEnum): class VGG11_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/vgg11-8a719046.pth", url="https://download.pytorch.org/models/vgg11-8a719046.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -63,11 +63,11 @@ class VGG11_Weights(WeightsEnum): ...@@ -63,11 +63,11 @@ class VGG11_Weights(WeightsEnum):
"acc@5": 88.628, "acc@5": 88.628,
}, },
) )
default = ImageNet1K_V1 DEFAULT = IMAGENET1K_V1
class VGG11_BN_Weights(WeightsEnum): class VGG11_BN_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/vgg11_bn-6002323d.pth", url="https://download.pytorch.org/models/vgg11_bn-6002323d.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -77,11 +77,11 @@ class VGG11_BN_Weights(WeightsEnum): ...@@ -77,11 +77,11 @@ class VGG11_BN_Weights(WeightsEnum):
"acc@5": 89.810, "acc@5": 89.810,
}, },
) )
default = ImageNet1K_V1 DEFAULT = IMAGENET1K_V1
class VGG13_Weights(WeightsEnum): class VGG13_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/vgg13-19584684.pth", url="https://download.pytorch.org/models/vgg13-19584684.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -91,11 +91,11 @@ class VGG13_Weights(WeightsEnum): ...@@ -91,11 +91,11 @@ class VGG13_Weights(WeightsEnum):
"acc@5": 89.246, "acc@5": 89.246,
}, },
) )
default = ImageNet1K_V1 DEFAULT = IMAGENET1K_V1
class VGG13_BN_Weights(WeightsEnum): class VGG13_BN_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/vgg13_bn-abd245e5.pth", url="https://download.pytorch.org/models/vgg13_bn-abd245e5.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -105,11 +105,11 @@ class VGG13_BN_Weights(WeightsEnum): ...@@ -105,11 +105,11 @@ class VGG13_BN_Weights(WeightsEnum):
"acc@5": 90.374, "acc@5": 90.374,
}, },
) )
default = ImageNet1K_V1 DEFAULT = IMAGENET1K_V1
class VGG16_Weights(WeightsEnum): class VGG16_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/vgg16-397923af.pth", url="https://download.pytorch.org/models/vgg16-397923af.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -122,7 +122,7 @@ class VGG16_Weights(WeightsEnum): ...@@ -122,7 +122,7 @@ class VGG16_Weights(WeightsEnum):
# We port the features of a VGG16 backbone trained by amdegroot because unlike the one on TorchVision, it uses the # We port the features of a VGG16 backbone trained by amdegroot because unlike the one on TorchVision, it uses the
# same input standardization method as the paper. Only the `features` weights have proper values, those on the # same input standardization method as the paper. Only the `features` weights have proper values, those on the
# `classifier` module are filled with nans. # `classifier` module are filled with nans.
ImageNet1K_Features = Weights( IMAGENET1K_FEATURES = Weights(
url="https://download.pytorch.org/models/vgg16_features-amdegroot-88682ab5.pth", url="https://download.pytorch.org/models/vgg16_features-amdegroot-88682ab5.pth",
transforms=partial( transforms=partial(
ImageNetEval, crop_size=224, mean=(0.48235, 0.45882, 0.40784), std=(1.0 / 255.0, 1.0 / 255.0, 1.0 / 255.0) ImageNetEval, crop_size=224, mean=(0.48235, 0.45882, 0.40784), std=(1.0 / 255.0, 1.0 / 255.0, 1.0 / 255.0)
...@@ -136,11 +136,11 @@ class VGG16_Weights(WeightsEnum): ...@@ -136,11 +136,11 @@ class VGG16_Weights(WeightsEnum):
"acc@5": float("nan"), "acc@5": float("nan"),
}, },
) )
default = ImageNet1K_V1 DEFAULT = IMAGENET1K_V1
class VGG16_BN_Weights(WeightsEnum): class VGG16_BN_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/vgg16_bn-6c64b313.pth", url="https://download.pytorch.org/models/vgg16_bn-6c64b313.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -150,11 +150,11 @@ class VGG16_BN_Weights(WeightsEnum): ...@@ -150,11 +150,11 @@ class VGG16_BN_Weights(WeightsEnum):
"acc@5": 91.516, "acc@5": 91.516,
}, },
) )
default = ImageNet1K_V1 DEFAULT = IMAGENET1K_V1
class VGG19_Weights(WeightsEnum): class VGG19_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/vgg19-dcbb9e9d.pth", url="https://download.pytorch.org/models/vgg19-dcbb9e9d.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -164,11 +164,11 @@ class VGG19_Weights(WeightsEnum): ...@@ -164,11 +164,11 @@ class VGG19_Weights(WeightsEnum):
"acc@5": 90.876, "acc@5": 90.876,
}, },
) )
default = ImageNet1K_V1 DEFAULT = IMAGENET1K_V1
class VGG19_BN_Weights(WeightsEnum): class VGG19_BN_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/vgg19_bn-c79401a0.pth", url="https://download.pytorch.org/models/vgg19_bn-c79401a0.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -178,59 +178,59 @@ class VGG19_BN_Weights(WeightsEnum): ...@@ -178,59 +178,59 @@ class VGG19_BN_Weights(WeightsEnum):
"acc@5": 91.842, "acc@5": 91.842,
}, },
) )
default = ImageNet1K_V1 DEFAULT = IMAGENET1K_V1
@handle_legacy_interface(weights=("pretrained", VGG11_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", VGG11_Weights.IMAGENET1K_V1))
def vgg11(*, weights: Optional[VGG11_Weights] = None, progress: bool = True, **kwargs: Any) -> VGG: def vgg11(*, weights: Optional[VGG11_Weights] = None, progress: bool = True, **kwargs: Any) -> VGG:
weights = VGG11_Weights.verify(weights) weights = VGG11_Weights.verify(weights)
return _vgg("A", False, weights, progress, **kwargs) return _vgg("A", False, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", VGG11_BN_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", VGG11_BN_Weights.IMAGENET1K_V1))
def vgg11_bn(*, weights: Optional[VGG11_BN_Weights] = None, progress: bool = True, **kwargs: Any) -> VGG: def vgg11_bn(*, weights: Optional[VGG11_BN_Weights] = None, progress: bool = True, **kwargs: Any) -> VGG:
weights = VGG11_BN_Weights.verify(weights) weights = VGG11_BN_Weights.verify(weights)
return _vgg("A", True, weights, progress, **kwargs) return _vgg("A", True, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", VGG13_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", VGG13_Weights.IMAGENET1K_V1))
def vgg13(*, weights: Optional[VGG13_Weights] = None, progress: bool = True, **kwargs: Any) -> VGG: def vgg13(*, weights: Optional[VGG13_Weights] = None, progress: bool = True, **kwargs: Any) -> VGG:
weights = VGG13_Weights.verify(weights) weights = VGG13_Weights.verify(weights)
return _vgg("B", False, weights, progress, **kwargs) return _vgg("B", False, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", VGG13_BN_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", VGG13_BN_Weights.IMAGENET1K_V1))
def vgg13_bn(*, weights: Optional[VGG13_BN_Weights] = None, progress: bool = True, **kwargs: Any) -> VGG: def vgg13_bn(*, weights: Optional[VGG13_BN_Weights] = None, progress: bool = True, **kwargs: Any) -> VGG:
weights = VGG13_BN_Weights.verify(weights) weights = VGG13_BN_Weights.verify(weights)
return _vgg("B", True, weights, progress, **kwargs) return _vgg("B", True, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", VGG16_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", VGG16_Weights.IMAGENET1K_V1))
def vgg16(*, weights: Optional[VGG16_Weights] = None, progress: bool = True, **kwargs: Any) -> VGG: def vgg16(*, weights: Optional[VGG16_Weights] = None, progress: bool = True, **kwargs: Any) -> VGG:
weights = VGG16_Weights.verify(weights) weights = VGG16_Weights.verify(weights)
return _vgg("D", False, weights, progress, **kwargs) return _vgg("D", False, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", VGG16_BN_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", VGG16_BN_Weights.IMAGENET1K_V1))
def vgg16_bn(*, weights: Optional[VGG16_BN_Weights] = None, progress: bool = True, **kwargs: Any) -> VGG: def vgg16_bn(*, weights: Optional[VGG16_BN_Weights] = None, progress: bool = True, **kwargs: Any) -> VGG:
weights = VGG16_BN_Weights.verify(weights) weights = VGG16_BN_Weights.verify(weights)
return _vgg("D", True, weights, progress, **kwargs) return _vgg("D", True, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", VGG19_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", VGG19_Weights.IMAGENET1K_V1))
def vgg19(*, weights: Optional[VGG19_Weights] = None, progress: bool = True, **kwargs: Any) -> VGG: def vgg19(*, weights: Optional[VGG19_Weights] = None, progress: bool = True, **kwargs: Any) -> VGG:
weights = VGG19_Weights.verify(weights) weights = VGG19_Weights.verify(weights)
return _vgg("E", False, weights, progress, **kwargs) return _vgg("E", False, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", VGG19_BN_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", VGG19_BN_Weights.IMAGENET1K_V1))
def vgg19_bn(*, weights: Optional[VGG19_BN_Weights] = None, progress: bool = True, **kwargs: Any) -> VGG: def vgg19_bn(*, weights: Optional[VGG19_BN_Weights] = None, progress: bool = True, **kwargs: Any) -> VGG:
weights = VGG19_BN_Weights.verify(weights) weights = VGG19_BN_Weights.verify(weights)
......
...@@ -63,7 +63,7 @@ _COMMON_META = { ...@@ -63,7 +63,7 @@ _COMMON_META = {
class R3D_18_Weights(WeightsEnum): class R3D_18_Weights(WeightsEnum):
Kinetics400_V1 = Weights( KINETICS400_V1 = Weights(
url="https://download.pytorch.org/models/r3d_18-b3b3357e.pth", url="https://download.pytorch.org/models/r3d_18-b3b3357e.pth",
transforms=partial(Kinect400Eval, crop_size=(112, 112), resize_size=(128, 171)), transforms=partial(Kinect400Eval, crop_size=(112, 112), resize_size=(128, 171)),
meta={ meta={
...@@ -74,11 +74,11 @@ class R3D_18_Weights(WeightsEnum): ...@@ -74,11 +74,11 @@ class R3D_18_Weights(WeightsEnum):
"acc@5": 75.45, "acc@5": 75.45,
}, },
) )
default = Kinetics400_V1 DEFAULT = KINETICS400_V1
class MC3_18_Weights(WeightsEnum): class MC3_18_Weights(WeightsEnum):
Kinetics400_V1 = Weights( KINETICS400_V1 = Weights(
url="https://download.pytorch.org/models/mc3_18-a90a0ba3.pth", url="https://download.pytorch.org/models/mc3_18-a90a0ba3.pth",
transforms=partial(Kinect400Eval, crop_size=(112, 112), resize_size=(128, 171)), transforms=partial(Kinect400Eval, crop_size=(112, 112), resize_size=(128, 171)),
meta={ meta={
...@@ -89,11 +89,11 @@ class MC3_18_Weights(WeightsEnum): ...@@ -89,11 +89,11 @@ class MC3_18_Weights(WeightsEnum):
"acc@5": 76.29, "acc@5": 76.29,
}, },
) )
default = Kinetics400_V1 DEFAULT = KINETICS400_V1
class R2Plus1D_18_Weights(WeightsEnum): class R2Plus1D_18_Weights(WeightsEnum):
Kinetics400_V1 = Weights( KINETICS400_V1 = Weights(
url="https://download.pytorch.org/models/r2plus1d_18-91a641e6.pth", url="https://download.pytorch.org/models/r2plus1d_18-91a641e6.pth",
transforms=partial(Kinect400Eval, crop_size=(112, 112), resize_size=(128, 171)), transforms=partial(Kinect400Eval, crop_size=(112, 112), resize_size=(128, 171)),
meta={ meta={
...@@ -104,10 +104,10 @@ class R2Plus1D_18_Weights(WeightsEnum): ...@@ -104,10 +104,10 @@ class R2Plus1D_18_Weights(WeightsEnum):
"acc@5": 78.81, "acc@5": 78.81,
}, },
) )
default = Kinetics400_V1 DEFAULT = KINETICS400_V1
@handle_legacy_interface(weights=("pretrained", R3D_18_Weights.Kinetics400_V1)) @handle_legacy_interface(weights=("pretrained", R3D_18_Weights.KINETICS400_V1))
def r3d_18(*, weights: Optional[R3D_18_Weights] = None, progress: bool = True, **kwargs: Any) -> VideoResNet: def r3d_18(*, weights: Optional[R3D_18_Weights] = None, progress: bool = True, **kwargs: Any) -> VideoResNet:
weights = R3D_18_Weights.verify(weights) weights = R3D_18_Weights.verify(weights)
...@@ -122,7 +122,7 @@ def r3d_18(*, weights: Optional[R3D_18_Weights] = None, progress: bool = True, * ...@@ -122,7 +122,7 @@ def r3d_18(*, weights: Optional[R3D_18_Weights] = None, progress: bool = True, *
) )
@handle_legacy_interface(weights=("pretrained", MC3_18_Weights.Kinetics400_V1)) @handle_legacy_interface(weights=("pretrained", MC3_18_Weights.KINETICS400_V1))
def mc3_18(*, weights: Optional[MC3_18_Weights] = None, progress: bool = True, **kwargs: Any) -> VideoResNet: def mc3_18(*, weights: Optional[MC3_18_Weights] = None, progress: bool = True, **kwargs: Any) -> VideoResNet:
weights = MC3_18_Weights.verify(weights) weights = MC3_18_Weights.verify(weights)
...@@ -137,7 +137,7 @@ def mc3_18(*, weights: Optional[MC3_18_Weights] = None, progress: bool = True, * ...@@ -137,7 +137,7 @@ def mc3_18(*, weights: Optional[MC3_18_Weights] = None, progress: bool = True, *
) )
@handle_legacy_interface(weights=("pretrained", R2Plus1D_18_Weights.Kinetics400_V1)) @handle_legacy_interface(weights=("pretrained", R2Plus1D_18_Weights.KINETICS400_V1))
def r2plus1d_18(*, weights: Optional[R2Plus1D_18_Weights] = None, progress: bool = True, **kwargs: Any) -> VideoResNet: def r2plus1d_18(*, weights: Optional[R2Plus1D_18_Weights] = None, progress: bool = True, **kwargs: Any) -> VideoResNet:
weights = R2Plus1D_18_Weights.verify(weights) weights = R2Plus1D_18_Weights.verify(weights)
......
...@@ -36,7 +36,7 @@ _COMMON_META = { ...@@ -36,7 +36,7 @@ _COMMON_META = {
class ViT_B_16_Weights(WeightsEnum): class ViT_B_16_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/vit_b_16-c867db91.pth", url="https://download.pytorch.org/models/vit_b_16-c867db91.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -49,11 +49,11 @@ class ViT_B_16_Weights(WeightsEnum): ...@@ -49,11 +49,11 @@ class ViT_B_16_Weights(WeightsEnum):
"acc@5": 95.318, "acc@5": 95.318,
}, },
) )
default = ImageNet1K_V1 DEFAULT = IMAGENET1K_V1
class ViT_B_32_Weights(WeightsEnum): class ViT_B_32_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/vit_b_32-d86f8d99.pth", url="https://download.pytorch.org/models/vit_b_32-d86f8d99.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -66,11 +66,11 @@ class ViT_B_32_Weights(WeightsEnum): ...@@ -66,11 +66,11 @@ class ViT_B_32_Weights(WeightsEnum):
"acc@5": 92.466, "acc@5": 92.466,
}, },
) )
default = ImageNet1K_V1 DEFAULT = IMAGENET1K_V1
class ViT_L_16_Weights(WeightsEnum): class ViT_L_16_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/vit_l_16-852ce7e3.pth", url="https://download.pytorch.org/models/vit_l_16-852ce7e3.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=242), transforms=partial(ImageNetEval, crop_size=224, resize_size=242),
meta={ meta={
...@@ -83,11 +83,11 @@ class ViT_L_16_Weights(WeightsEnum): ...@@ -83,11 +83,11 @@ class ViT_L_16_Weights(WeightsEnum):
"acc@5": 94.638, "acc@5": 94.638,
}, },
) )
default = ImageNet1K_V1 DEFAULT = IMAGENET1K_V1
class ViT_L_32_Weights(WeightsEnum): class ViT_L_32_Weights(WeightsEnum):
ImageNet1K_V1 = Weights( IMAGENET1K_V1 = Weights(
url="https://download.pytorch.org/models/vit_l_32-c7638314.pth", url="https://download.pytorch.org/models/vit_l_32-c7638314.pth",
transforms=partial(ImageNetEval, crop_size=224), transforms=partial(ImageNetEval, crop_size=224),
meta={ meta={
...@@ -100,7 +100,7 @@ class ViT_L_32_Weights(WeightsEnum): ...@@ -100,7 +100,7 @@ class ViT_L_32_Weights(WeightsEnum):
"acc@5": 93.07, "acc@5": 93.07,
}, },
) )
default = ImageNet1K_V1 DEFAULT = IMAGENET1K_V1
def _vision_transformer( def _vision_transformer(
...@@ -134,7 +134,7 @@ def _vision_transformer( ...@@ -134,7 +134,7 @@ def _vision_transformer(
return model return model
@handle_legacy_interface(weights=("pretrained", ViT_B_16_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", ViT_B_16_Weights.IMAGENET1K_V1))
def vit_b_16(*, weights: Optional[ViT_B_16_Weights] = None, progress: bool = True, **kwargs: Any) -> VisionTransformer: def vit_b_16(*, weights: Optional[ViT_B_16_Weights] = None, progress: bool = True, **kwargs: Any) -> VisionTransformer:
weights = ViT_B_16_Weights.verify(weights) weights = ViT_B_16_Weights.verify(weights)
...@@ -150,7 +150,7 @@ def vit_b_16(*, weights: Optional[ViT_B_16_Weights] = None, progress: bool = Tru ...@@ -150,7 +150,7 @@ def vit_b_16(*, weights: Optional[ViT_B_16_Weights] = None, progress: bool = Tru
) )
@handle_legacy_interface(weights=("pretrained", ViT_B_32_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", ViT_B_32_Weights.IMAGENET1K_V1))
def vit_b_32(*, weights: Optional[ViT_B_32_Weights] = None, progress: bool = True, **kwargs: Any) -> VisionTransformer: def vit_b_32(*, weights: Optional[ViT_B_32_Weights] = None, progress: bool = True, **kwargs: Any) -> VisionTransformer:
weights = ViT_B_32_Weights.verify(weights) weights = ViT_B_32_Weights.verify(weights)
...@@ -166,7 +166,7 @@ def vit_b_32(*, weights: Optional[ViT_B_32_Weights] = None, progress: bool = Tru ...@@ -166,7 +166,7 @@ def vit_b_32(*, weights: Optional[ViT_B_32_Weights] = None, progress: bool = Tru
) )
@handle_legacy_interface(weights=("pretrained", ViT_L_16_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", ViT_L_16_Weights.IMAGENET1K_V1))
def vit_l_16(*, weights: Optional[ViT_L_16_Weights] = None, progress: bool = True, **kwargs: Any) -> VisionTransformer: def vit_l_16(*, weights: Optional[ViT_L_16_Weights] = None, progress: bool = True, **kwargs: Any) -> VisionTransformer:
weights = ViT_L_16_Weights.verify(weights) weights = ViT_L_16_Weights.verify(weights)
...@@ -182,7 +182,7 @@ def vit_l_16(*, weights: Optional[ViT_L_16_Weights] = None, progress: bool = Tru ...@@ -182,7 +182,7 @@ def vit_l_16(*, weights: Optional[ViT_L_16_Weights] = None, progress: bool = Tru
) )
@handle_legacy_interface(weights=("pretrained", ViT_L_32_Weights.ImageNet1K_V1)) @handle_legacy_interface(weights=("pretrained", ViT_L_32_Weights.IMAGENET1K_V1))
def vit_l_32(*, weights: Optional[ViT_L_32_Weights] = None, progress: bool = True, **kwargs: Any) -> VisionTransformer: def vit_l_32(*, weights: Optional[ViT_L_32_Weights] = None, progress: bool = True, **kwargs: Any) -> VisionTransformer:
weights = ViT_L_32_Weights.verify(weights) weights = ViT_L_32_Weights.verify(weights)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment