Unverified Commit 6f016dd9 authored by Vasilis Vryniotis's avatar Vasilis Vryniotis Committed by GitHub
Browse files

Restructuring metrics meta-data (#5859)

* Restructuring metrics meta-data for detection, segmentation and optical flow.

* Renaming acc to pixel_acc for segmentation

* Restructure video meta-data.

* Restructure classification and quantization meta-data.

* Fix tests.

* Fix documentation
parent c82b86d1
...@@ -183,9 +183,11 @@ class Inception_V3_QuantizedWeights(WeightsEnum): ...@@ -183,9 +183,11 @@ class Inception_V3_QuantizedWeights(WeightsEnum):
"backend": "fbgemm", "backend": "fbgemm",
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#post-training-quantized-models", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#post-training-quantized-models",
"unquantized": Inception_V3_Weights.IMAGENET1K_V1, "unquantized": Inception_V3_Weights.IMAGENET1K_V1,
"metrics": {
"acc@1": 77.176, "acc@1": 77.176,
"acc@5": 93.354, "acc@5": 93.354,
}, },
},
) )
DEFAULT = IMAGENET1K_FBGEMM_V1 DEFAULT = IMAGENET1K_FBGEMM_V1
......
...@@ -75,9 +75,11 @@ class MobileNet_V2_QuantizedWeights(WeightsEnum): ...@@ -75,9 +75,11 @@ class MobileNet_V2_QuantizedWeights(WeightsEnum):
"backend": "qnnpack", "backend": "qnnpack",
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#qat-mobilenetv2", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#qat-mobilenetv2",
"unquantized": MobileNet_V2_Weights.IMAGENET1K_V1, "unquantized": MobileNet_V2_Weights.IMAGENET1K_V1,
"metrics": {
"acc@1": 71.658, "acc@1": 71.658,
"acc@5": 90.150, "acc@5": 90.150,
}, },
},
) )
DEFAULT = IMAGENET1K_QNNPACK_V1 DEFAULT = IMAGENET1K_QNNPACK_V1
......
...@@ -165,9 +165,11 @@ class MobileNet_V3_Large_QuantizedWeights(WeightsEnum): ...@@ -165,9 +165,11 @@ class MobileNet_V3_Large_QuantizedWeights(WeightsEnum):
"backend": "qnnpack", "backend": "qnnpack",
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#qat-mobilenetv3", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#qat-mobilenetv3",
"unquantized": MobileNet_V3_Large_Weights.IMAGENET1K_V1, "unquantized": MobileNet_V3_Large_Weights.IMAGENET1K_V1,
"metrics": {
"acc@1": 73.004, "acc@1": 73.004,
"acc@5": 90.858, "acc@5": 90.858,
}, },
},
) )
DEFAULT = IMAGENET1K_QNNPACK_V1 DEFAULT = IMAGENET1K_QNNPACK_V1
......
...@@ -162,9 +162,11 @@ class ResNet18_QuantizedWeights(WeightsEnum): ...@@ -162,9 +162,11 @@ class ResNet18_QuantizedWeights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 11689512, "num_params": 11689512,
"unquantized": ResNet18_Weights.IMAGENET1K_V1, "unquantized": ResNet18_Weights.IMAGENET1K_V1,
"metrics": {
"acc@1": 69.494, "acc@1": 69.494,
"acc@5": 88.882, "acc@5": 88.882,
}, },
},
) )
DEFAULT = IMAGENET1K_FBGEMM_V1 DEFAULT = IMAGENET1K_FBGEMM_V1
...@@ -177,9 +179,11 @@ class ResNet50_QuantizedWeights(WeightsEnum): ...@@ -177,9 +179,11 @@ class ResNet50_QuantizedWeights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 25557032, "num_params": 25557032,
"unquantized": ResNet50_Weights.IMAGENET1K_V1, "unquantized": ResNet50_Weights.IMAGENET1K_V1,
"metrics": {
"acc@1": 75.920, "acc@1": 75.920,
"acc@5": 92.814, "acc@5": 92.814,
}, },
},
) )
IMAGENET1K_FBGEMM_V2 = Weights( IMAGENET1K_FBGEMM_V2 = Weights(
url="https://download.pytorch.org/models/quantized/resnet50_fbgemm-23753f79.pth", url="https://download.pytorch.org/models/quantized/resnet50_fbgemm-23753f79.pth",
...@@ -188,9 +192,11 @@ class ResNet50_QuantizedWeights(WeightsEnum): ...@@ -188,9 +192,11 @@ class ResNet50_QuantizedWeights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 25557032, "num_params": 25557032,
"unquantized": ResNet50_Weights.IMAGENET1K_V2, "unquantized": ResNet50_Weights.IMAGENET1K_V2,
"metrics": {
"acc@1": 80.282, "acc@1": 80.282,
"acc@5": 94.976, "acc@5": 94.976,
}, },
},
) )
DEFAULT = IMAGENET1K_FBGEMM_V2 DEFAULT = IMAGENET1K_FBGEMM_V2
...@@ -203,9 +209,11 @@ class ResNeXt101_32X8D_QuantizedWeights(WeightsEnum): ...@@ -203,9 +209,11 @@ class ResNeXt101_32X8D_QuantizedWeights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 88791336, "num_params": 88791336,
"unquantized": ResNeXt101_32X8D_Weights.IMAGENET1K_V1, "unquantized": ResNeXt101_32X8D_Weights.IMAGENET1K_V1,
"metrics": {
"acc@1": 78.986, "acc@1": 78.986,
"acc@5": 94.480, "acc@5": 94.480,
}, },
},
) )
IMAGENET1K_FBGEMM_V2 = Weights( IMAGENET1K_FBGEMM_V2 = Weights(
url="https://download.pytorch.org/models/quantized/resnext101_32x8_fbgemm-ee16d00c.pth", url="https://download.pytorch.org/models/quantized/resnext101_32x8_fbgemm-ee16d00c.pth",
...@@ -214,9 +222,11 @@ class ResNeXt101_32X8D_QuantizedWeights(WeightsEnum): ...@@ -214,9 +222,11 @@ class ResNeXt101_32X8D_QuantizedWeights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 88791336, "num_params": 88791336,
"unquantized": ResNeXt101_32X8D_Weights.IMAGENET1K_V2, "unquantized": ResNeXt101_32X8D_Weights.IMAGENET1K_V2,
"metrics": {
"acc@1": 82.574, "acc@1": 82.574,
"acc@5": 96.132, "acc@5": 96.132,
}, },
},
) )
DEFAULT = IMAGENET1K_FBGEMM_V2 DEFAULT = IMAGENET1K_FBGEMM_V2
......
...@@ -117,9 +117,11 @@ class ShuffleNet_V2_X0_5_QuantizedWeights(WeightsEnum): ...@@ -117,9 +117,11 @@ class ShuffleNet_V2_X0_5_QuantizedWeights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 1366792, "num_params": 1366792,
"unquantized": ShuffleNet_V2_X0_5_Weights.IMAGENET1K_V1, "unquantized": ShuffleNet_V2_X0_5_Weights.IMAGENET1K_V1,
"metrics": {
"acc@1": 57.972, "acc@1": 57.972,
"acc@5": 79.780, "acc@5": 79.780,
}, },
},
) )
DEFAULT = IMAGENET1K_FBGEMM_V1 DEFAULT = IMAGENET1K_FBGEMM_V1
...@@ -132,9 +134,11 @@ class ShuffleNet_V2_X1_0_QuantizedWeights(WeightsEnum): ...@@ -132,9 +134,11 @@ class ShuffleNet_V2_X1_0_QuantizedWeights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 2278604, "num_params": 2278604,
"unquantized": ShuffleNet_V2_X1_0_Weights.IMAGENET1K_V1, "unquantized": ShuffleNet_V2_X1_0_Weights.IMAGENET1K_V1,
"metrics": {
"acc@1": 68.360, "acc@1": 68.360,
"acc@5": 87.582, "acc@5": 87.582,
}, },
},
) )
DEFAULT = IMAGENET1K_FBGEMM_V1 DEFAULT = IMAGENET1K_FBGEMM_V1
......
...@@ -422,9 +422,11 @@ class RegNet_Y_400MF_Weights(WeightsEnum): ...@@ -422,9 +422,11 @@ class RegNet_Y_400MF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 4344144, "num_params": 4344144,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#small-models", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#small-models",
"metrics": {
"acc@1": 74.046, "acc@1": 74.046,
"acc@5": 91.716, "acc@5": 91.716,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_y_400mf-e6988f5f.pth", url="https://download.pytorch.org/models/regnet_y_400mf-e6988f5f.pth",
...@@ -433,9 +435,11 @@ class RegNet_Y_400MF_Weights(WeightsEnum): ...@@ -433,9 +435,11 @@ class RegNet_Y_400MF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 4344144, "num_params": 4344144,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe",
"metrics": {
"acc@1": 75.804, "acc@1": 75.804,
"acc@5": 92.742, "acc@5": 92.742,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -448,9 +452,11 @@ class RegNet_Y_800MF_Weights(WeightsEnum): ...@@ -448,9 +452,11 @@ class RegNet_Y_800MF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 6432512, "num_params": 6432512,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#small-models", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#small-models",
"metrics": {
"acc@1": 76.420, "acc@1": 76.420,
"acc@5": 93.136, "acc@5": 93.136,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_y_800mf-58fc7688.pth", url="https://download.pytorch.org/models/regnet_y_800mf-58fc7688.pth",
...@@ -459,9 +465,11 @@ class RegNet_Y_800MF_Weights(WeightsEnum): ...@@ -459,9 +465,11 @@ class RegNet_Y_800MF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 6432512, "num_params": 6432512,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe",
"metrics": {
"acc@1": 78.828, "acc@1": 78.828,
"acc@5": 94.502, "acc@5": 94.502,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -474,9 +482,11 @@ class RegNet_Y_1_6GF_Weights(WeightsEnum): ...@@ -474,9 +482,11 @@ class RegNet_Y_1_6GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 11202430, "num_params": 11202430,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#small-models", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#small-models",
"metrics": {
"acc@1": 77.950, "acc@1": 77.950,
"acc@5": 93.966, "acc@5": 93.966,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_y_1_6gf-0d7bc02a.pth", url="https://download.pytorch.org/models/regnet_y_1_6gf-0d7bc02a.pth",
...@@ -485,9 +495,11 @@ class RegNet_Y_1_6GF_Weights(WeightsEnum): ...@@ -485,9 +495,11 @@ class RegNet_Y_1_6GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 11202430, "num_params": 11202430,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe",
"metrics": {
"acc@1": 80.876, "acc@1": 80.876,
"acc@5": 95.444, "acc@5": 95.444,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -500,9 +512,11 @@ class RegNet_Y_3_2GF_Weights(WeightsEnum): ...@@ -500,9 +512,11 @@ class RegNet_Y_3_2GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 19436338, "num_params": 19436338,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#medium-models", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#medium-models",
"metrics": {
"acc@1": 78.948, "acc@1": 78.948,
"acc@5": 94.576, "acc@5": 94.576,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_y_3_2gf-9180c971.pth", url="https://download.pytorch.org/models/regnet_y_3_2gf-9180c971.pth",
...@@ -511,9 +525,11 @@ class RegNet_Y_3_2GF_Weights(WeightsEnum): ...@@ -511,9 +525,11 @@ class RegNet_Y_3_2GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 19436338, "num_params": 19436338,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe",
"metrics": {
"acc@1": 81.982, "acc@1": 81.982,
"acc@5": 95.972, "acc@5": 95.972,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -526,9 +542,11 @@ class RegNet_Y_8GF_Weights(WeightsEnum): ...@@ -526,9 +542,11 @@ class RegNet_Y_8GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 39381472, "num_params": 39381472,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#medium-models", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#medium-models",
"metrics": {
"acc@1": 80.032, "acc@1": 80.032,
"acc@5": 95.048, "acc@5": 95.048,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_y_8gf-dc2b1b54.pth", url="https://download.pytorch.org/models/regnet_y_8gf-dc2b1b54.pth",
...@@ -537,9 +555,11 @@ class RegNet_Y_8GF_Weights(WeightsEnum): ...@@ -537,9 +555,11 @@ class RegNet_Y_8GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 39381472, "num_params": 39381472,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe",
"metrics": {
"acc@1": 82.828, "acc@1": 82.828,
"acc@5": 96.330, "acc@5": 96.330,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -552,9 +572,11 @@ class RegNet_Y_16GF_Weights(WeightsEnum): ...@@ -552,9 +572,11 @@ class RegNet_Y_16GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 83590140, "num_params": 83590140,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#large-models", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#large-models",
"metrics": {
"acc@1": 80.424, "acc@1": 80.424,
"acc@5": 95.240, "acc@5": 95.240,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_y_16gf-3e4a00f9.pth", url="https://download.pytorch.org/models/regnet_y_16gf-3e4a00f9.pth",
...@@ -563,9 +585,11 @@ class RegNet_Y_16GF_Weights(WeightsEnum): ...@@ -563,9 +585,11 @@ class RegNet_Y_16GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 83590140, "num_params": 83590140,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe",
"metrics": {
"acc@1": 82.886, "acc@1": 82.886,
"acc@5": 96.328, "acc@5": 96.328,
}, },
},
) )
IMAGENET1K_SWAG_E2E_V1 = Weights( IMAGENET1K_SWAG_E2E_V1 = Weights(
url="https://download.pytorch.org/models/regnet_y_16gf_swag-43afe44d.pth", url="https://download.pytorch.org/models/regnet_y_16gf_swag-43afe44d.pth",
...@@ -575,9 +599,11 @@ class RegNet_Y_16GF_Weights(WeightsEnum): ...@@ -575,9 +599,11 @@ class RegNet_Y_16GF_Weights(WeightsEnum):
meta={ meta={
**_COMMON_SWAG_META, **_COMMON_SWAG_META,
"num_params": 83590140, "num_params": 83590140,
"metrics": {
"acc@1": 86.012, "acc@1": 86.012,
"acc@5": 98.054, "acc@5": 98.054,
}, },
},
) )
IMAGENET1K_SWAG_LINEAR_V1 = Weights( IMAGENET1K_SWAG_LINEAR_V1 = Weights(
url="https://download.pytorch.org/models/regnet_y_16gf_lc_swag-f3ec0043.pth", url="https://download.pytorch.org/models/regnet_y_16gf_lc_swag-f3ec0043.pth",
...@@ -588,9 +614,11 @@ class RegNet_Y_16GF_Weights(WeightsEnum): ...@@ -588,9 +614,11 @@ class RegNet_Y_16GF_Weights(WeightsEnum):
**_COMMON_SWAG_META, **_COMMON_SWAG_META,
"recipe": "https://github.com/pytorch/vision/pull/5793", "recipe": "https://github.com/pytorch/vision/pull/5793",
"num_params": 83590140, "num_params": 83590140,
"metrics": {
"acc@1": 83.976, "acc@1": 83.976,
"acc@5": 97.244, "acc@5": 97.244,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -603,9 +631,11 @@ class RegNet_Y_32GF_Weights(WeightsEnum): ...@@ -603,9 +631,11 @@ class RegNet_Y_32GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 145046770, "num_params": 145046770,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#large-models", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#large-models",
"metrics": {
"acc@1": 80.878, "acc@1": 80.878,
"acc@5": 95.340, "acc@5": 95.340,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_y_32gf-8db6d4b5.pth", url="https://download.pytorch.org/models/regnet_y_32gf-8db6d4b5.pth",
...@@ -614,9 +644,11 @@ class RegNet_Y_32GF_Weights(WeightsEnum): ...@@ -614,9 +644,11 @@ class RegNet_Y_32GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 145046770, "num_params": 145046770,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe",
"metrics": {
"acc@1": 83.368, "acc@1": 83.368,
"acc@5": 96.498, "acc@5": 96.498,
}, },
},
) )
IMAGENET1K_SWAG_E2E_V1 = Weights( IMAGENET1K_SWAG_E2E_V1 = Weights(
url="https://download.pytorch.org/models/regnet_y_32gf_swag-04fdfa75.pth", url="https://download.pytorch.org/models/regnet_y_32gf_swag-04fdfa75.pth",
...@@ -626,9 +658,11 @@ class RegNet_Y_32GF_Weights(WeightsEnum): ...@@ -626,9 +658,11 @@ class RegNet_Y_32GF_Weights(WeightsEnum):
meta={ meta={
**_COMMON_SWAG_META, **_COMMON_SWAG_META,
"num_params": 145046770, "num_params": 145046770,
"metrics": {
"acc@1": 86.838, "acc@1": 86.838,
"acc@5": 98.362, "acc@5": 98.362,
}, },
},
) )
IMAGENET1K_SWAG_LINEAR_V1 = Weights( IMAGENET1K_SWAG_LINEAR_V1 = Weights(
url="https://download.pytorch.org/models/regnet_y_32gf_lc_swag-e1583746.pth", url="https://download.pytorch.org/models/regnet_y_32gf_lc_swag-e1583746.pth",
...@@ -639,9 +673,11 @@ class RegNet_Y_32GF_Weights(WeightsEnum): ...@@ -639,9 +673,11 @@ class RegNet_Y_32GF_Weights(WeightsEnum):
**_COMMON_SWAG_META, **_COMMON_SWAG_META,
"recipe": "https://github.com/pytorch/vision/pull/5793", "recipe": "https://github.com/pytorch/vision/pull/5793",
"num_params": 145046770, "num_params": 145046770,
"metrics": {
"acc@1": 84.622, "acc@1": 84.622,
"acc@5": 97.480, "acc@5": 97.480,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -655,9 +691,11 @@ class RegNet_Y_128GF_Weights(WeightsEnum): ...@@ -655,9 +691,11 @@ class RegNet_Y_128GF_Weights(WeightsEnum):
meta={ meta={
**_COMMON_SWAG_META, **_COMMON_SWAG_META,
"num_params": 644812894, "num_params": 644812894,
"metrics": {
"acc@1": 88.228, "acc@1": 88.228,
"acc@5": 98.682, "acc@5": 98.682,
}, },
},
) )
IMAGENET1K_SWAG_LINEAR_V1 = Weights( IMAGENET1K_SWAG_LINEAR_V1 = Weights(
url="https://download.pytorch.org/models/regnet_y_128gf_lc_swag-cbe8ce12.pth", url="https://download.pytorch.org/models/regnet_y_128gf_lc_swag-cbe8ce12.pth",
...@@ -668,9 +706,11 @@ class RegNet_Y_128GF_Weights(WeightsEnum): ...@@ -668,9 +706,11 @@ class RegNet_Y_128GF_Weights(WeightsEnum):
**_COMMON_SWAG_META, **_COMMON_SWAG_META,
"recipe": "https://github.com/pytorch/vision/pull/5793", "recipe": "https://github.com/pytorch/vision/pull/5793",
"num_params": 644812894, "num_params": 644812894,
"metrics": {
"acc@1": 86.068, "acc@1": 86.068,
"acc@5": 97.844, "acc@5": 97.844,
}, },
},
) )
DEFAULT = IMAGENET1K_SWAG_E2E_V1 DEFAULT = IMAGENET1K_SWAG_E2E_V1
...@@ -683,9 +723,11 @@ class RegNet_X_400MF_Weights(WeightsEnum): ...@@ -683,9 +723,11 @@ class RegNet_X_400MF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 5495976, "num_params": 5495976,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#small-models", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#small-models",
"metrics": {
"acc@1": 72.834, "acc@1": 72.834,
"acc@5": 90.950, "acc@5": 90.950,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_x_400mf-62229a5f.pth", url="https://download.pytorch.org/models/regnet_x_400mf-62229a5f.pth",
...@@ -694,9 +736,11 @@ class RegNet_X_400MF_Weights(WeightsEnum): ...@@ -694,9 +736,11 @@ class RegNet_X_400MF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 5495976, "num_params": 5495976,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe-with-fixres", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe-with-fixres",
"metrics": {
"acc@1": 74.864, "acc@1": 74.864,
"acc@5": 92.322, "acc@5": 92.322,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -709,9 +753,11 @@ class RegNet_X_800MF_Weights(WeightsEnum): ...@@ -709,9 +753,11 @@ class RegNet_X_800MF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 7259656, "num_params": 7259656,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#small-models", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#small-models",
"metrics": {
"acc@1": 75.212, "acc@1": 75.212,
"acc@5": 92.348, "acc@5": 92.348,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_x_800mf-94a99ebd.pth", url="https://download.pytorch.org/models/regnet_x_800mf-94a99ebd.pth",
...@@ -720,9 +766,11 @@ class RegNet_X_800MF_Weights(WeightsEnum): ...@@ -720,9 +766,11 @@ class RegNet_X_800MF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 7259656, "num_params": 7259656,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe-with-fixres", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe-with-fixres",
"metrics": {
"acc@1": 77.522, "acc@1": 77.522,
"acc@5": 93.826, "acc@5": 93.826,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -735,9 +783,11 @@ class RegNet_X_1_6GF_Weights(WeightsEnum): ...@@ -735,9 +783,11 @@ class RegNet_X_1_6GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 9190136, "num_params": 9190136,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#small-models", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#small-models",
"metrics": {
"acc@1": 77.040, "acc@1": 77.040,
"acc@5": 93.440, "acc@5": 93.440,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_x_1_6gf-a12f2b72.pth", url="https://download.pytorch.org/models/regnet_x_1_6gf-a12f2b72.pth",
...@@ -746,9 +796,11 @@ class RegNet_X_1_6GF_Weights(WeightsEnum): ...@@ -746,9 +796,11 @@ class RegNet_X_1_6GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 9190136, "num_params": 9190136,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe-with-fixres", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe-with-fixres",
"metrics": {
"acc@1": 79.668, "acc@1": 79.668,
"acc@5": 94.922, "acc@5": 94.922,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -761,9 +813,11 @@ class RegNet_X_3_2GF_Weights(WeightsEnum): ...@@ -761,9 +813,11 @@ class RegNet_X_3_2GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 15296552, "num_params": 15296552,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#medium-models", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#medium-models",
"metrics": {
"acc@1": 78.364, "acc@1": 78.364,
"acc@5": 93.992, "acc@5": 93.992,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_x_3_2gf-7071aa85.pth", url="https://download.pytorch.org/models/regnet_x_3_2gf-7071aa85.pth",
...@@ -772,9 +826,11 @@ class RegNet_X_3_2GF_Weights(WeightsEnum): ...@@ -772,9 +826,11 @@ class RegNet_X_3_2GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 15296552, "num_params": 15296552,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe",
"metrics": {
"acc@1": 81.196, "acc@1": 81.196,
"acc@5": 95.430, "acc@5": 95.430,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -787,9 +843,11 @@ class RegNet_X_8GF_Weights(WeightsEnum): ...@@ -787,9 +843,11 @@ class RegNet_X_8GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 39572648, "num_params": 39572648,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#medium-models", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#medium-models",
"metrics": {
"acc@1": 79.344, "acc@1": 79.344,
"acc@5": 94.686, "acc@5": 94.686,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_x_8gf-2b70d774.pth", url="https://download.pytorch.org/models/regnet_x_8gf-2b70d774.pth",
...@@ -798,9 +856,11 @@ class RegNet_X_8GF_Weights(WeightsEnum): ...@@ -798,9 +856,11 @@ class RegNet_X_8GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 39572648, "num_params": 39572648,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe",
"metrics": {
"acc@1": 81.682, "acc@1": 81.682,
"acc@5": 95.678, "acc@5": 95.678,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -813,9 +873,11 @@ class RegNet_X_16GF_Weights(WeightsEnum): ...@@ -813,9 +873,11 @@ class RegNet_X_16GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 54278536, "num_params": 54278536,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#medium-models", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#medium-models",
"metrics": {
"acc@1": 80.058, "acc@1": 80.058,
"acc@5": 94.944, "acc@5": 94.944,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_x_16gf-ba3796d7.pth", url="https://download.pytorch.org/models/regnet_x_16gf-ba3796d7.pth",
...@@ -824,9 +886,11 @@ class RegNet_X_16GF_Weights(WeightsEnum): ...@@ -824,9 +886,11 @@ class RegNet_X_16GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 54278536, "num_params": 54278536,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe",
"metrics": {
"acc@1": 82.716, "acc@1": 82.716,
"acc@5": 96.196, "acc@5": 96.196,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -839,9 +903,11 @@ class RegNet_X_32GF_Weights(WeightsEnum): ...@@ -839,9 +903,11 @@ class RegNet_X_32GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 107811560, "num_params": 107811560,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#large-models", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#large-models",
"metrics": {
"acc@1": 80.622, "acc@1": 80.622,
"acc@5": 95.248, "acc@5": 95.248,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/regnet_x_32gf-6eb8fdc6.pth", url="https://download.pytorch.org/models/regnet_x_32gf-6eb8fdc6.pth",
...@@ -850,9 +916,11 @@ class RegNet_X_32GF_Weights(WeightsEnum): ...@@ -850,9 +916,11 @@ class RegNet_X_32GF_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 107811560, "num_params": 107811560,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe",
"metrics": {
"acc@1": 83.014, "acc@1": 83.014,
"acc@5": 96.288, "acc@5": 96.288,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
......
...@@ -315,9 +315,11 @@ class ResNet18_Weights(WeightsEnum): ...@@ -315,9 +315,11 @@ class ResNet18_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 11689512, "num_params": 11689512,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnet", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnet",
"metrics": {
"acc@1": 69.758, "acc@1": 69.758,
"acc@5": 89.078, "acc@5": 89.078,
}, },
},
) )
DEFAULT = IMAGENET1K_V1 DEFAULT = IMAGENET1K_V1
...@@ -330,9 +332,11 @@ class ResNet34_Weights(WeightsEnum): ...@@ -330,9 +332,11 @@ class ResNet34_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 21797672, "num_params": 21797672,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnet", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnet",
"metrics": {
"acc@1": 73.314, "acc@1": 73.314,
"acc@5": 91.420, "acc@5": 91.420,
}, },
},
) )
DEFAULT = IMAGENET1K_V1 DEFAULT = IMAGENET1K_V1
...@@ -345,9 +349,11 @@ class ResNet50_Weights(WeightsEnum): ...@@ -345,9 +349,11 @@ class ResNet50_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 25557032, "num_params": 25557032,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnet", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnet",
"metrics": {
"acc@1": 76.130, "acc@1": 76.130,
"acc@5": 92.862, "acc@5": 92.862,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/resnet50-11ad3fa6.pth", url="https://download.pytorch.org/models/resnet50-11ad3fa6.pth",
...@@ -356,9 +362,11 @@ class ResNet50_Weights(WeightsEnum): ...@@ -356,9 +362,11 @@ class ResNet50_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 25557032, "num_params": 25557032,
"recipe": "https://github.com/pytorch/vision/issues/3995#issuecomment-1013906621", "recipe": "https://github.com/pytorch/vision/issues/3995#issuecomment-1013906621",
"metrics": {
"acc@1": 80.858, "acc@1": 80.858,
"acc@5": 95.434, "acc@5": 95.434,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -371,9 +379,11 @@ class ResNet101_Weights(WeightsEnum): ...@@ -371,9 +379,11 @@ class ResNet101_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 44549160, "num_params": 44549160,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnet", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnet",
"metrics": {
"acc@1": 77.374, "acc@1": 77.374,
"acc@5": 93.546, "acc@5": 93.546,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/resnet101-cd907fc2.pth", url="https://download.pytorch.org/models/resnet101-cd907fc2.pth",
...@@ -382,9 +392,11 @@ class ResNet101_Weights(WeightsEnum): ...@@ -382,9 +392,11 @@ class ResNet101_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 44549160, "num_params": 44549160,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe",
"metrics": {
"acc@1": 81.886, "acc@1": 81.886,
"acc@5": 95.780, "acc@5": 95.780,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -397,9 +409,11 @@ class ResNet152_Weights(WeightsEnum): ...@@ -397,9 +409,11 @@ class ResNet152_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 60192808, "num_params": 60192808,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnet", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnet",
"metrics": {
"acc@1": 78.312, "acc@1": 78.312,
"acc@5": 94.046, "acc@5": 94.046,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/resnet152-f82ba261.pth", url="https://download.pytorch.org/models/resnet152-f82ba261.pth",
...@@ -408,9 +422,11 @@ class ResNet152_Weights(WeightsEnum): ...@@ -408,9 +422,11 @@ class ResNet152_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 60192808, "num_params": 60192808,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe",
"metrics": {
"acc@1": 82.284, "acc@1": 82.284,
"acc@5": 96.002, "acc@5": 96.002,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -423,9 +439,11 @@ class ResNeXt50_32X4D_Weights(WeightsEnum): ...@@ -423,9 +439,11 @@ class ResNeXt50_32X4D_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 25028904, "num_params": 25028904,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnext", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnext",
"metrics": {
"acc@1": 77.618, "acc@1": 77.618,
"acc@5": 93.698, "acc@5": 93.698,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/resnext50_32x4d-1a0047aa.pth", url="https://download.pytorch.org/models/resnext50_32x4d-1a0047aa.pth",
...@@ -434,9 +452,11 @@ class ResNeXt50_32X4D_Weights(WeightsEnum): ...@@ -434,9 +452,11 @@ class ResNeXt50_32X4D_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 25028904, "num_params": 25028904,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe",
"metrics": {
"acc@1": 81.198, "acc@1": 81.198,
"acc@5": 95.340, "acc@5": 95.340,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -449,9 +469,11 @@ class ResNeXt101_32X8D_Weights(WeightsEnum): ...@@ -449,9 +469,11 @@ class ResNeXt101_32X8D_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 88791336, "num_params": 88791336,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnext", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnext",
"metrics": {
"acc@1": 79.312, "acc@1": 79.312,
"acc@5": 94.526, "acc@5": 94.526,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/resnext101_32x8d-110c445d.pth", url="https://download.pytorch.org/models/resnext101_32x8d-110c445d.pth",
...@@ -460,9 +482,11 @@ class ResNeXt101_32X8D_Weights(WeightsEnum): ...@@ -460,9 +482,11 @@ class ResNeXt101_32X8D_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 88791336, "num_params": 88791336,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe-with-fixres", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe-with-fixres",
"metrics": {
"acc@1": 82.834, "acc@1": 82.834,
"acc@5": 96.228, "acc@5": 96.228,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -475,9 +499,11 @@ class Wide_ResNet50_2_Weights(WeightsEnum): ...@@ -475,9 +499,11 @@ class Wide_ResNet50_2_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 68883240, "num_params": 68883240,
"recipe": "https://github.com/pytorch/vision/pull/912#issue-445437439", "recipe": "https://github.com/pytorch/vision/pull/912#issue-445437439",
"metrics": {
"acc@1": 78.468, "acc@1": 78.468,
"acc@5": 94.086, "acc@5": 94.086,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/wide_resnet50_2-9ba9bcbe.pth", url="https://download.pytorch.org/models/wide_resnet50_2-9ba9bcbe.pth",
...@@ -486,9 +512,11 @@ class Wide_ResNet50_2_Weights(WeightsEnum): ...@@ -486,9 +512,11 @@ class Wide_ResNet50_2_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 68883240, "num_params": 68883240,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe-with-fixres", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe-with-fixres",
"metrics": {
"acc@1": 81.602, "acc@1": 81.602,
"acc@5": 95.758, "acc@5": 95.758,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
...@@ -501,9 +529,11 @@ class Wide_ResNet101_2_Weights(WeightsEnum): ...@@ -501,9 +529,11 @@ class Wide_ResNet101_2_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 126886696, "num_params": 126886696,
"recipe": "https://github.com/pytorch/vision/pull/912#issue-445437439", "recipe": "https://github.com/pytorch/vision/pull/912#issue-445437439",
"metrics": {
"acc@1": 78.848, "acc@1": 78.848,
"acc@5": 94.284, "acc@5": 94.284,
}, },
},
) )
IMAGENET1K_V2 = Weights( IMAGENET1K_V2 = Weights(
url="https://download.pytorch.org/models/wide_resnet101_2-d733dc28.pth", url="https://download.pytorch.org/models/wide_resnet101_2-d733dc28.pth",
...@@ -512,9 +542,11 @@ class Wide_ResNet101_2_Weights(WeightsEnum): ...@@ -512,9 +542,11 @@ class Wide_ResNet101_2_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 126886696, "num_params": 126886696,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe",
"metrics": {
"acc@1": 82.510, "acc@1": 82.510,
"acc@5": 96.020, "acc@5": 96.020,
}, },
},
) )
DEFAULT = IMAGENET1K_V2 DEFAULT = IMAGENET1K_V2
......
...@@ -142,8 +142,10 @@ class DeepLabV3_ResNet50_Weights(WeightsEnum): ...@@ -142,8 +142,10 @@ class DeepLabV3_ResNet50_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 42004074, "num_params": 42004074,
"recipe": "https://github.com/pytorch/vision/tree/main/references/segmentation#deeplabv3_resnet50", "recipe": "https://github.com/pytorch/vision/tree/main/references/segmentation#deeplabv3_resnet50",
"mIoU": 66.4, "metrics": {
"acc": 92.4, "miou": 66.4,
"pixel_acc": 92.4,
},
}, },
) )
DEFAULT = COCO_WITH_VOC_LABELS_V1 DEFAULT = COCO_WITH_VOC_LABELS_V1
...@@ -157,8 +159,10 @@ class DeepLabV3_ResNet101_Weights(WeightsEnum): ...@@ -157,8 +159,10 @@ class DeepLabV3_ResNet101_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 60996202, "num_params": 60996202,
"recipe": "https://github.com/pytorch/vision/tree/main/references/segmentation#fcn_resnet101", "recipe": "https://github.com/pytorch/vision/tree/main/references/segmentation#fcn_resnet101",
"mIoU": 67.4, "metrics": {
"acc": 92.4, "miou": 67.4,
"pixel_acc": 92.4,
},
}, },
) )
DEFAULT = COCO_WITH_VOC_LABELS_V1 DEFAULT = COCO_WITH_VOC_LABELS_V1
...@@ -172,8 +176,10 @@ class DeepLabV3_MobileNet_V3_Large_Weights(WeightsEnum): ...@@ -172,8 +176,10 @@ class DeepLabV3_MobileNet_V3_Large_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 11029328, "num_params": 11029328,
"recipe": "https://github.com/pytorch/vision/tree/main/references/segmentation#deeplabv3_mobilenet_v3_large", "recipe": "https://github.com/pytorch/vision/tree/main/references/segmentation#deeplabv3_mobilenet_v3_large",
"mIoU": 60.3, "metrics": {
"acc": 91.2, "miou": 60.3,
"pixel_acc": 91.2,
},
}, },
) )
DEFAULT = COCO_WITH_VOC_LABELS_V1 DEFAULT = COCO_WITH_VOC_LABELS_V1
......
...@@ -61,8 +61,10 @@ class FCN_ResNet50_Weights(WeightsEnum): ...@@ -61,8 +61,10 @@ class FCN_ResNet50_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 35322218, "num_params": 35322218,
"recipe": "https://github.com/pytorch/vision/tree/main/references/segmentation#fcn_resnet50", "recipe": "https://github.com/pytorch/vision/tree/main/references/segmentation#fcn_resnet50",
"mIoU": 60.5, "metrics": {
"acc": 91.4, "miou": 60.5,
"pixel_acc": 91.4,
},
}, },
) )
DEFAULT = COCO_WITH_VOC_LABELS_V1 DEFAULT = COCO_WITH_VOC_LABELS_V1
...@@ -76,8 +78,10 @@ class FCN_ResNet101_Weights(WeightsEnum): ...@@ -76,8 +78,10 @@ class FCN_ResNet101_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"num_params": 54314346, "num_params": 54314346,
"recipe": "https://github.com/pytorch/vision/tree/main/references/segmentation#deeplabv3_resnet101", "recipe": "https://github.com/pytorch/vision/tree/main/references/segmentation#deeplabv3_resnet101",
"mIoU": 63.7, "metrics": {
"acc": 91.9, "miou": 63.7,
"pixel_acc": 91.9,
},
}, },
) )
DEFAULT = COCO_WITH_VOC_LABELS_V1 DEFAULT = COCO_WITH_VOC_LABELS_V1
......
...@@ -102,8 +102,10 @@ class LRASPP_MobileNet_V3_Large_Weights(WeightsEnum): ...@@ -102,8 +102,10 @@ class LRASPP_MobileNet_V3_Large_Weights(WeightsEnum):
"categories": _VOC_CATEGORIES, "categories": _VOC_CATEGORIES,
"min_size": (1, 1), "min_size": (1, 1),
"recipe": "https://github.com/pytorch/vision/tree/main/references/segmentation#lraspp_mobilenet_v3_large", "recipe": "https://github.com/pytorch/vision/tree/main/references/segmentation#lraspp_mobilenet_v3_large",
"mIoU": 57.9, "metrics": {
"acc": 91.2, "miou": 57.9,
"pixel_acc": 91.2,
},
}, },
) )
DEFAULT = COCO_WITH_VOC_LABELS_V1 DEFAULT = COCO_WITH_VOC_LABELS_V1
......
...@@ -197,9 +197,11 @@ class ShuffleNet_V2_X0_5_Weights(WeightsEnum): ...@@ -197,9 +197,11 @@ class ShuffleNet_V2_X0_5_Weights(WeightsEnum):
meta={ meta={
**_COMMON_META, **_COMMON_META,
"num_params": 1366792, "num_params": 1366792,
"metrics": {
"acc@1": 69.362, "acc@1": 69.362,
"acc@5": 88.316, "acc@5": 88.316,
}, },
},
) )
DEFAULT = IMAGENET1K_V1 DEFAULT = IMAGENET1K_V1
...@@ -211,9 +213,11 @@ class ShuffleNet_V2_X1_0_Weights(WeightsEnum): ...@@ -211,9 +213,11 @@ class ShuffleNet_V2_X1_0_Weights(WeightsEnum):
meta={ meta={
**_COMMON_META, **_COMMON_META,
"num_params": 2278604, "num_params": 2278604,
"metrics": {
"acc@1": 60.552, "acc@1": 60.552,
"acc@5": 81.746, "acc@5": 81.746,
}, },
},
) )
DEFAULT = IMAGENET1K_V1 DEFAULT = IMAGENET1K_V1
......
...@@ -128,9 +128,11 @@ class SqueezeNet1_0_Weights(WeightsEnum): ...@@ -128,9 +128,11 @@ class SqueezeNet1_0_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"min_size": (21, 21), "min_size": (21, 21),
"num_params": 1248424, "num_params": 1248424,
"metrics": {
"acc@1": 58.092, "acc@1": 58.092,
"acc@5": 80.420, "acc@5": 80.420,
}, },
},
) )
DEFAULT = IMAGENET1K_V1 DEFAULT = IMAGENET1K_V1
...@@ -143,9 +145,11 @@ class SqueezeNet1_1_Weights(WeightsEnum): ...@@ -143,9 +145,11 @@ class SqueezeNet1_1_Weights(WeightsEnum):
**_COMMON_META, **_COMMON_META,
"min_size": (17, 17), "min_size": (17, 17),
"num_params": 1235496, "num_params": 1235496,
"metrics": {
"acc@1": 58.178, "acc@1": 58.178,
"acc@5": 80.624, "acc@5": 80.624,
}, },
},
) )
DEFAULT = IMAGENET1K_V1 DEFAULT = IMAGENET1K_V1
......
...@@ -120,9 +120,11 @@ class VGG11_Weights(WeightsEnum): ...@@ -120,9 +120,11 @@ class VGG11_Weights(WeightsEnum):
meta={ meta={
**_COMMON_META, **_COMMON_META,
"num_params": 132863336, "num_params": 132863336,
"metrics": {
"acc@1": 69.020, "acc@1": 69.020,
"acc@5": 88.628, "acc@5": 88.628,
}, },
},
) )
DEFAULT = IMAGENET1K_V1 DEFAULT = IMAGENET1K_V1
...@@ -134,9 +136,11 @@ class VGG11_BN_Weights(WeightsEnum): ...@@ -134,9 +136,11 @@ class VGG11_BN_Weights(WeightsEnum):
meta={ meta={
**_COMMON_META, **_COMMON_META,
"num_params": 132868840, "num_params": 132868840,
"metrics": {
"acc@1": 70.370, "acc@1": 70.370,
"acc@5": 89.810, "acc@5": 89.810,
}, },
},
) )
DEFAULT = IMAGENET1K_V1 DEFAULT = IMAGENET1K_V1
...@@ -148,9 +152,11 @@ class VGG13_Weights(WeightsEnum): ...@@ -148,9 +152,11 @@ class VGG13_Weights(WeightsEnum):
meta={ meta={
**_COMMON_META, **_COMMON_META,
"num_params": 133047848, "num_params": 133047848,
"metrics": {
"acc@1": 69.928, "acc@1": 69.928,
"acc@5": 89.246, "acc@5": 89.246,
}, },
},
) )
DEFAULT = IMAGENET1K_V1 DEFAULT = IMAGENET1K_V1
...@@ -162,9 +168,11 @@ class VGG13_BN_Weights(WeightsEnum): ...@@ -162,9 +168,11 @@ class VGG13_BN_Weights(WeightsEnum):
meta={ meta={
**_COMMON_META, **_COMMON_META,
"num_params": 133053736, "num_params": 133053736,
"metrics": {
"acc@1": 71.586, "acc@1": 71.586,
"acc@5": 90.374, "acc@5": 90.374,
}, },
},
) )
DEFAULT = IMAGENET1K_V1 DEFAULT = IMAGENET1K_V1
...@@ -176,9 +184,11 @@ class VGG16_Weights(WeightsEnum): ...@@ -176,9 +184,11 @@ class VGG16_Weights(WeightsEnum):
meta={ meta={
**_COMMON_META, **_COMMON_META,
"num_params": 138357544, "num_params": 138357544,
"metrics": {
"acc@1": 71.592, "acc@1": 71.592,
"acc@5": 90.382, "acc@5": 90.382,
}, },
},
) )
# We port the features of a VGG16 backbone trained by amdegroot because unlike the one on TorchVision, it uses the # We port the features of a VGG16 backbone trained by amdegroot because unlike the one on TorchVision, it uses the
# same input standardization method as the paper. Only the `features` weights have proper values, those on the # same input standardization method as the paper. Only the `features` weights have proper values, those on the
...@@ -196,9 +206,11 @@ class VGG16_Weights(WeightsEnum): ...@@ -196,9 +206,11 @@ class VGG16_Weights(WeightsEnum):
"num_params": 138357544, "num_params": 138357544,
"categories": None, "categories": None,
"recipe": "https://github.com/amdegroot/ssd.pytorch#training-ssd", "recipe": "https://github.com/amdegroot/ssd.pytorch#training-ssd",
"metrics": {
"acc@1": float("nan"), "acc@1": float("nan"),
"acc@5": float("nan"), "acc@5": float("nan"),
}, },
},
) )
DEFAULT = IMAGENET1K_V1 DEFAULT = IMAGENET1K_V1
...@@ -210,9 +222,11 @@ class VGG16_BN_Weights(WeightsEnum): ...@@ -210,9 +222,11 @@ class VGG16_BN_Weights(WeightsEnum):
meta={ meta={
**_COMMON_META, **_COMMON_META,
"num_params": 138365992, "num_params": 138365992,
"metrics": {
"acc@1": 73.360, "acc@1": 73.360,
"acc@5": 91.516, "acc@5": 91.516,
}, },
},
) )
DEFAULT = IMAGENET1K_V1 DEFAULT = IMAGENET1K_V1
...@@ -224,9 +238,11 @@ class VGG19_Weights(WeightsEnum): ...@@ -224,9 +238,11 @@ class VGG19_Weights(WeightsEnum):
meta={ meta={
**_COMMON_META, **_COMMON_META,
"num_params": 143667240, "num_params": 143667240,
"metrics": {
"acc@1": 72.376, "acc@1": 72.376,
"acc@5": 90.876, "acc@5": 90.876,
}, },
},
) )
DEFAULT = IMAGENET1K_V1 DEFAULT = IMAGENET1K_V1
...@@ -238,9 +254,11 @@ class VGG19_BN_Weights(WeightsEnum): ...@@ -238,9 +254,11 @@ class VGG19_BN_Weights(WeightsEnum):
meta={ meta={
**_COMMON_META, **_COMMON_META,
"num_params": 143678248, "num_params": 143678248,
"metrics": {
"acc@1": 74.218, "acc@1": 74.218,
"acc@5": 91.842, "acc@5": 91.842,
}, },
},
) )
DEFAULT = IMAGENET1K_V1 DEFAULT = IMAGENET1K_V1
......
...@@ -322,9 +322,11 @@ class R3D_18_Weights(WeightsEnum): ...@@ -322,9 +322,11 @@ class R3D_18_Weights(WeightsEnum):
meta={ meta={
**_COMMON_META, **_COMMON_META,
"num_params": 33371472, "num_params": 33371472,
"metrics": {
"acc@1": 52.75, "acc@1": 52.75,
"acc@5": 75.45, "acc@5": 75.45,
}, },
},
) )
DEFAULT = KINETICS400_V1 DEFAULT = KINETICS400_V1
...@@ -336,9 +338,11 @@ class MC3_18_Weights(WeightsEnum): ...@@ -336,9 +338,11 @@ class MC3_18_Weights(WeightsEnum):
meta={ meta={
**_COMMON_META, **_COMMON_META,
"num_params": 11695440, "num_params": 11695440,
"metrics": {
"acc@1": 53.90, "acc@1": 53.90,
"acc@5": 76.29, "acc@5": 76.29,
}, },
},
) )
DEFAULT = KINETICS400_V1 DEFAULT = KINETICS400_V1
...@@ -350,9 +354,11 @@ class R2Plus1D_18_Weights(WeightsEnum): ...@@ -350,9 +354,11 @@ class R2Plus1D_18_Weights(WeightsEnum):
meta={ meta={
**_COMMON_META, **_COMMON_META,
"num_params": 31505325, "num_params": 31505325,
"metrics": {
"acc@1": 57.50, "acc@1": 57.50,
"acc@5": 78.81, "acc@5": 78.81,
}, },
},
) )
DEFAULT = KINETICS400_V1 DEFAULT = KINETICS400_V1
......
...@@ -328,9 +328,11 @@ class ViT_B_16_Weights(WeightsEnum): ...@@ -328,9 +328,11 @@ class ViT_B_16_Weights(WeightsEnum):
"num_params": 86567656, "num_params": 86567656,
"min_size": (224, 224), "min_size": (224, 224),
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#vit_b_16", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#vit_b_16",
"metrics": {
"acc@1": 81.072, "acc@1": 81.072,
"acc@5": 95.318, "acc@5": 95.318,
}, },
},
) )
IMAGENET1K_SWAG_E2E_V1 = Weights( IMAGENET1K_SWAG_E2E_V1 = Weights(
url="https://download.pytorch.org/models/vit_b_16_swag-9ac1b537.pth", url="https://download.pytorch.org/models/vit_b_16_swag-9ac1b537.pth",
...@@ -344,9 +346,11 @@ class ViT_B_16_Weights(WeightsEnum): ...@@ -344,9 +346,11 @@ class ViT_B_16_Weights(WeightsEnum):
**_COMMON_SWAG_META, **_COMMON_SWAG_META,
"num_params": 86859496, "num_params": 86859496,
"min_size": (384, 384), "min_size": (384, 384),
"metrics": {
"acc@1": 85.304, "acc@1": 85.304,
"acc@5": 97.650, "acc@5": 97.650,
}, },
},
) )
IMAGENET1K_SWAG_LINEAR_V1 = Weights( IMAGENET1K_SWAG_LINEAR_V1 = Weights(
url="https://download.pytorch.org/models/vit_b_16_lc_swag-4e70ced5.pth", url="https://download.pytorch.org/models/vit_b_16_lc_swag-4e70ced5.pth",
...@@ -361,9 +365,11 @@ class ViT_B_16_Weights(WeightsEnum): ...@@ -361,9 +365,11 @@ class ViT_B_16_Weights(WeightsEnum):
"recipe": "https://github.com/pytorch/vision/pull/5793", "recipe": "https://github.com/pytorch/vision/pull/5793",
"num_params": 86567656, "num_params": 86567656,
"min_size": (224, 224), "min_size": (224, 224),
"metrics": {
"acc@1": 81.886, "acc@1": 81.886,
"acc@5": 96.180, "acc@5": 96.180,
}, },
},
) )
DEFAULT = IMAGENET1K_V1 DEFAULT = IMAGENET1K_V1
...@@ -377,9 +383,11 @@ class ViT_B_32_Weights(WeightsEnum): ...@@ -377,9 +383,11 @@ class ViT_B_32_Weights(WeightsEnum):
"num_params": 88224232, "num_params": 88224232,
"min_size": (224, 224), "min_size": (224, 224),
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#vit_b_32", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#vit_b_32",
"metrics": {
"acc@1": 75.912, "acc@1": 75.912,
"acc@5": 92.466, "acc@5": 92.466,
}, },
},
) )
DEFAULT = IMAGENET1K_V1 DEFAULT = IMAGENET1K_V1
...@@ -393,9 +401,11 @@ class ViT_L_16_Weights(WeightsEnum): ...@@ -393,9 +401,11 @@ class ViT_L_16_Weights(WeightsEnum):
"num_params": 304326632, "num_params": 304326632,
"min_size": (224, 224), "min_size": (224, 224),
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#vit_l_16", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#vit_l_16",
"metrics": {
"acc@1": 79.662, "acc@1": 79.662,
"acc@5": 94.638, "acc@5": 94.638,
}, },
},
) )
IMAGENET1K_SWAG_E2E_V1 = Weights( IMAGENET1K_SWAG_E2E_V1 = Weights(
url="https://download.pytorch.org/models/vit_l_16_swag-4f3808c9.pth", url="https://download.pytorch.org/models/vit_l_16_swag-4f3808c9.pth",
...@@ -409,9 +419,11 @@ class ViT_L_16_Weights(WeightsEnum): ...@@ -409,9 +419,11 @@ class ViT_L_16_Weights(WeightsEnum):
**_COMMON_SWAG_META, **_COMMON_SWAG_META,
"num_params": 305174504, "num_params": 305174504,
"min_size": (512, 512), "min_size": (512, 512),
"metrics": {
"acc@1": 88.064, "acc@1": 88.064,
"acc@5": 98.512, "acc@5": 98.512,
}, },
},
) )
IMAGENET1K_SWAG_LINEAR_V1 = Weights( IMAGENET1K_SWAG_LINEAR_V1 = Weights(
url="https://download.pytorch.org/models/vit_l_16_lc_swag-4d563306.pth", url="https://download.pytorch.org/models/vit_l_16_lc_swag-4d563306.pth",
...@@ -426,9 +438,11 @@ class ViT_L_16_Weights(WeightsEnum): ...@@ -426,9 +438,11 @@ class ViT_L_16_Weights(WeightsEnum):
"recipe": "https://github.com/pytorch/vision/pull/5793", "recipe": "https://github.com/pytorch/vision/pull/5793",
"num_params": 304326632, "num_params": 304326632,
"min_size": (224, 224), "min_size": (224, 224),
"metrics": {
"acc@1": 85.146, "acc@1": 85.146,
"acc@5": 97.422, "acc@5": 97.422,
}, },
},
) )
DEFAULT = IMAGENET1K_V1 DEFAULT = IMAGENET1K_V1
...@@ -442,9 +456,11 @@ class ViT_L_32_Weights(WeightsEnum): ...@@ -442,9 +456,11 @@ class ViT_L_32_Weights(WeightsEnum):
"num_params": 306535400, "num_params": 306535400,
"min_size": (224, 224), "min_size": (224, 224),
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#vit_l_32", "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#vit_l_32",
"metrics": {
"acc@1": 76.972, "acc@1": 76.972,
"acc@5": 93.07, "acc@5": 93.07,
}, },
},
) )
DEFAULT = IMAGENET1K_V1 DEFAULT = IMAGENET1K_V1
...@@ -462,9 +478,11 @@ class ViT_H_14_Weights(WeightsEnum): ...@@ -462,9 +478,11 @@ class ViT_H_14_Weights(WeightsEnum):
**_COMMON_SWAG_META, **_COMMON_SWAG_META,
"num_params": 633470440, "num_params": 633470440,
"min_size": (518, 518), "min_size": (518, 518),
"metrics": {
"acc@1": 88.552, "acc@1": 88.552,
"acc@5": 98.694, "acc@5": 98.694,
}, },
},
) )
IMAGENET1K_SWAG_LINEAR_V1 = Weights( IMAGENET1K_SWAG_LINEAR_V1 = Weights(
url="https://download.pytorch.org/models/vit_h_14_lc_swag-c1eb923e.pth", url="https://download.pytorch.org/models/vit_h_14_lc_swag-c1eb923e.pth",
...@@ -479,9 +497,11 @@ class ViT_H_14_Weights(WeightsEnum): ...@@ -479,9 +497,11 @@ class ViT_H_14_Weights(WeightsEnum):
"recipe": "https://github.com/pytorch/vision/pull/5793", "recipe": "https://github.com/pytorch/vision/pull/5793",
"num_params": 632045800, "num_params": 632045800,
"min_size": (224, 224), "min_size": (224, 224),
"metrics": {
"acc@1": 85.708, "acc@1": 85.708,
"acc@5": 97.730, "acc@5": 97.730,
}, },
},
) )
DEFAULT = IMAGENET1K_SWAG_E2E_V1 DEFAULT = IMAGENET1K_SWAG_E2E_V1
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment