diff --git a/torchvision/prototype/models/detection/faster_rcnn.py b/torchvision/prototype/models/detection/faster_rcnn.py index 4a70b12bff2..1f37f267c7b 100644 --- a/torchvision/prototype/models/detection/faster_rcnn.py +++ b/torchvision/prototype/models/detection/faster_rcnn.py @@ -40,7 +40,7 @@ class FasterRCNN_ResNet50_FPN_Weights(WeightsEnum): - Coco_V1 = Weights( + COCO_V1 = Weights( url="https://download.pytorch.org/models/fasterrcnn_resnet50_fpn_coco-258fb6c6.pth", transforms=CocoEval, meta={ @@ -50,11 +50,11 @@ class FasterRCNN_ResNet50_FPN_Weights(WeightsEnum): "map": 37.0, }, ) - default = Coco_V1 + default = COCO_V1 class FasterRCNN_MobileNet_V3_Large_FPN_Weights(WeightsEnum): - Coco_V1 = Weights( + COCO_V1 = Weights( url="https://download.pytorch.org/models/fasterrcnn_mobilenet_v3_large_fpn-fb6a3cc7.pth", transforms=CocoEval, meta={ @@ -64,11 +64,11 @@ class FasterRCNN_MobileNet_V3_Large_FPN_Weights(WeightsEnum): "map": 32.8, }, ) - default = Coco_V1 + default = COCO_V1 class FasterRCNN_MobileNet_V3_Large_320_FPN_Weights(WeightsEnum): - Coco_V1 = Weights( + COCO_V1 = Weights( url="https://download.pytorch.org/models/fasterrcnn_mobilenet_v3_large_320_fpn-907ea3f9.pth", transforms=CocoEval, meta={ @@ -78,11 +78,11 @@ class FasterRCNN_MobileNet_V3_Large_320_FPN_Weights(WeightsEnum): "map": 22.8, }, ) - default = Coco_V1 + default = COCO_V1 @handle_legacy_interface( - weights=("pretrained", FasterRCNN_ResNet50_FPN_Weights.Coco_V1), + weights=("pretrained", FasterRCNN_ResNet50_FPN_Weights.COCO_V1), weights_backbone=("pretrained_backbone", ResNet50_Weights.ImageNet1K_V1), ) def fasterrcnn_resnet50_fpn( @@ -113,7 +113,7 @@ def fasterrcnn_resnet50_fpn( if weights is not None: model.load_state_dict(weights.get_state_dict(progress=progress)) - if weights == FasterRCNN_ResNet50_FPN_Weights.Coco_V1: + if weights == FasterRCNN_ResNet50_FPN_Weights.COCO_V1: overwrite_eps(model, 0.0) return model @@ -161,7 +161,7 @@ def _fasterrcnn_mobilenet_v3_large_fpn( @handle_legacy_interface( - weights=("pretrained", FasterRCNN_MobileNet_V3_Large_FPN_Weights.Coco_V1), + weights=("pretrained", FasterRCNN_MobileNet_V3_Large_FPN_Weights.COCO_V1), weights_backbone=("pretrained_backbone", MobileNet_V3_Large_Weights.ImageNet1K_V1), ) def fasterrcnn_mobilenet_v3_large_fpn( @@ -192,7 +192,7 @@ def fasterrcnn_mobilenet_v3_large_fpn( @handle_legacy_interface( - weights=("pretrained", FasterRCNN_MobileNet_V3_Large_320_FPN_Weights.Coco_V1), + weights=("pretrained", FasterRCNN_MobileNet_V3_Large_320_FPN_Weights.COCO_V1), weights_backbone=("pretrained_backbone", MobileNet_V3_Large_Weights.ImageNet1K_V1), ) def fasterrcnn_mobilenet_v3_large_320_fpn( diff --git a/torchvision/prototype/models/detection/keypoint_rcnn.py b/torchvision/prototype/models/detection/keypoint_rcnn.py index f4e71dd3e41..b7da833e533 100644 --- a/torchvision/prototype/models/detection/keypoint_rcnn.py +++ b/torchvision/prototype/models/detection/keypoint_rcnn.py @@ -45,7 +45,7 @@ class KeypointRCNN_ResNet50_FPN_Weights(WeightsEnum): "map_kp": 61.1, }, ) - Coco_V1 = Weights( + COCO_V1 = Weights( url="https://download.pytorch.org/models/keypointrcnn_resnet50_fpn_coco-fc266e95.pth", transforms=CocoEval, meta={ @@ -56,7 +56,7 @@ class KeypointRCNN_ResNet50_FPN_Weights(WeightsEnum): "map_kp": 65.0, }, ) - default = Coco_V1 + default = COCO_V1 @handle_legacy_interface( @@ -64,7 +64,7 @@ class KeypointRCNN_ResNet50_FPN_Weights(WeightsEnum): "pretrained", lambda kwargs: KeypointRCNN_ResNet50_FPN_Weights.Coco_Legacy if kwargs["pretrained"] == "legacy" - else KeypointRCNN_ResNet50_FPN_Weights.Coco_V1, + else KeypointRCNN_ResNet50_FPN_Weights.COCO_V1, ), weights_backbone=("pretrained_backbone", ResNet50_Weights.ImageNet1K_V1), ) @@ -101,7 +101,7 @@ def keypointrcnn_resnet50_fpn( if weights is not None: model.load_state_dict(weights.get_state_dict(progress=progress)) - if weights == KeypointRCNN_ResNet50_FPN_Weights.Coco_V1: + if weights == KeypointRCNN_ResNet50_FPN_Weights.COCO_V1: overwrite_eps(model, 0.0) return model diff --git a/torchvision/prototype/models/detection/mask_rcnn.py b/torchvision/prototype/models/detection/mask_rcnn.py index 0c137718382..98f12e73bf3 100644 --- a/torchvision/prototype/models/detection/mask_rcnn.py +++ b/torchvision/prototype/models/detection/mask_rcnn.py @@ -24,7 +24,7 @@ class MaskRCNN_ResNet50_FPN_Weights(WeightsEnum): - Coco_V1 = Weights( + COCO_V1 = Weights( url="https://download.pytorch.org/models/maskrcnn_resnet50_fpn_coco-bf2d0c1e.pth", transforms=CocoEval, meta={ @@ -39,11 +39,11 @@ class MaskRCNN_ResNet50_FPN_Weights(WeightsEnum): "map_mask": 34.6, }, ) - default = Coco_V1 + default = COCO_V1 @handle_legacy_interface( - weights=("pretrained", MaskRCNN_ResNet50_FPN_Weights.Coco_V1), + weights=("pretrained", MaskRCNN_ResNet50_FPN_Weights.COCO_V1), weights_backbone=("pretrained_backbone", ResNet50_Weights.ImageNet1K_V1), ) def maskrcnn_resnet50_fpn( @@ -74,7 +74,7 @@ def maskrcnn_resnet50_fpn( if weights is not None: model.load_state_dict(weights.get_state_dict(progress=progress)) - if weights == MaskRCNN_ResNet50_FPN_Weights.Coco_V1: + if weights == MaskRCNN_ResNet50_FPN_Weights.COCO_V1: overwrite_eps(model, 0.0) return model diff --git a/torchvision/prototype/models/detection/retinanet.py b/torchvision/prototype/models/detection/retinanet.py index 314c036ccc6..7119182ab33 100644 --- a/torchvision/prototype/models/detection/retinanet.py +++ b/torchvision/prototype/models/detection/retinanet.py @@ -25,7 +25,7 @@ class RetinaNet_ResNet50_FPN_Weights(WeightsEnum): - Coco_V1 = Weights( + COCO_V1 = Weights( url="https://download.pytorch.org/models/retinanet_resnet50_fpn_coco-eeacb38b.pth", transforms=CocoEval, meta={ @@ -39,11 +39,11 @@ class RetinaNet_ResNet50_FPN_Weights(WeightsEnum): "map": 36.4, }, ) - default = Coco_V1 + default = COCO_V1 @handle_legacy_interface( - weights=("pretrained", RetinaNet_ResNet50_FPN_Weights.Coco_V1), + weights=("pretrained", RetinaNet_ResNet50_FPN_Weights.COCO_V1), weights_backbone=("pretrained_backbone", ResNet50_Weights.ImageNet1K_V1), ) def retinanet_resnet50_fpn( @@ -77,7 +77,7 @@ def retinanet_resnet50_fpn( if weights is not None: model.load_state_dict(weights.get_state_dict(progress=progress)) - if weights == RetinaNet_ResNet50_FPN_Weights.Coco_V1: + if weights == RetinaNet_ResNet50_FPN_Weights.COCO_V1: overwrite_eps(model, 0.0) return model diff --git a/torchvision/prototype/models/detection/ssd.py b/torchvision/prototype/models/detection/ssd.py index 94eb2839bce..0e88fd883f1 100644 --- a/torchvision/prototype/models/detection/ssd.py +++ b/torchvision/prototype/models/detection/ssd.py @@ -23,7 +23,7 @@ class SSD300_VGG16_Weights(WeightsEnum): - Coco_V1 = Weights( + COCO_V1 = Weights( url="https://download.pytorch.org/models/ssd300_vgg16_coco-b556d3b4.pth", transforms=CocoEval, meta={ @@ -38,11 +38,11 @@ class SSD300_VGG16_Weights(WeightsEnum): "map": 25.1, }, ) - default = Coco_V1 + default = COCO_V1 @handle_legacy_interface( - weights=("pretrained", SSD300_VGG16_Weights.Coco_V1), + weights=("pretrained", SSD300_VGG16_Weights.COCO_V1), weights_backbone=("pretrained_backbone", VGG16_Weights.ImageNet1K_Features), ) def ssd300_vgg16( diff --git a/torchvision/prototype/models/detection/ssdlite.py b/torchvision/prototype/models/detection/ssdlite.py index e06c7ee6e46..c10eb9b3520 100644 --- a/torchvision/prototype/models/detection/ssdlite.py +++ b/torchvision/prototype/models/detection/ssdlite.py @@ -28,7 +28,7 @@ class SSDLite320_MobileNet_V3_Large_Weights(WeightsEnum): - Coco_V1 = Weights( + COCO_V1 = Weights( url="https://download.pytorch.org/models/ssdlite320_mobilenet_v3_large_coco-a79551df.pth", transforms=CocoEval, meta={ @@ -43,11 +43,11 @@ class SSDLite320_MobileNet_V3_Large_Weights(WeightsEnum): "map": 21.3, }, ) - default = Coco_V1 + default = COCO_V1 @handle_legacy_interface( - weights=("pretrained", SSDLite320_MobileNet_V3_Large_Weights.Coco_V1), + weights=("pretrained", SSDLite320_MobileNet_V3_Large_Weights.COCO_V1), weights_backbone=("pretrained_backbone", MobileNet_V3_Large_Weights.ImageNet1K_V1), ) def ssdlite320_mobilenet_v3_large(