From efe7ca0868c561dfd07f1d8eaa2463c2454a2578 Mon Sep 17 00:00:00 2001 From: Felix Dittrich Date: Thu, 27 Jul 2023 15:56:19 +0200 Subject: [PATCH] fix obj det train and suppress endless warning prints (#1267) --- doctr/models/obj_detection/faster_rcnn/pytorch.py | 8 +++++--- doctr/transforms/modules/pytorch.py | 4 ++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/doctr/models/obj_detection/faster_rcnn/pytorch.py b/doctr/models/obj_detection/faster_rcnn/pytorch.py index e8e2f7fb8e..f2503743c3 100644 --- a/doctr/models/obj_detection/faster_rcnn/pytorch.py +++ b/doctr/models/obj_detection/faster_rcnn/pytorch.py @@ -5,7 +5,7 @@ from typing import Any, Dict -from torchvision.models.detection import FasterRCNN, faster_rcnn +from torchvision.models.detection import FasterRCNN, FasterRCNN_MobileNet_V3_Large_FPN_Weights, faster_rcnn from ...utils import load_pretrained_params @@ -37,7 +37,7 @@ def _fasterrcnn(arch: str, pretrained: bool, **kwargs: Any) -> FasterRCNN: # Build the model _kwargs.update(kwargs) - model = faster_rcnn.__dict__[arch](pretrained=False, pretrained_backbone=False, **_kwargs) + model = faster_rcnn.__dict__[arch](weights=None, weights_backbone=None, **_kwargs) model.cfg = default_cfgs[arch] if pretrained: @@ -47,7 +47,9 @@ def _fasterrcnn(arch: str, pretrained: bool, **kwargs: Any) -> FasterRCNN: # Filter keys state_dict = { k: v - for k, v in faster_rcnn.__dict__[arch](pretrained=True).state_dict().items() + for k, v in faster_rcnn.__dict__[arch](weights=FasterRCNN_MobileNet_V3_Large_FPN_Weights.DEFAULT) + .state_dict() + .items() if not k.startswith("roi_heads.") } diff --git a/doctr/transforms/modules/pytorch.py b/doctr/transforms/modules/pytorch.py index aea1af9314..a20e56086f 100644 --- a/doctr/transforms/modules/pytorch.py +++ b/doctr/transforms/modules/pytorch.py @@ -26,7 +26,7 @@ def __init__( preserve_aspect_ratio: bool = False, symmetric_pad: bool = False, ) -> None: - super().__init__(size, interpolation) + super().__init__(size, interpolation, antialias=True) self.preserve_aspect_ratio = preserve_aspect_ratio self.symmetric_pad = symmetric_pad @@ -64,7 +64,7 @@ def forward( tmp_size = (self.size, max(int(self.size / actual_ratio), 1)) # Scale image - img = F.resize(img, tmp_size, self.interpolation) + img = F.resize(img, tmp_size, self.interpolation, antialias=True) raw_shape = img.shape[-2:] if isinstance(self.size, (tuple, list)): # Pad (inverted in pytorch)