From 3630ceaf23687cbbc784a9ea4b3fc77b33e05ddc Mon Sep 17 00:00:00 2001 From: jshilong <2392587229zsl@gmail.com> Date: Fri, 11 Jun 2021 15:24:30 +0800 Subject: [PATCH 1/2] fix the import path of msda --- mmdet/models/utils/transformer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmdet/models/utils/transformer.py b/mmdet/models/utils/transformer.py index 2e9f06d2354..fb8de0ce775 100644 --- a/mmdet/models/utils/transformer.py +++ b/mmdet/models/utils/transformer.py @@ -6,9 +6,9 @@ from mmcv.cnn.bricks.registry import (TRANSFORMER_LAYER, TRANSFORMER_LAYER_SEQUENCE) from mmcv.cnn.bricks.transformer import (BaseTransformerLayer, - MultiScaleDeformableAttention, TransformerLayerSequence, build_transformer_layer_sequence) +from mmcv.ops.multi_scale_deform_attn import MultiScaleDeformableAttention from mmcv.runner.base_module import BaseModule from torch.nn.init import normal_ @@ -384,7 +384,7 @@ def init_weights(self): nn.init.xavier_uniform_(p) for m in self.modules(): if isinstance(m, MultiScaleDeformableAttention): - m.init_weight() + m.init_weights() if not self.as_two_stage: xavier_init(self.reference_points, distribution='uniform', bias=0.) normal_(self.level_embeds) From 8aac58b9c3df23bcdea71412617bae3bc4a1d32d Mon Sep 17 00:00:00 2001 From: jshilong <2392587229zsl@gmail.com> Date: Fri, 11 Jun 2021 17:01:33 +0800 Subject: [PATCH 2/2] fix the import path of msda --- mmdet/models/utils/transformer.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/mmdet/models/utils/transformer.py b/mmdet/models/utils/transformer.py index fb8de0ce775..78a4fe5e1fa 100644 --- a/mmdet/models/utils/transformer.py +++ b/mmdet/models/utils/transformer.py @@ -1,4 +1,5 @@ import math +import warnings import torch import torch.nn as nn @@ -8,12 +9,20 @@ from mmcv.cnn.bricks.transformer import (BaseTransformerLayer, TransformerLayerSequence, build_transformer_layer_sequence) -from mmcv.ops.multi_scale_deform_attn import MultiScaleDeformableAttention from mmcv.runner.base_module import BaseModule from torch.nn.init import normal_ from mmdet.models.utils.builder import TRANSFORMER +try: + from mmcv.ops.multi_scale_deform_attn import MultiScaleDeformableAttention + +except ImportError: + warnings.warn( + '`MultiScaleDeformableAttention` in MMCV has been moved to ' + '`mmcv.ops.multi_scale_deform_attn`, please update your MMCV') + from mmcv.cnn.bricks.transformer import MultiScaleDeformableAttention + def inverse_sigmoid(x, eps=1e-5): """Inverse function of sigmoid.