Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix the import path of msda #5338

Merged
merged 2 commits into from
Jun 12, 2021
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 11 additions & 2 deletions mmdet/models/utils/transformer.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,28 @@
import math
import warnings

import torch
import torch.nn as nn
from mmcv.cnn import build_activation_layer, build_norm_layer, xavier_init
from mmcv.cnn.bricks.registry import (TRANSFORMER_LAYER,
TRANSFORMER_LAYER_SEQUENCE)
from mmcv.cnn.bricks.transformer import (BaseTransformerLayer,
MultiScaleDeformableAttention,
TransformerLayerSequence,
build_transformer_layer_sequence)
from mmcv.runner.base_module import BaseModule
from torch.nn.init import normal_

from mmdet.models.utils.builder import TRANSFORMER

try:
from mmcv.ops.multi_scale_deform_attn import MultiScaleDeformableAttention

except ImportError:
warnings.warn(
'`MultiScaleDeformableAttention` in MMCV has been moved to '
'`mmcv.ops.multi_scale_deform_attn`, please update your MMCV')
from mmcv.cnn.bricks.transformer import MultiScaleDeformableAttention


def inverse_sigmoid(x, eps=1e-5):
"""Inverse function of sigmoid.
Expand Down Expand Up @@ -384,7 +393,7 @@ def init_weights(self):
nn.init.xavier_uniform_(p)
for m in self.modules():
if isinstance(m, MultiScaleDeformableAttention):
m.init_weight()
m.init_weights()
if not self.as_two_stage:
xavier_init(self.reference_points, distribution='uniform', bias=0.)
normal_(self.level_embeds)
Expand Down