From 2dd88e583afc26cbd7727cfedb97b1d2812eedaf Mon Sep 17 00:00:00 2001 From: Hang Zhang <8041160+zhanghang1989@users.noreply.github.com> Date: Fri, 15 Jun 2018 12:19:47 -0700 Subject: [PATCH] fix path (#72) --- encoding/models/encnet.py | 6 +++--- encoding/nn/customize.py | 7 ------- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/encoding/models/encnet.py b/encoding/models/encnet.py index a4abe644..15af2921 100644 --- a/encoding/models/encnet.py +++ b/encoding/models/encnet.py @@ -166,7 +166,7 @@ def get_encnet_resnet50_pcontext(pretrained=False, root='~/.encoding/models', ** >>> model = get_encnet_resnet50_pcontext(pretrained=True) >>> print(model) """ - return get_encnet('pcontext', 'resnet50', pretrained, aux=False, **kwargs) + return get_encnet('pcontext', 'resnet50', pretrained, root=root, aux=False, **kwargs) def get_encnet_resnet101_pcontext(pretrained=False, root='~/.encoding/models', **kwargs): r"""EncNet-PSP model from the paper `"Context Encoding for Semantic Segmentation" @@ -185,7 +185,7 @@ def get_encnet_resnet101_pcontext(pretrained=False, root='~/.encoding/models', * >>> model = get_encnet_resnet101_pcontext(pretrained=True) >>> print(model) """ - return get_encnet('pcontext', 'resnet101', pretrained, aux=False, **kwargs) + return get_encnet('pcontext', 'resnet101', pretrained, root=root, aux=False, **kwargs) def get_encnet_resnet50_ade(pretrained=False, root='~/.encoding/models', **kwargs): r"""EncNet-PSP model from the paper `"Context Encoding for Semantic Segmentation" @@ -204,4 +204,4 @@ def get_encnet_resnet50_ade(pretrained=False, root='~/.encoding/models', **kwarg >>> model = get_encnet_resnet50_ade(pretrained=True) >>> print(model) """ - return get_encnet('ade20k', 'resnet50', pretrained, aux=True, **kwargs) + return get_encnet('ade20k', 'resnet50', pretrained, root=root, aux=True, **kwargs) diff --git a/encoding/nn/customize.py b/encoding/nn/customize.py index 95318343..96410f91 100644 --- a/encoding/nn/customize.py +++ b/encoding/nn/customize.py @@ -177,13 +177,6 @@ def __init__(self, in_channels, norm_layer, up_kwargs): # bilinear upsample options self._up_kwargs = up_kwargs - def _cat_each(self, x, feat1, feat2, feat3, feat4): - assert(len(x) == len(feat1)) - z = [] - for i in range(len(x)): - z.append(torch.cat((x[i], feat1[i], feat2[i], feat3[i], feat4[i]), 1)) - return z - def forward(self, x): _, _, h, w = x.size() feat1 = F.upsample(self.conv1(self.pool1(x)), (h, w), **self._up_kwargs)