Skip to content

Commit

Permalink
fix syncbn bug for pytorch 1.6 (zhanghang1989#355)
Browse files Browse the repository at this point in the history
  • Loading branch information
zhangbin0917 committed Mar 6, 2021
1 parent ced288d commit 58b93a7
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 8 deletions.
12 changes: 8 additions & 4 deletions encoding/functions/syncbn.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,11 +103,13 @@ def forward(cls, ctx, x, gamma, beta, running_mean, running_var,

# Output
ctx.save_for_backward(x, _ex, _exs, gamma, beta)
return y

ctx.mark_non_differentiable(running_mean, running_var)
return y, running_mean, running_var

@staticmethod
@once_differentiable
def backward(ctx, dz):
def backward(ctx, dz, _drunning_mean, _drunning_var):
x, _ex, _exs, gamma, beta = ctx.saved_tensors
dz = dz.contiguous()

Expand Down Expand Up @@ -243,11 +245,13 @@ def forward(cls, ctx, x, gamma, beta, running_mean, running_var,

# Output
ctx.save_for_backward(x, _ex, _exs, gamma, beta)
return x

ctx.mark_non_differentiable(running_mean, running_var)
return x, running_mean, running_var

@staticmethod
@once_differentiable
def backward(ctx, dz):
def backward(ctx, dz, _drunning_mean, _drunning_var):
z, _ex, _exs, gamma, beta = ctx.saved_tensors
dz = dz.contiguous()

Expand Down
10 changes: 6 additions & 4 deletions encoding/nn/syncbn.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,13 +193,15 @@ def forward(self, x):
"worker_queue": self.worker_queues[self.worker_ids.index(x.get_device())]
}
if self.inplace:
return inp_syncbatchnorm(x, self.weight, self.bias, self.running_mean, self.running_var,
y, _, _ = inp_syncbatchnorm(x, self.weight, self.bias, self.running_mean, self.running_var,
extra, self.sync, self.training, self.momentum, self.eps,
self.activation, self.slope).view(input_shape)
self.activation, self.slope)
return y.view(input_shape)
else:
return syncbatchnorm(x, self.weight, self.bias, self.running_mean, self.running_var,
y, _, _ = syncbatchnorm(x, self.weight, self.bias, self.running_mean, self.running_var,
extra, self.sync, self.training, self.momentum, self.eps,
self.activation, self.slope).view(input_shape)
self.activation, self.slope)
return y.view(input_shape)

def extra_repr(self):
if self.activation == 'none':
Expand Down

0 comments on commit 58b93a7

Please sign in to comment.