Skip to content
This repository has been archived by the owner on Dec 16, 2022. It is now read-only.

Commit

Permalink
Clone warns (#5575)
Browse files Browse the repository at this point in the history
* Silence a warning that happens when you inappropriately clone a tensor

* Changelog
  • Loading branch information
dirkgr authored Feb 23, 2022
1 parent 9da4b0f commit 5b3352c
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 4 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

- Updated dependencies, especially around doc creation.
- Running the test suite out-of-tree (e.g. after installation) is now possible by pointing the environment variable `ALLENNLP_SRC_DIR` to the sources.
- Silenced a warning that happens when you inappropriately clone a tensor.

## [v2.9.0](https://github.com/allenai/allennlp/releases/tag/v2.9.0) - 2022-01-27

Expand Down
11 changes: 7 additions & 4 deletions allennlp/nn/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -2122,7 +2122,7 @@ def distributed_device() -> torch.device:
return int_to_device(-1 if dist.get_backend() != "nccl" else torch.cuda.current_device())


def dist_reduce(value: _V, reduce_op, **kwargs) -> _V:
def dist_reduce(value: _V, reduce_op) -> _V:
"""
Reduces the given `value` across all distributed worker nodes according the given
reduction operation.
Expand All @@ -2147,15 +2147,18 @@ def dist_reduce(value: _V, reduce_op, **kwargs) -> _V:
if not is_distributed():
return value
device = distributed_device()
value_tensor = torch.tensor(value, device=device, **kwargs)
if isinstance(value, torch.Tensor):
value_tensor = value.clone().to(device)
else:
value_tensor = torch.tensor(value, device=device)
dist.all_reduce(value_tensor, op=reduce_op)

if isinstance(value, torch.Tensor):
return value_tensor
return value_tensor.item() # type: ignore[return-value]


def dist_reduce_sum(value: _V, **kwargs) -> _V:
def dist_reduce_sum(value: _V) -> _V:
"""
Sums the given `value` across distributed worker nodes.
This is equivalent to calling `dist_reduce(v, dist.ReduceOp.SUM)`.
Expand All @@ -2168,7 +2171,7 @@ def dist_reduce_sum(value: _V, **kwargs) -> _V:
# result in an `AttributeError`.
if not is_distributed():
return value
return dist_reduce(value, dist.ReduceOp.SUM, **kwargs)
return dist_reduce(value, dist.ReduceOp.SUM)


def _collect_state_dict(
Expand Down

0 comments on commit 5b3352c

Please sign in to comment.