Skip to content

Commit

Permalink
Update accelerate version + warning check fix (huggingface#22833)
Browse files Browse the repository at this point in the history
  • Loading branch information
muellerzr authored Apr 18, 2023
1 parent 78cda46 commit aec10d1
Show file tree
Hide file tree
Showing 3 changed files with 4 additions and 4 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@
# 2. once modified, run: `make deps_table_update` to update src/transformers/dependency_versions_table.py
_deps = [
"Pillow",
"accelerate>=0.10.0",
"accelerate>=0.17.0",
"av==9.2.0", # Latest version of PyAV (10.0.0) has issues with audio stream.
"beautifulsoup4",
"black~=23.1",
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/dependency_versions_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# 2. run `make deps_table_update``
deps = {
"Pillow": "Pillow",
"accelerate": "accelerate>=0.10.0",
"accelerate": "accelerate>=0.17.0",
"av": "av==9.2.0",
"beautifulsoup4": "beautifulsoup4",
"black": "black~=23.1",
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/training_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -1552,10 +1552,10 @@ def _setup_devices(self) -> "torch.device":
if (
torch.distributed.is_available()
and torch.distributed.is_initialized()
and self.distributed_state.distributed_type != DistributedType.NO
and self.distributed_state.distributed_type == DistributedType.NO
):
logger.warning(
"torch.distributed process group is initialized, but parallel_mode == ParallelMode.DISTRIBUTED. "
"torch.distributed process group is initialized, but parallel_mode != ParallelMode.DISTRIBUTED. "
"In order to use Torch DDP, launch your script with `python -m torch.distributed.launch"
)

Expand Down

0 comments on commit aec10d1

Please sign in to comment.