Skip to content

Commit

Permalink
Fix collect_env.py for Windows (pytorch#8326)
Browse files Browse the repository at this point in the history
* Fix collect_env.py for Windows

* Fix expect file for Win machine
  • Loading branch information
peterjc123 authored and soumith committed Jun 11, 2018
1 parent 52e4d3c commit bed172c
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,10 @@ CMake version: version 3.10.X
Python version: 3.6
Is CUDA available: Yes
CUDA runtime version: 9.0.X
GPU models and configuration: Could not collect
Nvidia driver version: Could not collect
cuDNN version: Could not collect
GPU models and configuration: GPU 0: Tesla M60
Nvidia driver version: 390.X
cuDNN version: Probably one of the following:
C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v9.0\bin\cudnn64_7.dll

Versions of relevant libraries:
[pip] numpy (1.14.X)
Expand Down
31 changes: 26 additions & 5 deletions torch/utils/collect_env.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,11 @@ def run_and_parse_first_match(run_lambda, command, regex):


def get_conda_packages(run_lambda):
out = run_and_read_all(run_lambda, 'conda list | grep "torch\|soumith"')
if get_platform() == 'win32':
grep_cmd = 'findstr /R "torch soumith"'
else:
grep_cmd = 'grep "torch\|soumith"'
out = run_and_read_all(run_lambda, 'conda list | ' + grep_cmd)
if out is None:
return out
# Comment starting at beginning of line
Expand All @@ -81,12 +85,14 @@ def get_cmake_version(run_lambda):


def get_nvidia_driver_version(run_lambda):
return run_and_parse_first_match(run_lambda, 'nvidia-smi', r'Driver Version: (.*?) ')
smi = get_nvidia_smi()
return run_and_parse_first_match(run_lambda, smi, r'Driver Version: (.*?) ')


def get_gpu_info(run_lambda):
smi = get_nvidia_smi()
uuid_regex = re.compile(' \(UUID: .+?\)')
rc, out, _ = run_lambda('nvidia-smi -L')
rc, out, _ = run_lambda(smi + ' -L')
if rc is not 0:
return None
# Anonymize GPUs by removing their UUID
Expand All @@ -99,7 +105,11 @@ def get_running_cuda_version(run_lambda):

def get_cudnn_version(run_lambda):
"""This will return a list of libcudnn.so; it's hard to tell which one is being used"""
rc, out, _ = run_lambda('find /usr/local /usr/lib -type f -name "libcudnn*" 2> /dev/null')
if get_platform() == 'win32':
cudnn_cmd = 'where /R "%CUDA_PATH%\\bin" cudnn*.dll'
else:
cudnn_cmd = 'find /usr/local /usr/lib -type f -name "libcudnn*" 2> /dev/null'
rc, out, _ = run_lambda(cudnn_cmd)
# find will return 1 if there are permission errors or if not found
if len(out) == 0:
return None
Expand All @@ -110,6 +120,13 @@ def get_cudnn_version(run_lambda):
return 'Probably one of the following:\n{}'.format(result)


def get_nvidia_smi():
smi = 'nvidia-smi'
if get_platform() == 'win32':
smi = '"C:\\Program Files\\NVIDIA Corporation\\NVSMI\\%s"' % smi
return smi


def get_platform():
if sys.platform.startswith('linux'):
return 'linux'
Expand Down Expand Up @@ -172,7 +189,11 @@ def get_os(run_lambda):
def get_pip_packages(run_lambda):
# People generally have `pip` as `pip` or `pip3`
def run_with_pip(pip):
return run_and_read_all(run_lambda, pip + ' list --format=legacy | grep "torch\|numpy"')
if get_platform() == 'win32':
grep_cmd = 'findstr /R "numpy torch"'
else:
grep_cmd = 'grep "torch\|numpy"'
return run_and_read_all(run_lambda, pip + ' list --format=legacy | ' + grep_cmd)

if not PY3:
return 'pip', run_with_pip('pip')
Expand Down

0 comments on commit bed172c

Please sign in to comment.