Avoid configuring ROCm if USE_CUDA is on. (#26910)

Summary:
Move the resolution of conflict between `USE_CUDA` and `USE_ROCM` to CMake as to effectuate:

- `USE_CUDA=ON` and CUDA is found, `USE_ROCM=ON` and ROCM is found --> fatal error
- Either `USE_CUDA=ON` and CUDA is found or `USE_ROCM=ON` and ROCM is found --> The respective GPU feature is ON
- Otherwise no GPU support
Pull Request resolved: https://github.com/pytorch/pytorch/pull/26910

Differential Revision: D17738652

Pulled By: ezyang

fbshipit-source-id: 8e07cc7e922e0abda24a6518119c28952276064e
This commit is contained in:
Hong Xu
2019-10-03 08:27:23 -07:00
committed by Facebook Github Bot
parent 5b5f398dd4
commit 8fbefa06f6
2 changed files with 8 additions and 4 deletions

View File

@ -55,11 +55,15 @@ add_subdirectory(src/THNN)
IF(USE_ROCM)
include(LoadHIP)
if (NOT PYTORCH_FOUND_HIP)
MESSAGE(FATAL_ERROR
"Could not find HIP installation")
set(USE_ROCM OFF)
endif()
ENDIF()
# Both CUDA and ROCM are enabled and found. Report an error.
if(USE_CUDA AND USE_ROCM)
message(FATAL_ERROR "Both CUDA and ROCm are enabled and found. PyTorch can only be built with either of them. Please turn one off by using either USE_CUDA=OFF or USE_ROCM=OFF.")
endif()
IF(MSVC)
# we want to respect the standard, and we are bored of those **** .
ADD_DEFINITIONS(-D_CRT_SECURE_NO_DEPRECATE=1)

View File

@ -5,7 +5,7 @@ import ctypes.util
from subprocess import Popen, PIPE
from . import which
from .env import IS_WINDOWS, IS_LINUX, IS_DARWIN, check_env_flag, check_negative_env_flag
from .env import IS_WINDOWS, IS_LINUX, IS_DARWIN, check_negative_env_flag
LINUX_HOME = '/usr/local/cuda'
WINDOWS_HOME = glob.glob('C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v*.*')
@ -53,7 +53,7 @@ def find_cuda_version(cuda_home):
if len(candidates) > 0:
return candidates[0]
if check_negative_env_flag('USE_CUDA') or check_env_flag('USE_ROCM'):
if check_negative_env_flag('USE_CUDA'):
USE_CUDA = False
CUDA_HOME = None
CUDA_VERSION = None