mirror of
https://github.com/pytorch/pytorch.git
synced 2025-11-12 14:54:55 +08:00
Refactor hotpatch_vars and apply it to libtorch (#14976)
Summary: Fixes #14801. Pull Request resolved: https://github.com/pytorch/pytorch/pull/14976 Differential Revision: D13485381 Pulled By: soumith fbshipit-source-id: 0af3c2e1b90988d56f6f85632328d1e4b788ffd2
This commit is contained in:
committed by
Facebook Github Bot
parent
656b565a0f
commit
d71fac20eb
24
setup.py
24
setup.py
@ -145,29 +145,11 @@ import json
|
||||
import glob
|
||||
import importlib
|
||||
|
||||
from tools.setup_helpers.env import check_env_flag, check_negative_env_flag
|
||||
from tools.setup_helpers.env import (check_env_flag, check_negative_env_flag,
|
||||
hotpatch_build_env_vars)
|
||||
|
||||
|
||||
def hotpatch_var(var, prefix='USE_'):
|
||||
if check_env_flag('NO_' + var):
|
||||
os.environ[prefix + var] = '0'
|
||||
elif check_negative_env_flag('NO_' + var):
|
||||
os.environ[prefix + var] = '1'
|
||||
elif check_env_flag('WITH_' + var):
|
||||
os.environ[prefix + var] = '1'
|
||||
elif check_negative_env_flag('WITH_' + var):
|
||||
os.environ[prefix + var] = '0'
|
||||
|
||||
# Before we run the setup_helpers, let's look for NO_* and WITH_*
|
||||
# variables and hotpatch environment with the USE_* equivalent
|
||||
use_env_vars = ['CUDA', 'CUDNN', 'FBGEMM', 'MIOPEN', 'MKLDNN', 'NNPACK', 'DISTRIBUTED',
|
||||
'OPENCV', 'TENSORRT', 'QNNPACK', 'FFMPEG', 'SYSTEM_NCCL',
|
||||
'GLOO_IBVERBS']
|
||||
list(map(hotpatch_var, use_env_vars))
|
||||
|
||||
# Also hotpatch a few with BUILD_* equivalent
|
||||
build_env_vars = ['BINARY', 'TEST', 'CAFFE2_OPS']
|
||||
[hotpatch_var(v, 'BUILD_') for v in build_env_vars]
|
||||
hotpatch_build_env_vars()
|
||||
|
||||
from tools.setup_helpers.cuda import USE_CUDA, CUDA_HOME, CUDA_VERSION
|
||||
from tools.setup_helpers.build import (BUILD_BINARY, BUILD_TEST,
|
||||
|
||||
@ -4,9 +4,13 @@ import shlex
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from setup_helpers.env import check_env_flag, hotpatch_build_env_vars
|
||||
|
||||
|
||||
hotpatch_build_env_vars()
|
||||
|
||||
from setup_helpers.cuda import USE_CUDA
|
||||
from setup_helpers.dist_check import USE_DISTRIBUTED, USE_GLOO_IBVERBS, IS_LINUX
|
||||
from setup_helpers.env import check_env_flag
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Placeholder for future interface. For now just gives a nice -h.
|
||||
|
||||
@ -27,3 +27,27 @@ def gather_paths(env_vars):
|
||||
|
||||
def lib_paths_from_base(base_path):
|
||||
return [os.path.join(base_path, s) for s in ['lib/x64', 'lib', 'lib64']]
|
||||
|
||||
|
||||
def hotpatch_var(var, prefix='USE_'):
|
||||
if check_env_flag('NO_' + var):
|
||||
os.environ[prefix + var] = '0'
|
||||
elif check_negative_env_flag('NO_' + var):
|
||||
os.environ[prefix + var] = '1'
|
||||
elif check_env_flag('WITH_' + var):
|
||||
os.environ[prefix + var] = '1'
|
||||
elif check_negative_env_flag('WITH_' + var):
|
||||
os.environ[prefix + var] = '0'
|
||||
|
||||
|
||||
def hotpatch_build_env_vars():
|
||||
# Before we run the setup_helpers, let's look for NO_* and WITH_*
|
||||
# variables and hotpatch environment with the USE_* equivalent
|
||||
use_env_vars = ['CUDA', 'CUDNN', 'FBGEMM', 'MIOPEN', 'MKLDNN', 'NNPACK', 'DISTRIBUTED',
|
||||
'OPENCV', 'TENSORRT', 'QNNPACK', 'FFMPEG', 'SYSTEM_NCCL',
|
||||
'GLOO_IBVERBS']
|
||||
list(map(hotpatch_var, use_env_vars))
|
||||
|
||||
# Also hotpatch a few with BUILD_* equivalent
|
||||
build_env_vars = ['BINARY', 'TEST', 'CAFFE2_OPS']
|
||||
[hotpatch_var(v, 'BUILD_') for v in build_env_vars]
|
||||
|
||||
Reference in New Issue
Block a user