[CD] Enable pypi dependencies both for XPU linux and Windows whls (#141135)

Enable xpu runtime pypi packages as dependencies of XPU CD wheels both for Linux and Windows.
Fixes https://github.com/pytorch/pytorch/issues/135867
Works for https://github.com/pytorch/pytorch/issues/139722 and https://github.com/pytorch/pytorch/issues/114850
Pull Request resolved: https://github.com/pytorch/pytorch/pull/141135
Approved by: https://github.com/atalman
This commit is contained in:
chuanqiw
2024-11-29 21:35:07 +00:00
committed by PyTorch MergeBot
parent 44707b0667
commit a23ac6f8bd
7 changed files with 140 additions and 36 deletions

View File

@ -15,9 +15,12 @@ case "${GPU_ARCH_TYPE:-BLANK}" in
rocm)
bash "${SCRIPTPATH}/build_rocm.sh"
;;
cpu | cpu-cxx11-abi | cpu-s390x | xpu)
cpu | cpu-cxx11-abi | cpu-s390x)
bash "${SCRIPTPATH}/build_cpu.sh"
;;
xpu)
bash "${SCRIPTPATH}/build_xpu.sh"
;;
*)
echo "Un-recognized GPU_ARCH_TYPE '${GPU_ARCH_TYPE}', exiting..."
exit 1

View File

@ -2,8 +2,6 @@
set -ex
GPU_ARCH_TYPE=${GPU_ARCH_TYPE:-cpu}
export TH_BINARY_BUILD=1
export USE_CUDA=0
@ -17,22 +15,13 @@ if [[ -z "$EXTRA_CAFFE2_CMAKE_FLAGS" ]]; then
EXTRA_CAFFE2_CMAKE_FLAGS=()
fi
DIR_SUFFIX=cpu
if [[ "$GPU_ARCH_TYPE" == "xpu" ]]; then
DIR_SUFFIX=xpu
# Refer https://www.intel.com/content/www/us/en/developer/articles/tool/pytorch-prerequisites-for-intel-gpus.html
source /opt/intel/oneapi/compiler/latest/env/vars.sh
source /opt/intel/oneapi/pti/latest/env/vars.sh
export USE_STATIC_MKL=1
fi
WHEELHOUSE_DIR="wheelhouse$DIR_SUFFIX"
LIBTORCH_HOUSE_DIR="libtorch_house$DIR_SUFFIX"
WHEELHOUSE_DIR="wheelhousecpu"
LIBTORCH_HOUSE_DIR="libtorch_housecpu"
if [[ -z "$PYTORCH_FINAL_PACKAGE_DIR" ]]; then
if [[ -z "$BUILD_PYTHONLESS" ]]; then
PYTORCH_FINAL_PACKAGE_DIR="/remote/wheelhouse$DIR_SUFFIX"
PYTORCH_FINAL_PACKAGE_DIR="/remote/wheelhousecpu"
else
PYTORCH_FINAL_PACKAGE_DIR="/remote/libtorch_house$DIR_SUFFIX"
PYTORCH_FINAL_PACKAGE_DIR="/remote/libtorch_housecpu"
fi
fi
mkdir -p "$PYTORCH_FINAL_PACKAGE_DIR" || true
@ -60,24 +49,6 @@ DEPS_SONAME=(
"libgomp.so.1"
)
if [[ "$GPU_ARCH_TYPE" == "xpu" ]]; then
echo "Bundling with xpu support package libs."
DEPS_LIST+=(
"/opt/intel/oneapi/compiler/latest/lib/libOpenCL.so.1"
"/opt/intel/oneapi/compiler/latest/lib/libsvml.so"
"/opt/intel/oneapi/compiler/latest/lib/libirng.so"
"/opt/intel/oneapi/compiler/latest/lib/libimf.so"
"/opt/intel/oneapi/compiler/latest/lib/libintlc.so.5"
)
DEPS_SONAME+=(
"libOpenCL.so.1"
"libsvml.so"
"libirng.so"
"libimf.so"
"libintlc.so.5"
)
fi
rm -rf /usr/local/cuda*
SOURCE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"

108
.ci/manywheel/build_xpu.sh Executable file
View File

@ -0,0 +1,108 @@
#!/usr/bin/env bash
set -ex
export TH_BINARY_BUILD=1
export USE_CUDA=0
# Keep an array of cmake variables to add to
if [[ -z "$CMAKE_ARGS" ]]; then
# These are passed to tools/build_pytorch_libs.sh::build()
CMAKE_ARGS=()
fi
if [[ -z "$EXTRA_CAFFE2_CMAKE_FLAGS" ]]; then
# These are passed to tools/build_pytorch_libs.sh::build_caffe2()
EXTRA_CAFFE2_CMAKE_FLAGS=()
fi
# Refer https://www.intel.com/content/www/us/en/developer/articles/tool/pytorch-prerequisites-for-intel-gpus.html
source /opt/intel/oneapi/compiler/latest/env/vars.sh
source /opt/intel/oneapi/pti/latest/env/vars.sh
source /opt/intel/oneapi/umf/latest/env/vars.sh
export USE_STATIC_MKL=1
WHEELHOUSE_DIR="wheelhousexpu"
LIBTORCH_HOUSE_DIR="libtorch_housexpu"
if [[ -z "$PYTORCH_FINAL_PACKAGE_DIR" ]]; then
if [[ -z "$BUILD_PYTHONLESS" ]]; then
PYTORCH_FINAL_PACKAGE_DIR="/remote/wheelhousexpu"
else
PYTORCH_FINAL_PACKAGE_DIR="/remote/libtorch_housexpu"
fi
fi
mkdir -p "$PYTORCH_FINAL_PACKAGE_DIR" || true
OS_NAME=$(awk -F= '/^NAME/{print $2}' /etc/os-release)
if [[ "$OS_NAME" == *"CentOS Linux"* ]]; then
LIBGOMP_PATH="/usr/lib64/libgomp.so.1"
elif [[ "$OS_NAME" == *"Red Hat Enterprise Linux"* ]]; then
LIBGOMP_PATH="/usr/lib64/libgomp.so.1"
elif [[ "$OS_NAME" == *"AlmaLinux"* ]]; then
LIBGOMP_PATH="/usr/lib64/libgomp.so.1"
elif [[ "$OS_NAME" == *"Ubuntu"* ]]; then
if [[ "$(uname -m)" == "s390x" ]]; then
LIBGOMP_PATH="/usr/lib/s390x-linux-gnu/libgomp.so.1"
else
LIBGOMP_PATH="/usr/lib/x86_64-linux-gnu/libgomp.so.1"
fi
fi
DEPS_LIST=(
"$LIBGOMP_PATH"
"/opt/intel/oneapi/compiler/latest/lib/libOpenCL.so.1"
)
DEPS_SONAME=(
"libgomp.so.1"
"libOpenCL.so.1"
)
if [[ -z "$PYTORCH_EXTRA_INSTALL_REQUIREMENTS" ]]; then
echo "Bundling with xpu support package libs."
DEPS_LIST+=(
"/opt/intel/oneapi/compiler/latest/lib/libsycl.so.8"
"/opt/intel/oneapi/compiler/latest/lib/libur_loader.so.0"
"/opt/intel/oneapi/compiler/latest/lib/libur_adapter_level_zero.so.0"
"/opt/intel/oneapi/compiler/latest/lib/libur_adapter_opencl.so.0"
"/opt/intel/oneapi/compiler/latest/lib/libsvml.so"
"/opt/intel/oneapi/compiler/latest/lib/libirng.so"
"/opt/intel/oneapi/compiler/latest/lib/libimf.so"
"/opt/intel/oneapi/compiler/latest/lib/libintlc.so.5"
"/opt/intel/oneapi/pti/latest/lib/libpti_view.so.0.10"
"/opt/intel/oneapi/umf/latest/lib/libumf.so.0"
"/opt/intel/oneapi/tcm/latest/lib/libhwloc.so.15"
)
DEPS_SONAME+=(
"libsycl.so.8"
"libur_loader.so.0"
"libur_adapter_level_zero.so.0"
"libur_adapter_opencl.so.0"
"libsvml.so"
"libirng.so"
"libimf.so"
"libintlc.so.5"
"libpti_view.so.0.10"
"libumf.so.0"
"libhwloc.so.15"
)
else
echo "Using xpu runtime libs from pypi."
XPU_RPATHS=(
'$ORIGIN/../../../..'
)
XPU_RPATHS=$(IFS=: ; echo "${XPU_RPATHS[*]}")
export C_SO_RPATH=$XPU_RPATHS':$ORIGIN:$ORIGIN/lib'
export LIB_SO_RPATH=$XPU_RPATHS':$ORIGIN'
export FORCE_RPATH="--force-rpath"
fi
rm -rf /usr/local/cuda*
SOURCE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
if [[ -z "$BUILD_PYTHONLESS" ]]; then
BUILD_SCRIPT=build_common.sh
else
BUILD_SCRIPT=build_libtorch.sh
fi
source ${SOURCE_DIR}/${BUILD_SCRIPT}

View File

@ -8,6 +8,7 @@ export VC_YEAR=2019
if [[ "$DESIRED_CUDA" == 'xpu' ]]; then
export VC_YEAR=2022
export XPU_VERSION=2025.0
fi
pushd "$BUILDER_ROOT"

View File

@ -77,6 +77,15 @@ PYTORCH_EXTRA_INSTALL_REQUIREMENTS = {
"nvidia-nvtx-cu12==12.6.77; platform_system == 'Linux' and platform_machine == 'x86_64' | "
"nvidia-nvjitlink-cu12==12.6.85; platform_system == 'Linux' and platform_machine == 'x86_64'"
),
"xpu": (
"intel-cmplr-lib-rt==2025.0.2 | "
"intel-cmplr-lib-ur==2025.0.2 | "
"intel-cmplr-lic-rt==2025.0.2 | "
"intel-sycl-rt==2025.0.2 | "
"tcmlib==1.2.0 | "
"umf==0.9.1 | "
"intel-pti==0.10.0; platform_system == 'Linux' and platform_machine == 'x86_64'"
),
}
@ -434,8 +443,10 @@ def generate_wheels_matrix(
".", "_"
),
"pytorch_extra_install_requirements": (
PYTORCH_EXTRA_INSTALL_REQUIREMENTS["12.4"]
if os != "linux" and gpu_arch_type != "xpu"
PYTORCH_EXTRA_INSTALL_REQUIREMENTS["xpu"]
if gpu_arch_type == "xpu"
else PYTORCH_EXTRA_INSTALL_REQUIREMENTS["12.4"]
if os != "linux"
else ""
),
}

View File

@ -634,6 +634,7 @@ jobs:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
build_name: manywheel-py3_9-xpu
build_environment: linux-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: intel-cmplr-lib-rt==2025.0.2 | intel-cmplr-lib-ur==2025.0.2 | intel-cmplr-lic-rt==2025.0.2 | intel-sycl-rt==2025.0.2 | tcmlib==1.2.0 | umf==0.9.1 | intel-pti==0.10.0; platform_system == 'Linux' and platform_machine == 'x86_64'
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
manywheel-py3_9-xpu-test: # Testing
@ -1320,6 +1321,7 @@ jobs:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
build_name: manywheel-py3_10-xpu
build_environment: linux-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: intel-cmplr-lib-rt==2025.0.2 | intel-cmplr-lib-ur==2025.0.2 | intel-cmplr-lic-rt==2025.0.2 | intel-sycl-rt==2025.0.2 | tcmlib==1.2.0 | umf==0.9.1 | intel-pti==0.10.0; platform_system == 'Linux' and platform_machine == 'x86_64'
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
manywheel-py3_10-xpu-test: # Testing
@ -2076,6 +2078,7 @@ jobs:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
build_name: manywheel-py3_11-xpu
build_environment: linux-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: intel-cmplr-lib-rt==2025.0.2 | intel-cmplr-lib-ur==2025.0.2 | intel-cmplr-lic-rt==2025.0.2 | intel-sycl-rt==2025.0.2 | tcmlib==1.2.0 | umf==0.9.1 | intel-pti==0.10.0; platform_system == 'Linux' and platform_machine == 'x86_64'
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
manywheel-py3_11-xpu-test: # Testing
@ -2762,6 +2765,7 @@ jobs:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
build_name: manywheel-py3_12-xpu
build_environment: linux-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: intel-cmplr-lib-rt==2025.0.2 | intel-cmplr-lib-ur==2025.0.2 | intel-cmplr-lic-rt==2025.0.2 | intel-sycl-rt==2025.0.2 | tcmlib==1.2.0 | umf==0.9.1 | intel-pti==0.10.0; platform_system == 'Linux' and platform_machine == 'x86_64'
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
manywheel-py3_12-xpu-test: # Testing
@ -3228,6 +3232,7 @@ jobs:
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
build_name: manywheel-py3_13-xpu
build_environment: linux-binary-manywheel
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: intel-cmplr-lib-rt==2025.0.2 | intel-cmplr-lib-ur==2025.0.2 | intel-cmplr-lic-rt==2025.0.2 | intel-sycl-rt==2025.0.2 | tcmlib==1.2.0 | umf==0.9.1 | intel-pti==0.10.0; platform_system == 'Linux' and platform_machine == 'x86_64'
secrets:
github-token: ${{ secrets.GITHUB_TOKEN }}
manywheel-py3_13-xpu-test: # Testing

View File

@ -860,6 +860,7 @@ jobs:
GPU_ARCH_TYPE: xpu
SKIP_ALL_TESTS: 1
DESIRED_PYTHON: "3.9"
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: intel-cmplr-lib-rt==2025.0.2 | intel-cmplr-lib-ur==2025.0.2 | intel-cmplr-lic-rt==2025.0.2 | intel-sycl-rt==2025.0.2 | tcmlib==1.2.0 | umf==0.9.1 | intel-pti==0.10.0; platform_system == 'Linux' and platform_machine == 'x86_64'
steps:
- name: Display EC2 information
shell: bash
@ -1929,6 +1930,7 @@ jobs:
GPU_ARCH_TYPE: xpu
SKIP_ALL_TESTS: 1
DESIRED_PYTHON: "3.10"
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: intel-cmplr-lib-rt==2025.0.2 | intel-cmplr-lib-ur==2025.0.2 | intel-cmplr-lic-rt==2025.0.2 | intel-sycl-rt==2025.0.2 | tcmlib==1.2.0 | umf==0.9.1 | intel-pti==0.10.0; platform_system == 'Linux' and platform_machine == 'x86_64'
steps:
- name: Display EC2 information
shell: bash
@ -2998,6 +3000,7 @@ jobs:
GPU_ARCH_TYPE: xpu
SKIP_ALL_TESTS: 1
DESIRED_PYTHON: "3.11"
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: intel-cmplr-lib-rt==2025.0.2 | intel-cmplr-lib-ur==2025.0.2 | intel-cmplr-lic-rt==2025.0.2 | intel-sycl-rt==2025.0.2 | tcmlib==1.2.0 | umf==0.9.1 | intel-pti==0.10.0; platform_system == 'Linux' and platform_machine == 'x86_64'
steps:
- name: Display EC2 information
shell: bash
@ -4067,6 +4070,7 @@ jobs:
GPU_ARCH_TYPE: xpu
SKIP_ALL_TESTS: 1
DESIRED_PYTHON: "3.12"
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: intel-cmplr-lib-rt==2025.0.2 | intel-cmplr-lib-ur==2025.0.2 | intel-cmplr-lic-rt==2025.0.2 | intel-sycl-rt==2025.0.2 | tcmlib==1.2.0 | umf==0.9.1 | intel-pti==0.10.0; platform_system == 'Linux' and platform_machine == 'x86_64'
steps:
- name: Display EC2 information
shell: bash
@ -5136,6 +5140,7 @@ jobs:
GPU_ARCH_TYPE: xpu
SKIP_ALL_TESTS: 1
DESIRED_PYTHON: "3.13"
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: intel-cmplr-lib-rt==2025.0.2 | intel-cmplr-lib-ur==2025.0.2 | intel-cmplr-lic-rt==2025.0.2 | intel-sycl-rt==2025.0.2 | tcmlib==1.2.0 | umf==0.9.1 | intel-pti==0.10.0; platform_system == 'Linux' and platform_machine == 'x86_64'
steps:
- name: Display EC2 information
shell: bash