mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
[BE] Remove conda from scripts and build files Part 2 (#145015)
Continuation of https://github.com/pytorch/pytorch/pull/144870 Remove conda logic from scripts: 1. Remove conda build from triton build script 2. Remove conda checks from setup.py 3. Remove conda from release scripts 4. Script read_conda_versions.sh is not used (checked via git grep) Related to: https://github.com/pytorch/pytorch/issues/138506 Pull Request resolved: https://github.com/pytorch/pytorch/pull/145015 Approved by: https://github.com/malfet, https://github.com/Skylion007
This commit is contained in:
committed by
PyTorch MergeBot
parent
b7af210d8d
commit
a215e174a1
52
.github/scripts/build_triton_wheel.py
vendored
52
.github/scripts/build_triton_wheel.py
vendored
@ -52,7 +52,6 @@ def build_triton(
|
||||
*,
|
||||
version: str,
|
||||
commit_hash: str,
|
||||
build_conda: bool = False,
|
||||
device: str = "cuda",
|
||||
py_version: Optional[str] = None,
|
||||
release: bool = False,
|
||||
@ -83,55 +82,6 @@ def build_triton(
|
||||
else:
|
||||
check_call(["git", "checkout", commit_hash], cwd=triton_basedir)
|
||||
|
||||
if build_conda:
|
||||
with open(triton_basedir / "meta.yaml", "w") as meta:
|
||||
print(
|
||||
f"package:\n name: torchtriton\n version: {version}\n",
|
||||
file=meta,
|
||||
)
|
||||
print("source:\n path: .\n", file=meta)
|
||||
print(
|
||||
"build:\n string: py{{py}}\n number: 1\n script: cd python; "
|
||||
"python setup.py install --record=record.txt\n",
|
||||
" script_env:\n - MAX_JOBS\n",
|
||||
file=meta,
|
||||
)
|
||||
print(
|
||||
"requirements:\n host:\n - python\n - setuptools\n - pybind11\n"
|
||||
" run:\n - python\n - filelock\n - pytorch\n",
|
||||
file=meta,
|
||||
)
|
||||
print(
|
||||
"about:\n home: https://github.com/openai/triton\n license: MIT\n summary:"
|
||||
" 'A language and compiler for custom Deep Learning operation'",
|
||||
file=meta,
|
||||
)
|
||||
|
||||
patch_init_py(
|
||||
triton_pythondir / "triton" / "__init__.py",
|
||||
version=f"{version}",
|
||||
)
|
||||
if py_version is None:
|
||||
py_version = f"{sys.version_info.major}.{sys.version_info.minor}"
|
||||
check_call(
|
||||
[
|
||||
"conda",
|
||||
"build",
|
||||
"--python",
|
||||
py_version,
|
||||
"-c",
|
||||
"pytorch-nightly",
|
||||
"--output-folder",
|
||||
tmpdir,
|
||||
".",
|
||||
],
|
||||
cwd=triton_basedir,
|
||||
env=env,
|
||||
)
|
||||
conda_path = next(iter(Path(tmpdir).glob("linux-64/torchtriton*.bz2")))
|
||||
shutil.copy(conda_path, Path.cwd())
|
||||
return Path.cwd() / conda_path.name
|
||||
|
||||
# change built wheel name and version
|
||||
env["TRITON_WHEEL_NAME"] = triton_pkg_name
|
||||
if with_clang_ldd:
|
||||
@ -172,7 +122,6 @@ def main() -> None:
|
||||
|
||||
parser = ArgumentParser("Build Triton binaries")
|
||||
parser.add_argument("--release", action="store_true")
|
||||
parser.add_argument("--build-conda", action="store_true")
|
||||
parser.add_argument(
|
||||
"--device", type=str, default="cuda", choices=["cuda", "rocm", "xpu"]
|
||||
)
|
||||
@ -188,7 +137,6 @@ def main() -> None:
|
||||
args.commit_hash if args.commit_hash else read_triton_pin(args.device)
|
||||
),
|
||||
version=args.triton_version,
|
||||
build_conda=args.build_conda,
|
||||
py_version=args.py_version,
|
||||
release=args.release,
|
||||
with_clang_ldd=args.with_clang_ldd,
|
||||
|
@ -1,184 +0,0 @@
|
||||
# Simple script used to easily search all packages in conda for their
|
||||
# dependency requirements
|
||||
# TODO also search through output of ldd
|
||||
# TODO update conda info syntax for different channels
|
||||
|
||||
if [ -z "$CONDA_ROOT" ]; then
|
||||
# TODO create our own environment
|
||||
echo "Please set CONDA_ROOT so that I know where to search for conda libraries"
|
||||
echo "I expect CONDA_ROOT to be the path to the current conda environment."
|
||||
echo "Also FYI I will probably mess up the current conda environment."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "Please give me a package name to search for"
|
||||
exit 1
|
||||
fi
|
||||
PKG_NAME="$1"
|
||||
|
||||
if [ -n "$2" ]; then
|
||||
echo "Searching in channel $2"
|
||||
CONDA_CHANNEL="$2"
|
||||
fi
|
||||
|
||||
# These are the packages of interest to search the dependencies for
|
||||
# TODO use this
|
||||
PACKAGES_OF_INTEREST=( libgcc-ng libprotobuf numpy )
|
||||
|
||||
# We will run `conda install` and `conda uninstall` a lot, but we don't want
|
||||
# this very noisy output to clutter the user experience
|
||||
VERBOSE_LOG='read_conda_versions.log'
|
||||
echo "Conda install/uninstall log for $PKG_NAME" > $VERBOSE_LOG
|
||||
|
||||
|
||||
|
||||
#
|
||||
# Build up the name of the installed library to call `nm` on
|
||||
#
|
||||
PKG_INSTALLED_LIB="$PKG_NAME"
|
||||
|
||||
# opencv installs a bunch of libraries. We'll just check libopencv_core
|
||||
if [[ $PKG_NAME == opencv ]]; then
|
||||
PKG_INSTALLED_LIB="${PKG_INSTALLED_LIB}_core"
|
||||
fi
|
||||
|
||||
# Most packages prepend a 'lib' to the package name, but libprotobuf is an
|
||||
# exception
|
||||
if [[ $PKG_NAME != lib* ]]; then
|
||||
PKG_INSTALLED_LIB="lib${PKG_INSTALLED_LIB}"
|
||||
fi
|
||||
|
||||
# The shared library suffix differs on macOS an Linux
|
||||
if [[ "$(uname)" == Darwin ]]; then
|
||||
PKG_INSTALLED_LIB="${PKG_INSTALLED_LIB}.dylib"
|
||||
else
|
||||
PKG_INSTALLED_LIB="${PKG_INSTALLED_LIB}.so"
|
||||
fi
|
||||
echo "Determined the library name of $PKG_NAME to be $PKG_INSTALLED_LIB"
|
||||
echo "Determined the library name of $PKG_NAME to be $PKG_INSTALLED_LIB" >> $VERBOSE_LOG
|
||||
|
||||
|
||||
|
||||
#
|
||||
# Get all available packages with conda-search
|
||||
#
|
||||
|
||||
# Split the output from conda search into an array, one line per package (plus
|
||||
# the header)
|
||||
conda_search_packages=()
|
||||
while read -r line; do conda_search_packages+=("$line"); done <<< "$(conda search $PKG_NAME $CONDA_CHANNEL)"
|
||||
|
||||
### Typical `conda search` output looks like
|
||||
### Loading channels: done
|
||||
### Name Version Build Channel
|
||||
### protobuf 2.6.1 py27_0 defaults
|
||||
### 2.6.1 py27_1 defaults
|
||||
### 3.2.0 py27_0 defaults
|
||||
### 3.2.0 py35_0 defaults
|
||||
### 3.2.0 py36_0 defaults
|
||||
### 3.4.1 py27h66c1d77_0 defaults
|
||||
### 3.4.1 py35h9d33684_0 defaults
|
||||
### 3.4.1 py36h314970b_0 defaults
|
||||
### 3.5.1 py27h0a44026_0 defaults
|
||||
### 3.5.1 py35h0a44026_0 defaults
|
||||
### 3.5.1 py36h0a44026_0 defaults
|
||||
##
|
||||
### Typical `conda info` output looks like
|
||||
### protobuf 3.5.1 py36h0a44026_0
|
||||
### -----------------------------
|
||||
### file name : protobuf-3.5.1-py36h0a44026_0.tar.bz2
|
||||
### name : protobuf
|
||||
### version : 3.5.1
|
||||
### build string: py36h0a44026_0
|
||||
### build number: 0
|
||||
### channel : https://repo.continuum.io/pkgs/main/osx-64
|
||||
### size : 589 KB
|
||||
### arch : None
|
||||
### constrains : ()
|
||||
### license : New BSD License
|
||||
### license_family: BSD
|
||||
### md5 : 7dbdb06612e21c42fbb8a62354e13e10
|
||||
### platform : None
|
||||
### subdir : osx-64
|
||||
### timestamp : 1519951502766
|
||||
### url : https://repo.continuum.io/pkgs/main/osx-64/protobuf-3.5.1-py36h0a44026_0.tar.bz2
|
||||
### dependencies:
|
||||
### libcxx >=4.0.1
|
||||
### libprotobuf >=3.5.1,<3.6.0a0
|
||||
### python >=3.6,<3.7.0a0
|
||||
### six
|
||||
|
||||
# Echo what packages we'll look through.
|
||||
echo "Processing these packages:"
|
||||
for pkg in "${conda_search_packages[@]:2}"; do
|
||||
echo " $pkg"
|
||||
done
|
||||
|
||||
|
||||
|
||||
#
|
||||
# Look up each package in conda info, then install it and search the exported
|
||||
# symbols for signs of cxx11
|
||||
#
|
||||
for pkg in "${conda_search_packages[@]:2}"; do
|
||||
echo "Processing $pkg" >> $VERBOSE_LOG
|
||||
|
||||
# Split each line into an array and build the package specification
|
||||
# <package_name (1st line only)> maj.min.patch build_string channel_name
|
||||
line_parts=( $pkg )
|
||||
if [[ ${line_parts[0]} == $PKG_NAME ]]; then
|
||||
# First line of output
|
||||
PKG_VERSION="${line_parts[1]}"
|
||||
PKG_BUILD_STR="${line_parts[2]}"
|
||||
else
|
||||
PKG_VERSION="${line_parts[0]}"
|
||||
PKG_BUILD_STR="${line_parts[1]}"
|
||||
fi
|
||||
PKG_SPEC="$PKG_NAME=$PKG_VERSION=$PKG_BUILD_STR"
|
||||
|
||||
# Output current pkg spec
|
||||
echo
|
||||
echo "${PKG_SPEC}:"
|
||||
echo "Determined that the package spec is $PKG_SPEC" >> $VERBOSE_LOG
|
||||
|
||||
# Split the output of conda_info into an array of lines
|
||||
pkg_dependencies=()
|
||||
while read -r line; do pkg_dependencies+=("$line"); done <<< "$(conda info "$PKG_SPEC" $CONDA_CHANNEL)"
|
||||
|
||||
# List all the listed dependencies in `conda info`
|
||||
if [ "${#pkg_dependencies[@]}" -gt 19 ]; then
|
||||
echo " Listed dependencies:"
|
||||
echo " Listed dependencies:" >> $VERBOSE_LOG
|
||||
for pkg_dependency in "${pkg_dependencies[@]:20}"; do
|
||||
echo " $pkg_dependency"
|
||||
echo " $pkg_dependency" >> $VERBOSE_LOG
|
||||
done
|
||||
else
|
||||
echo " No listed dependencies in conda-info" >> $VERBOSE_LOG
|
||||
fi
|
||||
|
||||
# But sometimes (a lot of the time) the gcc with which a package was built
|
||||
# against is not listed in dependencies. So we try to figure it out manually
|
||||
# We install this exact package, and then grep the exported symbols for signs
|
||||
# of cxx11
|
||||
echo "Calling conda-uninstall on $PKG_NAME" >> $VERBOSE_LOG
|
||||
echo "conda uninstall -y $PKG_NAME --quiet" >> $VERBOSE_LOG
|
||||
conda uninstall -y "$PKG_NAME" --quiet >> $VERBOSE_LOG 2>&1
|
||||
|
||||
echo "Calling conda-install on $PKG_SPEC" >> $VERBOSE_LOG
|
||||
echo "conda install -y $PKG_SPEC --quiet --no-deps $CONDA_CHANNEL" >> $VERBOSE_LOG
|
||||
conda install -y "$PKG_SPEC" --quiet --no-deps $CONDA_CHANNEL >> $VERBOSE_LOG 2>&1
|
||||
if [ $? -eq 0 ]; then
|
||||
# Only grep the exported symbols if the library was installed correctly
|
||||
|
||||
MENTIONS_CXX11="$(nm "$CONDA_ROOT/lib/$PKG_INSTALLED_LIB" | grep cxx11 | wc -l)"
|
||||
if [ $MENTIONS_CXX11 -gt 0 ]; then
|
||||
echo " This package is built against the recent gcc ABI ($MENTIONS_CXX11 mentions of cxx11)"
|
||||
echo "$CONDA_ROOT/lib/$PKG_INSTALLED_LIB mentions cxx11 $MENTIONS_CXX11 times" >> $VERBOSE_LOG
|
||||
fi
|
||||
else
|
||||
echo "Error installing $PKG_SPEC , continuing"
|
||||
echo "Error installing $PKG_SPEC , continuing" >> $VERBOSE_LOG
|
||||
fi
|
||||
done
|
@ -1,52 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
grab_prune_version() {
|
||||
conda search -c "${CHANNEL}" --platform "${platform}" "${PKG}" 2>/dev/null | \
|
||||
grep "${CHANNEL}" | \
|
||||
awk -F ' *' '{print $2}' | \
|
||||
uniq | \
|
||||
head -n -1 | \
|
||||
xargs
|
||||
}
|
||||
|
||||
grab_latest_version() {
|
||||
conda search -c "${CHANNEL}" --platform "${platform}" "${PKG}" 2>/dev/null | \
|
||||
grep "${CHANNEL}" | \
|
||||
awk -F ' *' '{print $2}' | \
|
||||
uniq | \
|
||||
tail -n 1 | \
|
||||
xargs
|
||||
}
|
||||
|
||||
grab_specs_for_version() {
|
||||
conda search -c "${CHANNEL}" --platform "${platform}" "${PKG}" 2>/dev/null | \
|
||||
grep "${CHANNEL}" | \
|
||||
grep "$1" | \
|
||||
awk -F ' *' '{print $3}' | \
|
||||
uniq | \
|
||||
xargs
|
||||
}
|
||||
|
||||
set -eou pipefail
|
||||
|
||||
CHANNEL=${CHANNEL:-pytorch-nightly}
|
||||
PKG=${PKG:-pytorch}
|
||||
PLATFORMS=${PLATFORMS:-noarch osx-64 osx-arm64 linux-64 win-64}
|
||||
|
||||
for platform in ${PLATFORMS}; do
|
||||
latest_version="$(grab_latest_version || true)"
|
||||
specs_in_latest_version="$(grab_specs_for_version "${latest_version}" || true)"
|
||||
versions_to_prune="$(grab_prune_version || true)"
|
||||
for version in ${versions_to_prune}; do
|
||||
specs_in_prune_version="$(grab_specs_for_version "${version}" || true)"
|
||||
for spec in ${specs_in_prune_version}; do
|
||||
# If this spec is included in specs_in_latest_version, then remove it.
|
||||
if [[ "${specs_in_latest_version}" =~ ${spec} ]];then
|
||||
(
|
||||
set -x
|
||||
anaconda remove --force "${CHANNEL}/${PKG}/${version}/${platform}/${PKG}-${version}-${spec}.tar.bz2"
|
||||
)
|
||||
fi
|
||||
done
|
||||
done
|
||||
done
|
@ -1,14 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
|
||||
CHANNEL=${CHANNEL:-pytorch-nightly}
|
||||
PACKAGES=${PACKAGES:-pytorch}
|
||||
|
||||
for pkg in ${PACKAGES}; do
|
||||
echo "+ Attempting to prune: ${CHANNEL}/${pkg}"
|
||||
CHANNEL="${CHANNEL}" PKG="${pkg}" "${DIR}/prune.sh"
|
||||
echo
|
||||
done
|
@ -1,69 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -eou pipefail
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "${DIR}/common_utils.sh"
|
||||
|
||||
|
||||
# Allow for users to pass PACKAGE_NAME
|
||||
# For use with other packages, i.e. torchvision, etc.
|
||||
PACKAGE_NAME=${PACKAGE_NAME:-pytorch}
|
||||
PYTORCH_CONDA_FROM=${PYTORCH_CONDA_FROM:-pytorch-test}
|
||||
PYTORCH_CONDA_TO=${PYTORCH_CONDA_TO:-pytorch}
|
||||
CONDA_PLATFORMS="linux-64 osx-64 win-64 noarch"
|
||||
|
||||
pytorch_version="$(get_pytorch_version)"
|
||||
|
||||
tmp_dir="$(mktemp -d)"
|
||||
pushd "${tmp_dir}"
|
||||
trap 'rm -rf ${tmp_dir}' EXIT
|
||||
|
||||
conda_search() {
|
||||
conda search -q "${PYTORCH_CONDA_FROM}::${PACKAGE_NAME}==${pytorch_version}" -c "${PYTORCH_CONDA_FROM}" --platform "${platform}" \
|
||||
| grep -e "^${PACKAGE_NAME}" \
|
||||
| awk -F ' *' '{print $3}' \
|
||||
| xargs -I % echo "https://anaconda.org/${PYTORCH_CONDA_FROM}/${PACKAGE_NAME}/${pytorch_version}/download/${platform}/${PACKAGE_NAME}-${pytorch_version}-%.tar.bz2"
|
||||
}
|
||||
|
||||
pkgs_to_download=()
|
||||
for platform in ${CONDA_PLATFORMS}; do
|
||||
pkgs_to_download+=($(\
|
||||
conda_search 2>/dev/null || true
|
||||
))
|
||||
# Create directory where packages will eventually be downloaded
|
||||
mkdir -p "${platform}"
|
||||
done
|
||||
|
||||
my_curl() {
|
||||
local dl_url=$1
|
||||
local start=$(date +%s)
|
||||
# downloads should be distinguished by platform which should be the second
|
||||
# to last field in the url, this is to avoid clobbering same named files
|
||||
# for different platforms
|
||||
dl_dir=$(echo "${dl_url}" | rev | cut -d'/' -f 2 | rev)
|
||||
dl_name=$(echo "${dl_url}" | rev | cut -d'/' -f 1 | rev)
|
||||
curl -fsSL -o "${dl_dir}/${dl_name}" "${dl_url}"
|
||||
local end=$(date +%s)
|
||||
local diff=$(( end - start ))
|
||||
echo "+ ${dl_url} took ${diff}s"
|
||||
}
|
||||
export -f my_curl
|
||||
|
||||
# Download all packages in parallel
|
||||
printf '%s\n' "${pkgs_to_download[@]}" \
|
||||
| xargs -P 10 -I % bash -c '(declare -t my_curl); my_curl %'
|
||||
|
||||
# dry run by default
|
||||
DRY_RUN=${DRY_RUN:-enabled}
|
||||
ANACONDA="true anaconda"
|
||||
if [[ $DRY_RUN = "disabled" ]]; then
|
||||
ANACONDA="anaconda"
|
||||
fi
|
||||
(
|
||||
# We use --skip here to avoid re-uploading files we've already uploaded
|
||||
set -x
|
||||
${ANACONDA} upload --skip -u ${PYTORCH_CONDA_TO} $(find . -name '*.bz2')
|
||||
)
|
||||
|
||||
popd
|
@ -26,38 +26,6 @@ restore_libtorch() {
|
||||
aws_promote libtorch-* libtorch
|
||||
}
|
||||
|
||||
ANACONDA="true anaconda"
|
||||
if [[ ${DRY_RUN} = "disabled" ]]; then
|
||||
ANACONDA="anaconda"
|
||||
fi
|
||||
PYTORCH_CONDA_TO=${PYTORCH_CONDA_TO:-pytorch-test}
|
||||
|
||||
upload_conda() {
|
||||
local pkg
|
||||
pkg=${1}
|
||||
(
|
||||
set -x
|
||||
${ANACONDA} upload --skip -u "${PYTORCH_CONDA_TO}" "${pkg}"
|
||||
)
|
||||
}
|
||||
|
||||
export -f upload_conda
|
||||
|
||||
restore_conda() {
|
||||
TMP_DIR="$(mktemp -d)"
|
||||
trap 'rm -rf ${TMP_DIR}' EXIT
|
||||
(
|
||||
set -x
|
||||
aws s3 cp --recursive "${PYTORCH_S3_BACKUP_BUCKET}/conda" "${TMP_DIR}/"
|
||||
)
|
||||
export ANACONDA
|
||||
export PYTORCH_CONDA_TO
|
||||
# Should upload all bz2 packages in parallel for quick restoration
|
||||
find "${TMP_DIR}" -name '*.bz2' -type f \
|
||||
| xargs -P 10 -I % bash -c "(declare -t upload_conda); upload_conda %"
|
||||
}
|
||||
|
||||
|
||||
restore_wheels
|
||||
restore_libtorch
|
||||
restore_conda
|
||||
|
2
setup.py
2
setup.py
@ -699,7 +699,7 @@ class build_ext(setuptools.command.build_ext.build_ext):
|
||||
# It's an old-style class in Python 2.7...
|
||||
setuptools.command.build_ext.build_ext.run(self)
|
||||
|
||||
if IS_DARWIN and package_type != "conda":
|
||||
if IS_DARWIN:
|
||||
self._embed_libomp()
|
||||
|
||||
# Copy the essential export library to compile C++ extensions.
|
||||
|
Reference in New Issue
Block a user