mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-22 22:25:10 +08:00
Compare commits
92 Commits
quint-bits
...
v2.1.1-rc1
Author | SHA1 | Date | |
---|---|---|---|
cc54a5072e | |||
3788d86e3e | |||
1f0450eed2 | |||
9570baa150 | |||
b3b274ddcb | |||
5bcfb1b9b4 | |||
c496f9a40b | |||
39a66a66fe | |||
ed87177528 | |||
c07240e5e4 | |||
bb96803a35 | |||
3002bf71e6 | |||
e7892b2e02 | |||
909fcf9b21 | |||
0bc598a604 | |||
dd7fb44d20 | |||
6026c29db0 | |||
0f9ac00ac6 | |||
209f2fa8ff | |||
fa1db4310d | |||
e6702486f6 | |||
e68aa76642 | |||
88cde0c37c | |||
e4c42a93bc | |||
7bcf7da3a2 | |||
1841d54370 | |||
fca42334be | |||
539a971161 | |||
9287a0cf59 | |||
c464075d5d | |||
1b4161c686 | |||
28220534de | |||
da9639c752 | |||
e534243ec2 | |||
01fa8c140a | |||
5aae979614 | |||
ced78cc2a7 | |||
d8db5808ce | |||
889811ab5b | |||
1191449343 | |||
6d9fad8474 | |||
ed62318bea | |||
ee67c4dd6a | |||
5529b81631 | |||
7e23b4907d | |||
71c9d5c3a6 | |||
91e414957b | |||
ce3ed7f293 | |||
bd372d460b | |||
12b8c26f35 | |||
7397cf324c | |||
fa8259db8d | |||
d83c8287ea | |||
ba19c52e31 | |||
c5c9536aa7 | |||
6b7a777661 | |||
ebd3224303 | |||
6e4ae13657 | |||
265e46e193 | |||
da7290dfbd | |||
828992cf13 | |||
48246f3dfb | |||
7d6971dcee | |||
5417e23ba8 | |||
7a9101951d | |||
03e7f0b99d | |||
c0e7239f43 | |||
04c1e07fd7 | |||
cb4362ba5f | |||
bddd30ca7a | |||
9cc99906e9 | |||
a49fca4dd4 | |||
83964c761e | |||
085bd1da62 | |||
90452f41e3 | |||
35c3d5a080 | |||
d07ac50e26 | |||
8a3b017769 | |||
a82894b0d3 | |||
050fc31538 | |||
b3cb05b396 | |||
fec68a2799 | |||
f139dda1cc | |||
5252dfb762 | |||
da1ccca830 | |||
c9cbdaf24f | |||
f187e42a54 | |||
9175987fcc | |||
d8e6594fb8 | |||
f82c027774 | |||
6d20b39d3f | |||
17f400404f |
@ -1 +1 @@
|
||||
05d67b9418cacda0d356c2102d7c1a887948b013
|
||||
34f8189eae57a23cc15b4b4f032fe25757e0db8e
|
||||
|
@ -7,18 +7,14 @@ source "$(dirname "${BASH_SOURCE[0]}")/common_utils.sh"
|
||||
function install_huggingface() {
|
||||
local version
|
||||
version=$(get_pinned_commit huggingface)
|
||||
pip_install pandas
|
||||
pip_install scipy
|
||||
pip_install z3-solver
|
||||
pip_install pandas==2.0.3
|
||||
pip_install "transformers==${version}"
|
||||
}
|
||||
|
||||
function install_timm() {
|
||||
local commit
|
||||
commit=$(get_pinned_commit timm)
|
||||
pip_install pandas
|
||||
pip_install scipy
|
||||
pip_install z3-solver
|
||||
pip_install pandas==2.0.3
|
||||
pip_install "git+https://github.com/rwightman/pytorch-image-models@${commit}"
|
||||
}
|
||||
|
||||
|
17
.ci/docker/common/install_onnx.sh
Normal file → Executable file
17
.ci/docker/common/install_onnx.sh
Normal file → Executable file
@ -4,6 +4,10 @@ set -ex
|
||||
|
||||
source "$(dirname "${BASH_SOURCE[0]}")/common_utils.sh"
|
||||
|
||||
retry () {
|
||||
"$@" || (sleep 10 && "$@") || (sleep 20 && "$@") || (sleep 40 && "$@")
|
||||
}
|
||||
|
||||
# A bunch of custom pip dependencies for ONNX
|
||||
pip_install \
|
||||
beartype==0.10.4 \
|
||||
@ -18,22 +22,17 @@ pip_install \
|
||||
# onnx-weekly. Otherwise, onnx-weekly could be
|
||||
# overwritten by onnx.
|
||||
pip_install \
|
||||
onnxruntime==1.15.1 \
|
||||
parameterized==0.8.1 \
|
||||
pytest-cov==4.0.0 \
|
||||
pytest-subtests==0.10.0 \
|
||||
tabulate==0.9.0 \
|
||||
transformers==4.31.0
|
||||
|
||||
# Using 1.15dev branch for the following not yet released features and fixes.
|
||||
# - Segfault fix for shape inference.
|
||||
# - Inliner to workaround ORT segfault.
|
||||
pip_install onnx-weekly==1.15.0.dev20230717
|
||||
pip_install coloredlogs packaging
|
||||
retry pip_install -i https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/ORT-Nightly/pypi/simple/ --no-cache-dir --no-input ort-nightly==1.16.0.dev20230908001
|
||||
|
||||
# TODO: change this when onnx-script is on testPypi
|
||||
# pip_install onnxscript-preview==0.1.0.dev20230809 --no-deps
|
||||
# NOTE: temp change for CI to run on unpublished onnxscript PR.
|
||||
pip_install "onnxscript@git+https://github.com/microsoft/onnxscript@f69be19ebd3f2e0d7efe64b0c7be3329cbab3822" --no-deps
|
||||
pip_install onnx==1.14.1
|
||||
pip_install onnxscript-preview==0.1.0.dev20230828 --no-deps
|
||||
|
||||
# Cache the transformers model to be used later by ONNX tests. We need to run the transformers
|
||||
# package to download the model. By default, the model is cached at ~/.cache/huggingface/hub/
|
||||
|
@ -271,7 +271,12 @@ pytest-cpp==2.3.0
|
||||
#Pinned versions: 2.3.0
|
||||
#test that import:
|
||||
|
||||
z3-solver
|
||||
z3-solver==4.12.2.0
|
||||
#Description: The Z3 Theorem Prover Project
|
||||
#Pinned versions:
|
||||
#test that import:
|
||||
|
||||
tensorboard==2.13.0
|
||||
#Description: Also included in .ci/docker/requirements-docs.txt
|
||||
#Pinned versions:
|
||||
#test that import: test_tensorboard
|
||||
|
@ -180,7 +180,7 @@ function install_numpy_pytorch_interop() {
|
||||
|
||||
function clone_pytorch_xla() {
|
||||
if [[ ! -d ./xla ]]; then
|
||||
git clone --recursive --quiet https://github.com/pytorch/xla.git
|
||||
git clone --recursive -b r2.1 https://github.com/pytorch/xla.git
|
||||
pushd xla
|
||||
# pin the xla hash so that we don't get broken by changes to xla
|
||||
git checkout "$(cat ../.github/ci_commit_pins/xla.txt)"
|
||||
|
@ -544,6 +544,10 @@ test_without_numpy() {
|
||||
python -c "import sys;sys.path.insert(0, 'fake_numpy');from unittest import TestCase;import torch;x=torch.randn(3,3);TestCase().assertRaises(RuntimeError, lambda: x.numpy())"
|
||||
# Regression test for https://github.com/pytorch/pytorch/issues/66353
|
||||
python -c "import sys;sys.path.insert(0, 'fake_numpy');import torch;print(torch.tensor([torch.tensor(0.), torch.tensor(1.)]))"
|
||||
# Regression test for https://github.com/pytorch/pytorch/issues/109387
|
||||
if [[ "${TEST_CONFIG}" == *dynamo* ]]; then
|
||||
python -c "import sys;sys.path.insert(0, 'fake_numpy');import torch;torch.compile(lambda x:print(x))('Hello World')"
|
||||
fi
|
||||
popd
|
||||
}
|
||||
|
||||
|
@ -35,7 +35,7 @@ if [[ "$BUILD_ENVIRONMENT" == *cuda* ]]; then
|
||||
fi
|
||||
|
||||
# TODO: Move both of them to Windows AMI
|
||||
python -m pip install pytest-rerunfailures==10.3 pytest-cpp==2.3.0
|
||||
python -m pip install pytest-rerunfailures==10.3 pytest-cpp==2.3.0 tensorboard==2.13.0
|
||||
|
||||
# Install Z3 optional dependency for Windows builds.
|
||||
python -m pip install z3-solver
|
||||
|
@ -62,7 +62,7 @@ git --no-pager log --max-count 1
|
||||
popd
|
||||
|
||||
# Clone the Builder main repo
|
||||
retry git clone -q https://github.com/pytorch/builder.git "$BUILDER_ROOT"
|
||||
retry git clone -q https://github.com/pytorch/builder.git -b release/2.1 "$BUILDER_ROOT"
|
||||
pushd "$BUILDER_ROOT"
|
||||
echo "Using builder from "
|
||||
git --no-pager log --max-count 1
|
||||
|
@ -90,7 +90,7 @@ if [[ "$PACKAGE_TYPE" == conda ]]; then
|
||||
if [[ "\${TORCH_CONDA_BUILD_FOLDER}" == "pytorch-nightly" ]]; then
|
||||
PYTORCH_CHANNEL="pytorch-nightly"
|
||||
fi
|
||||
retry conda install \${EXTRA_CONDA_FLAGS} -yq -c nvidia -c "\${PYTORCH_CHANNEL}" "pytorch-cuda=\${cu_ver}"
|
||||
retry conda install \${EXTRA_CONDA_FLAGS} -yq -c nvidia -c pytorch-test "pytorch-cuda=\${cu_ver}"
|
||||
fi
|
||||
conda install \${EXTRA_CONDA_FLAGS} -y "\$pkg" --offline
|
||||
)
|
||||
@ -98,9 +98,9 @@ elif [[ "$PACKAGE_TYPE" != libtorch ]]; then
|
||||
if [[ "$(uname -m)" == aarch64 ]]; then
|
||||
# Using "extra-index-url" until all needed aarch64 dependencies are
|
||||
# added to "https://download.pytorch.org/whl/nightly/"
|
||||
pip install "\$pkg" --extra-index-url "https://download.pytorch.org/whl/nightly/${DESIRED_CUDA}"
|
||||
pip install "\$pkg" --extra-index-url "https://download.pytorch.org/whl/test/${DESIRED_CUDA}"
|
||||
else
|
||||
pip install "\$pkg" --index-url "https://download.pytorch.org/whl/nightly/${DESIRED_CUDA}"
|
||||
pip install "\$pkg" --index-url "https://download.pytorch.org/whl/test/${DESIRED_CUDA}"
|
||||
fi
|
||||
retry pip install -q numpy protobuf typing-extensions
|
||||
fi
|
||||
|
@ -77,7 +77,9 @@ else
|
||||
export PYTORCH_BUILD_VERSION="${BASE_BUILD_VERSION}+$DESIRED_CUDA"
|
||||
fi
|
||||
|
||||
if [[ -n "${PYTORCH_EXTRA_INSTALL_REQUIREMENTS:-}" ]]; then
|
||||
# The build with with-pypi-cudnn suffix is only applicabe to
|
||||
# pypi small wheel Linux x86 build
|
||||
if [[ -n "${PYTORCH_EXTRA_INSTALL_REQUIREMENTS:-}" ]] && [[ "$(uname)" == 'Linux' && "$(uname -m)" == "x86_64" ]]; then
|
||||
export PYTORCH_BUILD_VERSION="${PYTORCH_BUILD_VERSION}-with-pypi-cudnn"
|
||||
fi
|
||||
|
||||
|
@ -11,7 +11,7 @@ PKG_DIR=${PKG_DIR:-/tmp/workspace/final_pkgs}
|
||||
# currently set within `designate_upload_channel`
|
||||
UPLOAD_CHANNEL=${UPLOAD_CHANNEL:-nightly}
|
||||
# Designates what subfolder to put packages into
|
||||
UPLOAD_SUBFOLDER=${UPLOAD_SUBFOLDER:-cpu}
|
||||
UPLOAD_SUBFOLDER=${UPLOAD_SUBFOLDER:-}
|
||||
UPLOAD_BUCKET="s3://pytorch"
|
||||
BACKUP_BUCKET="s3://pytorch-backup"
|
||||
BUILD_NAME=${BUILD_NAME:-}
|
||||
@ -64,12 +64,17 @@ s3_upload() {
|
||||
local pkg_type
|
||||
extension="$1"
|
||||
pkg_type="$2"
|
||||
s3_dir="${UPLOAD_BUCKET}/${pkg_type}/${UPLOAD_CHANNEL}/${UPLOAD_SUBFOLDER}/"
|
||||
s3_root_dir="${UPLOAD_BUCKET}/${pkg_type}/${UPLOAD_CHANNEL}"
|
||||
if [[ -z ${UPLOAD_SUBFOLDER:-} ]]; then
|
||||
s3_upload_dir="${s3_root_dir}/"
|
||||
else
|
||||
s3_upload_dir="${s3_root_dir}/${UPLOAD_SUBFOLDER}/"
|
||||
fi
|
||||
(
|
||||
for pkg in ${PKG_DIR}/*.${extension}; do
|
||||
(
|
||||
set -x
|
||||
${AWS_S3_CP} --no-progress --acl public-read "${pkg}" "${s3_dir}"
|
||||
${AWS_S3_CP} --no-progress --acl public-read "${pkg}" "${s3_upload_dir}"
|
||||
)
|
||||
done
|
||||
)
|
||||
@ -82,15 +87,17 @@ pip install -q awscli
|
||||
case "${PACKAGE_TYPE}" in
|
||||
conda)
|
||||
conda_upload
|
||||
# Fetch platform (eg. win-64, linux-64, etc.) from index file
|
||||
# Because there's no actual conda command to read this
|
||||
subdir=$(\
|
||||
tar -xOf ${PKG_DIR}/*.bz2 info/index.json \
|
||||
| grep subdir \
|
||||
| cut -d ':' -f2 \
|
||||
| sed -e 's/[[:space:]]//' -e 's/"//g' -e 's/,//' \
|
||||
)
|
||||
BACKUP_DIR="conda/${subdir}"
|
||||
for conda_archive in ${PKG_DIR}/*.tar.bz2; do
|
||||
# Fetch platform (eg. win-64, linux-64, etc.) from index file because
|
||||
# there's no actual conda command to read this
|
||||
subdir=$(\
|
||||
tar -xOf "${conda_archive}" info/index.json \
|
||||
| grep subdir \
|
||||
| cut -d ':' -f2 \
|
||||
| sed -e 's/[[:space:]]//' -e 's/"//g' -e 's/,//' \
|
||||
)
|
||||
BACKUP_DIR="conda/${subdir}"
|
||||
done
|
||||
;;
|
||||
libtorch)
|
||||
s3_upload "zip" "libtorch"
|
||||
|
2
.github/ci_commit_pins/xla.txt
vendored
2
.github/ci_commit_pins/xla.txt
vendored
@ -1 +1 @@
|
||||
e1ee592d9806216d7ac0bb711cae6307b0c5b68a
|
||||
r2.1
|
||||
|
1
.github/merge_rules.yaml
vendored
1
.github/merge_rules.yaml
vendored
@ -7,6 +7,7 @@
|
||||
- docs/source/onnx.rst
|
||||
- docs/source/onnx*
|
||||
- docs/source/scripts/onnx/**
|
||||
- docs/source/_static/img/onnx/**
|
||||
- scripts/onnx/**
|
||||
- test/onnx/**
|
||||
- tools/onnx/**
|
||||
|
@ -25,3 +25,4 @@ sympy==1.11.1
|
||||
pytest-cpp==2.3.0
|
||||
rockset==1.0.3
|
||||
z3-solver==4.12.2.0
|
||||
tensorboard==2.13.0
|
||||
|
17
.github/scripts/build_triton_wheel.py
vendored
17
.github/scripts/build_triton_wheel.py
vendored
@ -60,12 +60,18 @@ def build_triton(
|
||||
build_conda: bool = False,
|
||||
build_rocm: bool = False,
|
||||
py_version: Optional[str] = None,
|
||||
release: bool = False,
|
||||
) -> Path:
|
||||
env = os.environ.copy()
|
||||
if "MAX_JOBS" not in env:
|
||||
max_jobs = os.cpu_count() or 1
|
||||
env["MAX_JOBS"] = str(max_jobs)
|
||||
|
||||
if not release:
|
||||
# Nightly binaries include the triton commit hash, i.e. 2.1.0+e6216047b8
|
||||
# while release build should only include the version, i.e. 2.1.0
|
||||
version = f"{version}+{commit_hash[:10]}"
|
||||
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
triton_basedir = Path(tmpdir) / "triton"
|
||||
triton_pythondir = triton_basedir / "python"
|
||||
@ -80,7 +86,7 @@ def build_triton(
|
||||
if build_conda:
|
||||
with open(triton_basedir / "meta.yaml", "w") as meta:
|
||||
print(
|
||||
f"package:\n name: torchtriton\n version: {version}+{commit_hash[:10]}\n",
|
||||
f"package:\n name: torchtriton\n version: {version}\n",
|
||||
file=meta,
|
||||
)
|
||||
print("source:\n path: .\n", file=meta)
|
||||
@ -103,7 +109,7 @@ def build_triton(
|
||||
|
||||
patch_init_py(
|
||||
triton_pythondir / "triton" / "__init__.py",
|
||||
version=f"{version}+{commit_hash[:10]}",
|
||||
version=f"{version}",
|
||||
)
|
||||
if py_version is None:
|
||||
py_version = f"{sys.version_info.major}.{sys.version_info.minor}"
|
||||
@ -129,11 +135,11 @@ def build_triton(
|
||||
patch_setup_py(
|
||||
triton_pythondir / "setup.py",
|
||||
name=triton_pkg_name,
|
||||
version=f"{version}+{commit_hash[:10]}",
|
||||
version=f"{version}",
|
||||
)
|
||||
patch_init_py(
|
||||
triton_pythondir / "triton" / "__init__.py",
|
||||
version=f"{version}+{commit_hash[:10]}",
|
||||
version=f"{version}",
|
||||
)
|
||||
|
||||
if build_rocm:
|
||||
@ -157,12 +163,14 @@ def main() -> None:
|
||||
from argparse import ArgumentParser
|
||||
|
||||
parser = ArgumentParser("Build Triton binaries")
|
||||
parser.add_argument("--release", action="store_true")
|
||||
parser.add_argument("--build-conda", action="store_true")
|
||||
parser.add_argument("--build-rocm", action="store_true")
|
||||
parser.add_argument("--py-version", type=str)
|
||||
parser.add_argument("--commit-hash", type=str)
|
||||
parser.add_argument("--triton-version", type=str, default=read_triton_version())
|
||||
args = parser.parse_args()
|
||||
|
||||
build_triton(
|
||||
build_rocm=args.build_rocm,
|
||||
commit_hash=args.commit_hash
|
||||
@ -171,6 +179,7 @@ def main() -> None:
|
||||
version=args.triton_version,
|
||||
build_conda=args.build_conda,
|
||||
py_version=args.py_version,
|
||||
release=args.release,
|
||||
)
|
||||
|
||||
|
||||
|
29
.github/scripts/ensure_actions_will_cancel.py
vendored
29
.github/scripts/ensure_actions_will_cancel.py
vendored
@ -1,6 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
from pathlib import Path
|
||||
@ -10,9 +9,11 @@ import yaml
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
|
||||
WORKFLOWS = REPO_ROOT / ".github" / "workflows"
|
||||
EXPECTED_GROUP = (
|
||||
EXPECTED_GROUP_PREFIX = (
|
||||
"${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}"
|
||||
"-${{ github.event_name == 'workflow_dispatch' }}"
|
||||
)
|
||||
EXPECTED_GROUP = (
|
||||
EXPECTED_GROUP_PREFIX + "-${{ github.event_name == 'workflow_dispatch' }}"
|
||||
)
|
||||
|
||||
|
||||
@ -26,15 +27,8 @@ def should_check(filename: Path) -> bool:
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Ensure all relevant GitHub actions jobs will be cancelled based on a concurrency key"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
files = list(WORKFLOWS.glob("*.yml"))
|
||||
|
||||
errors_found = False
|
||||
files = [f for f in files if should_check(f)]
|
||||
files = [f for f in WORKFLOWS.glob("*.yml") if should_check(f)]
|
||||
names = set()
|
||||
for filename in files:
|
||||
with open(filename) as f:
|
||||
@ -46,7 +40,18 @@ if __name__ == "__main__":
|
||||
errors_found = True
|
||||
names.add(name)
|
||||
actual = data.get("concurrency", {})
|
||||
if not actual.get("group", "").startswith(EXPECTED_GROUP):
|
||||
if filename.name == "create_release.yml":
|
||||
if not actual.get("group", "").startswith(EXPECTED_GROUP_PREFIX):
|
||||
print(
|
||||
f"'concurrency' incorrect or not found in '{filename.relative_to(REPO_ROOT)}'",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(
|
||||
f"concurrency group should start with {EXPECTED_GROUP_PREFIX} but found {actual.get('group', None)}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
errors_found = True
|
||||
elif not actual.get("group", "").startswith(EXPECTED_GROUP):
|
||||
print(
|
||||
f"'concurrency' incorrect or not found in '{filename.relative_to(REPO_ROOT)}'",
|
||||
file=sys.stderr,
|
||||
|
5
.github/scripts/filter_test_configs.py
vendored
5
.github/scripts/filter_test_configs.py
vendored
@ -62,9 +62,10 @@ SUPPORTED_PERIODICAL_MODES: Dict[str, Callable[[Optional[str]], bool]] = {
|
||||
}
|
||||
|
||||
# The link to the published list of disabled jobs
|
||||
DISABLED_JOBS_URL = "https://ossci-metrics.s3.amazonaws.com/disabled-jobs.json"
|
||||
# Pinning Disabled and Unstable job to Oct 4, 2023.
|
||||
DISABLED_JOBS_URL = "https://ossci-metrics.s3.amazonaws.com/disabled-jobs.json?versionId=EniFrNbB6taGjwKyN94j4oqUeeN8ALfI"
|
||||
# and unstable jobs
|
||||
UNSTABLE_JOBS_URL = "https://ossci-metrics.s3.amazonaws.com/unstable-jobs.json"
|
||||
UNSTABLE_JOBS_URL = "https://ossci-metrics.s3.amazonaws.com/unstable-jobs.json?versionId=2voGK5DSv0Hzvxhc23ChGcOLEBIO2vHf"
|
||||
|
||||
# Some constants used to handle disabled and unstable jobs
|
||||
JOB_NAME_SEP = "/"
|
||||
|
30
.github/scripts/generate_binary_build_matrix.py
vendored
30
.github/scripts/generate_binary_build_matrix.py
vendored
@ -24,6 +24,21 @@ CPU_CXX11_ABI_ARCH = ["cpu-cxx11-abi"]
|
||||
|
||||
CPU_AARCH64_ARCH = ["cpu-aarch64"]
|
||||
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS = (
|
||||
"nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | " # noqa: B950
|
||||
"nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'"
|
||||
)
|
||||
|
||||
|
||||
def arch_type(arch_version: str) -> str:
|
||||
if arch_version in CUDA_ARCHES:
|
||||
@ -238,17 +253,7 @@ def generate_wheels_matrix(
|
||||
"devtoolset": "",
|
||||
"container_image": WHEEL_CONTAINER_IMAGES[arch_version],
|
||||
"package_type": package_type,
|
||||
"pytorch_extra_install_requirements": "nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | " # noqa: B950
|
||||
"nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | "
|
||||
"nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64'",
|
||||
"pytorch_extra_install_requirements": PYTORCH_EXTRA_INSTALL_REQUIREMENTS,
|
||||
"build_name": f"{package_type}-py{python_version}-{gpu_arch_type}{gpu_arch_version}-with-pypi-cudnn".replace( # noqa: B950
|
||||
".", "_"
|
||||
),
|
||||
@ -273,6 +278,9 @@ def generate_wheels_matrix(
|
||||
"build_name": f"{package_type}-py{python_version}-{gpu_arch_type}{gpu_arch_version}".replace(
|
||||
".", "_"
|
||||
),
|
||||
"pytorch_extra_install_requirements": PYTORCH_EXTRA_INSTALL_REQUIREMENTS
|
||||
if os != "linux"
|
||||
else "",
|
||||
}
|
||||
)
|
||||
return ret
|
||||
|
2
.github/templates/common.yml.j2
vendored
2
.github/templates/common.yml.j2
vendored
@ -8,7 +8,7 @@
|
||||
# NOTE: If testing pytorch/builder changes you can change this variable to change what pytorch/builder reference
|
||||
# the binary builds will check out
|
||||
{%- set builder_repo = "pytorch/builder" -%}
|
||||
{%- set builder_branch = "main" -%}
|
||||
{%- set builder_branch = "release/2.1" -%}
|
||||
|
||||
{%- macro concurrency(build_environment) -%}
|
||||
concurrency:
|
||||
|
@ -55,12 +55,12 @@ jobs:
|
||||
uses: ./.github/workflows/_binary-build-linux.yml
|
||||
with:!{{ upload.binary_env_as_input(config) }}
|
||||
{%- if "aarch64" in build_environment %}
|
||||
runs_on: linux.t4g.2xlarge
|
||||
runs_on: linux.arm64.2xlarge
|
||||
ALPINE_IMAGE: "arm64v8/alpine"
|
||||
{%- endif %}
|
||||
build_name: !{{ config["build_name"] }}
|
||||
build_environment: !{{ build_environment }}
|
||||
{%- if config.pytorch_extra_install_requirements is defined %}
|
||||
{%- if config.pytorch_extra_install_requirements is defined and config.pytorch_extra_install_requirements|d('')|length > 0 %}
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: !{{ config.pytorch_extra_install_requirements }}
|
||||
{%- endif %}
|
||||
secrets:
|
||||
@ -74,7 +74,7 @@ jobs:
|
||||
build_name: !{{ config["build_name"] }}
|
||||
build_environment: !{{ build_environment }}
|
||||
{%- if "aarch64" in build_environment %}
|
||||
runs_on: linux.t4g.2xlarge
|
||||
runs_on: linux.arm64.2xlarge
|
||||
ALPINE_IMAGE: "arm64v8/alpine"
|
||||
{%- elif config["gpu_arch_type"] == "rocm" %}
|
||||
runs_on: linux.rocm.gpu
|
||||
@ -97,13 +97,13 @@ jobs:
|
||||
with:
|
||||
name: !{{ config["build_name"] }}
|
||||
path: "${{ runner.temp }}/artifacts/"
|
||||
!{{ common.checkout(deep_clone=False, directory="pytorch") }}
|
||||
!{{ common.checkout(deep_clone=False, directory="builder", repository=common.builder_repo, branch=common.builder_branch) }}
|
||||
!{{ common.checkout(deep_clone=False, directory="pytorch", checkout_pr_head=False) }}
|
||||
!{{ common.checkout(deep_clone=False, directory="builder", repository=common.builder_repo, branch=common.builder_branch, checkout_pr_head=False) }}
|
||||
- name: ROCm set GPU_FLAG
|
||||
run: |
|
||||
echo "GPU_FLAG=--device=/dev/mem --device=/dev/kfd --device=/dev/dri --group-add video --group-add daemon" >> "${GITHUB_ENV}"
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: !{{ config["container_image"] }}
|
||||
- name: Test Pytorch binary
|
||||
|
@ -61,6 +61,9 @@ jobs:
|
||||
runs-on: macos-12-xl
|
||||
timeout-minutes: !{{ common.timeout_minutes }}
|
||||
!{{ upload.binary_env(config, true) }}
|
||||
{%- if config.pytorch_extra_install_requirements is defined and config.pytorch_extra_install_requirements|d('')|length > 0 %}
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: !{{ config.pytorch_extra_install_requirements }}
|
||||
{%- endif %}
|
||||
# For sccache access (only on non-forked PRs)
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.MACOS_SCCACHE_S3_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.MACOS_SCCACHE_S3_SECRET_ACCESS_KEY }}
|
||||
@ -74,8 +77,8 @@ jobs:
|
||||
/bin/bash "${RUNNER_TEMP}/conda.sh" -b -p "${RUNNER_TEMP}/anaconda"
|
||||
echo "${RUNNER_TEMP}/anaconda/bin" >> "${GITHUB_PATH}"
|
||||
echo "DEVELOPER_DIR=/Applications/Xcode_13.3.1.app/Contents/Developer" >> "${GITHUB_ENV}"
|
||||
!{{ common.checkout(deep_clone=False, directory="pytorch") }}
|
||||
!{{ common.checkout(deep_clone=False, directory="builder", repository=common.builder_repo, branch=common.builder_branch) }}
|
||||
!{{ common.checkout(deep_clone=False, directory="pytorch", checkout_pr_head=False) }}
|
||||
!{{ common.checkout(deep_clone=False, directory="builder", repository=common.builder_repo, branch=common.builder_branch, checkout_pr_head=False) }}
|
||||
- name: Install sccache (only for non-forked PRs, and pushes to trunk)
|
||||
uses: nick-fields/retry@v2.8.2
|
||||
if: ${{ github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
|
2
.github/templates/upload.yml.j2
vendored
2
.github/templates/upload.yml.j2
vendored
@ -67,6 +67,6 @@
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
{%- endmacro %}
|
||||
|
@ -59,11 +59,14 @@ jobs:
|
||||
runs-on: windows.4xlarge.nonephemeral
|
||||
timeout-minutes: !{{ common.timeout_minutes }}
|
||||
!{{ upload.binary_env(config, True) }}
|
||||
{%- if config.pytorch_extra_install_requirements is defined and config.pytorch_extra_install_requirements|d('')|length > 0 %}
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: !{{ config.pytorch_extra_install_requirements }}
|
||||
{%- endif %}
|
||||
steps:
|
||||
!{{ common.setup_ec2_windows() }}
|
||||
!{{ set_runner_specific_vars() }}
|
||||
!{{ common.checkout(deep_clone=False, directory="pytorch") }}
|
||||
!{{ common.checkout(deep_clone=False, directory="builder", repository=common.builder_repo, branch=common.builder_branch) }}
|
||||
!{{ common.checkout(deep_clone=False, directory="pytorch", checkout_pr_head=False) }}
|
||||
!{{ common.checkout(deep_clone=False, directory="builder", repository=common.builder_repo, branch=common.builder_branch, checkout_pr_head=False) }}
|
||||
- name: Populate binary env
|
||||
shell: bash
|
||||
run: |
|
||||
@ -102,8 +105,8 @@ jobs:
|
||||
with:
|
||||
name: !{{ config["build_name"] }}
|
||||
path: "${{ env.PYTORCH_FINAL_PACKAGE_DIR }}"
|
||||
!{{ common.checkout(deep_clone=False, directory="pytorch") }}
|
||||
!{{ common.checkout(deep_clone=False, directory="builder", repository=common.builder_repo, branch=common.builder_branch) }}
|
||||
!{{ common.checkout(deep_clone=False, directory="pytorch", checkout_pr_head=False) }}
|
||||
!{{ common.checkout(deep_clone=False, directory="builder", repository=common.builder_repo, branch=common.builder_branch, checkout_pr_head=False) }}
|
||||
- name: Populate binary env
|
||||
shell: bash
|
||||
run: |
|
||||
|
12
.github/workflows/_android-build-test.yml
vendored
12
.github/workflows/_android-build-test.yml
vendored
@ -36,7 +36,7 @@ jobs:
|
||||
keep-going: ${{ steps.filter.outputs.keep-going }}
|
||||
steps:
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
fetch-depth: 1
|
||||
submodules: false
|
||||
@ -58,25 +58,25 @@ jobs:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
steps:
|
||||
- name: Setup SSH (Click me for login details)
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@release/2.1
|
||||
with:
|
||||
github-secret: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# [see note: pytorch repo ref]
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
|
||||
- name: Setup Linux
|
||||
uses: ./.github/actions/setup-linux
|
||||
|
||||
- name: Calculate docker image
|
||||
id: calculate-docker-image
|
||||
uses: pytorch/test-infra/.github/actions/calculate-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/calculate-docker-image@release/2.1
|
||||
with:
|
||||
docker-image-name: ${{ inputs.docker-image-name }}
|
||||
|
||||
- name: Pull docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: ${{ steps.calculate-docker-image.outputs.docker-image }}
|
||||
|
||||
@ -140,5 +140,5 @@ jobs:
|
||||
if: always()
|
||||
|
||||
- name: Teardown Linux
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@main
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@release/2.1
|
||||
if: always()
|
||||
|
12
.github/workflows/_android-full-build-test.yml
vendored
12
.github/workflows/_android-full-build-test.yml
vendored
@ -36,7 +36,7 @@ jobs:
|
||||
keep-going: ${{ steps.filter.outputs.keep-going }}
|
||||
steps:
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
fetch-depth: 1
|
||||
submodules: false
|
||||
@ -58,25 +58,25 @@ jobs:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
steps:
|
||||
- name: Setup SSH (Click me for login details)
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@release/2.1
|
||||
with:
|
||||
github-secret: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# [see note: pytorch repo ref]
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
|
||||
- name: Setup Linux
|
||||
uses: ./.github/actions/setup-linux
|
||||
|
||||
- name: Calculate docker image
|
||||
id: calculate-docker-image
|
||||
uses: pytorch/test-infra/.github/actions/calculate-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/calculate-docker-image@release/2.1
|
||||
with:
|
||||
docker-image-name: ${{ inputs.docker-image-name }}
|
||||
|
||||
- name: Pull docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: ${{ steps.calculate-docker-image.outputs.docker-image }}
|
||||
|
||||
@ -185,5 +185,5 @@ jobs:
|
||||
if: always()
|
||||
|
||||
- name: Teardown Linux
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@main
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@release/2.1
|
||||
if: always()
|
||||
|
14
.github/workflows/_bazel-build-test.yml
vendored
14
.github/workflows/_bazel-build-test.yml
vendored
@ -41,7 +41,7 @@ jobs:
|
||||
reenabled-issues: ${{ steps.filter.outputs.reenabled-issues }}
|
||||
steps:
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
fetch-depth: 1
|
||||
submodules: false
|
||||
@ -63,30 +63,30 @@ jobs:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
steps:
|
||||
- name: Setup SSH (Click me for login details)
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@release/2.1
|
||||
with:
|
||||
github-secret: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# [see note: pytorch repo ref]
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
|
||||
- name: Setup Linux
|
||||
uses: ./.github/actions/setup-linux
|
||||
|
||||
- name: Calculate docker image
|
||||
id: calculate-docker-image
|
||||
uses: pytorch/test-infra/.github/actions/calculate-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/calculate-docker-image@release/2.1
|
||||
with:
|
||||
docker-image-name: ${{ inputs.docker-image-name }}
|
||||
|
||||
- name: Pull docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: ${{ steps.calculate-docker-image.outputs.docker-image }}
|
||||
|
||||
- name: Install nvidia driver, nvidia-docker runtime, set GPU_FLAG
|
||||
uses: pytorch/test-infra/.github/actions/setup-nvidia@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-nvidia@release/2.1
|
||||
if: ${{ inputs.cuda-version != 'cpu' }}
|
||||
|
||||
- name: Output disk space left
|
||||
@ -197,5 +197,5 @@ jobs:
|
||||
file-suffix: bazel-${{ github.job }}_${{ steps.get-job-id.outputs.job-id }}
|
||||
|
||||
- name: Teardown Linux
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@main
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@release/2.1
|
||||
if: always()
|
||||
|
15
.github/workflows/_binary-build-linux.yml
vendored
15
.github/workflows/_binary-build-linux.yml
vendored
@ -139,12 +139,12 @@ jobs:
|
||||
run: env
|
||||
|
||||
- name: "[FB EMPLOYEES] Enable SSH (Click me for login details)"
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@release/2.1
|
||||
with:
|
||||
github-secret: ${{ secrets.github-token }}
|
||||
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
no-sudo: ${{ inputs.build_environment == 'linux-aarch64-binary-manywheel' }}
|
||||
|
||||
@ -159,10 +159,12 @@ jobs:
|
||||
- name: Clean workspace
|
||||
shell: bash
|
||||
run: |
|
||||
set -eux
|
||||
|
||||
rm -rf "${GITHUB_WORKSPACE}"
|
||||
mkdir "${GITHUB_WORKSPACE}"
|
||||
|
||||
if [[ inputs.build_environment == 'linux-aarch64-binary-manywheel' ]]; then
|
||||
if [[ ${{ inputs.build_environment }} == 'linux-aarch64-binary-manywheel' ]]; then
|
||||
rm -rf "${RUNNER_TEMP}/artifacts"
|
||||
mkdir "${RUNNER_TEMP}/artifacts"
|
||||
fi
|
||||
@ -170,7 +172,6 @@ jobs:
|
||||
- name: Checkout PyTorch to pytorch dir
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -184,7 +185,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder to builder dir
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -210,7 +211,7 @@ jobs:
|
||||
|
||||
- name: Pull Docker image
|
||||
if: ${{ steps.filter.outputs.is-test-matrix-empty == 'False' }}
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: ${{ inputs.DOCKER_IMAGE }}
|
||||
|
||||
@ -267,7 +268,7 @@ jobs:
|
||||
|
||||
- name: Teardown Linux
|
||||
if: always()
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@main
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@release/2.1
|
||||
|
||||
- name: Chown workspace
|
||||
if: always()
|
||||
|
13
.github/workflows/_binary-test-linux.yml
vendored
13
.github/workflows/_binary-test-linux.yml
vendored
@ -127,13 +127,13 @@ jobs:
|
||||
} >> "${GITHUB_ENV} }}"
|
||||
|
||||
- name: "[FB EMPLOYEES] Enable SSH (Click me for login details)"
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@release/2.1
|
||||
with:
|
||||
github-secret: ${{ secrets.github-token }}
|
||||
|
||||
# Setup the environment
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
no-sudo: ${{ inputs.build_environment == 'linux-aarch64-binary-manywheel' }}
|
||||
|
||||
@ -154,7 +154,6 @@ jobs:
|
||||
- name: Checkout PyTorch to pytorch dir
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
|
||||
@ -167,7 +166,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder to builder dir
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -198,12 +197,12 @@ jobs:
|
||||
path: "${{ runner.temp }}/artifacts/"
|
||||
|
||||
- name: Install nvidia driver, nvidia-docker runtime, set GPU_FLAG
|
||||
uses: pytorch/test-infra/.github/actions/setup-nvidia@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-nvidia@release/2.1
|
||||
if: ${{ inputs.GPU_ARCH_TYPE == 'cuda' && steps.filter.outputs.is-test-matrix-empty == 'False' }}
|
||||
|
||||
- name: Pull Docker image
|
||||
if: ${{ steps.filter.outputs.is-test-matrix-empty == 'False' }}
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: ${{ inputs.DOCKER_IMAGE }}
|
||||
|
||||
@ -213,7 +212,7 @@ jobs:
|
||||
|
||||
- name: Teardown Linux
|
||||
if: always()
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@main
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@release/2.1
|
||||
|
||||
- name: Chown workspace
|
||||
if: always()
|
||||
|
4
.github/workflows/_binary-upload.yml
vendored
4
.github/workflows/_binary-upload.yml
vendored
@ -97,7 +97,7 @@ jobs:
|
||||
SHA1: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
steps:
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
no-sudo: true
|
||||
|
||||
@ -121,7 +121,7 @@ jobs:
|
||||
shell: bash -e -l {0}
|
||||
run: |
|
||||
# reference ends with an RC suffix
|
||||
if [[ ${GITHUB_REF_NAME} = *-rc[0-9]* ]]; then
|
||||
if [[ "${GITHUB_REF_NAME}" = *-rc[0-9]* ]]; then
|
||||
echo "UPLOAD_CHANNEL=test" >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
|
6
.github/workflows/_buck-build-test.yml
vendored
6
.github/workflows/_buck-build-test.yml
vendored
@ -22,7 +22,7 @@ jobs:
|
||||
keep-going: ${{ steps.filter.outputs.keep-going }}
|
||||
steps:
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
fetch-depth: 1
|
||||
submodules: false
|
||||
@ -43,7 +43,7 @@ jobs:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
steps:
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
|
||||
- name: Set up JDK 8
|
||||
uses: actions/setup-java@v3
|
||||
@ -52,7 +52,7 @@ jobs:
|
||||
distribution: 'temurin'
|
||||
|
||||
- name: Setup miniconda
|
||||
uses: pytorch/test-infra/.github/actions/setup-miniconda@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-miniconda@release/2.1
|
||||
with:
|
||||
python-version: 3.8
|
||||
environment-file: .github/requirements/conda-env-${{ runner.os }}-${{ runner.arch }}
|
||||
|
10
.github/workflows/_docs.yml
vendored
10
.github/workflows/_docs.yml
vendored
@ -66,7 +66,7 @@ jobs:
|
||||
name: build-docs-${{ matrix.docs_type }}-${{ inputs.push }}
|
||||
steps:
|
||||
- name: Setup SSH (Click me for login details)
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@release/2.1
|
||||
with:
|
||||
github-secret: ${{ secrets.GITHUB_TOKEN }}
|
||||
instructions: |
|
||||
@ -77,19 +77,19 @@ jobs:
|
||||
|
||||
# [see note: pytorch repo ref]
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
|
||||
- name: Setup Linux
|
||||
uses: ./.github/actions/setup-linux
|
||||
|
||||
- name: Calculate docker image
|
||||
id: calculate-docker-image
|
||||
uses: pytorch/test-infra/.github/actions/calculate-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/calculate-docker-image@release/2.1
|
||||
with:
|
||||
docker-image-name: ${{ inputs.docker-image }}
|
||||
|
||||
- name: Pull docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: ${{ steps.calculate-docker-image.outputs.docker-image }}
|
||||
|
||||
@ -187,5 +187,5 @@ jobs:
|
||||
s3-prefix: pytorch/pytorch/${{ github.event.pull_request.number }}/functorchdocs
|
||||
|
||||
- name: Teardown Linux
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@main
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@release/2.1
|
||||
if: always()
|
||||
|
371
.github/workflows/_ios-build-test.yml
vendored
371
.github/workflows/_ios-build-test.yml
vendored
@ -7,14 +7,6 @@ on:
|
||||
required: true
|
||||
type: string
|
||||
description: Top-level label for what's being built/tested.
|
||||
ios-platform:
|
||||
required: true
|
||||
type: string
|
||||
description: Which iOS platform to build for.
|
||||
ios-arch:
|
||||
required: true
|
||||
type: string
|
||||
description: Which iOS arch to build for.
|
||||
sync-tag:
|
||||
required: false
|
||||
type: string
|
||||
@ -31,8 +23,6 @@ on:
|
||||
env:
|
||||
GIT_DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
BUILD_ENVIRONMENT: ${{ inputs.build-environment }}
|
||||
IOS_PLATFORM: ${{ inputs.ios-platform }}
|
||||
IOS_ARCH: ${{ inputs.ios-arch }}
|
||||
|
||||
jobs:
|
||||
filter:
|
||||
@ -43,7 +33,7 @@ jobs:
|
||||
keep-going: ${{ steps.filter.outputs.keep-going }}
|
||||
steps:
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
fetch-depth: 1
|
||||
submodules: false
|
||||
@ -63,33 +53,30 @@ jobs:
|
||||
matrix: ${{ fromJSON(needs.filter.outputs.test-matrix) }}
|
||||
fail-fast: false
|
||||
runs-on: ${{ matrix.runner }}
|
||||
env:
|
||||
IOS_PLATFORM: ${{ matrix.ios_platform }}
|
||||
IOS_ARCH: ${{ matrix.ios_arch }}
|
||||
BUILD_LITE_INTERPRETER: ${{ matrix.use_lite_interpreter }}
|
||||
USE_PYTORCH_METAL: ${{ matrix.use_metal }}
|
||||
USE_COREML_DELEGATE: ${{ matrix.use_coreml }}
|
||||
CUSTOM_OP_LIST: ${{ matrix.use_custom_op_list }}
|
||||
# TODO: Bump it to 2.2.0 after cherry pick this or figure out a better way
|
||||
# to get this version instead of hard coding it here
|
||||
PYTORCH_VERSION: 2.1.0
|
||||
timeout-minutes: 240
|
||||
steps:
|
||||
# [see note: pytorch repo ref]
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
|
||||
- name: Populate CI build options
|
||||
shell: bash
|
||||
run: |
|
||||
# Most builds use the lite interpreter, if certain builds shouldn't
|
||||
# build the lite interpreter this env variable should get over-written
|
||||
# in the following case statement
|
||||
echo "BUILD_LITE_INTERPRETER=1" >> "${GITHUB_ENV}"
|
||||
set -ex
|
||||
|
||||
case ${BUILD_ENVIRONMENT} in
|
||||
*metal*)
|
||||
echo "USE_PYTORCH_METAL=1" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
*full_jit*)
|
||||
echo "BUILD_LITE_INTERPRETER=0" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
*custom*)
|
||||
echo "SELECTED_OP_LIST=${GITHUB_WORKSPACE}/ios/TestApp/custom_build/mobilenetv2.yaml" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
*coreml*)
|
||||
echo "USE_COREML_DELEGATE=1" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
esac
|
||||
if [ -n "${CUSTOM_OP_LIST:-}" ]; then
|
||||
echo "SELECTED_OP_LIST=${GITHUB_WORKSPACE}/ios/TestApp/custom_build/${CUSTOM_OP_LIST}" >> "${GITHUB_ENV}"
|
||||
fi
|
||||
|
||||
- name: Install brew dependencies
|
||||
uses: nick-fields/retry@v2.8.2
|
||||
@ -102,7 +89,7 @@ jobs:
|
||||
brew install libtool
|
||||
|
||||
- name: Setup miniconda for iOS
|
||||
uses: pytorch/test-infra/.github/actions/setup-miniconda@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-miniconda@release/2.1
|
||||
with:
|
||||
python-version: "3.9"
|
||||
environment-file: .github/requirements/conda-env-iOS
|
||||
@ -116,54 +103,67 @@ jobs:
|
||||
retry_wait_seconds: 90
|
||||
command: |
|
||||
set -x
|
||||
cd ios/TestApp
|
||||
# install fastlane
|
||||
|
||||
pushd ios/TestApp
|
||||
# Install fastlane
|
||||
sudo gem install bundler && bundle install
|
||||
bundle update fastlane
|
||||
popd
|
||||
|
||||
- name: Build PyTorch Mobile Runtime
|
||||
- name: Build PyTorch mobile runtime
|
||||
shell: bash
|
||||
run: |
|
||||
set -eux
|
||||
# shellcheck disable=SC1091
|
||||
export TCLLIBPATH="/usr/local/lib"
|
||||
python -VV
|
||||
${CONDA_RUN} scripts/build_ios.sh
|
||||
|
||||
- name: Build TestApp
|
||||
if: inputs.ios-platform == 'SIMULATOR'
|
||||
if: matrix.ios_platform == 'SIMULATOR'
|
||||
timeout-minutes: 15
|
||||
run: |
|
||||
# run the ruby build script
|
||||
# Run the ruby build script
|
||||
if ! [ -x "$(command -v xcodebuild)" ]; then
|
||||
echo 'Error: xcodebuild is not installed.'
|
||||
exit 1
|
||||
fi
|
||||
ruby scripts/xcode_build.rb -i build_ios/install -x ios/TestApp/TestApp.xcodeproj -p "${IOS_PLATFORM}"
|
||||
|
||||
- name: Run Simulator Tests
|
||||
if: inputs.ios-platform == 'SIMULATOR'
|
||||
- name: Run simulator tests
|
||||
if: matrix.ios_platform == 'SIMULATOR'
|
||||
shell: bash
|
||||
run: |
|
||||
set -eux
|
||||
# shellcheck disable=SC1091
|
||||
# use the pytorch nightly build to generate models
|
||||
${CONDA_RUN} pip3 install --pre torch torchvision torchaudio -f https://download.pytorch.org/whl/nightly/cpu/torch_nightly.html
|
||||
# generate models for differnet backends
|
||||
cd "${GITHUB_WORKSPACE}/ios/TestApp/benchmark"
|
||||
# Use the pytorch nightly build to generate models
|
||||
${CONDA_RUN} pip install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/cpu
|
||||
|
||||
# Generate models for differnet backends
|
||||
pushd "${GITHUB_WORKSPACE}/ios/TestApp/benchmark"
|
||||
mkdir -p ../models
|
||||
# NB: Both of the following scripts only export models with lite interpreter
|
||||
if [ "${USE_COREML_DELEGATE}" == 1 ]; then
|
||||
${CONDA_RUN} python coreml_backend.py
|
||||
else
|
||||
cd "${GITHUB_WORKSPACE}"
|
||||
pushd "${GITHUB_WORKSPACE}"
|
||||
${CONDA_RUN} python test/mobile/model_test/gen_test_model.py ios-test
|
||||
popd
|
||||
fi
|
||||
cd "${GITHUB_WORKSPACE}/ios/TestApp/benchmark"
|
||||
|
||||
if [ "${BUILD_LITE_INTERPRETER}" == 1 ]; then
|
||||
echo "Setting up the TestApp for LiteInterpreter"
|
||||
ruby setup.rb --lite 1
|
||||
else
|
||||
# Generate some models for JIT without lite interpreter
|
||||
${CONDA_RUN} python trace_model.py
|
||||
|
||||
echo "Setting up the TestApp for Full JIT"
|
||||
ruby setup.rb
|
||||
fi
|
||||
cd "${GITHUB_WORKSPACE}/ios/TestApp"
|
||||
# instruments -s -devices
|
||||
popd
|
||||
|
||||
pushd "${GITHUB_WORKSPACE}/ios/TestApp"
|
||||
# Instruments -s -devices
|
||||
if [ "${BUILD_LITE_INTERPRETER}" == 1 ]; then
|
||||
if [ "${USE_COREML_DELEGATE}" == 1 ]; then
|
||||
bundle exec fastlane scan --only_testing TestAppTests/TestAppTests/testCoreML
|
||||
@ -173,9 +173,282 @@ jobs:
|
||||
else
|
||||
bundle exec fastlane scan --only_testing TestAppTests/TestAppTests/testFullJIT
|
||||
fi
|
||||
popd
|
||||
|
||||
- name: Dump Simulator Tests On a Failure
|
||||
if: failure() && inputs.ios-platform == 'SIMULATOR'
|
||||
- name: Dump simulator tests on failure
|
||||
if: failure() && matrix.ios_platform == 'SIMULATOR'
|
||||
run: |
|
||||
echo "Simulator Tests Logs:"
|
||||
cat /Users/runner/Library/Logs/scan/*.log
|
||||
|
||||
- name: Prepare the build artifacts for upload
|
||||
shell: bash
|
||||
run: |
|
||||
set -eux
|
||||
|
||||
# The structure of the folder is as follows:
|
||||
#
|
||||
# RUNNER_TEMP/
|
||||
# └── IOS_ARCH/
|
||||
# ├── LICENSE
|
||||
# ├── install
|
||||
# │ ├── include
|
||||
# │ │ └── headers
|
||||
# │ └── lib
|
||||
# │ ├── libXNNPACK.a
|
||||
# │ ├── libc10.a
|
||||
# │ ├── libclog.a
|
||||
# │ ├── libcpuinfo.a
|
||||
# │ ├── libeigen_blas.a
|
||||
# │ ├── libpthreadpool.a
|
||||
# │ ├── libpytorch_qnnpack.a
|
||||
# │ ├── libtorch.a
|
||||
# │ └── libtorch_cpu.a
|
||||
# ├── src
|
||||
# │ └── LibTorch-Lite.h
|
||||
# └── version.txt
|
||||
SETUP_DIR="${RUNNER_TEMP}/${IOS_ARCH}"
|
||||
mkdir -p "${SETUP_DIR}/src"
|
||||
|
||||
cp -R "${GITHUB_WORKSPACE}/build_ios/install" "${SETUP_DIR}"
|
||||
# Copy the umbrella header and license
|
||||
if [ "${BUILD_LITE_INTERPRETER}" == 1 ]; then
|
||||
cp "${GITHUB_WORKSPACE}/ios/LibTorch-Lite.h" "${SETUP_DIR}/src"
|
||||
else
|
||||
cp "${GITHUB_WORKSPACE}/ios/LibTorch.h" "${SETUP_DIR}/src"
|
||||
fi
|
||||
|
||||
# Copy license and version
|
||||
cp "${GITHUB_WORKSPACE}/LICENSE" "${SETUP_DIR}"
|
||||
echo "${PYTORCH_VERSION}" > "${SETUP_DIR}"/version.txt
|
||||
|
||||
# Save the podspec for the upload job later
|
||||
if [ "${BUILD_LITE_INTERPRETER}" == "1" ]; then
|
||||
DATE=$(date -u +%Y%m%d)
|
||||
cp "${GITHUB_WORKSPACE}"/ios/LibTorch-Lite-Nightly.podspec.template "${SETUP_DIR}"/LibTorch-Lite-Nightly.podspec
|
||||
sed -i '' -e "s/IOS_NIGHTLY_BUILD_VERSION/${PYTORCH_VERSION}.${DATE}/g" "${SETUP_DIR}"/LibTorch-Lite-Nightly.podspec
|
||||
|
||||
cp "${GITHUB_WORKSPACE}"/ios/LibTorch-Lite.podspec.template "${SETUP_DIR}"/LibTorch-Lite.podspec
|
||||
sed -i '' -e "s/IOS_BUILD_VERSION/${PYTORCH_VERSION}/g" "${SETUP_DIR}"/LibTorch-Lite.podspec
|
||||
else
|
||||
# NB: There is no nightly build without lite interpreter atm
|
||||
cp "${GITHUB_WORKSPACE}"/ios/LibTorch.podspec.template "${SETUP_DIR}"/LibTorch.podspec
|
||||
sed -i '' -e "s/IOS_BUILD_VERSION/${PYTORCH_VERSION}/g" "${SETUP_DIR}"/LibTorch.podspec
|
||||
fi
|
||||
|
||||
pushd "${SETUP_DIR}"
|
||||
# NB: It's important to zip all the files before uploading because the GHA will upload
|
||||
# all files sequentially which is both slow and has too many requests. More info is at
|
||||
# https://github.com/actions/upload-artifact#too-many-uploads-resulting-in-429-responses
|
||||
zip -r "${IOS_ARCH}.zip" install src version.txt LICENSE ./*.podspec
|
||||
popd
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: pytorch-ios-build-artifacts-${{ matrix.ios_arch }}
|
||||
if-no-files-found: error
|
||||
path: ${{ runner.temp }}/${{ matrix.ios_arch }}/${{ matrix.ios_arch }}.zip
|
||||
|
||||
upload-ios-artifacts:
|
||||
# NB: this job run on GitHub MacOS ephemeral runner so that it can use lipo
|
||||
# to create the fat iOS binaries for both x86_64 and arm64
|
||||
runs-on: macos-12
|
||||
needs: build
|
||||
# NB: Only upload release build, if we need it, we could also turn on nightly here
|
||||
environment: ${{ (github.event_name == 'push' && (github.event.ref == 'refs/heads/nightly' || startsWith(github.event.ref, 'refs/tags/v'))) && 'ios-upload' || '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# For awscli S3 upload
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.10'
|
||||
cache: pip
|
||||
|
||||
# For cocoapods pod upload
|
||||
- uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: '3.2'
|
||||
bundler-cache: true
|
||||
|
||||
- name: Download arm64 artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: pytorch-ios-build-artifacts-arm64
|
||||
|
||||
- name: Download x86_64 artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: pytorch-ios-build-artifacts-x86_64
|
||||
|
||||
- name: Unzip arm64 and x86_64 artifacts
|
||||
shell: bash
|
||||
run: |
|
||||
set -eux
|
||||
|
||||
for ARCH in "arm64" "x86_64"; do
|
||||
TMP_DIR="${RUNNER_TEMP}/${ARCH}"
|
||||
mkdir -p "${TMP_DIR}"
|
||||
|
||||
cp "${ARCH}.zip" "${TMP_DIR}"
|
||||
|
||||
pushd "${TMP_DIR}"
|
||||
unzip -o "${ARCH}.zip"
|
||||
popd
|
||||
done
|
||||
|
||||
- name: Prepare the artifact
|
||||
env:
|
||||
IS_NIGHTLY: ${{ github.event.ref == 'refs/heads/nightly' }}
|
||||
shell: bash
|
||||
working-directory: ${{ runner.temp }}/arm64
|
||||
run: |
|
||||
set -eux
|
||||
|
||||
DEST_DIR="${RUNNER_TEMP}"/ios
|
||||
echo "DEST_DIR=${DEST_DIR}" >> "$GITHUB_ENV"
|
||||
|
||||
# Prepare all the sub directories
|
||||
mkdir -p "${DEST_DIR}"/install/lib
|
||||
|
||||
# Copy header and share files, arm64 or x86_64 both work
|
||||
cp -R install/include "${DEST_DIR}"/install
|
||||
cp -R install/share "${DEST_DIR}"/install
|
||||
# The last dash is important to copy only files under src
|
||||
cp -R src "${DEST_DIR}"
|
||||
cp LICENSE "${DEST_DIR}"
|
||||
|
||||
if [ "${IS_NIGHTLY}" == true ]; then
|
||||
PYTORCH_VERSION=$(cat version.txt)
|
||||
DATE=$(date -u +%Y%m%d)
|
||||
echo "${PYTORCH_VERSION}.${DATE}" > "${DEST_DIR}"/version.txt
|
||||
else
|
||||
cp version.txt "${DEST_DIR}"
|
||||
fi
|
||||
PYTORCH_VERSION=$(cat "${DEST_DIR}"/version.txt)
|
||||
echo "PYTORCH_VERSION=${PYTORCH_VERSION}" >> "$GITHUB_ENV"
|
||||
|
||||
pushd install/lib
|
||||
# shellcheck disable=SC2207
|
||||
LIBRARIES=($(ls ./*.a))
|
||||
popd
|
||||
|
||||
for LIB in "${LIBRARIES[@]}"; do
|
||||
FROM_LIBS=("${RUNNER_TEMP}"/arm64/install/lib/"${LIB}" "${RUNNER_TEMP}"/x86_64/install/lib/"${LIB}")
|
||||
# Create a fat binary for both arm64 and x86_64
|
||||
lipo -create "${FROM_LIBS[@]}" -o "${DEST_DIR}"/install/lib/"${LIB}"
|
||||
# Print the info
|
||||
lipo -i "${DEST_DIR}"/install/lib/"${LIB}"
|
||||
done
|
||||
|
||||
BUILD_LITE_INTERPRETER=1
|
||||
if [ -f "${RUNNER_TEMP}"/arm64/LibTorch.podspec ]; then
|
||||
# If LibTorch.podspec is used instead of LibTorch-Lite.podspec, the artifact is built
|
||||
# without lite interpreter
|
||||
BUILD_LITE_INTERPRETER=0
|
||||
fi
|
||||
echo "BUILD_LITE_INTERPRETER=${BUILD_LITE_INTERPRETER}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Prepare the podspec
|
||||
env:
|
||||
IS_NIGHTLY: ${{ github.event.ref == 'refs/heads/nightly' }}
|
||||
shell: bash
|
||||
working-directory: ${{ env.DEST_DIR }}
|
||||
run: |
|
||||
set -eux
|
||||
|
||||
ARTIFACT_NAME=libtorch
|
||||
SPEC_NAME=LibTorch
|
||||
|
||||
if [ "${BUILD_LITE_INTERPRETER}" == "1" ]; then
|
||||
ARTIFACT_NAME="${ARTIFACT_NAME}_lite_ios"
|
||||
SPEC_NAME="${SPEC_NAME}-Lite"
|
||||
else
|
||||
ARTIFACT_NAME="${ARTIFACT_NAME}_ios"
|
||||
fi
|
||||
|
||||
if [ "${IS_NIGHTLY}" == true ]; then
|
||||
ARTIFACT_NAME="${ARTIFACT_NAME}_nightly_${PYTORCH_VERSION}.zip"
|
||||
SPEC_NAME="${SPEC_NAME}-Nightly"
|
||||
else
|
||||
ARTIFACT_NAME="${ARTIFACT_NAME}_${PYTORCH_VERSION}.zip"
|
||||
fi
|
||||
|
||||
SPEC_NAME_WITH_VERSION="${SPEC_NAME}-${PYTORCH_VERSION}.podspec"
|
||||
SPEC_NAME="${SPEC_NAME}.podspec"
|
||||
|
||||
# Also copy the spec file
|
||||
cp "${RUNNER_TEMP}"/arm64/"${SPEC_NAME}" "${SPEC_NAME_WITH_VERSION}"
|
||||
|
||||
# NB: It's important to zip all the files before uploading because the GHA will upload
|
||||
# all files sequentially which is both slow and has too many requests. More info is at
|
||||
# https://github.com/actions/upload-artifact#too-many-uploads-resulting-in-429-responses
|
||||
zip -r "${ARTIFACT_NAME}" install src version.txt LICENSE
|
||||
|
||||
{
|
||||
echo "ARTIFACT_NAME=${ARTIFACT_NAME}"
|
||||
echo "SPEC_NAME_WITH_VERSION=${SPEC_NAME_WITH_VERSION}"
|
||||
echo "SPEC_NAME=${SPEC_NAME}"
|
||||
} >> "$GITHUB_ENV"
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: pytorch-ios-artifacts
|
||||
if-no-files-found: error
|
||||
path: ${{ env.DEST_DIR }}/${{ env.ARTIFACT_NAME }}
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: pytorch-ios-podspec
|
||||
if-no-files-found: error
|
||||
path: ${{ env.DEST_DIR }}/${{ env.SPEC_NAME_WITH_VERSION }}
|
||||
|
||||
- name: Set DRY_RUN
|
||||
if: ${{ github.event_name == 'push' && (github.event.ref == 'refs/heads/nightly' || (startsWith(github.event.ref, 'refs/tags/v'))) }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo "DRY_RUN=disabled" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Upload the artifact to S3
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
IS_NIGHTLY: ${{ github.event.ref == 'refs/heads/nightly' }}
|
||||
shell: bash
|
||||
working-directory: ${{ env.DEST_DIR }}
|
||||
run: |
|
||||
set -eux
|
||||
|
||||
pip install -q awscli==1.29.40
|
||||
|
||||
DRY_RUN=${DRY_RUN:-enabled}
|
||||
AWS_S3_CP="aws s3 cp --dryrun"
|
||||
if [ "${DRY_RUN}" == "disabled" ]; then
|
||||
AWS_S3_CP="aws s3 cp"
|
||||
fi
|
||||
|
||||
if [ "${IS_NIGHTLY}" == true ]; then
|
||||
BUCKET_NAME="ossci-ios-build"
|
||||
else
|
||||
BUCKET_NAME="ossci-ios"
|
||||
fi
|
||||
|
||||
${AWS_S3_CP} "${ARTIFACT_NAME}" "s3://${BUCKET_NAME}/" --acl public-read
|
||||
${AWS_S3_CP} "${SPEC_NAME_WITH_VERSION}" "s3://${BUCKET_NAME}/" --acl public-read
|
||||
|
||||
- name: Upload the artifact to cocoapods (nightly only)
|
||||
env:
|
||||
# We need to set this secret to upload to cocoapods. However, we might want
|
||||
# to NOT set this for PROD release so that we can upload the artifacts manually
|
||||
COCOAPODS_TRUNK_TOKEN: ${{ secrets.COCOAPODS_TRUNK_TOKEN || '' }}
|
||||
if: ${{ github.event_name == 'push' && github.event.ref == 'refs/heads/nightly' && env.COCOAPODS_TRUNK_TOKEN != '' }}
|
||||
shell: bash
|
||||
working-directory: ${{ runner.temp }}/arm64
|
||||
run: |
|
||||
set -eux
|
||||
|
||||
gem install cocoapods
|
||||
|
||||
pod trunk me
|
||||
# Upload the spec to cocoapods
|
||||
pod trunk push --verbose --allow-warnings --use-libraries --skip-import-validation "${SPEC_NAME}"
|
||||
|
10
.github/workflows/_linux-build.yml
vendored
10
.github/workflows/_linux-build.yml
vendored
@ -73,7 +73,7 @@ jobs:
|
||||
test-matrix: ${{ steps.filter.outputs.test-matrix }}
|
||||
steps:
|
||||
- name: Setup SSH (Click me for login details)
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@release/2.1
|
||||
with:
|
||||
github-secret: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@ -82,19 +82,19 @@ jobs:
|
||||
# checkout because when we run this action we don't *have* a local
|
||||
# checkout. In other cases you should prefer a local checkout.
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
|
||||
- name: Setup Linux
|
||||
uses: ./.github/actions/setup-linux
|
||||
|
||||
- name: Calculate docker image
|
||||
id: calculate-docker-image
|
||||
uses: pytorch/test-infra/.github/actions/calculate-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/calculate-docker-image@release/2.1
|
||||
with:
|
||||
docker-image-name: ${{ inputs.docker-image-name }}
|
||||
|
||||
- name: Pull docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: ${{ steps.calculate-docker-image.outputs.docker-image }}
|
||||
|
||||
@ -192,5 +192,5 @@ jobs:
|
||||
path: sccache-stats-*.json
|
||||
|
||||
- name: Teardown Linux
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@main
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@release/2.1
|
||||
if: always()
|
||||
|
12
.github/workflows/_linux-test.yml
vendored
12
.github/workflows/_linux-test.yml
vendored
@ -57,7 +57,7 @@ jobs:
|
||||
timeout-minutes: ${{ inputs.timeout-minutes }}
|
||||
steps:
|
||||
- name: Setup SSH (Click me for login details)
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@release/2.1
|
||||
if: ${{ !contains(matrix.runner, 'gcp.a100') }}
|
||||
with:
|
||||
github-secret: ${{ secrets.GITHUB_TOKEN }}
|
||||
@ -66,25 +66,25 @@ jobs:
|
||||
docker exec -it $(docker container ps --format '{{.ID}}') bash
|
||||
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
|
||||
- name: Setup Linux
|
||||
uses: ./.github/actions/setup-linux
|
||||
|
||||
- name: Calculate docker image
|
||||
id: calculate-docker-image
|
||||
uses: pytorch/test-infra/.github/actions/calculate-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/calculate-docker-image@release/2.1
|
||||
with:
|
||||
docker-image-name: ${{ inputs.docker-image }}
|
||||
|
||||
- name: Pull docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: ${{ steps.calculate-docker-image.outputs.docker-image }}
|
||||
|
||||
- name: Install nvidia driver, nvidia-docker runtime, set GPU_FLAG
|
||||
id: install-nvidia-driver
|
||||
uses: pytorch/test-infra/.github/actions/setup-nvidia@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-nvidia@release/2.1
|
||||
if: contains(inputs.build-environment, 'cuda') && !contains(matrix.config, 'nogpu')
|
||||
|
||||
- name: Lock NVIDIA A100 40GB Frequency
|
||||
@ -292,7 +292,7 @@ jobs:
|
||||
path: ./**/core.[1-9]*
|
||||
|
||||
- name: Teardown Linux
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@main
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@release/2.1
|
||||
if: always()
|
||||
|
||||
# NB: We are currently having an intermittent GPU-related issue on G5 runners with
|
||||
|
10
.github/workflows/_mac-build.yml
vendored
10
.github/workflows/_mac-build.yml
vendored
@ -71,11 +71,11 @@ jobs:
|
||||
test-matrix: ${{ steps.filter.outputs.test-matrix }}
|
||||
steps:
|
||||
- name: Clean up disk space before running MacOS workflow
|
||||
uses: pytorch/test-infra/.github/actions/check-disk-space@main
|
||||
uses: pytorch/test-infra/.github/actions/check-disk-space@release/2.1
|
||||
|
||||
# [see note: pytorch repo ref]
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
|
||||
- name: Set xcode version
|
||||
env:
|
||||
@ -87,7 +87,7 @@ jobs:
|
||||
|
||||
- name: Setup miniconda
|
||||
if: inputs.environment-file == ''
|
||||
uses: pytorch/test-infra/.github/actions/setup-miniconda@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-miniconda@release/2.1
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
environment-file: .github/requirements/conda-env-${{ runner.os }}-${{ runner.arch }}
|
||||
@ -97,7 +97,7 @@ jobs:
|
||||
# environment even though the arch is x86-64
|
||||
- name: Setup miniconda using the provided environment file
|
||||
if: inputs.environment-file != ''
|
||||
uses: pytorch/test-infra/.github/actions/setup-miniconda@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-miniconda@release/2.1
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
environment-file: ${{ inputs.environment-file }}
|
||||
@ -206,4 +206,4 @@ jobs:
|
||||
- name: Clean up disk space
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
uses: pytorch/test-infra/.github/actions/check-disk-space@main
|
||||
uses: pytorch/test-infra/.github/actions/check-disk-space@release/2.1
|
||||
|
4
.github/workflows/_mac-test-mps.yml
vendored
4
.github/workflows/_mac-test-mps.yml
vendored
@ -41,7 +41,7 @@ jobs:
|
||||
reenabled-issues: ${{ steps.filter.outputs.reenabled-issues }}
|
||||
steps:
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
fetch-depth: 1
|
||||
submodules: false
|
||||
@ -85,7 +85,7 @@ jobs:
|
||||
use-gha: true
|
||||
|
||||
- name: Setup miniconda
|
||||
uses: pytorch/test-infra/.github/actions/setup-miniconda@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-miniconda@release/2.1
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
environment-file: .github/requirements/conda-env-${{ runner.os }}-${{ runner.arch }}
|
||||
|
8
.github/workflows/_mac-test.yml
vendored
8
.github/workflows/_mac-test.yml
vendored
@ -69,11 +69,11 @@ jobs:
|
||||
done
|
||||
|
||||
- name: Clean up disk space before running MacOS workflow
|
||||
uses: pytorch/test-infra/.github/actions/check-disk-space@main
|
||||
uses: pytorch/test-infra/.github/actions/check-disk-space@release/2.1
|
||||
|
||||
# [see note: pytorch repo ref]
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
|
||||
- name: Download build artifacts
|
||||
uses: ./.github/actions/download-build-artifacts
|
||||
@ -82,7 +82,7 @@ jobs:
|
||||
use-gha: true
|
||||
|
||||
- name: Setup miniconda
|
||||
uses: pytorch/test-infra/.github/actions/setup-miniconda@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-miniconda@release/2.1
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
environment-file: .github/requirements/conda-env-${{ runner.os }}-${{ runner.arch }}
|
||||
@ -205,4 +205,4 @@ jobs:
|
||||
- name: Clean up disk space
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
uses: pytorch/test-infra/.github/actions/check-disk-space@main
|
||||
uses: pytorch/test-infra/.github/actions/check-disk-space@release/2.1
|
||||
|
6
.github/workflows/_rocm-test.yml
vendored
6
.github/workflows/_rocm-test.yml
vendored
@ -48,7 +48,7 @@ jobs:
|
||||
steps:
|
||||
# [see note: pytorch repo ref]
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
no-sudo: true
|
||||
|
||||
@ -57,12 +57,12 @@ jobs:
|
||||
|
||||
- name: Calculate docker image
|
||||
id: calculate-docker-image
|
||||
uses: pytorch/test-infra/.github/actions/calculate-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/calculate-docker-image@release/2.1
|
||||
with:
|
||||
docker-image-name: ${{ inputs.docker-image }}
|
||||
|
||||
- name: Pull docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: ${{ steps.calculate-docker-image.outputs.docker-image }}
|
||||
|
||||
|
26
.github/workflows/_run_android_tests.yml
vendored
26
.github/workflows/_run_android_tests.yml
vendored
@ -22,7 +22,7 @@ jobs:
|
||||
keep-going: ${{ steps.filter.outputs.keep-going }}
|
||||
steps:
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
fetch-depth: 1
|
||||
submodules: false
|
||||
@ -41,17 +41,25 @@ jobs:
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.filter.outputs.test-matrix) }}
|
||||
fail-fast: false
|
||||
# NB: This job can only run on GitHub Linux runner atm. This is an ok thing though
|
||||
# because that runner is ephemeral and could access upload secrets
|
||||
runs-on: ${{ matrix.runner }}
|
||||
env:
|
||||
# GitHub runner installs Android SDK on this path
|
||||
ANDROID_ROOT: /usr/local/lib/android
|
||||
ANDROID_NDK_VERSION: '21.4.7075529'
|
||||
BUILD_LITE_INTERPRETER: ${{ matrix.use_lite_interpreter }}
|
||||
# 4 of them are supported atm: armeabi-v7a, arm64-v8a, x86, x86_64
|
||||
SUPPORT_ABI: '${{ matrix.support_abi }}'
|
||||
steps:
|
||||
# [see note: pytorch repo ref]
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
|
||||
- name: Setup miniconda
|
||||
uses: pytorch/test-infra/.github/actions/setup-miniconda@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-miniconda@release/2.1
|
||||
with:
|
||||
python-version: 3.8
|
||||
environment-file: .github/requirements/conda-env-${{ runner.os }}-${{ runner.arch }}
|
||||
environment-file: .github/requirements/conda-env-${{ runner.os }}-${{ runner.arch }}.txt
|
||||
|
||||
- name: Install NDK
|
||||
uses: nick-fields/retry@v2.8.2
|
||||
@ -60,12 +68,12 @@ jobs:
|
||||
max_attempts: 3
|
||||
retry_wait_seconds: 90
|
||||
command: |
|
||||
set -eux
|
||||
|
||||
# Install NDK 21 after GitHub update
|
||||
# https://github.com/actions/virtual-environments/issues/5595
|
||||
ANDROID_ROOT="/usr/local/lib/android"
|
||||
ANDROID_SDK_ROOT="${ANDROID_ROOT}/sdk"
|
||||
ANDROID_NDK="${ANDROID_SDK_ROOT}/ndk-bundle"
|
||||
ANDROID_NDK_VERSION="21.4.7075529"
|
||||
|
||||
SDKMANAGER="${ANDROID_SDK_ROOT}/cmdline-tools/latest/bin/sdkmanager"
|
||||
# NB: This step downloads and installs NDK, thus it could be flaky.
|
||||
@ -86,8 +94,10 @@ jobs:
|
||||
|
||||
- name: Build PyTorch Android
|
||||
run: |
|
||||
set -eux
|
||||
|
||||
echo "CMAKE_PREFIX_PATH=${CONDA_PREFIX:-"$(dirname "$(which conda)")/../"}" >> "${GITHUB_ENV}"
|
||||
${CONDA_RUN} ./scripts/build_pytorch_android.sh x86
|
||||
${CONDA_RUN} ./scripts/build_pytorch_android.sh "${SUPPORT_ABI}"
|
||||
|
||||
- name: Run tests
|
||||
uses: reactivecircus/android-emulator-runner@v2
|
||||
|
6
.github/workflows/_win-build.yml
vendored
6
.github/workflows/_win-build.yml
vendored
@ -60,10 +60,10 @@ jobs:
|
||||
git config --global core.fsmonitor false
|
||||
|
||||
- name: Clean up leftover processes on non-ephemeral Windows runner
|
||||
uses: pytorch/test-infra/.github/actions/cleanup-runner@main
|
||||
uses: pytorch/test-infra/.github/actions/cleanup-runner@release/2.1
|
||||
|
||||
- name: Setup SSH (Click me for login details)
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@release/2.1
|
||||
with:
|
||||
github-secret: ${{ secrets.GITHUB_TOKEN }}
|
||||
instructions: |
|
||||
@ -78,7 +78,7 @@ jobs:
|
||||
|
||||
# [see note: pytorch repo ref]
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
no-sudo: true
|
||||
|
||||
|
6
.github/workflows/_win-test.yml
vendored
6
.github/workflows/_win-test.yml
vendored
@ -48,10 +48,10 @@ jobs:
|
||||
git config --global core.fsmonitor false
|
||||
|
||||
- name: Clean up leftover processes on non-ephemeral Windows runner
|
||||
uses: pytorch/test-infra/.github/actions/cleanup-runner@main
|
||||
uses: pytorch/test-infra/.github/actions/cleanup-runner@release/2.1
|
||||
|
||||
- name: Setup SSH (Click me for login details)
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@release/2.1
|
||||
with:
|
||||
github-secret: ${{ secrets.GITHUB_TOKEN }}
|
||||
instructions: |
|
||||
@ -67,7 +67,7 @@ jobs:
|
||||
|
||||
# [see note: pytorch repo ref]
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
no-sudo: true
|
||||
|
||||
|
48
.github/workflows/build-android-binaries.yml
vendored
Normal file
48
.github/workflows/build-android-binaries.yml
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
name: Build Android binaries
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- nightly
|
||||
tags:
|
||||
# NOTE: Binary build pipelines should only get triggered on release candidate builds
|
||||
# Release candidate tags look like: v1.11.0-rc1
|
||||
- v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+
|
||||
paths:
|
||||
- .github/workflows/build-android-binaries.yml
|
||||
- .github/workflows/_run_android_tests.yml
|
||||
- android/**
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/build-android-binaries.yml
|
||||
- .github/workflows/_run_android_tests.yml
|
||||
- android/**
|
||||
# NB: We can use this workflow dispatch to test and build the binaries manually
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}-${{ github.event_name == 'workflow_dispatch' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
android-build-test:
|
||||
name: android-build-test
|
||||
uses: ./.github/workflows/_run_android_tests.yml
|
||||
with:
|
||||
test-matrix: |
|
||||
{ include: [
|
||||
{ config: 'default',
|
||||
shard: 1,
|
||||
num_shards: 1,
|
||||
runner: 'ubuntu-20.04-16x',
|
||||
use_lite_interpreter: 1,
|
||||
support_abi: 'armeabi-v7a,arm64-v8a,x86,x86_64',
|
||||
},
|
||||
{ config: 'default',
|
||||
shard: 1,
|
||||
num_shards: 1,
|
||||
runner: 'ubuntu-20.04-16x',
|
||||
use_lite_interpreter: 0,
|
||||
support_abi: 'armeabi-v7a,arm64-v8a,x86,x86_64',
|
||||
},
|
||||
]}
|
70
.github/workflows/build-ios-binaries.yml
vendored
Normal file
70
.github/workflows/build-ios-binaries.yml
vendored
Normal file
@ -0,0 +1,70 @@
|
||||
name: Build iOS binaries
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- nightly
|
||||
tags:
|
||||
# NOTE: Binary build pipelines should only get triggered on release candidate builds
|
||||
# Release candidate tags look like: v1.11.0-rc1
|
||||
- v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+
|
||||
paths:
|
||||
- .github/workflows/build-ios-binaries.yml
|
||||
- .github/workflows/_ios-build-test.yml
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/build-ios-binaries.yml
|
||||
- .github/workflows/_ios-build-test.yml
|
||||
# NB: We can use this workflow dispatch to test and build iOS binaries manually
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
use_lite_interpreter:
|
||||
description: "Use PyTorch lite interpreter?"
|
||||
type: string
|
||||
default: 1
|
||||
use_coreml:
|
||||
description: "Use Apple Core ML?"
|
||||
type: string
|
||||
default: 1
|
||||
use_custom_op_list:
|
||||
description: "Specify the custom ops list to include in the binaries"
|
||||
type: string
|
||||
default: ""
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}-${{ github.event_name == 'workflow_dispatch' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# TODO: Figure out how to migrate this job to M1 runner
|
||||
ios-build-test:
|
||||
name: ios-build-test
|
||||
uses: ./.github/workflows/_ios-build-test.yml
|
||||
with:
|
||||
build-environment: ios-build-test
|
||||
sync-tag: ios-build-test
|
||||
test-matrix: |
|
||||
{ include: [
|
||||
{ config: "default",
|
||||
shard: 1,
|
||||
num_shards: 1,
|
||||
runner: "macos-12",
|
||||
ios_platform: "SIMULATOR",
|
||||
ios_arch: "x86_64",
|
||||
use_lite_interpreter: ${{ inputs.use_lite_interpreter || 1 }},
|
||||
use_metal: 0,
|
||||
use_coreml: ${{ inputs.use_coreml || 1 }},
|
||||
use_custom_op_list: ${{ inputs.use_custom_op_list || '' }}
|
||||
},
|
||||
{ config: "default",
|
||||
shard: 1,
|
||||
num_shards: 1,
|
||||
runner: "macos-12",
|
||||
ios_platform: "OS",
|
||||
ios_arch: "arm64",
|
||||
use_lite_interpreter: ${{ inputs.use_lite_interpreter || 1 }},
|
||||
use_metal: 1,
|
||||
use_coreml: ${{ inputs.use_coreml || 1 }},
|
||||
use_custom_op_list: ${{ inputs.use_custom_op_list || '' }}
|
||||
}
|
||||
]}
|
192
.github/workflows/build-triton-wheel.yml
vendored
192
.github/workflows/build-triton-wheel.yml
vendored
@ -3,7 +3,11 @@ name: Build Triton wheels
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- release/2.1
|
||||
tags:
|
||||
# NOTE: Binary build pipelines should only get triggered on release candidate builds
|
||||
# Release candidate tags look like: v1.11.0-rc1
|
||||
- v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+
|
||||
paths:
|
||||
- .github/workflows/build-triton-wheel.yml
|
||||
- .github/scripts/build_triton_wheel.py
|
||||
@ -43,12 +47,12 @@ jobs:
|
||||
BUILD_DEVICE: ${{ matrix.device }}
|
||||
steps:
|
||||
- name: Setup SSH (Click me for login details)
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@release/2.1
|
||||
with:
|
||||
github-secret: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
submodules: false
|
||||
|
||||
@ -56,11 +60,13 @@ jobs:
|
||||
uses: ./.github/actions/setup-linux
|
||||
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: ${{ env.DOCKER_IMAGE }}
|
||||
|
||||
- name: Build Triton wheel
|
||||
env:
|
||||
IS_RELEASE_TAG: ${{ startsWith(github.event.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
set -x
|
||||
mkdir -p "${RUNNER_TEMP}/artifacts/"
|
||||
@ -98,64 +104,75 @@ jobs:
|
||||
BUILD_ROCM="--build-rocm"
|
||||
fi
|
||||
|
||||
RELEASE=""
|
||||
if [[ "${IS_RELEASE_TAG}" == true ]]; then
|
||||
RELEASE="--release"
|
||||
fi
|
||||
|
||||
docker exec -t "${container_name}" yum install -y zlib-devel zip
|
||||
docker exec -t "${container_name}" "${PYTHON_EXECUTABLE}" -m pip install -U setuptools==67.4.0
|
||||
docker exec -t "${container_name}" "${PYTHON_EXECUTABLE}" /pytorch/.github/scripts/build_triton_wheel.py $BUILD_ROCM
|
||||
docker exec -t "${container_name}" "${PYTHON_EXECUTABLE}" /pytorch/.github/scripts/build_triton_wheel.py $BUILD_ROCM $RELEASE
|
||||
docker exec -t "${container_name}" chown -R 1000.1000 /artifacts
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "pytorch-triton-wheel-${{ matrix.py_vers }}"
|
||||
# NB: Use the same name here and all wheels can be downloaded by referring to the same artifact
|
||||
name: pytorch-triton-wheel
|
||||
if-no-files-found: error
|
||||
path:
|
||||
${{ runner.temp }}/artifacts/*
|
||||
path: ${{ runner.temp }}/artifacts/*
|
||||
|
||||
- name: Teardown Linux
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@main
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@release/2.1
|
||||
if: always()
|
||||
|
||||
upload-wheel:
|
||||
runs-on: linux.20_04.4x
|
||||
runs-on: ubuntu-22.04
|
||||
needs: build-wheel
|
||||
container:
|
||||
image: continuumio/miniconda3:4.12.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.github-token }}
|
||||
environment: ${{ (github.event_name == 'push' && (github.event.ref == 'refs/heads/nightly' || startsWith(github.event.ref, 'refs/tags/v'))) && 'conda-aws-upload' || '' }}
|
||||
steps:
|
||||
- name: Download Build Artifacts (3.8)
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Download Build Artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: "pytorch-triton-wheel-3.8"
|
||||
path: "${{ runner.temp }}/artifacts/"
|
||||
- name: Download Build Artifacts (3.9)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: "pytorch-triton-wheel-3.9"
|
||||
path: "${{ runner.temp }}/artifacts/"
|
||||
- name: Download Build Artifacts (3.10)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: "pytorch-triton-wheel-3.10"
|
||||
path: "${{ runner.temp }}/artifacts/"
|
||||
- name: Download Build Artifacts (3.11)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: "pytorch-triton-wheel-3.11"
|
||||
path: "${{ runner.temp }}/artifacts/"
|
||||
- name: Upload binaries
|
||||
if: ${{ github.event_name == 'push' && github.event.ref == 'refs/heads/main' }}
|
||||
env:
|
||||
PKG_DIR: "${{ runner.temp }}/artifacts"
|
||||
# When running these on pull_request events these should be blank
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_S3_UPDATE_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_UPDATE_SECRET_ACCESS_KEY }}
|
||||
UPLOAD_BUCKET: "s3://pytorch"
|
||||
name: pytorch-triton-wheel
|
||||
path: ${{ runner.temp }}/artifacts/
|
||||
|
||||
- name: Set DRY_RUN (only for tagged pushes)
|
||||
if: ${{ github.event_name == 'push' && (github.event.ref == 'refs/heads/nightly' || (startsWith(github.event.ref, 'refs/tags/v'))) }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -ex
|
||||
pip install -q awscli
|
||||
s3_dir="${UPLOAD_BUCKET}/whl/nightly/"
|
||||
for pkg in "${PKG_DIR}/"*.whl; do
|
||||
aws s3 cp --no-progress --acl public-read "${pkg}" "${s3_dir}"
|
||||
done
|
||||
echo "DRY_RUN=disabled" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Set UPLOAD_CHANNEL (only for tagged pushes)
|
||||
if: ${{ github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -ex
|
||||
|
||||
# reference ends with an RC suffix
|
||||
if [[ "${GITHUB_REF_NAME}" = *-rc[0-9]* ]]; then
|
||||
echo "UPLOAD_CHANNEL=test" >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
# NB: This step is gated by DRY_RUN, which is enabled everywhere except nightly and release branches
|
||||
- name: Upload binaries
|
||||
env:
|
||||
PACKAGE_TYPE: wheel
|
||||
# The UPLOAD_SUBFOLDER needs to be empty here so that triton wheels are uploaded
|
||||
# to nightly or test
|
||||
UPLOAD_SUBFOLDER: ""
|
||||
PKG_DIR: ${{ runner.temp }}/artifacts
|
||||
# When running these on pull_request events these should be blank
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -ex
|
||||
bash .circleci/scripts/binary_upload.sh
|
||||
|
||||
build-conda:
|
||||
name: "Build Triton Conda"
|
||||
runs-on: [self-hosted, linux.2xlarge]
|
||||
@ -164,19 +181,17 @@ jobs:
|
||||
matrix:
|
||||
py_vers: [ "3.8", "3.9", "3.10", "3.11" ]
|
||||
timeout-minutes: 40
|
||||
environment: ${{ (github.event_name == 'push' && github.event.ref == 'refs/heads/main') && 'conda-aws-upload' || '' }}
|
||||
env:
|
||||
DOCKER_IMAGE: pytorch/conda-builder:cpu
|
||||
PY_VERS: ${{ matrix.py_vers }}
|
||||
ANACONDA_API_TOKEN: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
steps:
|
||||
- name: Setup SSH (Click me for login details)
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@release/2.1
|
||||
with:
|
||||
github-secret: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
submodules: false
|
||||
|
||||
@ -184,11 +199,13 @@ jobs:
|
||||
uses: ./.github/actions/setup-linux
|
||||
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: ${{ env.DOCKER_IMAGE }}
|
||||
|
||||
- name: Build Triton conda package
|
||||
env:
|
||||
IS_RELEASE_TAG: ${{ startsWith(github.event.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
set -x
|
||||
mkdir -p "${RUNNER_TEMP}/artifacts/"
|
||||
@ -198,31 +215,76 @@ jobs:
|
||||
-v "${GITHUB_WORKSPACE}:/pytorch" \
|
||||
-v "${RUNNER_TEMP}/artifacts:/artifacts" \
|
||||
-w /artifacts/ \
|
||||
-e ANACONDA_API_TOKEN \
|
||||
"${DOCKER_IMAGE}" \
|
||||
)
|
||||
|
||||
RELEASE=""
|
||||
if [[ "${IS_RELEASE_TAG}" == true ]]; then
|
||||
RELEASE="--release"
|
||||
fi
|
||||
|
||||
docker exec -t "${container_name}" yum install -y llvm11 llvm11-devel llvm11-static llvm11-libs zlib-devel
|
||||
docker exec -t "${container_name}" python /pytorch/.github/scripts/build_triton_wheel.py --build-conda --py-version="${PY_VERS}"
|
||||
|
||||
- name: Upload artifacts to Anaconda
|
||||
if: ${{ github.event_name == 'push' && github.event.ref == 'refs/heads/main' }}
|
||||
run: |
|
||||
container_name=$(docker container ps --format '{{.ID}}')
|
||||
docker exec -t "${container_name}" sh -c "anaconda upload /artifacts/torch*.tar.bz2 -u pytorch-nightly --label main --no-progress --force"
|
||||
|
||||
- name: Chown artifacts
|
||||
run: |
|
||||
container_name=$(docker container ps --format '{{.ID}}')
|
||||
docker exec -t "${container_name}" python /pytorch/.github/scripts/build_triton_wheel.py --build-conda --py-version="${PY_VERS}" $RELEASE
|
||||
docker exec -t "${container_name}" chown -R 1000.1000 /artifacts
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "pytorch-triton-conda-${{ matrix.py_vers }}"
|
||||
# NB: Use the same name here and all wheels can be downloaded by referring to the same artifact
|
||||
name: pytorch-triton-conda
|
||||
if-no-files-found: error
|
||||
path:
|
||||
${{ runner.temp }}/artifacts/*
|
||||
path: ${{ runner.temp }}/artifacts/*
|
||||
|
||||
- name: Teardown Linux
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@main
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@release/2.1
|
||||
if: always()
|
||||
|
||||
upload-conda:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: build-conda
|
||||
container:
|
||||
image: continuumio/miniconda3:4.12.0
|
||||
environment: ${{ (github.event_name == 'push' && (github.event.ref == 'refs/heads/nightly' || startsWith(github.event.ref, 'refs/tags/v'))) && 'conda-aws-upload' || '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Download Build Artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: pytorch-triton-conda
|
||||
path: ${{ runner.temp }}/artifacts/
|
||||
|
||||
- name: Set DRY_RUN (only for tagged pushes)
|
||||
if: ${{ github.event_name == 'push' && (github.event.ref == 'refs/heads/nightly' || (startsWith(github.event.ref, 'refs/tags/v'))) }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo "DRY_RUN=disabled" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Set UPLOAD_CHANNEL (only for tagged pushes)
|
||||
if: ${{ github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -ex
|
||||
|
||||
# reference ends with an RC suffix
|
||||
if [[ "${GITHUB_REF_NAME}" = *-rc[0-9]* ]]; then
|
||||
echo "UPLOAD_CHANNEL=test" >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
# NB: This step is gated by DRY_RUN, which is enabled everywhere except nightly and release branches
|
||||
- name: Upload binaries to Anaconda
|
||||
env:
|
||||
PACKAGE_TYPE: conda
|
||||
PKG_DIR: ${{ runner.temp }}/artifacts
|
||||
# When running these on pull_request events these should be blank
|
||||
CONDA_PYTORCHBOT_TOKEN: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
CONDA_PYTORCHBOT_TOKEN_TEST: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -ex
|
||||
|
||||
if [[ "${UPLOAD_CHANNEL}" = "nightly" ]]; then
|
||||
export ANACONDA_API_TOKEN="${CONDA_PYTORCHBOT_TOKEN}"
|
||||
else
|
||||
export ANACONDA_API_TOKEN="${CONDA_PYTORCHBOT_TOKEN_TEST}"
|
||||
fi
|
||||
bash .circleci/scripts/binary_upload.sh
|
||||
|
2
.github/workflows/check-labels.yml
vendored
2
.github/workflows/check-labels.yml
vendored
@ -29,7 +29,7 @@ jobs:
|
||||
runs-on: linux.20_04.4x
|
||||
steps:
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
submodules: false
|
||||
fetch-depth: 1
|
||||
|
@ -10,7 +10,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
|
||||
- name: Run close_nonexistent_disable_issues.py
|
||||
env:
|
||||
|
7
.github/workflows/create_release.yml
vendored
7
.github/workflows/create_release.yml
vendored
@ -2,10 +2,9 @@ name: Create Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: ['v*']
|
||||
branches:
|
||||
- main
|
||||
- nightly
|
||||
- release/*
|
||||
release:
|
||||
types: [published]
|
||||
pull_request:
|
||||
@ -31,6 +30,8 @@ jobs:
|
||||
run: |
|
||||
tag_or_branch="${PT_GITHUB_REF#refs/tags/}"
|
||||
tag_or_branch="${tag_or_branch#refs/heads/}"
|
||||
# replace directory separators with _ in branch name
|
||||
tag_or_branch="${tag_or_branch//\//_}"
|
||||
echo "PT_RELEASE_NAME=pytorch-$tag_or_branch" >> "$GITHUB_ENV"
|
||||
echo "PT_RELEASE_FILE=pytorch-$tag_or_branch.tar.gz" >> "$GITHUB_ENV"
|
||||
- name: Create source distribution
|
||||
@ -52,5 +53,5 @@ jobs:
|
||||
files: ${{env.PT_RELEASE_FILE}}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}-${{ github.event_name == 'workflow_dispatch' }}
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}-${{ github.event_name }}
|
||||
cancel-in-progress: true
|
||||
|
8
.github/workflows/docker-builds.yml
vendored
8
.github/workflows/docker-builds.yml
vendored
@ -61,21 +61,21 @@ jobs:
|
||||
# [see note: pytorch repo ref]
|
||||
# deep clone (fetch-depth 0) required for git merge-base
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
|
||||
- name: Setup Linux
|
||||
uses: ./.github/actions/setup-linux
|
||||
|
||||
- name: Build docker image
|
||||
id: build-docker-image
|
||||
uses: pytorch/test-infra/.github/actions/calculate-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/calculate-docker-image@release/2.1
|
||||
with:
|
||||
docker-image-name: ${{ matrix.docker-image-name }}
|
||||
always-rebuild: true
|
||||
push: true
|
||||
|
||||
- name: Pull docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: ${{ steps.build-docker-image.outputs.docker-image }}
|
||||
|
||||
@ -105,5 +105,5 @@ jobs:
|
||||
if: always()
|
||||
|
||||
- name: Teardown Linux
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@main
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@release/2.1
|
||||
if: always()
|
||||
|
34
.github/workflows/docker-release.yml
vendored
34
.github/workflows/docker-release.yml
vendored
@ -2,6 +2,15 @@ name: Build Official Docker Images
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
channel:
|
||||
description: "Channel to use (nightly, test, release)"
|
||||
required: false
|
||||
type: choice
|
||||
default: test
|
||||
options:
|
||||
- release
|
||||
- test
|
||||
pull_request:
|
||||
paths:
|
||||
- Dockerfile
|
||||
@ -15,6 +24,7 @@ on:
|
||||
- v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+
|
||||
- ciflow/nightly/*
|
||||
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}-${{ github.event_name == 'workflow_dispatch' }}
|
||||
cancel-in-progress: true
|
||||
@ -26,7 +36,7 @@ env:
|
||||
DOCKER_REGISTRY: ghcr.io
|
||||
NO_BUILD_SUFFIX: true
|
||||
USE_BUILDX: 1
|
||||
WITH_PUSH: ${{ github.event_name == 'push' && (github.event.ref == 'refs/heads/nightly' || (startsWith(github.event.ref, 'refs/tags/') && !startsWith(github.event.ref, 'refs/tags/ciflow/'))) }}
|
||||
WITH_PUSH: ${{ inputs.channel == 'release' }}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@ -45,9 +55,10 @@ jobs:
|
||||
env:
|
||||
BUILD_IMAGE_TYPE: ${{ matrix.image_type }}
|
||||
BUILD_PLATFORMS: ${{ matrix.platform }}
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
steps:
|
||||
- name: Setup SSH (Click me for login details)
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@main
|
||||
uses: pytorch/test-infra/.github/actions/setup-ssh@release/2.1
|
||||
with:
|
||||
github-secret: ${{ secrets.GITHUB_TOKEN }}
|
||||
# [see note: pytorch repo ref]
|
||||
@ -60,7 +71,7 @@ jobs:
|
||||
- name: Setup Linux
|
||||
uses: ./.github/actions/setup-linux
|
||||
- name: Login to GitHub Container Registry
|
||||
if: ${{ env.WITH_PUSH == 'true' }}
|
||||
if: ${{ inputs.channel == 'release' }}
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
@ -81,13 +92,18 @@ jobs:
|
||||
# To get QEMU binaries in our PATH
|
||||
echo "${RUNNER_TEMP}/bin" >> "${GITHUB_PATH}"
|
||||
# Generate PyTorch version to use
|
||||
echo "PYTORCH_VERSION=$(python3 .github/scripts/generate_pytorch_version.py)" >> "${GITHUB_ENV}"
|
||||
- name: Setup nightly specific variables
|
||||
if: ${{ github.event.ref == 'refs/heads/nightly' || startsWith(github.event.ref, 'refs/tags/ciflow/nightly/') }}
|
||||
if [[ ${CHANNEL} == "release" ]]; then
|
||||
echo "PYTORCH_VERSION=2.1.0" >> "${GITHUB_ENV}"
|
||||
else
|
||||
echo "PYTORCH_VERSION=$(python3 .github/scripts/generate_pytorch_version.py)" >> "${GITHUB_ENV}"
|
||||
fi
|
||||
- name: Setup release specific variables
|
||||
run: |
|
||||
{
|
||||
echo "DOCKER_IMAGE=pytorch-nightly";
|
||||
echo "INSTALL_CHANNEL=pytorch-nightly";
|
||||
echo "INSTALL_CHANNEL=pytorch-test";
|
||||
if [[ ${CHANNEL} == "release" ]]; then
|
||||
echo "INSTALL_CHANNEL=pytorch";
|
||||
fi
|
||||
echo "TRITON_VERSION=$(cut -f 1 .ci/docker/triton_version.txt)+$(cut -c -10 .ci/docker/ci_commit_pins/triton.txt)";
|
||||
} >> "${GITHUB_ENV}"
|
||||
- name: Run docker build / push
|
||||
@ -109,5 +125,5 @@ jobs:
|
||||
ghcr.io/pytorch/pytorch-nightly:latest
|
||||
docker push ghcr.io/pytorch/pytorch-nightly:latest
|
||||
- name: Teardown Linux
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@main
|
||||
uses: pytorch/test-infra/.github/actions/teardown-linux@release/2.1
|
||||
if: always()
|
||||
|
28
.github/workflows/generated-linux-aarch64-binary-manywheel-nightly.yml
generated
vendored
28
.github/workflows/generated-linux-aarch64-binary-manywheel-nightly.yml
generated
vendored
@ -49,10 +49,11 @@ jobs:
|
||||
GPU_ARCH_TYPE: cpu-aarch64
|
||||
DOCKER_IMAGE: pytorch/manylinuxaarch64-builder:cpu-aarch64
|
||||
DESIRED_PYTHON: "3.8"
|
||||
runs_on: linux.t4g.2xlarge
|
||||
runs_on: linux.arm64.2xlarge
|
||||
ALPINE_IMAGE: "arm64v8/alpine"
|
||||
build_name: manywheel-py3_8-cpu-aarch64
|
||||
build_environment: linux-aarch64-binary-manywheel
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
secrets:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
manywheel-py3_8-cpu-aarch64-test: # Testing
|
||||
@ -71,7 +72,7 @@ jobs:
|
||||
DESIRED_PYTHON: "3.8"
|
||||
build_name: manywheel-py3_8-cpu-aarch64
|
||||
build_environment: linux-aarch64-binary-manywheel
|
||||
runs_on: linux.t4g.2xlarge
|
||||
runs_on: linux.arm64.2xlarge
|
||||
ALPINE_IMAGE: "arm64v8/alpine"
|
||||
secrets:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
@ -93,7 +94,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_9-cpu-aarch64-build:
|
||||
@ -109,10 +110,11 @@ jobs:
|
||||
GPU_ARCH_TYPE: cpu-aarch64
|
||||
DOCKER_IMAGE: pytorch/manylinuxaarch64-builder:cpu-aarch64
|
||||
DESIRED_PYTHON: "3.9"
|
||||
runs_on: linux.t4g.2xlarge
|
||||
runs_on: linux.arm64.2xlarge
|
||||
ALPINE_IMAGE: "arm64v8/alpine"
|
||||
build_name: manywheel-py3_9-cpu-aarch64
|
||||
build_environment: linux-aarch64-binary-manywheel
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
secrets:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
manywheel-py3_9-cpu-aarch64-test: # Testing
|
||||
@ -131,7 +133,7 @@ jobs:
|
||||
DESIRED_PYTHON: "3.9"
|
||||
build_name: manywheel-py3_9-cpu-aarch64
|
||||
build_environment: linux-aarch64-binary-manywheel
|
||||
runs_on: linux.t4g.2xlarge
|
||||
runs_on: linux.arm64.2xlarge
|
||||
ALPINE_IMAGE: "arm64v8/alpine"
|
||||
secrets:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
@ -153,7 +155,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_10-cpu-aarch64-build:
|
||||
@ -169,10 +171,11 @@ jobs:
|
||||
GPU_ARCH_TYPE: cpu-aarch64
|
||||
DOCKER_IMAGE: pytorch/manylinuxaarch64-builder:cpu-aarch64
|
||||
DESIRED_PYTHON: "3.10"
|
||||
runs_on: linux.t4g.2xlarge
|
||||
runs_on: linux.arm64.2xlarge
|
||||
ALPINE_IMAGE: "arm64v8/alpine"
|
||||
build_name: manywheel-py3_10-cpu-aarch64
|
||||
build_environment: linux-aarch64-binary-manywheel
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
secrets:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
manywheel-py3_10-cpu-aarch64-test: # Testing
|
||||
@ -191,7 +194,7 @@ jobs:
|
||||
DESIRED_PYTHON: "3.10"
|
||||
build_name: manywheel-py3_10-cpu-aarch64
|
||||
build_environment: linux-aarch64-binary-manywheel
|
||||
runs_on: linux.t4g.2xlarge
|
||||
runs_on: linux.arm64.2xlarge
|
||||
ALPINE_IMAGE: "arm64v8/alpine"
|
||||
secrets:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
@ -213,7 +216,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_11-cpu-aarch64-build:
|
||||
@ -229,10 +232,11 @@ jobs:
|
||||
GPU_ARCH_TYPE: cpu-aarch64
|
||||
DOCKER_IMAGE: pytorch/manylinuxaarch64-builder:cpu-aarch64
|
||||
DESIRED_PYTHON: "3.11"
|
||||
runs_on: linux.t4g.2xlarge
|
||||
runs_on: linux.arm64.2xlarge
|
||||
ALPINE_IMAGE: "arm64v8/alpine"
|
||||
build_name: manywheel-py3_11-cpu-aarch64
|
||||
build_environment: linux-aarch64-binary-manywheel
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
secrets:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
manywheel-py3_11-cpu-aarch64-test: # Testing
|
||||
@ -251,7 +255,7 @@ jobs:
|
||||
DESIRED_PYTHON: "3.11"
|
||||
build_name: manywheel-py3_11-cpu-aarch64
|
||||
build_environment: linux-aarch64-binary-manywheel
|
||||
runs_on: linux.t4g.2xlarge
|
||||
runs_on: linux.arm64.2xlarge
|
||||
ALPINE_IMAGE: "arm64v8/alpine"
|
||||
secrets:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
@ -273,5 +277,5 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
24
.github/workflows/generated-linux-binary-conda-nightly.yml
generated
vendored
24
.github/workflows/generated-linux-binary-conda-nightly.yml
generated
vendored
@ -90,7 +90,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
conda-py3_8-cuda11_8-build:
|
||||
@ -150,7 +150,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
conda-py3_8-cuda12_1-build:
|
||||
@ -210,7 +210,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
conda-py3_9-cpu-build:
|
||||
@ -267,7 +267,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
conda-py3_9-cuda11_8-build:
|
||||
@ -327,7 +327,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
conda-py3_9-cuda12_1-build:
|
||||
@ -387,7 +387,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
conda-py3_10-cpu-build:
|
||||
@ -444,7 +444,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
conda-py3_10-cuda11_8-build:
|
||||
@ -504,7 +504,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
conda-py3_10-cuda12_1-build:
|
||||
@ -564,7 +564,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
conda-py3_11-cpu-build:
|
||||
@ -621,7 +621,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
conda-py3_11-cuda11_8-build:
|
||||
@ -681,7 +681,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
conda-py3_11-cuda12_1-build:
|
||||
@ -741,5 +741,5 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
52
.github/workflows/generated-linux-binary-libtorch-cxx11-abi-nightly.yml
generated
vendored
52
.github/workflows/generated-linux-binary-libtorch-cxx11-abi-nightly.yml
generated
vendored
@ -93,7 +93,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cpu-shared-without-deps-cxx11-abi-build:
|
||||
@ -153,7 +153,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cpu-static-with-deps-cxx11-abi-build:
|
||||
@ -213,7 +213,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cpu-static-without-deps-cxx11-abi-build:
|
||||
@ -273,7 +273,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cuda11_8-shared-with-deps-cxx11-abi-build:
|
||||
@ -336,7 +336,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cuda11_8-shared-without-deps-cxx11-abi-build:
|
||||
@ -399,7 +399,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cuda11_8-static-with-deps-cxx11-abi-build:
|
||||
@ -462,7 +462,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cuda11_8-static-without-deps-cxx11-abi-build:
|
||||
@ -525,7 +525,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cuda12_1-shared-with-deps-cxx11-abi-build:
|
||||
@ -588,7 +588,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cuda12_1-shared-without-deps-cxx11-abi-build:
|
||||
@ -651,7 +651,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cuda12_1-static-with-deps-cxx11-abi-build:
|
||||
@ -714,7 +714,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cuda12_1-static-without-deps-cxx11-abi-build:
|
||||
@ -777,7 +777,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-rocm5_5-shared-with-deps-cxx11-abi-build:
|
||||
@ -828,7 +828,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -840,7 +839,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -854,7 +853,7 @@ jobs:
|
||||
run: |
|
||||
echo "GPU_FLAG=--device=/dev/mem --device=/dev/kfd --device=/dev/dri --group-add video --group-add daemon" >> "${GITHUB_ENV}"
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: pytorch/libtorch-cxx11-builder:rocm5.5
|
||||
- name: Test Pytorch binary
|
||||
@ -881,7 +880,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-rocm5_5-static-with-deps-cxx11-abi-build:
|
||||
@ -932,7 +931,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -944,7 +942,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -958,7 +956,7 @@ jobs:
|
||||
run: |
|
||||
echo "GPU_FLAG=--device=/dev/mem --device=/dev/kfd --device=/dev/dri --group-add video --group-add daemon" >> "${GITHUB_ENV}"
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: pytorch/libtorch-cxx11-builder:rocm5.5
|
||||
- name: Test Pytorch binary
|
||||
@ -985,7 +983,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-rocm5_6-shared-with-deps-cxx11-abi-build:
|
||||
@ -1036,7 +1034,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1048,7 +1045,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1062,7 +1059,7 @@ jobs:
|
||||
run: |
|
||||
echo "GPU_FLAG=--device=/dev/mem --device=/dev/kfd --device=/dev/dri --group-add video --group-add daemon" >> "${GITHUB_ENV}"
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: pytorch/libtorch-cxx11-builder:rocm5.6
|
||||
- name: Test Pytorch binary
|
||||
@ -1089,7 +1086,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-rocm5_6-static-with-deps-cxx11-abi-build:
|
||||
@ -1140,7 +1137,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1152,7 +1148,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1166,7 +1162,7 @@ jobs:
|
||||
run: |
|
||||
echo "GPU_FLAG=--device=/dev/mem --device=/dev/kfd --device=/dev/dri --group-add video --group-add daemon" >> "${GITHUB_ENV}"
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: pytorch/libtorch-cxx11-builder:rocm5.6
|
||||
- name: Test Pytorch binary
|
||||
@ -1193,5 +1189,5 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
52
.github/workflows/generated-linux-binary-libtorch-pre-cxx11-nightly.yml
generated
vendored
52
.github/workflows/generated-linux-binary-libtorch-pre-cxx11-nightly.yml
generated
vendored
@ -93,7 +93,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cpu-shared-without-deps-pre-cxx11-build:
|
||||
@ -153,7 +153,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cpu-static-with-deps-pre-cxx11-build:
|
||||
@ -213,7 +213,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cpu-static-without-deps-pre-cxx11-build:
|
||||
@ -273,7 +273,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cuda11_8-shared-with-deps-pre-cxx11-build:
|
||||
@ -336,7 +336,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cuda11_8-shared-without-deps-pre-cxx11-build:
|
||||
@ -399,7 +399,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cuda11_8-static-with-deps-pre-cxx11-build:
|
||||
@ -462,7 +462,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cuda11_8-static-without-deps-pre-cxx11-build:
|
||||
@ -525,7 +525,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cuda12_1-shared-with-deps-pre-cxx11-build:
|
||||
@ -588,7 +588,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cuda12_1-shared-without-deps-pre-cxx11-build:
|
||||
@ -651,7 +651,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cuda12_1-static-with-deps-pre-cxx11-build:
|
||||
@ -714,7 +714,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-cuda12_1-static-without-deps-pre-cxx11-build:
|
||||
@ -777,7 +777,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-rocm5_5-shared-with-deps-pre-cxx11-build:
|
||||
@ -828,7 +828,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -840,7 +839,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -854,7 +853,7 @@ jobs:
|
||||
run: |
|
||||
echo "GPU_FLAG=--device=/dev/mem --device=/dev/kfd --device=/dev/dri --group-add video --group-add daemon" >> "${GITHUB_ENV}"
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: pytorch/manylinux-builder:rocm5.5
|
||||
- name: Test Pytorch binary
|
||||
@ -881,7 +880,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-rocm5_5-static-with-deps-pre-cxx11-build:
|
||||
@ -932,7 +931,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -944,7 +942,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -958,7 +956,7 @@ jobs:
|
||||
run: |
|
||||
echo "GPU_FLAG=--device=/dev/mem --device=/dev/kfd --device=/dev/dri --group-add video --group-add daemon" >> "${GITHUB_ENV}"
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: pytorch/manylinux-builder:rocm5.5
|
||||
- name: Test Pytorch binary
|
||||
@ -985,7 +983,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-rocm5_6-shared-with-deps-pre-cxx11-build:
|
||||
@ -1036,7 +1034,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1048,7 +1045,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1062,7 +1059,7 @@ jobs:
|
||||
run: |
|
||||
echo "GPU_FLAG=--device=/dev/mem --device=/dev/kfd --device=/dev/dri --group-add video --group-add daemon" >> "${GITHUB_ENV}"
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: pytorch/manylinux-builder:rocm5.6
|
||||
- name: Test Pytorch binary
|
||||
@ -1089,7 +1086,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
libtorch-rocm5_6-static-with-deps-pre-cxx11-build:
|
||||
@ -1140,7 +1137,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1152,7 +1148,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1166,7 +1162,7 @@ jobs:
|
||||
run: |
|
||||
echo "GPU_FLAG=--device=/dev/mem --device=/dev/kfd --device=/dev/dri --group-add video --group-add daemon" >> "${GITHUB_ENV}"
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: pytorch/manylinux-builder:rocm5.6
|
||||
- name: Test Pytorch binary
|
||||
@ -1193,5 +1189,5 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
2
.github/workflows/generated-linux-binary-manywheel-main.yml
generated
vendored
2
.github/workflows/generated-linux-binary-manywheel-main.yml
generated
vendored
@ -86,7 +86,7 @@ jobs:
|
||||
DESIRED_PYTHON: "3.8"
|
||||
build_name: manywheel-py3_8-cuda12_1-with-pypi-cudnn
|
||||
build_environment: linux-binary-manywheel
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
secrets:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
manywheel-py3_8-cuda12_1-with-pypi-cudnn-test: # Testing
|
||||
|
104
.github/workflows/generated-linux-binary-manywheel-nightly.yml
generated
vendored
104
.github/workflows/generated-linux-binary-manywheel-nightly.yml
generated
vendored
@ -90,7 +90,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_8-cpu-cxx11-abi-build:
|
||||
@ -150,7 +150,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_8-cuda11_8-build:
|
||||
@ -210,7 +210,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_8-cuda12_1-with-pypi-cudnn-build:
|
||||
@ -229,7 +229,7 @@ jobs:
|
||||
DESIRED_PYTHON: "3.8"
|
||||
build_name: manywheel-py3_8-cuda12_1-with-pypi-cudnn
|
||||
build_environment: linux-binary-manywheel
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
secrets:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
manywheel-py3_8-cuda12_1-with-pypi-cudnn-test: # Testing
|
||||
@ -271,7 +271,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_8-cuda12_1-build:
|
||||
@ -331,7 +331,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_8-rocm5_5-build:
|
||||
@ -380,7 +380,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -392,7 +391,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -406,7 +405,7 @@ jobs:
|
||||
run: |
|
||||
echo "GPU_FLAG=--device=/dev/mem --device=/dev/kfd --device=/dev/dri --group-add video --group-add daemon" >> "${GITHUB_ENV}"
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: pytorch/manylinux-builder:rocm5.5
|
||||
- name: Test Pytorch binary
|
||||
@ -432,7 +431,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_8-rocm5_6-build:
|
||||
@ -481,7 +480,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -493,7 +491,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -507,7 +505,7 @@ jobs:
|
||||
run: |
|
||||
echo "GPU_FLAG=--device=/dev/mem --device=/dev/kfd --device=/dev/dri --group-add video --group-add daemon" >> "${GITHUB_ENV}"
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: pytorch/manylinux-builder:rocm5.6
|
||||
- name: Test Pytorch binary
|
||||
@ -533,7 +531,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_9-cpu-build:
|
||||
@ -590,7 +588,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_9-cpu-cxx11-abi-build:
|
||||
@ -650,7 +648,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_9-cuda11_8-build:
|
||||
@ -710,7 +708,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_9-cuda12_1-with-pypi-cudnn-build:
|
||||
@ -729,7 +727,7 @@ jobs:
|
||||
DESIRED_PYTHON: "3.9"
|
||||
build_name: manywheel-py3_9-cuda12_1-with-pypi-cudnn
|
||||
build_environment: linux-binary-manywheel
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
secrets:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
manywheel-py3_9-cuda12_1-with-pypi-cudnn-test: # Testing
|
||||
@ -771,7 +769,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_9-cuda12_1-build:
|
||||
@ -831,7 +829,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_9-rocm5_5-build:
|
||||
@ -880,7 +878,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -892,7 +889,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -906,7 +903,7 @@ jobs:
|
||||
run: |
|
||||
echo "GPU_FLAG=--device=/dev/mem --device=/dev/kfd --device=/dev/dri --group-add video --group-add daemon" >> "${GITHUB_ENV}"
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: pytorch/manylinux-builder:rocm5.5
|
||||
- name: Test Pytorch binary
|
||||
@ -932,7 +929,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_9-rocm5_6-build:
|
||||
@ -981,7 +978,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -993,7 +989,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1007,7 +1003,7 @@ jobs:
|
||||
run: |
|
||||
echo "GPU_FLAG=--device=/dev/mem --device=/dev/kfd --device=/dev/dri --group-add video --group-add daemon" >> "${GITHUB_ENV}"
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: pytorch/manylinux-builder:rocm5.6
|
||||
- name: Test Pytorch binary
|
||||
@ -1033,7 +1029,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_10-cpu-build:
|
||||
@ -1090,7 +1086,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_10-cpu-cxx11-abi-build:
|
||||
@ -1150,7 +1146,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_10-cuda11_8-build:
|
||||
@ -1210,7 +1206,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_10-cuda12_1-with-pypi-cudnn-build:
|
||||
@ -1229,7 +1225,7 @@ jobs:
|
||||
DESIRED_PYTHON: "3.10"
|
||||
build_name: manywheel-py3_10-cuda12_1-with-pypi-cudnn
|
||||
build_environment: linux-binary-manywheel
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
secrets:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
manywheel-py3_10-cuda12_1-with-pypi-cudnn-test: # Testing
|
||||
@ -1271,7 +1267,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_10-cuda12_1-build:
|
||||
@ -1331,7 +1327,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_10-rocm5_5-build:
|
||||
@ -1380,7 +1376,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1392,7 +1387,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1406,7 +1401,7 @@ jobs:
|
||||
run: |
|
||||
echo "GPU_FLAG=--device=/dev/mem --device=/dev/kfd --device=/dev/dri --group-add video --group-add daemon" >> "${GITHUB_ENV}"
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: pytorch/manylinux-builder:rocm5.5
|
||||
- name: Test Pytorch binary
|
||||
@ -1432,7 +1427,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_10-rocm5_6-build:
|
||||
@ -1481,7 +1476,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1493,7 +1487,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1507,7 +1501,7 @@ jobs:
|
||||
run: |
|
||||
echo "GPU_FLAG=--device=/dev/mem --device=/dev/kfd --device=/dev/dri --group-add video --group-add daemon" >> "${GITHUB_ENV}"
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: pytorch/manylinux-builder:rocm5.6
|
||||
- name: Test Pytorch binary
|
||||
@ -1533,7 +1527,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_11-cpu-build:
|
||||
@ -1590,7 +1584,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_11-cpu-cxx11-abi-build:
|
||||
@ -1650,7 +1644,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_11-cuda11_8-build:
|
||||
@ -1710,7 +1704,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_11-cuda12_1-with-pypi-cudnn-build:
|
||||
@ -1729,7 +1723,7 @@ jobs:
|
||||
DESIRED_PYTHON: "3.11"
|
||||
build_name: manywheel-py3_11-cuda12_1-with-pypi-cudnn
|
||||
build_environment: linux-binary-manywheel
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
secrets:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
manywheel-py3_11-cuda12_1-with-pypi-cudnn-test: # Testing
|
||||
@ -1771,7 +1765,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_11-cuda12_1-build:
|
||||
@ -1831,7 +1825,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_11-rocm5_5-build:
|
||||
@ -1880,7 +1874,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1892,7 +1885,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1906,7 +1899,7 @@ jobs:
|
||||
run: |
|
||||
echo "GPU_FLAG=--device=/dev/mem --device=/dev/kfd --device=/dev/dri --group-add video --group-add daemon" >> "${GITHUB_ENV}"
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: pytorch/manylinux-builder:rocm5.5
|
||||
- name: Test Pytorch binary
|
||||
@ -1932,7 +1925,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
||||
manywheel-py3_11-rocm5_6-build:
|
||||
@ -1981,7 +1974,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1993,7 +1985,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2007,7 +1999,7 @@ jobs:
|
||||
run: |
|
||||
echo "GPU_FLAG=--device=/dev/mem --device=/dev/kfd --device=/dev/dri --group-add video --group-add daemon" >> "${GITHUB_ENV}"
|
||||
- name: Pull Docker image
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@main
|
||||
uses: pytorch/test-infra/.github/actions/pull-docker-image@release/2.1
|
||||
with:
|
||||
docker-image: pytorch/manylinux-builder:rocm5.6
|
||||
- name: Test Pytorch binary
|
||||
@ -2033,5 +2025,5 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
20
.github/workflows/generated-macos-arm64-binary-conda-nightly.yml
generated
vendored
20
.github/workflows/generated-macos-arm64-binary-conda-nightly.yml
generated
vendored
@ -75,7 +75,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -87,7 +86,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -144,7 +143,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
conda-py3_9-cpu-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -187,7 +186,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -199,7 +197,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -256,7 +254,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
conda-py3_10-cpu-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -299,7 +297,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -311,7 +308,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -368,7 +365,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
conda-py3_11-cpu-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -411,7 +408,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -423,7 +419,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -480,5 +476,5 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
24
.github/workflows/generated-macos-arm64-binary-wheel-nightly.yml
generated
vendored
24
.github/workflows/generated-macos-arm64-binary-wheel-nightly.yml
generated
vendored
@ -48,6 +48,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cpu
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.8"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
# For sccache access (only on non-forked PRs)
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.MACOS_SCCACHE_S3_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.MACOS_SCCACHE_S3_SECRET_ACCESS_KEY }}
|
||||
@ -75,7 +76,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -87,7 +87,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -144,7 +144,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
wheel-py3_9-cpu-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -160,6 +160,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cpu
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.9"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
# For sccache access (only on non-forked PRs)
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.MACOS_SCCACHE_S3_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.MACOS_SCCACHE_S3_SECRET_ACCESS_KEY }}
|
||||
@ -187,7 +188,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -199,7 +199,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -256,7 +256,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
wheel-py3_10-cpu-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -272,6 +272,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cpu
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.10"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
# For sccache access (only on non-forked PRs)
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.MACOS_SCCACHE_S3_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.MACOS_SCCACHE_S3_SECRET_ACCESS_KEY }}
|
||||
@ -299,7 +300,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -311,7 +311,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -368,7 +368,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
wheel-py3_11-cpu-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -384,6 +384,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cpu
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.11"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
# For sccache access (only on non-forked PRs)
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.MACOS_SCCACHE_S3_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.MACOS_SCCACHE_S3_SECRET_ACCESS_KEY }}
|
||||
@ -411,7 +412,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -423,7 +423,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -480,5 +480,5 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
20
.github/workflows/generated-macos-binary-conda-nightly.yml
generated
vendored
20
.github/workflows/generated-macos-binary-conda-nightly.yml
generated
vendored
@ -73,7 +73,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -85,7 +84,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -142,7 +141,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
conda-py3_9-cpu-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -185,7 +184,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -197,7 +195,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -254,7 +252,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
conda-py3_10-cpu-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -297,7 +295,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -309,7 +306,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -366,7 +363,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
conda-py3_11-cpu-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -409,7 +406,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -421,7 +417,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -478,5 +474,5 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
20
.github/workflows/generated-macos-binary-libtorch-cxx11-abi-nightly.yml
generated
vendored
20
.github/workflows/generated-macos-binary-libtorch-cxx11-abi-nightly.yml
generated
vendored
@ -77,7 +77,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -89,7 +88,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -147,7 +146,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cpu-shared-without-deps-cxx11-abi-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -194,7 +193,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -206,7 +204,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -264,7 +262,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cpu-static-with-deps-cxx11-abi-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -311,7 +309,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -323,7 +320,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -381,7 +378,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cpu-static-without-deps-cxx11-abi-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -428,7 +425,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -440,7 +436,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -498,5 +494,5 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
24
.github/workflows/generated-macos-binary-wheel-nightly.yml
generated
vendored
24
.github/workflows/generated-macos-binary-wheel-nightly.yml
generated
vendored
@ -46,6 +46,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cpu
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.8"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
# For sccache access (only on non-forked PRs)
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.MACOS_SCCACHE_S3_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.MACOS_SCCACHE_S3_SECRET_ACCESS_KEY }}
|
||||
@ -73,7 +74,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -85,7 +85,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -142,7 +142,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
wheel-py3_9-cpu-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -158,6 +158,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cpu
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.9"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
# For sccache access (only on non-forked PRs)
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.MACOS_SCCACHE_S3_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.MACOS_SCCACHE_S3_SECRET_ACCESS_KEY }}
|
||||
@ -185,7 +186,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -197,7 +197,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -254,7 +254,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
wheel-py3_10-cpu-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -270,6 +270,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cpu
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.10"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
# For sccache access (only on non-forked PRs)
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.MACOS_SCCACHE_S3_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.MACOS_SCCACHE_S3_SECRET_ACCESS_KEY }}
|
||||
@ -297,7 +298,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -309,7 +309,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -366,7 +366,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
wheel-py3_11-cpu-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -382,6 +382,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cpu
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.11"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
# For sccache access (only on non-forked PRs)
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.MACOS_SCCACHE_S3_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.MACOS_SCCACHE_S3_SECRET_ACCESS_KEY }}
|
||||
@ -409,7 +410,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -421,7 +421,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -478,5 +478,5 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
96
.github/workflows/generated-windows-binary-conda-nightly.yml
generated
vendored
96
.github/workflows/generated-windows-binary-conda-nightly.yml
generated
vendored
@ -92,7 +92,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -104,7 +103,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -208,7 +207,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -220,7 +218,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -268,7 +266,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
conda-py3_8-cuda11_8-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -331,7 +329,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -343,7 +340,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -448,7 +445,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -460,7 +456,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -509,7 +505,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
conda-py3_8-cuda12_1-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -572,7 +568,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -584,7 +579,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -689,7 +684,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -701,7 +695,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -750,7 +744,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
conda-py3_9-cpu-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -812,7 +806,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -824,7 +817,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -928,7 +921,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -940,7 +932,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -988,7 +980,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
conda-py3_9-cuda11_8-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -1051,7 +1043,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1063,7 +1054,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1168,7 +1159,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1180,7 +1170,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1229,7 +1219,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
conda-py3_9-cuda12_1-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -1292,7 +1282,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1304,7 +1293,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1409,7 +1398,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1421,7 +1409,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1470,7 +1458,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
conda-py3_10-cpu-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -1532,7 +1520,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1544,7 +1531,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1648,7 +1635,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1660,7 +1646,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1708,7 +1694,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
conda-py3_10-cuda11_8-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -1771,7 +1757,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1783,7 +1768,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1888,7 +1873,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1900,7 +1884,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1949,7 +1933,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
conda-py3_10-cuda12_1-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -2012,7 +1996,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2024,7 +2007,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2129,7 +2112,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2141,7 +2123,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2190,7 +2172,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
conda-py3_11-cpu-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -2252,7 +2234,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2264,7 +2245,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2368,7 +2349,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2380,7 +2360,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2428,7 +2408,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
conda-py3_11-cuda11_8-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -2491,7 +2471,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2503,7 +2482,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2608,7 +2587,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2620,7 +2598,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2669,7 +2647,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
conda-py3_11-cuda12_1-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -2732,7 +2710,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2744,7 +2721,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2849,7 +2826,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2861,7 +2837,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2910,5 +2886,5 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
6
.github/workflows/generated-windows-binary-libtorch-debug-main.yml
generated
vendored
6
.github/workflows/generated-windows-binary-libtorch-debug-main.yml
generated
vendored
@ -89,7 +89,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -101,7 +100,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -209,7 +208,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -221,7 +219,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
|
96
.github/workflows/generated-windows-binary-libtorch-debug-nightly.yml
generated
vendored
96
.github/workflows/generated-windows-binary-libtorch-debug-nightly.yml
generated
vendored
@ -96,7 +96,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -108,7 +107,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -216,7 +215,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -228,7 +226,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -280,7 +278,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cpu-shared-without-deps-debug-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -346,7 +344,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -358,7 +355,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -466,7 +463,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -478,7 +474,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -530,7 +526,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cpu-static-with-deps-debug-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -596,7 +592,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -608,7 +603,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -716,7 +711,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -728,7 +722,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -780,7 +774,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cpu-static-without-deps-debug-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -846,7 +840,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -858,7 +851,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -966,7 +959,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -978,7 +970,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1030,7 +1022,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cuda11_8-shared-with-deps-debug-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -1097,7 +1089,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1109,7 +1100,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1218,7 +1209,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1230,7 +1220,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1283,7 +1273,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cuda11_8-shared-without-deps-debug-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -1350,7 +1340,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1362,7 +1351,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1471,7 +1460,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1483,7 +1471,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1536,7 +1524,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cuda11_8-static-with-deps-debug-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -1603,7 +1591,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1615,7 +1602,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1724,7 +1711,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1736,7 +1722,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1789,7 +1775,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cuda11_8-static-without-deps-debug-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -1856,7 +1842,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1868,7 +1853,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1977,7 +1962,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1989,7 +1973,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2042,7 +2026,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cuda12_1-shared-with-deps-debug-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -2109,7 +2093,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2121,7 +2104,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2230,7 +2213,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2242,7 +2224,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2295,7 +2277,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cuda12_1-shared-without-deps-debug-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -2362,7 +2344,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2374,7 +2355,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2483,7 +2464,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2495,7 +2475,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2548,7 +2528,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cuda12_1-static-with-deps-debug-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -2615,7 +2595,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2627,7 +2606,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2736,7 +2715,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2748,7 +2726,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2801,7 +2779,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cuda12_1-static-without-deps-debug-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -2868,7 +2846,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2880,7 +2857,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2989,7 +2966,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -3001,7 +2977,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -3054,5 +3030,5 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
6
.github/workflows/generated-windows-binary-libtorch-release-main.yml
generated
vendored
6
.github/workflows/generated-windows-binary-libtorch-release-main.yml
generated
vendored
@ -89,7 +89,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -101,7 +100,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -209,7 +208,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -221,7 +219,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
|
96
.github/workflows/generated-windows-binary-libtorch-release-nightly.yml
generated
vendored
96
.github/workflows/generated-windows-binary-libtorch-release-nightly.yml
generated
vendored
@ -96,7 +96,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -108,7 +107,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -216,7 +215,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -228,7 +226,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -280,7 +278,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cpu-shared-without-deps-release-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -346,7 +344,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -358,7 +355,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -466,7 +463,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -478,7 +474,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -530,7 +526,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cpu-static-with-deps-release-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -596,7 +592,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -608,7 +603,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -716,7 +711,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -728,7 +722,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -780,7 +774,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cpu-static-without-deps-release-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -846,7 +840,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -858,7 +851,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -966,7 +959,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -978,7 +970,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1030,7 +1022,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cuda11_8-shared-with-deps-release-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -1097,7 +1089,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1109,7 +1100,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1218,7 +1209,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1230,7 +1220,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1283,7 +1273,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cuda11_8-shared-without-deps-release-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -1350,7 +1340,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1362,7 +1351,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1471,7 +1460,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1483,7 +1471,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1536,7 +1524,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cuda11_8-static-with-deps-release-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -1603,7 +1591,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1615,7 +1602,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1724,7 +1711,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1736,7 +1722,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1789,7 +1775,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cuda11_8-static-without-deps-release-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -1856,7 +1842,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1868,7 +1853,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1977,7 +1962,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1989,7 +1973,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2042,7 +2026,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cuda12_1-shared-with-deps-release-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -2109,7 +2093,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2121,7 +2104,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2230,7 +2213,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2242,7 +2224,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2295,7 +2277,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cuda12_1-shared-without-deps-release-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -2362,7 +2344,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2374,7 +2355,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2483,7 +2464,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2495,7 +2475,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2548,7 +2528,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cuda12_1-static-with-deps-release-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -2615,7 +2595,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2627,7 +2606,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2736,7 +2715,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2748,7 +2726,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2801,7 +2779,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
libtorch-cuda12_1-static-without-deps-release-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -2868,7 +2846,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2880,7 +2857,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2989,7 +2966,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -3001,7 +2977,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -3054,5 +3030,5 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
108
.github/workflows/generated-windows-binary-wheel-nightly.yml
generated
vendored
108
.github/workflows/generated-windows-binary-wheel-nightly.yml
generated
vendored
@ -46,6 +46,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cpu
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.8"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
steps:
|
||||
- name: Display EC2 information
|
||||
shell: bash
|
||||
@ -92,7 +93,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -104,7 +104,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -208,7 +208,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -220,7 +219,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -268,7 +267,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
wheel-py3_8-cuda11_8-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -285,6 +284,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cuda
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.8"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
steps:
|
||||
- name: Display EC2 information
|
||||
shell: bash
|
||||
@ -331,7 +331,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -343,7 +342,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -448,7 +447,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -460,7 +458,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -509,7 +507,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
wheel-py3_8-cuda12_1-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -526,6 +524,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cuda
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.8"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
steps:
|
||||
- name: Display EC2 information
|
||||
shell: bash
|
||||
@ -572,7 +571,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -584,7 +582,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -689,7 +687,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -701,7 +698,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -750,7 +747,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
wheel-py3_9-cpu-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -766,6 +763,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cpu
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.9"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
steps:
|
||||
- name: Display EC2 information
|
||||
shell: bash
|
||||
@ -812,7 +810,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -824,7 +821,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -928,7 +925,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -940,7 +936,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -988,7 +984,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
wheel-py3_9-cuda11_8-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -1005,6 +1001,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cuda
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.9"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
steps:
|
||||
- name: Display EC2 information
|
||||
shell: bash
|
||||
@ -1051,7 +1048,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1063,7 +1059,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1168,7 +1164,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1180,7 +1175,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1229,7 +1224,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
wheel-py3_9-cuda12_1-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -1246,6 +1241,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cuda
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.9"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
steps:
|
||||
- name: Display EC2 information
|
||||
shell: bash
|
||||
@ -1292,7 +1288,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1304,7 +1299,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1409,7 +1404,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1421,7 +1415,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1470,7 +1464,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
wheel-py3_10-cpu-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -1486,6 +1480,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cpu
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.10"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
steps:
|
||||
- name: Display EC2 information
|
||||
shell: bash
|
||||
@ -1532,7 +1527,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1544,7 +1538,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1648,7 +1642,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1660,7 +1653,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1708,7 +1701,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
wheel-py3_10-cuda11_8-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -1725,6 +1718,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cuda
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.10"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
steps:
|
||||
- name: Display EC2 information
|
||||
shell: bash
|
||||
@ -1771,7 +1765,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1783,7 +1776,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1888,7 +1881,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -1900,7 +1892,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -1949,7 +1941,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
wheel-py3_10-cuda12_1-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -1966,6 +1958,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cuda
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.10"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
steps:
|
||||
- name: Display EC2 information
|
||||
shell: bash
|
||||
@ -2012,7 +2005,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2024,7 +2016,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2129,7 +2121,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2141,7 +2132,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2190,7 +2181,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
wheel-py3_11-cpu-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -2206,6 +2197,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cpu
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.11"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
steps:
|
||||
- name: Display EC2 information
|
||||
shell: bash
|
||||
@ -2252,7 +2244,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2264,7 +2255,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2368,7 +2359,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2380,7 +2370,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2428,7 +2418,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
wheel-py3_11-cuda11_8-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -2445,6 +2435,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cuda
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.11"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
steps:
|
||||
- name: Display EC2 information
|
||||
shell: bash
|
||||
@ -2491,7 +2482,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2503,7 +2493,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2608,7 +2598,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2620,7 +2609,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2669,7 +2658,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
wheel-py3_11-cuda12_1-build:
|
||||
if: ${{ github.repository_owner == 'pytorch' }}
|
||||
@ -2686,6 +2675,7 @@ jobs:
|
||||
GPU_ARCH_TYPE: cuda
|
||||
SKIP_ALL_TESTS: 1
|
||||
DESIRED_PYTHON: "3.11"
|
||||
PYTORCH_EXTRA_INSTALL_REQUIREMENTS: nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.18.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | triton==2.1.0; platform_system == 'Linux' and platform_machine == 'x86_64'
|
||||
steps:
|
||||
- name: Display EC2 information
|
||||
shell: bash
|
||||
@ -2732,7 +2722,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2744,7 +2733,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2849,7 +2838,6 @@ jobs:
|
||||
- name: Checkout PyTorch
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
submodules: recursive
|
||||
path: pytorch
|
||||
quiet-checkout: true
|
||||
@ -2861,7 +2849,7 @@ jobs:
|
||||
- name: Checkout pytorch/builder
|
||||
uses: malfet/checkout@silent-checkout
|
||||
with:
|
||||
ref: main
|
||||
ref: release/2.1
|
||||
submodules: recursive
|
||||
repository: pytorch/builder
|
||||
path: builder
|
||||
@ -2910,5 +2898,5 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
aws-pytorch-uploader-access-key-id: ${{ secrets.AWS_PYTORCH_UPLOADER_ACCESS_KEY_ID }}
|
||||
aws-pytorch-uploader-secret-access-key: ${{ secrets.AWS_PYTORCH_UPLOADER_SECRET_ACCESS_KEY }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }}
|
||||
conda-pytorchbot-token: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }}
|
||||
uses: ./.github/workflows/_binary-upload.yml
|
||||
|
2
.github/workflows/lint-bc.yml
vendored
2
.github/workflows/lint-bc.yml
vendored
@ -26,7 +26,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Run BC Lint Action
|
||||
uses: pytorch/test-infra/.github/actions/bc-lint@main
|
||||
uses: pytorch/test-infra/.github/actions/bc-lint@release/2.1
|
||||
with:
|
||||
repo: ${{ github.event.pull_request.head.repo.full_name }}
|
||||
base_sha: ${{ github.event.pull_request.base.sha }}
|
||||
|
21
.github/workflows/lint.yml
vendored
21
.github/workflows/lint.yml
vendored
@ -15,12 +15,13 @@ on:
|
||||
# When any other step fails, it's job will be retried once by retryBot.
|
||||
jobs:
|
||||
lintrunner:
|
||||
uses: pytorch/test-infra/.github/workflows/linux_job.yml@main
|
||||
uses: pytorch/test-infra/.github/workflows/linux_job.yml@release/2.1
|
||||
with:
|
||||
runner: linux.2xlarge
|
||||
docker-image: pytorch-linux-focal-linter
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
test-infra-ref: 'release/2.1'
|
||||
script: |
|
||||
# The generic Linux job chooses to use base env, not the one setup by the image
|
||||
CONDA_ENV=$(conda env list --json | jq -r ".envs | .[-1]")
|
||||
@ -62,12 +63,13 @@ jobs:
|
||||
exit $RC
|
||||
|
||||
quick-checks:
|
||||
uses: pytorch/test-infra/.github/workflows/linux_job.yml@main
|
||||
uses: pytorch/test-infra/.github/workflows/linux_job.yml@release/2.1
|
||||
with:
|
||||
runner: linux.2xlarge
|
||||
docker-image: pytorch-linux-focal-linter
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
test-infra-ref: 'release/2.1'
|
||||
script: |
|
||||
# The generic Linux job chooses to use base env, not the one setup by the image
|
||||
CONDA_ENV=$(conda env list --json | jq -r ".envs | .[-1]")
|
||||
@ -103,7 +105,7 @@ jobs:
|
||||
if: github.event_name == 'pull_request' && !contains(github.event.pull_request.labels.*.name, 'skip-pr-sanity-checks')
|
||||
steps:
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
submodules: false
|
||||
fetch-depth: -1
|
||||
@ -116,12 +118,13 @@ jobs:
|
||||
bash .github/scripts/pr-sanity-check.sh
|
||||
|
||||
workflow-checks:
|
||||
uses: pytorch/test-infra/.github/workflows/linux_job.yml@main
|
||||
uses: pytorch/test-infra/.github/workflows/linux_job.yml@release/2.1
|
||||
with:
|
||||
runner: linux.2xlarge
|
||||
docker-image: pytorch-linux-focal-linter
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
test-infra-ref: 'release/2.1'
|
||||
script: |
|
||||
# The generic Linux job chooses to use base env, not the one setup by the image
|
||||
CONDA_ENV=$(conda env list --json | jq -r ".envs | .[-1]")
|
||||
@ -151,12 +154,13 @@ jobs:
|
||||
exit $RC
|
||||
|
||||
toc:
|
||||
uses: pytorch/test-infra/.github/workflows/linux_job.yml@main
|
||||
uses: pytorch/test-infra/.github/workflows/linux_job.yml@release/2.1
|
||||
with:
|
||||
runner: linux.2xlarge
|
||||
docker-image: pytorch-linux-focal-linter
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
test-infra-ref: 'release/2.1'
|
||||
script: |
|
||||
# The generic Linux job chooses to use base env, not the one setup by the image
|
||||
CONDA_ENV=$(conda env list --json | jq -r ".envs | .[-1]")
|
||||
@ -189,12 +193,13 @@ jobs:
|
||||
test-tools:
|
||||
name: Test tools
|
||||
if: ${{ github.repository == 'pytorch/pytorch' }}
|
||||
uses: pytorch/test-infra/.github/workflows/linux_job.yml@main
|
||||
uses: pytorch/test-infra/.github/workflows/linux_job.yml@release/2.1
|
||||
with:
|
||||
runner: linux.2xlarge
|
||||
docker-image: pytorch-linux-focal-linter
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
test-infra-ref: 'release/2.1'
|
||||
script: |
|
||||
# The generic Linux job chooses to use base env, not the one setup by the image
|
||||
CONDA_ENV=$(conda env list --json | jq -r ".envs | .[-1]")
|
||||
@ -210,7 +215,7 @@ jobs:
|
||||
runs-on: linux.20_04.4x
|
||||
steps:
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
submodules: false
|
||||
fetch-depth: 1
|
||||
@ -240,7 +245,7 @@ jobs:
|
||||
# [see note: pytorch repo ref]
|
||||
# deep clone (fetch-depth 0) required, to allow us to use git log
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
submodules: false
|
||||
fetch-depth: 1
|
||||
|
@ -21,7 +21,7 @@ jobs:
|
||||
environment: upload-stats
|
||||
steps:
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
fetch-depth: 1
|
||||
submodules: false
|
||||
|
55
.github/workflows/periodic.yml
vendored
55
.github/workflows/periodic.yml
vendored
@ -112,30 +112,38 @@ jobs:
|
||||
cuda-version: "11.8"
|
||||
test-matrix: ${{ needs.win-vs2019-cuda11_8-py3-build.outputs.test-matrix }}
|
||||
|
||||
ios-12-5-1-x86-64-coreml:
|
||||
name: ios-12-5-1-x86-64-coreml
|
||||
# TODO: Figure out how to migrate this job to M1 runner
|
||||
ios-build-test:
|
||||
name: ios-build-test
|
||||
if: github.event_name != 'schedule' || github.event.schedule == '45 0,8,16 * * 1-5' || github.event.schedule == '45 4 * * 0,6'
|
||||
uses: ./.github/workflows/_ios-build-test.yml
|
||||
with:
|
||||
build-environment: ios-12-5-1-x86-64-coreml
|
||||
ios-platform: SIMULATOR
|
||||
ios-arch: x86_64
|
||||
build-environment: ios-build-test
|
||||
sync-tag: ios-build-test
|
||||
test-matrix: |
|
||||
{ include: [
|
||||
{ config: "default", shard: 1, num_shards: 1, runner: "macos-12" },
|
||||
]}
|
||||
|
||||
ios-12-5-1-arm64-custom-ops:
|
||||
name: ios-12-5-1-arm64-custom-ops
|
||||
if: github.event_name != 'schedule' || github.event.schedule == '45 0,8,16 * * 1-5' || github.event.schedule == '45 4 * * 0,6'
|
||||
uses: ./.github/workflows/_ios-build-test.yml
|
||||
with:
|
||||
build-environment: ios-12-5-1-arm64-custom-ops
|
||||
ios-platform: OS
|
||||
ios-arch: arm64
|
||||
test-matrix: |
|
||||
{ include: [
|
||||
{ config: "default", shard: 1, num_shards: 1, runner: "macos-12" },
|
||||
{ config: "default",
|
||||
shard: 1,
|
||||
num_shards: 1,
|
||||
runner: "macos-12",
|
||||
ios_platform: "SIMULATOR",
|
||||
ios_arch: "x86_64",
|
||||
use_lite_interpreter: 1,
|
||||
use_metal: 0,
|
||||
use_coreml: 1,
|
||||
use_custom_op_list: ""
|
||||
},
|
||||
{ config: "default",
|
||||
shard: 1,
|
||||
num_shards: 1,
|
||||
runner: "macos-12",
|
||||
ios_platform: "OS",
|
||||
ios_arch: "arm64",
|
||||
use_lite_interpreter: 1,
|
||||
use_metal: 1,
|
||||
use_coreml: 1,
|
||||
use_custom_op_list: "mobilenetv2.yaml"
|
||||
}
|
||||
]}
|
||||
|
||||
buck-build-test:
|
||||
@ -180,7 +188,14 @@ jobs:
|
||||
with:
|
||||
test-matrix: |
|
||||
{ include: [
|
||||
{ config: "default", shard: 1, num_shards: 1, runner: "ubuntu-20.04-16x" },
|
||||
{ config: "default",
|
||||
shard: 1,
|
||||
num_shards: 1,
|
||||
runner: "ubuntu-20.04-16x"
|
||||
use_lite_interpreter: 1,
|
||||
# Just set x86 for testing here
|
||||
support_abi: x86,
|
||||
},
|
||||
]}
|
||||
|
||||
linux-vulkan-focal-py3_11-clang10-build:
|
||||
|
2
.github/workflows/update_pytorch_labels.yml
vendored
2
.github/workflows/update_pytorch_labels.yml
vendored
@ -14,7 +14,7 @@ jobs:
|
||||
if: ${{ github.repository == 'pytorch/pytorch' }}
|
||||
steps:
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
fetch-depth: 1
|
||||
submodules: false
|
||||
|
2
.github/workflows/upload-alerts.yml
vendored
2
.github/workflows/upload-alerts.yml
vendored
@ -44,7 +44,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
uses: pytorch/test-infra/.github/actions/upload-alerts@main
|
||||
uses: pytorch/test-infra/.github/actions/upload-alerts@release/2.1
|
||||
with:
|
||||
alerts: '${{ steps.alert_creation_step.outputs.script-output }}'
|
||||
organization: "pytorch"
|
||||
|
2
.github/workflows/upload-test-stats.yml
vendored
2
.github/workflows/upload-test-stats.yml
vendored
@ -37,7 +37,7 @@ jobs:
|
||||
run: echo "${TRIGGERING_WORKFLOW}"
|
||||
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
|
@ -29,7 +29,7 @@ jobs:
|
||||
name: Upload dynamo performance stats for ${{ github.event.workflow_run.id }}, attempt ${{ github.event.workflow_run.run_attempt }}
|
||||
steps:
|
||||
- name: Checkout PyTorch
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@main
|
||||
uses: pytorch/pytorch/.github/actions/checkout-pytorch@release/2.1
|
||||
with:
|
||||
submodules: false
|
||||
fetch-depth: 1
|
||||
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -364,3 +364,7 @@ venv/
|
||||
# Log files
|
||||
*.log
|
||||
sweep/
|
||||
|
||||
# Android build artifacts
|
||||
android/pytorch_android/.cxx
|
||||
android/pytorch_android_torchvision/.cxx
|
||||
|
@ -73,8 +73,8 @@ ARG TARGETPLATFORM
|
||||
|
||||
# On arm64 we can only install wheel packages.
|
||||
RUN case ${TARGETPLATFORM} in \
|
||||
"linux/arm64") pip install --extra-index-url https://download.pytorch.org/whl/cpu/ torch torchvision torchaudio torchtext ;; \
|
||||
*) /opt/conda/bin/conda install -c "${INSTALL_CHANNEL}" -c "${CUDA_CHANNEL}" -y "python=${PYTHON_VERSION}" pytorch torchvision torchaudio torchtext "pytorch-cuda=$(echo $CUDA_VERSION | cut -d'.' -f 1-2)" ;; \
|
||||
"linux/arm64") pip install --extra-index-url https://download.pytorch.org/whl/cpu/ torch torchvision torchaudio ;; \
|
||||
*) /opt/conda/bin/conda install -c "${INSTALL_CHANNEL}" -c "${CUDA_CHANNEL}" -y "python=${PYTHON_VERSION}" pytorch torchvision torchaudio "pytorch-cuda=$(echo $CUDA_VERSION | cut -d'.' -f 1-2)" ;; \
|
||||
esac && \
|
||||
/opt/conda/bin/conda clean -ya
|
||||
RUN /opt/conda/bin/pip install torchelastic
|
||||
|
@ -1,6 +1,6 @@
|
||||
ABI_FILTERS=armeabi-v7a,arm64-v8a,x86,x86_64
|
||||
|
||||
VERSION_NAME=2.1.0-SNAPSHOT
|
||||
VERSION_NAME=2.1.0
|
||||
GROUP=org.pytorch
|
||||
MAVEN_GROUP=org.pytorch
|
||||
SONATYPE_STAGING_PROFILE=orgpytorch
|
||||
|
@ -41,6 +41,7 @@ android {
|
||||
println 'Build pytorch_jni'
|
||||
exclude 'org/pytorch/LiteModuleLoader.java'
|
||||
exclude 'org/pytorch/LiteNativePeer.java'
|
||||
exclude 'org/pytorch/LitePyTorchAndroid.java'
|
||||
} else {
|
||||
println 'Build pytorch_jni_lite'
|
||||
}
|
||||
|
@ -17,6 +17,7 @@ sourceSets {
|
||||
java {
|
||||
srcDir '../src/main/java'
|
||||
exclude 'org/pytorch/PyTorchAndroid.java'
|
||||
exclude 'org/pytorch/LitePyTorchAndroid.java'
|
||||
exclude 'org/pytorch/LiteModuleLoader.java'
|
||||
exclude 'org/pytorch/LiteNativePeer.java'
|
||||
}
|
||||
|
@ -10,6 +10,7 @@ import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import org.junit.Test;
|
||||
import org.junit.Ignore;
|
||||
|
||||
public abstract class PytorchTestBase {
|
||||
private static final String TEST_MODULE_ASSET_NAME = "android_api_module.ptl";
|
||||
@ -413,7 +414,10 @@ public abstract class PytorchTestBase {
|
||||
}
|
||||
|
||||
@Test
|
||||
@Ignore
|
||||
public void testSpectralOps() throws IOException {
|
||||
// NB: This model fails without lite interpreter. The error is as follows:
|
||||
// RuntimeError: stft requires the return_complex parameter be given for real inputs
|
||||
runModel("spectral_ops");
|
||||
}
|
||||
|
||||
|
@ -10,12 +10,6 @@
|
||||
#include <fbjni/fbjni.h>
|
||||
|
||||
#include "pytorch_jni_common.h"
|
||||
#if defined(__ANDROID__)
|
||||
#ifndef USE_PTHREADPOOL
|
||||
#define USE_PTHREADPOOL
|
||||
#endif /* USE_PTHREADPOOL */
|
||||
#include <caffe2/utils/threadpool/pthreadpool-cpp.h>
|
||||
#endif
|
||||
|
||||
namespace pytorch_jni {
|
||||
|
||||
@ -666,32 +660,4 @@ at::IValue JIValue::JIValueToAtIValue(
|
||||
typeCode);
|
||||
}
|
||||
|
||||
#if defined(__ANDROID__)
|
||||
class PyTorchAndroidJni : public facebook::jni::JavaClass<PyTorchAndroidJni> {
|
||||
public:
|
||||
constexpr static auto kJavaDescriptor = "Lorg/pytorch/PyTorchAndroid;";
|
||||
|
||||
static void registerNatives() {
|
||||
javaClassStatic()->registerNatives({
|
||||
makeNativeMethod(
|
||||
"nativeSetNumThreads", PyTorchAndroidJni::setNumThreads),
|
||||
});
|
||||
}
|
||||
|
||||
static void setNumThreads(facebook::jni::alias_ref<jclass>, jint numThreads) {
|
||||
caffe2::pthreadpool()->set_thread_count(numThreads);
|
||||
}
|
||||
};
|
||||
#endif
|
||||
|
||||
void common_registerNatives() {
|
||||
static const int once = []() {
|
||||
#if defined(__ANDROID__)
|
||||
pytorch_jni::PyTorchAndroidJni::registerNatives();
|
||||
#endif
|
||||
return 0;
|
||||
}();
|
||||
((void)once);
|
||||
}
|
||||
|
||||
} // namespace pytorch_jni
|
||||
|
@ -17,6 +17,11 @@
|
||||
#include <android/asset_manager.h>
|
||||
#include <android/asset_manager_jni.h>
|
||||
#include <android/log.h>
|
||||
|
||||
#ifndef USE_PTHREADPOOL
|
||||
#define USE_PTHREADPOOL
|
||||
#endif /* USE_PTHREADPOOL */
|
||||
#include <caffe2/utils/threadpool/pthreadpool-cpp.h>
|
||||
#endif
|
||||
|
||||
namespace pytorch_jni {
|
||||
@ -235,6 +240,34 @@ class PytorchJni : public facebook::jni::HybridClass<PytorchJni> {
|
||||
}
|
||||
};
|
||||
|
||||
#if defined(__ANDROID__)
|
||||
class PyTorchAndroidJni : public facebook::jni::JavaClass<PyTorchAndroidJni> {
|
||||
public:
|
||||
constexpr static auto kJavaDescriptor = "Lorg/pytorch/PyTorchAndroid;";
|
||||
|
||||
static void registerNatives() {
|
||||
javaClassStatic()->registerNatives({
|
||||
makeNativeMethod(
|
||||
"nativeSetNumThreads", PyTorchAndroidJni::setNumThreads),
|
||||
});
|
||||
}
|
||||
|
||||
static void setNumThreads(facebook::jni::alias_ref<jclass>, jint numThreads) {
|
||||
caffe2::pthreadpool()->set_thread_count(numThreads);
|
||||
}
|
||||
};
|
||||
#endif
|
||||
|
||||
void common_registerNatives() {
|
||||
static const int once = []() {
|
||||
#if defined(__ANDROID__)
|
||||
pytorch_jni::PyTorchAndroidJni::registerNatives();
|
||||
#endif
|
||||
return 0;
|
||||
}();
|
||||
((void)once);
|
||||
}
|
||||
|
||||
} // namespace pytorch_jni
|
||||
|
||||
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void*) {
|
||||
|
@ -18,6 +18,11 @@
|
||||
#include <android/asset_manager.h>
|
||||
#include <android/asset_manager_jni.h>
|
||||
#include <android/log.h>
|
||||
|
||||
#ifndef USE_PTHREADPOOL
|
||||
#define USE_PTHREADPOOL
|
||||
#endif /* USE_PTHREADPOOL */
|
||||
#include <caffe2/utils/threadpool/pthreadpool-cpp.h>
|
||||
#endif
|
||||
|
||||
namespace pytorch_jni {
|
||||
@ -199,6 +204,34 @@ class PytorchJni : public facebook::jni::HybridClass<PytorchJni> {
|
||||
}
|
||||
};
|
||||
|
||||
#if defined(__ANDROID__)
|
||||
class PyTorchAndroidJni : public facebook::jni::JavaClass<PyTorchAndroidJni> {
|
||||
public:
|
||||
constexpr static auto kJavaDescriptor = "Lorg/pytorch/LitePyTorchAndroid;";
|
||||
|
||||
static void registerNatives() {
|
||||
javaClassStatic()->registerNatives({
|
||||
makeNativeMethod(
|
||||
"nativeSetNumThreads", PyTorchAndroidJni::setNumThreads),
|
||||
});
|
||||
}
|
||||
|
||||
static void setNumThreads(facebook::jni::alias_ref<jclass>, jint numThreads) {
|
||||
caffe2::pthreadpool()->set_thread_count(numThreads);
|
||||
}
|
||||
};
|
||||
#endif
|
||||
|
||||
void common_registerNatives() {
|
||||
static const int once = []() {
|
||||
#if defined(__ANDROID__)
|
||||
pytorch_jni::PyTorchAndroidJni::registerNatives();
|
||||
#endif
|
||||
return 0;
|
||||
}();
|
||||
((void)once);
|
||||
}
|
||||
|
||||
} // namespace pytorch_jni
|
||||
|
||||
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void*) {
|
||||
|
@ -0,0 +1,50 @@
|
||||
package org.pytorch;
|
||||
|
||||
import android.content.res.AssetManager;
|
||||
import com.facebook.jni.annotations.DoNotStrip;
|
||||
import com.facebook.soloader.nativeloader.NativeLoader;
|
||||
import com.facebook.soloader.nativeloader.SystemDelegate;
|
||||
|
||||
public final class LitePyTorchAndroid {
|
||||
static {
|
||||
if (!NativeLoader.isInitialized()) {
|
||||
NativeLoader.init(new SystemDelegate());
|
||||
}
|
||||
NativeLoader.loadLibrary("pytorch_jni_lite");
|
||||
PyTorchCodegenLoader.loadNativeLibs();
|
||||
}
|
||||
|
||||
/**
|
||||
* Attention: This is not recommended way of loading production modules, as prepackaged assets
|
||||
* increase apk size etc. For production usage consider using loading from file on the disk {@link
|
||||
* org.pytorch.Module#load(String)}.
|
||||
*
|
||||
* <p>This method is meant to use in tests and demos.
|
||||
*/
|
||||
public static Module loadModuleFromAsset(
|
||||
final AssetManager assetManager, final String assetName, final Device device) {
|
||||
return new Module(new LiteNativePeer(assetName, assetManager, device));
|
||||
}
|
||||
|
||||
public static Module loadModuleFromAsset(
|
||||
final AssetManager assetManager, final String assetName) {
|
||||
return new Module(new LiteNativePeer(assetName, assetManager, Device.CPU));
|
||||
}
|
||||
|
||||
/**
|
||||
* Globally sets the number of threads used on native side. Attention: Has global effect, all
|
||||
* modules use one thread pool with specified number of threads.
|
||||
*
|
||||
* @param numThreads number of threads, must be positive number.
|
||||
*/
|
||||
public static void setNumThreads(int numThreads) {
|
||||
if (numThreads < 1) {
|
||||
throw new IllegalArgumentException("Number of threads cannot be less than 1");
|
||||
}
|
||||
|
||||
nativeSetNumThreads(numThreads);
|
||||
}
|
||||
|
||||
@DoNotStrip
|
||||
private static native void nativeSetNumThreads(int numThreads);
|
||||
}
|
@ -10,7 +10,7 @@ public final class PyTorchAndroid {
|
||||
if (!NativeLoader.isInitialized()) {
|
||||
NativeLoader.init(new SystemDelegate());
|
||||
}
|
||||
NativeLoader.loadLibrary("pytorch_jni_lite");
|
||||
NativeLoader.loadLibrary("pytorch_jni");
|
||||
PyTorchCodegenLoader.loadNativeLibs();
|
||||
}
|
||||
|
||||
|
@ -41,6 +41,11 @@ android {
|
||||
buildConfigField("long[]", "INPUT_TENSOR_SHAPE", "new long[]{1, 3, 224, 224}")
|
||||
buildConfigField("boolean", "NATIVE_BUILD", 'false')
|
||||
buildConfigField("boolean", "USE_VULKAN_DEVICE", 'false')
|
||||
buildConfigField(
|
||||
"int",
|
||||
"BUILD_LITE_INTERPRETER",
|
||||
System.env.BUILD_LITE_INTERPRETER != null ? System.env.BUILD_LITE_INTERPRETER : "1"
|
||||
)
|
||||
addManifestPlaceholders([APP_NAME: "@string/app_name", MAIN_ACTIVITY: "org.pytorch.testapp.MainActivity"])
|
||||
}
|
||||
buildTypes {
|
||||
@ -63,14 +68,15 @@ android {
|
||||
mnet {
|
||||
dimension "model"
|
||||
applicationIdSuffix ".mnet"
|
||||
buildConfigField("String", "MODULE_ASSET_NAME", "\"mnet.pt\"")
|
||||
buildConfigField("String", "MODULE_ASSET_NAME", "\"mobilenet_v2.ptl\"")
|
||||
addManifestPlaceholders([APP_NAME: "MNET"])
|
||||
buildConfigField("String", "LOGCAT_TAG", "\"pytorch-mnet\"")
|
||||
}
|
||||
// NB: This is not working atm https://github.com/pytorch/pytorch/issues/102966
|
||||
mnetVulkan {
|
||||
dimension "model"
|
||||
applicationIdSuffix ".mnet_vulkan"
|
||||
buildConfigField("String", "MODULE_ASSET_NAME", "\"mnet_vulkan.pt\"")
|
||||
buildConfigField("String", "MODULE_ASSET_NAME", "\"mobilenet_v2_vulkan.ptl\"")
|
||||
buildConfigField("boolean", "USE_VULKAN_DEVICE", 'true')
|
||||
addManifestPlaceholders([APP_NAME: "MNET_VULKAN"])
|
||||
buildConfigField("String", "LOGCAT_TAG", "\"pytorch-mnet-vulkan\"")
|
||||
@ -78,7 +84,7 @@ android {
|
||||
resnet18 {
|
||||
dimension "model"
|
||||
applicationIdSuffix ".resnet18"
|
||||
buildConfigField("String", "MODULE_ASSET_NAME", "\"resnet18.pt\"")
|
||||
buildConfigField("String", "MODULE_ASSET_NAME", "\"resnet18.ptl\"")
|
||||
addManifestPlaceholders([APP_NAME: "RN18"])
|
||||
buildConfigField("String", "LOGCAT_TAG", "\"pytorch-resnet18\"")
|
||||
}
|
||||
@ -149,8 +155,8 @@ dependencies {
|
||||
//nativeBuildImplementation(name: 'pytorch_android_torchvision-release', ext: 'aar')
|
||||
//extractForNativeBuild(name: 'pytorch_android-release', ext: 'aar')
|
||||
|
||||
nightlyImplementation 'org.pytorch:pytorch_android:1.12.0-SNAPSHOT'
|
||||
nightlyImplementation 'org.pytorch:pytorch_android_torchvision:1.12.0-SNAPSHOT'
|
||||
nightlyImplementation 'org.pytorch:pytorch_android:2.2.0-SNAPSHOT'
|
||||
nightlyImplementation 'org.pytorch:pytorch_android_torchvision:2.2.0-SNAPSHOT'
|
||||
|
||||
aarImplementation(name:'pytorch_android', ext:'aar')
|
||||
aarImplementation(name:'pytorch_android_torchvision', ext:'aar')
|
||||
|
@ -1,6 +1,7 @@
|
||||
package org.pytorch.testapp;
|
||||
|
||||
import android.content.Context;
|
||||
import android.content.res.AssetManager;
|
||||
import android.os.Bundle;
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
@ -16,6 +17,8 @@ import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.nio.FloatBuffer;
|
||||
import org.pytorch.Device;
|
||||
import org.pytorch.IValue;
|
||||
@ -42,7 +45,13 @@ public class MainActivity extends AppCompatActivity {
|
||||
new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
final Result result = doModuleForward();
|
||||
final Result result;
|
||||
try {
|
||||
result = doModuleForward();
|
||||
} catch (ClassNotFoundException | NoSuchMethodException | IllegalAccessException |
|
||||
InvocationTargetException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
runOnUiThread(
|
||||
new Runnable() {
|
||||
@Override
|
||||
@ -118,7 +127,7 @@ public class MainActivity extends AppCompatActivity {
|
||||
|
||||
@WorkerThread
|
||||
@Nullable
|
||||
protected Result doModuleForward() {
|
||||
protected Result doModuleForward() throws ClassNotFoundException, IllegalAccessException, NoSuchMethodException, InvocationTargetException {
|
||||
if (mModule == null) {
|
||||
final long[] shape = BuildConfig.INPUT_TENSOR_SHAPE;
|
||||
long numElements = 1;
|
||||
@ -129,12 +138,29 @@ public class MainActivity extends AppCompatActivity {
|
||||
mInputTensor =
|
||||
Tensor.fromBlob(
|
||||
mInputTensorBuffer, BuildConfig.INPUT_TENSOR_SHAPE, MemoryFormat.CHANNELS_LAST);
|
||||
PyTorchAndroid.setNumThreads(1);
|
||||
mModule =
|
||||
BuildConfig.USE_VULKAN_DEVICE
|
||||
? PyTorchAndroid.loadModuleFromAsset(
|
||||
getAssets(), BuildConfig.MODULE_ASSET_NAME, Device.VULKAN)
|
||||
: PyTorchAndroid.loadModuleFromAsset(getAssets(), BuildConfig.MODULE_ASSET_NAME);
|
||||
|
||||
Class ptAndroid;
|
||||
if (BuildConfig.BUILD_LITE_INTERPRETER == 1) {
|
||||
ptAndroid = Class.forName("org.pytorch.LitePyTorchAndroid");
|
||||
}
|
||||
else {
|
||||
ptAndroid = Class.forName("org.pytorch.PyTorchAndroid");
|
||||
}
|
||||
|
||||
Method setNumThreads = ptAndroid.getMethod("setNumThreads", int.class);
|
||||
setNumThreads.invoke(null,1);
|
||||
|
||||
Method loadModuleFromAsset = ptAndroid.getMethod(
|
||||
"loadModuleFromAsset",
|
||||
AssetManager.class,
|
||||
String.class,
|
||||
Device.class
|
||||
);
|
||||
mModule = (Module) (BuildConfig.USE_VULKAN_DEVICE
|
||||
? loadModuleFromAsset.invoke(
|
||||
null, getAssets(), BuildConfig.MODULE_ASSET_NAME, Device.VULKAN)
|
||||
: loadModuleFromAsset.invoke(
|
||||
null, getAssets(), BuildConfig.MODULE_ASSET_NAME, Device.CPU));
|
||||
}
|
||||
|
||||
final long startTime = SystemClock.elapsedRealtime();
|
||||
|
@ -125,6 +125,7 @@ file(GLOB native_ao_sparse_h
|
||||
"native/ao_sparse/quantized/cpu/*.h")
|
||||
file(GLOB native_quantized_h "native/quantized/*.h" "native/quantized/cpu/*.h" "native/quantized/cudnn/*.h")
|
||||
file(GLOB native_cpu_h "native/cpu/*.h")
|
||||
file(GLOB native_utils_h "native/utils/*.h")
|
||||
|
||||
file(GLOB native_cuda_cu "native/cuda/*.cu")
|
||||
file(GLOB native_cuda_cpp "native/cuda/*.cpp")
|
||||
@ -540,7 +541,7 @@ install(FILES "${CMAKE_CURRENT_BINARY_DIR}/cmake-exports/ATenConfig.cmake"
|
||||
|
||||
set(INSTALL_HEADERS ${base_h} ${ATen_CORE_HEADERS})
|
||||
if(NOT INTERN_BUILD_MOBILE)
|
||||
list(APPEND INSTALL_HEADERS ${native_h} ${native_cpu_h} ${native_ao_sparse_h} ${native_quantized_h} ${cuda_h} ${native_cuda_h} ${native_hip_h} ${cudnn_h} ${hip_h} ${mps_h} ${native_mps_h} ${miopen_h})
|
||||
list(APPEND INSTALL_HEADERS ${native_h} ${native_cpu_h} ${native_ao_sparse_h} ${native_quantized_h} ${cuda_h} ${native_cuda_h} ${native_hip_h} ${cudnn_h} ${hip_h} ${mps_h} ${native_mps_h} ${native_utils_h} ${miopen_h})
|
||||
# Metal
|
||||
if(USE_PYTORCH_METAL_EXPORT)
|
||||
# Add files needed from exporting metal models(optimized_for_mobile)
|
||||
|
@ -371,6 +371,22 @@ inline void deprecated_AT_DISPATCH_ALL_TYPES_AND_HALF_AND_COMPLEX() {}
|
||||
AT_DISPATCH_CASE_FLOATING_AND_COMPLEX_TYPES_AND3( \
|
||||
SCALARTYPE1, SCALARTYPE2, SCALARTYPE3, __VA_ARGS__))
|
||||
|
||||
#define AT_DISPATCH_CASE_FLOATING_AND_COMPLEX_TYPES_AND4( \
|
||||
SCALARTYPE1, SCALARTYPE2, SCALARTYPE3, SCALARTYPE4, ...) \
|
||||
AT_DISPATCH_CASE_FLOATING_AND_COMPLEX_TYPES(__VA_ARGS__) \
|
||||
AT_DISPATCH_CASE(SCALARTYPE1, __VA_ARGS__) \
|
||||
AT_DISPATCH_CASE(SCALARTYPE2, __VA_ARGS__) \
|
||||
AT_DISPATCH_CASE(SCALARTYPE3, __VA_ARGS__) \
|
||||
AT_DISPATCH_CASE(SCALARTYPE4, __VA_ARGS__)
|
||||
|
||||
#define AT_DISPATCH_FLOATING_AND_COMPLEX_TYPES_AND4( \
|
||||
SCALARTYPE1, SCALARTYPE2, SCALARTYPE3, SCALARTYPE4, TYPE, NAME, ...) \
|
||||
AT_DISPATCH_SWITCH( \
|
||||
TYPE, \
|
||||
NAME, \
|
||||
AT_DISPATCH_CASE_FLOATING_AND_COMPLEX_TYPES_AND4( \
|
||||
SCALARTYPE1, SCALARTYPE2, SCALARTYPE3, SCALARTYPE4, __VA_ARGS__))
|
||||
|
||||
#define AT_DISPATCH_CASE_INTEGRAL_TYPES(...) \
|
||||
AT_DISPATCH_CASE(at::ScalarType::Byte, __VA_ARGS__) \
|
||||
AT_DISPATCH_CASE(at::ScalarType::Char, __VA_ARGS__) \
|
||||
|
@ -161,8 +161,10 @@ CUDA_STUB3(cuModuleGetFunction, CUfunction *, CUmodule, const char *);
|
||||
CUDA_STUB4(cuOccupancyMaxActiveBlocksPerMultiprocessor, int *, CUfunction, int, size_t);
|
||||
CUDA_STUB2(cuGetErrorString, CUresult, const char **);
|
||||
CUDA_STUB1(cuCtxGetCurrent, CUcontext *);
|
||||
CUDA_STUB1(cuCtxSetCurrent, CUcontext);
|
||||
CUDA_STUB1(cuModuleUnload, CUmodule);
|
||||
CUDA_STUB3(cuDevicePrimaryCtxGetState, CUdevice, unsigned int *, int *);
|
||||
CUDA_STUB2(cuDevicePrimaryCtxRetain, CUcontext *, CUdevice);
|
||||
CUDA_STUB4(cuLinkCreate, unsigned int, CUjit_option *, void **, CUlinkState *);
|
||||
CUDA_STUB3(cuLinkComplete, CUlinkState, void **, size_t *);
|
||||
CUDA_STUB3(cuFuncSetAttribute, CUfunction, CUfunction_attribute, int);
|
||||
|
@ -51,8 +51,10 @@ namespace at { namespace cuda {
|
||||
_(cuLaunchKernel) \
|
||||
_(cuLaunchCooperativeKernel) \
|
||||
_(cuCtxGetCurrent) \
|
||||
_(cuCtxSetCurrent) \
|
||||
_(cuModuleUnload) \
|
||||
_(cuDevicePrimaryCtxGetState) \
|
||||
_(cuDevicePrimaryCtxRetain) \
|
||||
_(cuLinkCreate) \
|
||||
_(cuLinkAddData) \
|
||||
_(cuLinkComplete) \
|
||||
|
@ -389,8 +389,9 @@ static inline bool mkldnn_conv_use_channels_last(const at::Tensor& input, const
|
||||
(input_memory_format == at::MemoryFormat::ChannelsLast) ||
|
||||
(weight_memory_format == at::MemoryFormat::ChannelsLast);
|
||||
|
||||
// TODO: add channels last 3d support
|
||||
bool can_use_mkldnn_channels_last_3d = false;
|
||||
bool can_use_mkldnn_channels_last_3d =
|
||||
(input_memory_format == at::MemoryFormat::ChannelsLast3d) ||
|
||||
(weight_memory_format == at::MemoryFormat::ChannelsLast3d);
|
||||
|
||||
return can_use_mkldnn_channels_last_2d || can_use_mkldnn_channels_last_3d;
|
||||
}
|
||||
|
@ -508,9 +508,6 @@ struct ConvParams {
|
||||
if (transposed && is_output_padding_big()) {
|
||||
return false;
|
||||
}
|
||||
if (transposed && groups > 1 && at::symint::size<T>(input, 1) == groups) {
|
||||
return false;
|
||||
}
|
||||
if (input.device().is_cpu() && input.scalar_type() == kBFloat16 && mkldnn_bf16_device_check()) {
|
||||
return true;
|
||||
}
|
||||
|
@ -253,7 +253,9 @@ static Tensor & copy_impl(Tensor & self, const Tensor & src, bool non_blocking)
|
||||
self.storage_offset() == src.storage_offset() &&
|
||||
self.strides().equals(src.strides()) &&
|
||||
self.sizes().equals(src.sizes()) &&
|
||||
self.scalar_type() == src.scalar_type()
|
||||
self.scalar_type() == src.scalar_type() &&
|
||||
self.is_conj() == src.is_conj() &&
|
||||
self.is_neg() == src.is_neg()
|
||||
);
|
||||
if (is_same_data) {
|
||||
return self;
|
||||
|
@ -10,6 +10,7 @@
|
||||
#include <ATen/native/SpectralOpsUtils.h>
|
||||
#include <ATen/native/cuda/CuFFTUtils.h>
|
||||
#include <ATen/native/cuda/CuFFTPlanCache.h>
|
||||
#include <ATen/cuda/nvrtc_stub/ATenNVRTC.h>
|
||||
#include <c10/util/irange.h>
|
||||
|
||||
#ifndef AT_PER_OPERATOR_HEADERS
|
||||
@ -27,7 +28,6 @@
|
||||
#include <cufftXt.h>
|
||||
|
||||
#include <cmath>
|
||||
#include <vector>
|
||||
|
||||
|
||||
namespace at::native {
|
||||
@ -304,6 +304,17 @@ static const Tensor& _exec_fft(Tensor& out, const Tensor& self, IntArrayRef out_
|
||||
CUFFT_CHECK(cufftSetWorkArea(plan, workspace.mutable_data_ptr()));
|
||||
|
||||
// execute transform plan
|
||||
#if !defined(USE_ROCM)
|
||||
CUcontext pctx = nullptr;
|
||||
at::globalContext().getNVRTC().cuCtxGetCurrent(&pctx);
|
||||
if (C10_UNLIKELY(!pctx)) {
|
||||
// workaround for corner case where a primary context exists but is not
|
||||
// the current context
|
||||
TORCH_WARN_ONCE("Attempting to run cuFFT, but there was no current CUDA context! Attempting to set the primary context...");
|
||||
at::globalContext().getNVRTC().cuDevicePrimaryCtxRetain(&pctx, 0);
|
||||
at::globalContext().getNVRTC().cuCtxSetCurrent(pctx);
|
||||
}
|
||||
#endif /* !defined(USE_ROCM) */
|
||||
exec_cufft_plan(*config, input.data_ptr(), out.data_ptr(), forward);
|
||||
|
||||
// Inplace reshaping to original batch shape and inverting the dimension permutation
|
||||
|
@ -727,7 +727,7 @@ Tensor _mkldnn_convolution_transpose(
|
||||
|
||||
if (bias.defined()) {
|
||||
const ideep::tensor b = itensor_from_tensor(bias);
|
||||
ideep::convolution_transpose_forward::compute(
|
||||
ideep::convolution_transpose_forward::compute_v3(
|
||||
x,
|
||||
w,
|
||||
b,
|
||||
@ -738,9 +738,10 @@ Tensor _mkldnn_convolution_transpose(
|
||||
padding_r(padding_expanded, output_padding_expanded),
|
||||
dilation.vec(),
|
||||
groups,
|
||||
use_channels_last,
|
||||
op_attr);
|
||||
} else {
|
||||
ideep::convolution_transpose_forward::compute(
|
||||
ideep::convolution_transpose_forward::compute_v3(
|
||||
x,
|
||||
w,
|
||||
output_sizes,
|
||||
@ -750,6 +751,7 @@ Tensor _mkldnn_convolution_transpose(
|
||||
padding_r(padding_expanded, output_padding_expanded),
|
||||
dilation.vec(),
|
||||
groups,
|
||||
use_channels_last,
|
||||
op_attr);
|
||||
}
|
||||
if (input.is_mkldnn()) {
|
||||
@ -988,7 +990,7 @@ Tensor mkldnn_convolution_transpose_backward_input(
|
||||
grad_input.resize_(input_size, memory_format);
|
||||
grad_x = itensor_from_tensor(grad_input);
|
||||
}
|
||||
ideep::convolution_transpose_backward_data::compute(
|
||||
ideep::convolution_transpose_backward_data::compute_v3(
|
||||
grad_y,
|
||||
w,
|
||||
input_size.vec(),
|
||||
@ -997,7 +999,8 @@ Tensor mkldnn_convolution_transpose_backward_input(
|
||||
padding.vec(),
|
||||
padding_r(padding, output_padding),
|
||||
dilation.vec(),
|
||||
groups);
|
||||
groups,
|
||||
is_channels_last);
|
||||
|
||||
if (grad_output.is_mkldnn()) {
|
||||
return MKLDNNTensor(grad_x, grad_output.options());
|
||||
@ -1024,7 +1027,7 @@ std::tuple<Tensor,Tensor> mkldnn_convolution_transpose_backward_weights(
|
||||
|
||||
ideep::tensor grad_w, grad_b;
|
||||
if (bias_defined) {
|
||||
ideep::convolution_transpose_backward_weights::compute(
|
||||
ideep::convolution_transpose_backward_weights::compute_v3(
|
||||
x,
|
||||
grad_y,
|
||||
weight_size.vec(),
|
||||
@ -1034,9 +1037,10 @@ std::tuple<Tensor,Tensor> mkldnn_convolution_transpose_backward_weights(
|
||||
padding.vec(),
|
||||
padding_r(padding, output_padding),
|
||||
dilation.vec(),
|
||||
groups);
|
||||
groups,
|
||||
is_channels_last);
|
||||
} else {
|
||||
ideep::convolution_transpose_backward_weights::compute(
|
||||
ideep::convolution_transpose_backward_weights::compute_v3(
|
||||
x,
|
||||
grad_y,
|
||||
weight_size.vec(),
|
||||
@ -1045,7 +1049,8 @@ std::tuple<Tensor,Tensor> mkldnn_convolution_transpose_backward_weights(
|
||||
padding.vec(),
|
||||
padding_r(padding, output_padding),
|
||||
dilation.vec(),
|
||||
groups);
|
||||
groups,
|
||||
is_channels_last);
|
||||
}
|
||||
|
||||
if (!is_channels_last) {
|
||||
@ -1061,18 +1066,21 @@ std::tuple<Tensor,Tensor> mkldnn_convolution_transpose_backward_weights(
|
||||
}
|
||||
|
||||
std::tuple<Tensor, Tensor, Tensor> mkldnn_convolution_transpose_backward(
|
||||
const Tensor& input, const Tensor& grad_output_t, const Tensor& weight,
|
||||
const Tensor& input_t, const Tensor& grad_output_t, const Tensor& weight_t,
|
||||
IntArrayRef padding, IntArrayRef output_padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups,
|
||||
std::array<bool,3> output_mask)
|
||||
{
|
||||
bool is_channels_last = mkldnn_conv_use_channels_last(input, weight);
|
||||
auto memory_format = mkldnn_convolution_memory_format(input.ndimension(), is_channels_last);
|
||||
bool is_channels_last = mkldnn_conv_use_channels_last(input_t, weight_t);
|
||||
auto memory_format = mkldnn_convolution_memory_format(input_t.ndimension(), is_channels_last);
|
||||
Tensor grad_output = grad_output_t.is_mkldnn() ? grad_output_t : grad_output_t.contiguous(memory_format);
|
||||
auto input = input_t.is_mkldnn() ? input_t : input_t.contiguous(memory_format);
|
||||
auto weight = weight_t.is_mkldnn() ? weight_t : weight_t.contiguous(memory_format);
|
||||
int64_t dim = input.ndimension() - 2;
|
||||
const auto padding_expanded = expand_param_if_needed(padding, "padding", dim);
|
||||
const auto stride_expanded = expand_param_if_needed(stride, "stride", dim);
|
||||
const auto dilation_expanded = expand_param_if_needed(dilation, "dilation", dim);
|
||||
const auto output_padding_expanded = expand_param_if_needed(output_padding, "output_padding", dim);
|
||||
|
||||
Tensor grad_input, grad_weight, grad_bias;
|
||||
if (output_mask[0]) {
|
||||
grad_input = mkldnn_convolution_transpose_backward_input(
|
||||
|
@ -293,7 +293,8 @@ at::Tensor& mps_copy_(at::Tensor& dst, const at::Tensor& src, bool non_blocking)
|
||||
dst.resize_as_(src);
|
||||
}
|
||||
|
||||
TORCH_CHECK(dst.dim() >= src.dim());
|
||||
TORCH_CHECK(
|
||||
dst.dim() >= src.dim(), "Destination ", dst.sym_sizes(), " doesn't match the broadcast shape ", src.sym_sizes());
|
||||
if (dst.dim() > src.dim()) {
|
||||
needs_broadcasting = true;
|
||||
} else {
|
||||
|
@ -16,15 +16,14 @@ namespace at::native {
|
||||
Scalar _local_scalar_dense_mps(const Tensor& self) {
|
||||
Scalar r;
|
||||
|
||||
auto output = at::empty_like(self, TensorOptions(kCPU));
|
||||
mps::mps_copy_(output, self, false);
|
||||
AT_DISPATCH_ALL_TYPES_AND_COMPLEX_AND3(at::ScalarType::Half,
|
||||
at::ScalarType::Bool,
|
||||
at::ScalarType::BFloat16,
|
||||
self.scalar_type(),
|
||||
"_local_scalar_dense_mps",
|
||||
[&] {
|
||||
Tensor output = at::empty({1}, TensorOptions(at::CPU(self.scalar_type())));
|
||||
|
||||
mps::mps_copy_(output, self, false);
|
||||
scalar_t value = *output.data_ptr<scalar_t>();
|
||||
r = Scalar(value);
|
||||
});
|
||||
|
@ -53,9 +53,9 @@ set(CMAKE_RANLIB ranlib CACHE FILEPATH "" FORCE)
|
||||
set(PKG_CONFIG_EXECUTABLE pkg-config CACHE FILEPATH "" FORCE)
|
||||
|
||||
# Setup iOS platform unless specified manually with IOS_PLATFORM
|
||||
if(NOT DEFINED IOS_PLATFORM)
|
||||
if(NOT IOS_PLATFORM)
|
||||
set(IOS_PLATFORM "OS")
|
||||
endif(NOT DEFINED IOS_PLATFORM)
|
||||
endif(NOT IOS_PLATFORM)
|
||||
set(IOS_PLATFORM ${IOS_PLATFORM} CACHE STRING "Type of iOS Platform")
|
||||
|
||||
# Check the platform selection and setup for developer root
|
||||
@ -118,9 +118,9 @@ set(CMAKE_FIND_LIBRARY_SUFFIXES ".dylib" ".so" ".a")
|
||||
# (where install_name_tool was hardcoded) and where CMAKE_INSTALL_NAME_TOOL isn't in the cache
|
||||
# and still cmake didn't fail in CMakeFindBinUtils.cmake (because it isn't rerun)
|
||||
# hardcode CMAKE_INSTALL_NAME_TOOL here to install_name_tool, so it behaves as it did before, Alex
|
||||
if(NOT DEFINED CMAKE_INSTALL_NAME_TOOL)
|
||||
if(NOT CMAKE_INSTALL_NAME_TOOL)
|
||||
find_program(CMAKE_INSTALL_NAME_TOOL install_name_tool)
|
||||
endif(NOT DEFINED CMAKE_INSTALL_NAME_TOOL)
|
||||
endif(NOT CMAKE_INSTALL_NAME_TOOL)
|
||||
|
||||
# Setup iOS deployment target
|
||||
set(IOS_DEPLOYMENT_TARGET ${IOS_DEPLOYMENT_TARGET} CACHE STRING "Minimum iOS version")
|
||||
@ -130,17 +130,17 @@ set(IOS_DEPLOYMENT_TARGET ${IOS_DEPLOYMENT_TARGET} CACHE STRING "Minimum iOS ver
|
||||
exec_program(/usr/bin/xcode-select ARGS -print-path OUTPUT_VARIABLE CMAKE_XCODE_DEVELOPER_DIR)
|
||||
set(XCODE_POST_43_ROOT "${CMAKE_XCODE_DEVELOPER_DIR}/Platforms/${IOS_PLATFORM_LOCATION}/Developer")
|
||||
set(XCODE_PRE_43_ROOT "/Developer/Platforms/${IOS_PLATFORM_LOCATION}/Developer")
|
||||
if(NOT DEFINED CMAKE_IOS_DEVELOPER_ROOT)
|
||||
if(NOT CMAKE_IOS_DEVELOPER_ROOT)
|
||||
if(EXISTS ${XCODE_POST_43_ROOT})
|
||||
set(CMAKE_IOS_DEVELOPER_ROOT ${XCODE_POST_43_ROOT})
|
||||
elseif(EXISTS ${XCODE_PRE_43_ROOT})
|
||||
set(CMAKE_IOS_DEVELOPER_ROOT ${XCODE_PRE_43_ROOT})
|
||||
endif(EXISTS ${XCODE_POST_43_ROOT})
|
||||
endif(NOT DEFINED CMAKE_IOS_DEVELOPER_ROOT)
|
||||
endif(NOT CMAKE_IOS_DEVELOPER_ROOT)
|
||||
set(CMAKE_IOS_DEVELOPER_ROOT ${CMAKE_IOS_DEVELOPER_ROOT} CACHE PATH "Location of iOS Platform")
|
||||
|
||||
# Find and use the most recent iOS sdk unless specified manually with CMAKE_IOS_SDK_ROOT
|
||||
if(NOT DEFINED CMAKE_IOS_SDK_ROOT)
|
||||
if(NOT CMAKE_IOS_SDK_ROOT)
|
||||
file(GLOB _CMAKE_IOS_SDKS "${CMAKE_IOS_DEVELOPER_ROOT}/SDKs/*")
|
||||
if(_CMAKE_IOS_SDKS)
|
||||
list(SORT _CMAKE_IOS_SDKS)
|
||||
@ -150,7 +150,7 @@ if(NOT DEFINED CMAKE_IOS_SDK_ROOT)
|
||||
message(FATAL_ERROR "No iOS SDK's found in default search path ${CMAKE_IOS_DEVELOPER_ROOT}. Manually set CMAKE_IOS_SDK_ROOT or install the iOS SDK.")
|
||||
endif(_CMAKE_IOS_SDKS)
|
||||
message(STATUS "Toolchain using default iOS SDK: ${CMAKE_IOS_SDK_ROOT}")
|
||||
endif(NOT DEFINED CMAKE_IOS_SDK_ROOT)
|
||||
endif(NOT CMAKE_IOS_SDK_ROOT)
|
||||
set(CMAKE_IOS_SDK_ROOT ${CMAKE_IOS_SDK_ROOT} CACHE PATH "Location of the selected iOS SDK")
|
||||
|
||||
# Set the sysroot default to the most recent SDK
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user