mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 12:54:11 +08:00
- onnx==1.15.0rc1 - onnxscript==0.1.0.dev20231006 - ort-nightly==1.17.0.dev20231005001 Pull Request resolved: https://github.com/pytorch/pytorch/pull/110663 Approved by: https://github.com/ezyang, https://github.com/thiagocrepaldi
51 lines
1.7 KiB
Bash
Executable File
51 lines
1.7 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
set -ex
|
|
|
|
source "$(dirname "${BASH_SOURCE[0]}")/common_utils.sh"
|
|
|
|
retry () {
|
|
"$@" || (sleep 10 && "$@") || (sleep 20 && "$@") || (sleep 40 && "$@")
|
|
}
|
|
|
|
# A bunch of custom pip dependencies for ONNX
|
|
pip_install \
|
|
beartype==0.15.0 \
|
|
filelock==3.9.0 \
|
|
flatbuffers==2.0 \
|
|
mock==5.0.1 \
|
|
ninja==1.10.2 \
|
|
networkx==2.0 \
|
|
numpy==1.24.2
|
|
|
|
# ONNXRuntime should be installed before installing
|
|
# onnx-weekly. Otherwise, onnx-weekly could be
|
|
# overwritten by onnx.
|
|
pip_install \
|
|
parameterized==0.8.1 \
|
|
pytest-cov==4.0.0 \
|
|
pytest-subtests==0.10.0 \
|
|
tabulate==0.9.0 \
|
|
transformers==4.32.1
|
|
|
|
pip_install coloredlogs packaging
|
|
retry pip_install -i https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/ORT-Nightly/pypi/simple/ --no-cache-dir --no-input ort-nightly==1.17.0.dev20231005006
|
|
|
|
pip_install -i https://test.pypi.org/simple/ onnx==1.15.0rc1
|
|
pip_install onnxscript==0.1.0.dev20231006 --no-deps
|
|
|
|
# Cache the transformers model to be used later by ONNX tests. We need to run the transformers
|
|
# package to download the model. By default, the model is cached at ~/.cache/huggingface/hub/
|
|
IMPORT_SCRIPT_FILENAME="/tmp/onnx_import_script.py"
|
|
as_jenkins echo 'import transformers; transformers.AutoModel.from_pretrained("sshleifer/tiny-gpt2"); transformers.AutoTokenizer.from_pretrained("sshleifer/tiny-gpt2");' > "${IMPORT_SCRIPT_FILENAME}"
|
|
|
|
# Need a PyTorch version for transformers to work
|
|
pip_install --pre torch --index-url https://download.pytorch.org/whl/nightly/cpu
|
|
# Very weird quoting behavior here https://github.com/conda/conda/issues/10972,
|
|
# so echo the command to a file and run the file instead
|
|
conda_run python "${IMPORT_SCRIPT_FILENAME}"
|
|
|
|
# Cleaning up
|
|
conda_run pip uninstall -y torch
|
|
rm "${IMPORT_SCRIPT_FILENAME}" || true
|