mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 12:54:11 +08:00
The Rank and IsScalar functions are no longer used in the torchlib. Requires onnxscript v0.5.4 Pull Request resolved: https://github.com/pytorch/pytorch/pull/165156 Approved by: https://github.com/Skylion007, https://github.com/cyyever
39 lines
1.3 KiB
Bash
Executable File
39 lines
1.3 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
set -ex
|
|
|
|
source "$(dirname "${BASH_SOURCE[0]}")/common_utils.sh"
|
|
|
|
retry () {
|
|
"$@" || (sleep 10 && "$@") || (sleep 20 && "$@") || (sleep 40 && "$@")
|
|
}
|
|
|
|
# ONNXRuntime should be installed before installing
|
|
# onnx-weekly. Otherwise, onnx-weekly could be
|
|
# overwritten by onnx.
|
|
pip_install \
|
|
parameterized==0.8.1 \
|
|
pytest-cov==4.0.0 \
|
|
pytest-subtests==0.10.0 \
|
|
tabulate==0.9.0 \
|
|
transformers==4.36.2
|
|
|
|
pip_install coloredlogs packaging
|
|
pip_install onnxruntime==1.23.0
|
|
pip_install onnxscript==0.5.4
|
|
|
|
# Cache the transformers model to be used later by ONNX tests. We need to run the transformers
|
|
# package to download the model. By default, the model is cached at ~/.cache/huggingface/hub/
|
|
IMPORT_SCRIPT_FILENAME="/tmp/onnx_import_script.py"
|
|
as_jenkins echo 'import transformers; transformers.GPTJForCausalLM.from_pretrained("hf-internal-testing/tiny-random-gptj");' > "${IMPORT_SCRIPT_FILENAME}"
|
|
|
|
# Need a PyTorch version for transformers to work
|
|
pip_install --pre torch --index-url https://download.pytorch.org/whl/nightly/cpu
|
|
# Very weird quoting behavior here https://github.com/conda/conda/issues/10972,
|
|
# so echo the command to a file and run the file instead
|
|
conda_run python "${IMPORT_SCRIPT_FILENAME}"
|
|
|
|
# Cleaning up
|
|
conda_run pip uninstall -y torch
|
|
rm "${IMPORT_SCRIPT_FILENAME}" || true
|