diff --git a/.ci/docker/common/install_onnx.sh b/.ci/docker/common/install_onnx.sh index 9f23feb5adfa..183b5b65c90a 100755 --- a/.ci/docker/common/install_onnx.sh +++ b/.ci/docker/common/install_onnx.sh @@ -19,8 +19,8 @@ pip_install \ transformers==4.36.2 pip_install coloredlogs packaging -pip_install onnxruntime==1.22.1 -pip_install onnxscript==0.4.0 +pip_install onnxruntime==1.23.0 +pip_install onnxscript==0.5.3 # Cache the transformers model to be used later by ONNX tests. We need to run the transformers # package to download the model. By default, the model is cached at ~/.cache/huggingface/hub/ diff --git a/.ci/docker/requirements-ci.txt b/.ci/docker/requirements-ci.txt index 9a5166ea5fc3..53ece6ef377a 100644 --- a/.ci/docker/requirements-ci.txt +++ b/.ci/docker/requirements-ci.txt @@ -341,7 +341,7 @@ onnx==1.18.0 #Pinned versions: #test that import: -onnxscript==0.4.0 +onnxscript==0.5.3 #Description: Required by mypy and test_public_bindings.py when checking torch.onnx._internal #Pinned versions: #test that import: