Pin transformers version in cpu-torch-latest due to multiprocessing error. (#6823)

This is a copy of https://github.com/microsoft/DeepSpeed/pull/6820 for
the cpu-torch-latest tests.

This PR will revert/fix these:
https://github.com/microsoft/DeepSpeed/pull/6822
This commit is contained in:
Logan Adams
2024-12-05 12:16:46 -08:00
committed by GitHub
parent 2ea181f0c3
commit 95ead2a055

View File

@ -37,6 +37,15 @@ jobs:
python -c "import torch; print('torch:', torch.__version__, torch)"
python -c "import torch; print('CUDA available:', torch.cuda.is_available())"
- name: Install transformers
run: |
git clone https://github.com/huggingface/transformers
cd transformers
# if needed switch to the last known good SHA until transformers@master is fixed
git checkout 6c3f168b3
git rev-parse --short HEAD
pip install .
- name: Install deepspeed
run: |
pip install .[dev,autotuning]