74
74
libswresample-dev
75
75
libswscale-dev
76
76
pciutils
77
+ python3-dev
77
78
TORCH_INDEX : ' --pre --index-url https://download.pytorch.org/whl/nightly/xpu'
78
79
AGENT_TOOLSDIRECTORY : /tmp/xpu-tool
79
80
@@ -154,8 +155,12 @@ jobs:
154
155
env :
155
156
PYTORCH_DEBUG_XPU_FALLBACK : ' 1'
156
157
TRANSFORMERS_TEST_DEVICE_SPEC : ' spec.py'
157
- # enable pytest parallel run, and continue others if meets crash case such as segmentation fault
158
- PYTEST_ADDOPTS : -rsf --timeout 600 --timeout_method=thread --dist worksteal ${{ needs.prepare.outputs.pytest_extra_args }}
158
+ # Usage of `--dist loadfile` is a must as HF tests has complex setups including
159
+ # setUpClass and @first_run clauses. So 'loadfile' stratagy allows to minimize
160
+ # race conditions scope. Besides, that's how HF Transformers recommend to run
161
+ # tests and how they run them in their own CI.
162
+ # See: https://github.com/huggingface/transformers/blob/v4.56.2/CONTRIBUTING.md?plain=1#L312
163
+ PYTEST_ADDOPTS : -rsf --timeout 600 --timeout_method=thread --dist loadfile ${{ needs.prepare.outputs.pytest_extra_args }}
159
164
strategy :
160
165
fail-fast : false
161
166
max-parallel : 1
@@ -224,21 +229,9 @@ jobs:
224
229
fi
225
230
- name : Prepare OS environment
226
231
run : |
227
- # as jobs might run in parallel on the same system, apt-get might
228
- # step into the lock hold by other job
229
- start_time=$SECONDS
230
- while ! sudo apt-get update; do
231
- sleep 1;
232
- if (( $SECONDS - start_time > 60 )); then false; fi
233
- done
234
- while ! sudo apt-get install -y $PACKAGES; do
235
- sleep 1;
236
- if (( $SECONDS - start_time > 60 )); then false; fi
237
- done
238
- while ! git lfs install; do
239
- sleep 1;
240
- if (( $SECONDS - start_time > 60 )); then false; fi
241
- done
232
+ sudo apt-get update
233
+ sudo apt-get install -y $PACKAGES
234
+ git lfs install
242
235
- name : Setup python-${{ env.python }}
243
236
uses : actions/setup-python@v5
244
237
with :
@@ -250,12 +243,17 @@ jobs:
250
243
pip install -U pip wheel setuptools
251
244
- name : Prepare pytorch and deps
252
245
run : |
253
- pip install junitparser
254
246
pip install $TORCH_INDEX \
255
247
torch==${{ needs.prepare.outputs.torch }} \
256
248
torchvision==${{ needs.prepare.outputs.torchvision }} \
257
249
torchaudio==${{ needs.prepare.outputs.torchaudio }} \
258
250
pytorch-triton-xpu==${{needs.prepare.outputs.triton }}
251
+ pip install \
252
+ junitparser \
253
+ pytest \
254
+ pytest-timeout \
255
+ pytest-xdist \
256
+ pytest-shard
259
257
- name : Prepare Transformers
260
258
run : |
261
259
pwd
@@ -281,8 +279,6 @@ jobs:
281
279
xpu-smi discovery -y --json --dump -1
282
280
- name : Sanity check installed packages
283
281
run : |
284
- # Use latest pytest
285
- pip install -U pytest pytest-timeout pytest-xdist pytest-shard
286
282
# These checks are to exit earlier if for any reason Transformers
287
283
# reinstalled torch packages back to CUDA versions (not expected).
288
284
pip show torch | grep Version | grep xpu
0 commit comments