-
-
Notifications
You must be signed in to change notification settings - Fork 845
434 lines (374 loc) · 15 KB
/
tests-integration-nightly.yml
File metadata and controls
434 lines (374 loc) · 15 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
name: Integration Tests (Downstream)
# Nightly smoke tests: run the bnb-specific test suites from transformers,
# accelerate, and peft against the latest main-branch bnb wheel. Catches
# downstream breakage before it reaches users.
#
# bnb is installed from the `continuous-release_main` pre-release which
# python-package.yml publishes on every push to main — no duplicate build.
#
# See agents/integration_tests_guide.md for background.
on:
workflow_dispatch:
pull_request:
paths:
- '.github/workflows/tests-integration-nightly.yml'
- 'scripts/integration_test_report.py'
# schedule:
# - cron: "30 3 * * *" # enable once stable; runs after python-package + tests-nightly
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
PYTHON_VERSION: "3.10"
TORCH_VERSION: "2.9.1"
PYPI_INDEX: "https://download.pytorch.org/whl/cu128"
BNB_WHEEL_URL: "https://github.com/bitsandbytes-foundation/bitsandbytes/releases/download/continuous-release_main/bitsandbytes-1.33.7.preview-py3-none-manylinux_2_24_x86_64.whl"
CUDA_VISIBLE_DEVICES: "0,1"
jobs:
# ─── Downstream test jobs ─────────────────────────────────────────────────
# Each job:
# 1. Installs torch, then bnb from the continuous-release wheel
# 2. Installs the downstream lib (latest release from PyPI)
# 3. Clones the matching version tag for the test files
# 4. Runs the library's bnb-specific tests with --junitxml
# 5. Uploads the XML + full log as an artifact for the report job
#
# Runner matching rationale (see integration_tests_guide.md):
# transformers CI runs on T4 → we use T4
# accelerate / peft CI runs on L4 → closest bnb equivalent is A10
# This reduces spurious failures from expected values calibrated on their runners.
test-transformers:
name: Transformers bnb tests (single GPU)
if: github.repository == 'bitsandbytes-foundation/bitsandbytes'
runs-on: bandb-aws-g5-4xlarge-plus-use1-public-80 # A10G (matches transformers CI)
steps:
- name: Show GPU information
run: nvidia-smi
- uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install torch + bnb (from continuous-release)
run: |
pip install torch==${TORCH_VERSION} --index-url ${PYPI_INDEX}
pip install "bitsandbytes[test] @ ${BNB_WHEEL_URL}"
- name: Install transformers and clone matching tag
run: |
pip install transformers accelerate
TRANSFORMERS_VERSION=$(pip show transformers | awk '/^Version:/ {print $2}')
echo "Installed transformers v${TRANSFORMERS_VERSION}"
git clone --depth=1 --branch "v${TRANSFORMERS_VERSION}" \
https://github.com/huggingface/transformers.git /tmp/transformers
- name: Show environment
run: |
pip list
python -m torch.utils.collect_env
- name: Run transformers bnb tests
working-directory: /tmp/transformers
env:
RUN_SLOW: "1"
shell: bash -o pipefail {0}
run: |
mkdir -p ${GITHUB_WORKSPACE}/reports
python -m pytest tests/quantization/bnb/ \
-v \
-k "not MultiGpu and not multi_gpu" \
--junitxml=${GITHUB_WORKSPACE}/reports/transformers.xml \
-o junit_logging=all \
2>&1 | tee ${GITHUB_WORKSPACE}/reports/transformers.log
- name: Upload JUnit XML and log
if: always()
uses: actions/upload-artifact@v4
with:
name: reports-transformers
path: reports/
retention-days: 7
test-transformers-multigpu:
name: Transformers bnb tests (multi GPU)
if: false # disabled until bandb-aws-g6-12xlarge-plus runner is provisioned
runs-on: bandb-aws-g6-12xlarge-plus-use1-public-80 # 4× L4 (2 used)
steps:
- name: Show GPU information
run: nvidia-smi
- uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install torch + bnb (from continuous-release)
run: |
pip install torch==${TORCH_VERSION} --index-url ${PYPI_INDEX}
pip install "bitsandbytes[test] @ ${BNB_WHEEL_URL}"
- name: Install transformers and clone matching tag
run: |
pip install transformers accelerate
TRANSFORMERS_VERSION=$(pip show transformers | awk '/^Version:/ {print $2}')
echo "Installed transformers v${TRANSFORMERS_VERSION}"
git clone --depth=1 --branch "v${TRANSFORMERS_VERSION}" \
https://github.com/huggingface/transformers.git /tmp/transformers
- name: Show environment
run: |
pip list
python -m torch.utils.collect_env
- name: Run transformers bnb tests (multi-GPU only)
working-directory: /tmp/transformers
env:
RUN_SLOW: "1"
shell: bash -o pipefail {0}
run: |
mkdir -p ${GITHUB_WORKSPACE}/reports
python -m pytest tests/quantization/bnb/ \
-v \
-k "MultiGpu or multi_gpu" \
--junitxml=${GITHUB_WORKSPACE}/reports/transformers-multigpu.xml \
-o junit_logging=all \
2>&1 | tee ${GITHUB_WORKSPACE}/reports/transformers-multigpu.log
- name: Upload JUnit XML and log
if: always()
uses: actions/upload-artifact@v4
with:
name: reports-transformers-multigpu
path: reports/
retention-days: 7
test-diffusers:
name: Diffusers bnb tests
if: github.repository == 'bitsandbytes-foundation/bitsandbytes'
runs-on: bandb-aws-g6e-4xlarge-plus-use1-public-80 # L40S (matches diffusers CI)
container:
image: diffusers/diffusers-pytorch-cuda
options: --gpus all --shm-size "16gb" --ipc host
steps:
- name: Show GPU information
run: nvidia-smi
- uses: actions/checkout@v4
- name: Install bnb (from continuous-release) over the image's version
run: |
pip install --force-reinstall --no-deps ${BNB_WHEEL_URL}
- name: Clone diffusers matching installed version
run: |
DIFFUSERS_VERSION=$(pip show diffusers | awk '/^Version:/ {print $2}')
echo "Installed diffusers v${DIFFUSERS_VERSION}"
git clone --depth=1 --branch "v${DIFFUSERS_VERSION}" \
https://github.com/huggingface/diffusers.git /tmp/diffusers
- name: Show environment
run: |
pip list
python -m torch.utils.collect_env
- name: Run diffusers bnb tests
working-directory: /tmp/diffusers
env:
RUN_SLOW: "1"
CUBLAS_WORKSPACE_CONFIG: ":16:8"
shell: bash -o pipefail {0}
run: |
mkdir -p ${GITHUB_WORKSPACE}/reports
python -m pytest \
-m bitsandbytes \
tests/ \
-v \
--junitxml=${GITHUB_WORKSPACE}/reports/diffusers.xml \
-o junit_logging=all \
2>&1 | tee ${GITHUB_WORKSPACE}/reports/diffusers.log
- name: Upload JUnit XML and log
if: always()
uses: actions/upload-artifact@v4
with:
name: reports-diffusers
path: reports/
retention-days: 7
test-axolotl:
name: Axolotl bnb kernel tests
if: github.repository == 'bitsandbytes-foundation/bitsandbytes'
runs-on: bandb-aws-g5-4xlarge-plus-use1-public-80 # A10G
steps:
- name: Show GPU information
run: nvidia-smi
- uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install torch + bnb (from continuous-release)
run: |
pip install torch==${TORCH_VERSION} --index-url ${PYPI_INDEX}
pip install "bitsandbytes[test] @ ${BNB_WHEEL_URL}"
- name: Install axolotl and clone matching tag
run: |
pip install axolotl transformers accelerate peft
AXOLOTL_VERSION=$(pip show axolotl | awk '/^Version:/ {print $2}')
echo "Installed axolotl v${AXOLOTL_VERSION}"
git clone --depth=1 --branch "v${AXOLOTL_VERSION}" \
https://github.com/axolotl-ai-cloud/axolotl.git /tmp/axolotl
- name: Show environment
run: |
pip list
python -m torch.utils.collect_env
- name: Run axolotl bnb kernel tests
working-directory: /tmp/axolotl
shell: bash -o pipefail {0}
run: |
mkdir -p ${GITHUB_WORKSPACE}/reports
python -m pytest \
tests/e2e/kernels/test_quantize.py \
tests/e2e/kernels/test_lora.py \
"tests/e2e/kernels/test_lora_features.py::TestQuantizedModels" \
-v \
--junitxml=${GITHUB_WORKSPACE}/reports/axolotl.xml \
-o junit_logging=all \
2>&1 | tee ${GITHUB_WORKSPACE}/reports/axolotl.log
- name: Upload JUnit XML and log
if: always()
uses: actions/upload-artifact@v4
with:
name: reports-axolotl
path: reports/
retention-days: 7
test-peft:
name: PEFT bnb tests (single GPU)
if: github.repository == 'bitsandbytes-foundation/bitsandbytes'
runs-on: bandb-aws-g6-4xlarge-plus-use1-public-80 # L4 (matches peft CI)
steps:
- name: Show GPU information
run: nvidia-smi
- uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install torch + bnb (from continuous-release)
run: |
pip install torch==${TORCH_VERSION} --index-url ${PYPI_INDEX}
pip install "bitsandbytes[test] @ ${BNB_WHEEL_URL}"
- name: Install peft and clone matching tag
run: |
pip install "peft[test]" transformers accelerate
PEFT_VERSION=$(pip show peft | awk '/^Version:/ {print $2}')
echo "Installed peft v${PEFT_VERSION}"
git clone --depth=1 --branch "v${PEFT_VERSION}" \
https://github.com/huggingface/peft.git /tmp/peft
- name: Show environment
run: |
pip list
python -m torch.utils.collect_env
- name: Run peft bnb tests
working-directory: /tmp/peft
env:
IS_GITHUB_CI: "1"
shell: bash -o pipefail {0}
run: |
mkdir -p ${GITHUB_WORKSPACE}/reports
python -m pytest \
-m single_gpu_tests \
-k PeftBnbGPUExampleTests \
tests/test_gpu_examples.py \
-v \
--junitxml=${GITHUB_WORKSPACE}/reports/peft.xml \
-o junit_logging=all \
2>&1 | tee ${GITHUB_WORKSPACE}/reports/peft.log
- name: Upload JUnit XML and log
if: always()
uses: actions/upload-artifact@v4
with:
name: reports-peft
path: reports/
retention-days: 7
test-peft-multigpu:
name: PEFT bnb tests (multi GPU)
if: false # disabled until bandb-aws-g6-12xlarge-plus runner is provisioned
runs-on: bandb-aws-g6-12xlarge-plus-use1-public-80 # 4× L4
steps:
- name: Show GPU information
run: nvidia-smi
- uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install torch + bnb (from continuous-release)
run: |
pip install torch==${TORCH_VERSION} --index-url ${PYPI_INDEX}
pip install "bitsandbytes[test] @ ${BNB_WHEEL_URL}"
- name: Install peft and clone matching tag
run: |
pip install "peft[test]" transformers accelerate
PEFT_VERSION=$(pip show peft | awk '/^Version:/ {print $2}')
echo "Installed peft v${PEFT_VERSION}"
git clone --depth=1 --branch "v${PEFT_VERSION}" \
https://github.com/huggingface/peft.git /tmp/peft
- name: Show environment
run: |
pip list
python -m torch.utils.collect_env
- name: Run peft bnb tests
working-directory: /tmp/peft
env:
IS_GITHUB_CI: "1"
shell: bash -o pipefail {0}
run: |
mkdir -p ${GITHUB_WORKSPACE}/reports
python -m pytest \
-m multi_gpu_tests \
-k PeftBnbGPUExampleTests \
tests/test_gpu_examples.py \
-v \
--junitxml=${GITHUB_WORKSPACE}/reports/peft-multigpu.xml \
-o junit_logging=all \
2>&1 | tee ${GITHUB_WORKSPACE}/reports/peft-multigpu.log
- name: Upload JUnit XML and log
if: always()
uses: actions/upload-artifact@v4
with:
name: reports-peft-multigpu
path: reports/
retention-days: 7
# ─── Consolidated report ──────────────────────────────────────────────────
# Runs after all test jobs finish (success or failure).
# Downloads the JUnit XMLs, runs our report script, writes to the job
# summary, uploads artifacts, and posts a consolidated message to
# #bnb-daily-ci-collab on Slack.
report:
name: Consolidated report
needs: [test-transformers, test-transformers-multigpu, test-diffusers, test-axolotl, test-peft, test-peft-multigpu]
if: always() && github.repository == 'bitsandbytes-foundation/bitsandbytes'
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Download all report artifacts
uses: actions/download-artifact@v4
with:
path: artifacts
pattern: reports-*
- name: Consolidate XMLs into reports/
run: |
mkdir -p reports
# Each artifact lands in artifacts/reports-<suite>/ — flatten to reports/<suite>.xml
find artifacts -name '*.xml' -exec cp {} reports/ \;
find artifacts -name '*.log' -exec cp {} reports/ \;
ls -la reports/
- name: Generate consolidated report + post to Slack
env:
SLACK_API_TOKEN: ${{ secrets.SLACK_CIFEEDBACK_BOT_TOKEN }}
run: |
pip install slack_sdk
python scripts/integration_test_report.py \
--reports-dir reports/ \
--output consolidated_report.md \
--slack-channel bnb-daily-ci-collab
- name: Write to job summary
if: always()
run: |
cat consolidated_report.md >> $GITHUB_STEP_SUMMARY
- name: Upload consolidated report
if: always()
uses: actions/upload-artifact@v4
with:
name: consolidated-report
path: |
consolidated_report.md
reports/
retention-days: 14