@@ -1251,87 +1251,132 @@ jobs:
1251
1251
# TODO: simplify the following workflows using a matrix
1252
1252
# TODO: run lighter CI on PRs and the full CI only on master (if needed)
1253
1253
ggml-ci-x64-cpu-low-perf :
1254
- runs-on : [self-hosted, Linux, X64, CPU, low-perf]
1254
+ runs-on : ubuntu-22.04
1255
1255
1256
1256
steps :
1257
1257
- name : Clone
1258
1258
id : checkout
1259
1259
uses : actions/checkout@v4
1260
1260
1261
+ - name : ccache
1262
+
1263
+ with :
1264
+ key : ggml-ci-x64-cpu-low-perf
1265
+ evict-old-files : 1d
1266
+
1267
+ - name : Dependencies
1268
+ id : depends
1269
+ run : |
1270
+ sudo apt-get update
1271
+ sudo apt-get install build-essential libcurl4-openssl-dev
1272
+
1261
1273
- name : Test
1262
1274
id : ggml-ci
1263
1275
run : |
1264
- bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1276
+ LLAMA_ARG_THREADS=$(nproc) GG_BUILD_LOW_PERF=1 bash ./ci/run.sh ./tmp/results ./tmp/mnt
1265
1277
1266
1278
ggml-ci-arm64-cpu-low-perf :
1267
- runs-on : [self-hosted, Linux, ARM64, CPU, low-perf]
1279
+ runs-on : ubuntu-22.04-arm
1268
1280
1269
1281
steps :
1270
1282
- name : Clone
1271
1283
id : checkout
1272
1284
uses : actions/checkout@v4
1273
1285
1286
+ - name : ccache
1287
+
1288
+ with :
1289
+ key : ggml-ci-arm64-cpu-low-perf
1290
+ evict-old-files : 1d
1291
+
1292
+ - name : Dependencies
1293
+ id : depends
1294
+ run : |
1295
+ sudo apt-get update
1296
+ sudo apt-get install build-essential libcurl4-openssl-dev
1297
+
1274
1298
- name : Test
1275
1299
id : ggml-ci
1276
1300
run : |
1277
- bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1301
+ LLAMA_ARG_THREADS=$(nproc) GG_BUILD_LOW_PERF=1 bash ./ci/run.sh ./tmp/results ./tmp/mnt
1278
1302
1279
1303
ggml-ci-x64-cpu-high-perf :
1280
- runs-on : [self-hosted, Linux, X64, CPU, high-perf]
1304
+ runs-on : ubuntu-22.04
1281
1305
1282
1306
steps :
1283
1307
- name : Clone
1284
1308
id : checkout
1285
1309
uses : actions/checkout@v4
1286
1310
1311
+ - name : ccache
1312
+
1313
+ with :
1314
+ key : ggml-ci-x64-cpu-high-perf
1315
+ evict-old-files : 1d
1316
+
1317
+ - name : Dependencies
1318
+ id : depends
1319
+ run : |
1320
+ sudo apt-get update
1321
+ sudo apt-get install build-essential libcurl4-openssl-dev
1322
+
1287
1323
- name : Test
1288
1324
id : ggml-ci
1289
1325
run : |
1290
- bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1326
+ LLAMA_ARG_THREADS=$(nproc) bash ./ci/run.sh ./tmp/results ./tmp/mnt
1291
1327
1292
1328
ggml-ci-arm64-cpu-high-perf :
1293
- runs-on : [self-hosted, Linux, ARM64, CPU, high-perf]
1329
+ runs-on : ubuntu-22.04-arm
1294
1330
1295
1331
steps :
1296
1332
- name : Clone
1297
1333
id : checkout
1298
1334
uses : actions/checkout@v4
1299
1335
1300
- - name : Test
1301
- id : ggml-ci
1302
- run : |
1303
- GG_BUILD_NO_BF16=1 GG_BUILD_EXTRA_TESTS_0=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1304
-
1305
- ggml-ci-x64-nvidia-v100-cuda :
1306
- runs-on : [self-hosted, Linux, X64, NVIDIA, V100]
1336
+ - name : ccache
1337
+
1338
+ with :
1339
+ key : ggml-ci-arm64-cpu-high-perf
1340
+ evict-old-files : 1d
1307
1341
1308
- steps :
1309
- - name : Clone
1310
- id : checkout
1311
- uses : actions/checkout@v4
1342
+ - name : Dependencies
1343
+ id : depends
1344
+ run : |
1345
+ sudo apt-get update
1346
+ sudo apt-get install build-essential libcurl4-openssl-dev
1312
1347
1313
1348
- name : Test
1314
1349
id : ggml-ci
1315
1350
run : |
1316
- nvidia-smi
1317
- GG_BUILD_CUDA=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1351
+ LLAMA_ARG_THREADS=$(nproc) GG_BUILD_NO_SVE=1 GG_BUILD_NO_BF16=1 GG_BUILD_EXTRA_TESTS_0=1 bash ./ci/run.sh ./tmp/results ./tmp/mnt
1318
1352
1319
- ggml-ci-x64-nvidia-v100-vulkan :
1320
- runs-on : [self-hosted, Linux, X64, NVIDIA, V100]
1353
+ ggml-ci-arm64-cpu-high-perf-sve :
1354
+ runs-on : ubuntu-22.04-arm
1321
1355
1322
1356
steps :
1323
1357
- name : Clone
1324
1358
id : checkout
1325
1359
uses : actions/checkout@v4
1326
1360
1361
+ - name : ccache
1362
+
1363
+ with :
1364
+ key : ggml-ci-arm64-cpu-high-perf-sve
1365
+ evict-old-files : 1d
1366
+
1367
+ - name : Dependencies
1368
+ id : depends
1369
+ run : |
1370
+ sudo apt-get update
1371
+ sudo apt-get install build-essential libcurl4-openssl-dev
1372
+
1327
1373
- name : Test
1328
1374
id : ggml-ci
1329
1375
run : |
1330
- vulkaninfo
1331
- GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1376
+ LLAMA_ARG_THREADS=$(nproc) GG_BUILD_NO_BF16=1 GG_BUILD_EXTRA_TESTS_0=1 bash ./ci/run.sh ./tmp/results ./tmp/mnt
1332
1377
1333
- ggml-ci-x64-nvidia-t4- cuda :
1334
- runs-on : [self-hosted, Linux, X64, NVIDIA, T4 ]
1378
+ ggml-ci-x64-nvidia-cuda :
1379
+ runs-on : [self-hosted, Linux, X64, NVIDIA]
1335
1380
1336
1381
steps :
1337
1382
- name : Clone
@@ -1344,8 +1389,8 @@ jobs:
1344
1389
nvidia-smi
1345
1390
GG_BUILD_CUDA=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1346
1391
1347
- ggml-ci-x64-nvidia-t4- vulkan :
1348
- runs-on : [self-hosted, Linux, X64, NVIDIA, T4 ]
1392
+ ggml-ci-x64-nvidia-vulkan-cm :
1393
+ runs-on : [self-hosted, Linux, X64, NVIDIA]
1349
1394
1350
1395
steps :
1351
1396
- name : Clone
@@ -1355,11 +1400,11 @@ jobs:
1355
1400
- name : Test
1356
1401
id : ggml-ci
1357
1402
run : |
1358
- vulkaninfo
1359
- GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1403
+ vulkaninfo --summary
1404
+ GG_BUILD_VULKAN=1 GGML_VK_DISABLE_COOPMAT2=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1360
1405
1361
- ggml-ci-x64-nvidia-t4- vulkan-coopmat1 :
1362
- runs-on : [self-hosted, Linux, X64, NVIDIA, T4 ]
1406
+ ggml-ci-x64-nvidia-vulkan-cm2 :
1407
+ runs-on : [self-hosted, Linux, X64, NVIDIA, COOPMAT2 ]
1363
1408
1364
1409
steps :
1365
1410
- name : Clone
@@ -1369,8 +1414,8 @@ jobs:
1369
1414
- name : Test
1370
1415
id : ggml-ci
1371
1416
run : |
1372
- vulkaninfo
1373
- GG_BUILD_VULKAN=1 GGML_VK_DISABLE_COOPMAT2=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1417
+ vulkaninfo --summary
1418
+ GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1374
1419
1375
1420
ggml-ci-x64-cpu-amx :
1376
1421
runs-on : [self-hosted, Linux, X64, CPU, AMX]
@@ -1385,21 +1430,36 @@ jobs:
1385
1430
run : |
1386
1431
bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1387
1432
1388
- ggml-ci-x64-amd-v710-vulkan :
1389
- runs-on : [self-hosted, Linux, X64, AMD, V710]
1390
-
1391
- steps :
1392
- - name : Clone
1393
- id : checkout
1394
- uses : actions/checkout@v4
1395
-
1396
- - name : Test
1397
- id : ggml-ci
1398
- run : |
1399
- GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1433
+ # ggml-ci-x64-amd-vulkan:
1434
+ # runs-on: [self-hosted, Linux, X64, AMD]
1435
+ #
1436
+ # steps:
1437
+ # - name: Clone
1438
+ # id: checkout
1439
+ # uses: actions/checkout@v4
1440
+ #
1441
+ # - name: Test
1442
+ # id: ggml-ci
1443
+ # run: |
1444
+ # vulkaninfo --summary
1445
+ # GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1446
+ #
1447
+ # ggml-ci-x64-amd-rocm:
1448
+ # runs-on: [self-hosted, Linux, X64, AMD]
1449
+ #
1450
+ # steps:
1451
+ # - name: Clone
1452
+ # id: checkout
1453
+ # uses: actions/checkout@v4
1454
+ #
1455
+ # - name: Test
1456
+ # id: ggml-ci
1457
+ # run: |
1458
+ # amd-smi static
1459
+ # GG_BUILD_ROCM=1 GG_BUILD_AMDGPU_TARGETS="gfx1101" bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1400
1460
1401
- ggml-ci-x64-amd-v710-rocm :
1402
- runs-on : [self-hosted, Linux, X64, AMD, V710 ]
1461
+ ggml-ci-mac-metal :
1462
+ runs-on : [self-hosted, macOS, ARM64 ]
1403
1463
1404
1464
steps :
1405
1465
- name : Clone
@@ -1409,9 +1469,9 @@ jobs:
1409
1469
- name : Test
1410
1470
id : ggml-ci
1411
1471
run : |
1412
- GG_BUILD_ROCM =1 GG_BUILD_AMDGPU_TARGETS="gfx1101" bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1472
+ GG_BUILD_METAL =1 bash ./ci/run.sh ~/results/llama.cpp ~ /mnt/llama.cpp
1413
1473
1414
- ggml-ci-mac-metal :
1474
+ ggml-ci-mac-vulkan :
1415
1475
runs-on : [self-hosted, macOS, ARM64]
1416
1476
1417
1477
steps :
@@ -1422,18 +1482,5 @@ jobs:
1422
1482
- name : Test
1423
1483
id : ggml-ci
1424
1484
run : |
1425
- GG_BUILD_METAL=1 bash ./ci/run.sh ~/results/llama.cpp ~/mnt/llama.cpp
1426
-
1427
- # TODO: install vulkan drivers
1428
- # ggml-ci-mac-vulkan:
1429
- # runs-on: [self-hosted, macOS, ARM64]
1430
- #
1431
- # steps:
1432
- # - name: Clone
1433
- # id: checkout
1434
- # uses: actions/checkout@v4
1435
- #
1436
- # - name: Test
1437
- # id: ggml-ci
1438
- # run: |
1439
- # GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp ~/mnt/llama.cpp
1485
+ vulkaninfo --summary
1486
+ GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp ~/mnt/llama.cpp
0 commit comments