@@ -1302,8 +1302,8 @@ jobs:
1302
1302
run : |
1303
1303
GG_BUILD_NO_BF16=1 GG_BUILD_EXTRA_TESTS_0=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1304
1304
1305
- ggml-ci-x64-nvidia-v100- cuda :
1306
- runs-on : [self-hosted, Linux, X64, NVIDIA, V100 ]
1305
+ ggml-ci-x64-nvidia-cuda :
1306
+ runs-on : [self-hosted, Linux, X64, NVIDIA]
1307
1307
1308
1308
steps :
1309
1309
- name : Clone
@@ -1316,8 +1316,8 @@ jobs:
1316
1316
nvidia-smi
1317
1317
GG_BUILD_CUDA=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1318
1318
1319
- ggml-ci-x64-nvidia-v100- vulkan :
1320
- runs-on : [self-hosted, Linux, X64, NVIDIA, V100 ]
1319
+ ggml-ci-x64-nvidia-vulkan-cm :
1320
+ runs-on : [self-hosted, Linux, X64, NVIDIA]
1321
1321
1322
1322
steps :
1323
1323
- name : Clone
@@ -1327,25 +1327,11 @@ jobs:
1327
1327
- name : Test
1328
1328
id : ggml-ci
1329
1329
run : |
1330
- vulkaninfo
1331
- GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1332
-
1333
- ggml-ci-x64-nvidia-t4-cuda :
1334
- runs-on : [self-hosted, Linux, X64, NVIDIA, T4]
1335
-
1336
- steps :
1337
- - name : Clone
1338
- id : checkout
1339
- uses : actions/checkout@v4
1340
-
1341
- - name : Test
1342
- id : ggml-ci
1343
- run : |
1344
- nvidia-smi
1345
- GG_BUILD_CUDA=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1330
+ vulkaninfo --summary
1331
+ GG_BUILD_VULKAN=1 GGML_VK_DISABLE_COOPMAT2=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1346
1332
1347
- ggml-ci-x64-nvidia-t4- vulkan :
1348
- runs-on : [self-hosted, Linux, X64, NVIDIA, T4 ]
1333
+ ggml-ci-x64-nvidia-vulkan-cm2 :
1334
+ runs-on : [self-hosted, Linux, X64, NVIDIA, COOPMAT2 ]
1349
1335
1350
1336
steps :
1351
1337
- name : Clone
@@ -1355,23 +1341,9 @@ jobs:
1355
1341
- name : Test
1356
1342
id : ggml-ci
1357
1343
run : |
1358
- vulkaninfo
1344
+ vulkaninfo --summary
1359
1345
GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1360
1346
1361
- ggml-ci-x64-nvidia-t4-vulkan-coopmat1 :
1362
- runs-on : [self-hosted, Linux, X64, NVIDIA, T4]
1363
-
1364
- steps :
1365
- - name : Clone
1366
- id : checkout
1367
- uses : actions/checkout@v4
1368
-
1369
- - name : Test
1370
- id : ggml-ci
1371
- run : |
1372
- vulkaninfo
1373
- GG_BUILD_VULKAN=1 GGML_VK_DISABLE_COOPMAT2=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1374
-
1375
1347
ggml-ci-x64-cpu-amx :
1376
1348
runs-on : [self-hosted, Linux, X64, CPU, AMX]
1377
1349
@@ -1385,31 +1357,33 @@ jobs:
1385
1357
run : |
1386
1358
bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1387
1359
1388
- ggml-ci-x64-amd-v710-vulkan :
1389
- runs-on : [self-hosted, Linux, X64, AMD, V710]
1390
-
1391
- steps :
1392
- - name : Clone
1393
- id : checkout
1394
- uses : actions/checkout@v4
1395
-
1396
- - name : Test
1397
- id : ggml-ci
1398
- run : |
1399
- GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1400
-
1401
- ggml-ci-x64-amd-v710-rocm :
1402
- runs-on : [self-hosted, Linux, X64, AMD, V710]
1403
-
1404
- steps :
1405
- - name : Clone
1406
- id : checkout
1407
- uses : actions/checkout@v4
1408
-
1409
- - name : Test
1410
- id : ggml-ci
1411
- run : |
1412
- GG_BUILD_ROCM=1 GG_BUILD_AMDGPU_TARGETS="gfx1101" bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1360
+ # ggml-ci-x64-amd-vulkan:
1361
+ # runs-on: [self-hosted, Linux, X64, AMD]
1362
+ #
1363
+ # steps:
1364
+ # - name: Clone
1365
+ # id: checkout
1366
+ # uses: actions/checkout@v4
1367
+ #
1368
+ # - name: Test
1369
+ # id: ggml-ci
1370
+ # run: |
1371
+ # vulkaninfo --summary
1372
+ # GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1373
+ #
1374
+ # ggml-ci-x64-amd-rocm:
1375
+ # runs-on: [self-hosted, Linux, X64, AMD]
1376
+ #
1377
+ # steps:
1378
+ # - name: Clone
1379
+ # id: checkout
1380
+ # uses: actions/checkout@v4
1381
+ #
1382
+ # - name: Test
1383
+ # id: ggml-ci
1384
+ # run: |
1385
+ # amd-smi static
1386
+ # GG_BUILD_ROCM=1 GG_BUILD_AMDGPU_TARGETS="gfx1101" bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1413
1387
1414
1388
ggml-ci-mac-metal :
1415
1389
runs-on : [self-hosted, macOS, ARM64]
@@ -1435,4 +1409,5 @@ jobs:
1435
1409
- name : Test
1436
1410
id : ggml-ci
1437
1411
run : |
1412
+ vulkaninfo --summary
1438
1413
GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp ~/mnt/llama.cpp
0 commit comments