@@ -1302,8 +1302,8 @@ jobs:
13021302        run : | 
13031303          GG_BUILD_NO_BF16=1 GG_BUILD_EXTRA_TESTS_0=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp 
13041304
1305- ggml-ci-x64-nvidia-v100- cuda :
1306-     runs-on : [self-hosted, Linux, X64, NVIDIA, V100 ] 
1305+ ggml-ci-x64-nvidia-cuda :
1306+     runs-on : [self-hosted, Linux, X64, NVIDIA] 
13071307
13081308    steps :
13091309      - name : Clone 
@@ -1316,8 +1316,8 @@ jobs:
13161316          nvidia-smi 
13171317          GG_BUILD_CUDA=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp 
13181318
1319- ggml-ci-x64-nvidia-v100- vulkan :
1320-     runs-on : [self-hosted, Linux, X64, NVIDIA, V100 ] 
1319+ ggml-ci-x64-nvidia-vulkan-cm  :
1320+     runs-on : [self-hosted, Linux, X64, NVIDIA] 
13211321
13221322    steps :
13231323      - name : Clone 
@@ -1327,25 +1327,11 @@ jobs:
13271327      - name : Test 
13281328        id : ggml-ci 
13291329        run : | 
1330-           vulkaninfo 
1331-           GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp 
1332- 
1333- ggml-ci-x64-nvidia-t4-cuda :
1334-     runs-on : [self-hosted, Linux, X64, NVIDIA, T4] 
1335- 
1336-     steps :
1337-       - name : Clone 
1338-         id : checkout 
1339-         uses : actions/checkout@v4 
1340- 
1341-       - name : Test 
1342-         id : ggml-ci 
1343-         run : | 
1344-           nvidia-smi 
1345-           GG_BUILD_CUDA=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp 
1330+           vulkaninfo --summary 
1331+           GG_BUILD_VULKAN=1 GGML_VK_DISABLE_COOPMAT2=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp 
13461332
1347- ggml-ci-x64-nvidia-t4- vulkan :
1348-     runs-on : [self-hosted, Linux, X64, NVIDIA, T4 ] 
1333+ ggml-ci-x64-nvidia-vulkan-cm2  :
1334+     runs-on : [self-hosted, Linux, X64, NVIDIA, COOPMAT2 ] 
13491335
13501336    steps :
13511337      - name : Clone 
@@ -1355,23 +1341,9 @@ jobs:
13551341      - name : Test 
13561342        id : ggml-ci 
13571343        run : | 
1358-           vulkaninfo 
1344+           vulkaninfo --summary  
13591345          GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp 
13601346
1361- ggml-ci-x64-nvidia-t4-vulkan-coopmat1 :
1362-     runs-on : [self-hosted, Linux, X64, NVIDIA, T4] 
1363- 
1364-     steps :
1365-       - name : Clone 
1366-         id : checkout 
1367-         uses : actions/checkout@v4 
1368- 
1369-       - name : Test 
1370-         id : ggml-ci 
1371-         run : | 
1372-           vulkaninfo 
1373-           GG_BUILD_VULKAN=1 GGML_VK_DISABLE_COOPMAT2=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp 
1374- 
13751347ggml-ci-x64-cpu-amx :
13761348    runs-on : [self-hosted, Linux, X64, CPU, AMX] 
13771349
@@ -1385,31 +1357,33 @@ jobs:
13851357        run : | 
13861358          bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp 
13871359
1388- ggml-ci-x64-amd-v710-vulkan :
1389-     runs-on : [self-hosted, Linux, X64, AMD, V710] 
1390- 
1391-     steps :
1392-       - name : Clone 
1393-         id : checkout 
1394-         uses : actions/checkout@v4 
1395- 
1396-       - name : Test 
1397-         id : ggml-ci 
1398-         run : | 
1399-           GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp 
1400- 
1401- ggml-ci-x64-amd-v710-rocm :
1402-     runs-on : [self-hosted, Linux, X64, AMD, V710] 
1403- 
1404-     steps :
1405-       - name : Clone 
1406-         id : checkout 
1407-         uses : actions/checkout@v4 
1408- 
1409-       - name : Test 
1410-         id : ggml-ci 
1411-         run : | 
1412-           GG_BUILD_ROCM=1 GG_BUILD_AMDGPU_TARGETS="gfx1101" bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp 
1360+ #   ggml-ci-x64-amd-vulkan:
1361+ #     runs-on: [self-hosted, Linux, X64, AMD]
1362+ # 
1363+ #     steps:
1364+ #       - name: Clone
1365+ #         id: checkout
1366+ #         uses: actions/checkout@v4
1367+ # 
1368+ #       - name: Test
1369+ #         id: ggml-ci
1370+ #         run: |
1371+ #           vulkaninfo --summary
1372+ #           GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1373+ # 
1374+ #   ggml-ci-x64-amd-rocm:
1375+ #     runs-on: [self-hosted, Linux, X64, AMD]
1376+ # 
1377+ #     steps:
1378+ #       - name: Clone
1379+ #         id: checkout
1380+ #         uses: actions/checkout@v4
1381+ # 
1382+ #       - name: Test
1383+ #         id: ggml-ci
1384+ #         run: |
1385+ #           amd-smi static
1386+ #           GG_BUILD_ROCM=1 GG_BUILD_AMDGPU_TARGETS="gfx1101" bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
14131387
14141388  ggml-ci-mac-metal :
14151389    runs-on : [self-hosted, macOS, ARM64] 
@@ -1435,4 +1409,5 @@ jobs:
14351409      - name : Test 
14361410        id : ggml-ci 
14371411        run : | 
1412+           vulkaninfo --summary 
14381413          GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp ~/mnt/llama.cpp 
0 commit comments