mirror of
				https://github.com/ggml-org/llama.cpp.git
				synced 2025-10-29 08:41:22 +00:00 
			
		
		
		
	ci : disable AMD workflows + update NVIDIA workflows (#16200)
* ci : disable AMD workflows + update NVIDIA workflows * cont : fixes * cont : update nvidia vulkan workflows
This commit is contained in:
		
							
								
								
									
										119
									
								
								.github/workflows/build.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										119
									
								
								.github/workflows/build.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1302,8 +1302,8 @@ jobs: | ||||
|         run: | | ||||
|           GG_BUILD_NO_BF16=1 GG_BUILD_EXTRA_TESTS_0=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp | ||||
|  | ||||
|   ggml-ci-x64-nvidia-v100-cuda: | ||||
|     runs-on: [self-hosted, Linux, X64, NVIDIA, V100] | ||||
|   ggml-ci-x64-nvidia-cuda: | ||||
|     runs-on: [self-hosted, Linux, X64, NVIDIA] | ||||
|  | ||||
|     steps: | ||||
|       - name: Clone | ||||
| @@ -1316,8 +1316,8 @@ jobs: | ||||
|           nvidia-smi | ||||
|           GG_BUILD_CUDA=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp | ||||
|  | ||||
|   ggml-ci-x64-nvidia-v100-vulkan: | ||||
|     runs-on: [self-hosted, Linux, X64, NVIDIA, V100] | ||||
|   ggml-ci-x64-nvidia-vulkan-cm: | ||||
|     runs-on: [self-hosted, Linux, X64, NVIDIA] | ||||
|  | ||||
|     steps: | ||||
|       - name: Clone | ||||
| @@ -1327,51 +1327,23 @@ jobs: | ||||
|       - name: Test | ||||
|         id: ggml-ci | ||||
|         run: | | ||||
|           vulkaninfo | ||||
|           GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp | ||||
|  | ||||
|   ggml-ci-x64-nvidia-t4-cuda: | ||||
|     runs-on: [self-hosted, Linux, X64, NVIDIA, T4] | ||||
|  | ||||
|     steps: | ||||
|       - name: Clone | ||||
|         id: checkout | ||||
|         uses: actions/checkout@v4 | ||||
|  | ||||
|       - name: Test | ||||
|         id: ggml-ci | ||||
|         run: | | ||||
|           nvidia-smi | ||||
|           GG_BUILD_CUDA=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp | ||||
|  | ||||
|   ggml-ci-x64-nvidia-t4-vulkan: | ||||
|     runs-on: [self-hosted, Linux, X64, NVIDIA, T4] | ||||
|  | ||||
|     steps: | ||||
|       - name: Clone | ||||
|         id: checkout | ||||
|         uses: actions/checkout@v4 | ||||
|  | ||||
|       - name: Test | ||||
|         id: ggml-ci | ||||
|         run: | | ||||
|           vulkaninfo | ||||
|           GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp | ||||
|  | ||||
|   ggml-ci-x64-nvidia-t4-vulkan-coopmat1: | ||||
|     runs-on: [self-hosted, Linux, X64, NVIDIA, T4] | ||||
|  | ||||
|     steps: | ||||
|       - name: Clone | ||||
|         id: checkout | ||||
|         uses: actions/checkout@v4 | ||||
|  | ||||
|       - name: Test | ||||
|         id: ggml-ci | ||||
|         run: | | ||||
|           vulkaninfo | ||||
|           vulkaninfo --summary | ||||
|           GG_BUILD_VULKAN=1 GGML_VK_DISABLE_COOPMAT2=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp | ||||
|  | ||||
|   ggml-ci-x64-nvidia-vulkan-cm2: | ||||
|     runs-on: [self-hosted, Linux, X64, NVIDIA, COOPMAT2] | ||||
|  | ||||
|     steps: | ||||
|       - name: Clone | ||||
|         id: checkout | ||||
|         uses: actions/checkout@v4 | ||||
|  | ||||
|       - name: Test | ||||
|         id: ggml-ci | ||||
|         run: | | ||||
|           vulkaninfo --summary | ||||
|           GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp | ||||
|  | ||||
|   ggml-ci-x64-cpu-amx: | ||||
|     runs-on: [self-hosted, Linux, X64, CPU, AMX] | ||||
|  | ||||
| @@ -1385,31 +1357,33 @@ jobs: | ||||
|         run: | | ||||
|           bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp | ||||
|  | ||||
|   ggml-ci-x64-amd-v710-vulkan: | ||||
|     runs-on: [self-hosted, Linux, X64, AMD, V710] | ||||
|  | ||||
|     steps: | ||||
|       - name: Clone | ||||
|         id: checkout | ||||
|         uses: actions/checkout@v4 | ||||
|  | ||||
|       - name: Test | ||||
|         id: ggml-ci | ||||
|         run: | | ||||
|           GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp | ||||
|  | ||||
|   ggml-ci-x64-amd-v710-rocm: | ||||
|     runs-on: [self-hosted, Linux, X64, AMD, V710] | ||||
|  | ||||
|     steps: | ||||
|       - name: Clone | ||||
|         id: checkout | ||||
|         uses: actions/checkout@v4 | ||||
|  | ||||
|       - name: Test | ||||
|         id: ggml-ci | ||||
|         run: | | ||||
|           GG_BUILD_ROCM=1 GG_BUILD_AMDGPU_TARGETS="gfx1101" bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp | ||||
| #  ggml-ci-x64-amd-vulkan: | ||||
| #    runs-on: [self-hosted, Linux, X64, AMD] | ||||
| # | ||||
| #    steps: | ||||
| #      - name: Clone | ||||
| #        id: checkout | ||||
| #        uses: actions/checkout@v4 | ||||
| # | ||||
| #      - name: Test | ||||
| #        id: ggml-ci | ||||
| #        run: | | ||||
| #          vulkaninfo --summary | ||||
| #          GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp | ||||
| # | ||||
| #  ggml-ci-x64-amd-rocm: | ||||
| #    runs-on: [self-hosted, Linux, X64, AMD] | ||||
| # | ||||
| #    steps: | ||||
| #      - name: Clone | ||||
| #        id: checkout | ||||
| #        uses: actions/checkout@v4 | ||||
| # | ||||
| #      - name: Test | ||||
| #        id: ggml-ci | ||||
| #        run: | | ||||
| #          amd-smi static | ||||
| #          GG_BUILD_ROCM=1 GG_BUILD_AMDGPU_TARGETS="gfx1101" bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp | ||||
|  | ||||
|   ggml-ci-mac-metal: | ||||
|     runs-on: [self-hosted, macOS, ARM64] | ||||
| @@ -1435,4 +1409,5 @@ jobs: | ||||
|       - name: Test | ||||
|         id: ggml-ci | ||||
|         run: | | ||||
|           vulkaninfo --summary | ||||
|           GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp ~/mnt/llama.cpp | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 Georgi Gerganov
					Georgi Gerganov