mirror of
				https://github.com/ggml-org/llama.cpp.git
				synced 2025-10-31 08:51:55 +00:00 
			
		
		
		
	ci : fix windows build and release (#14431)
This commit is contained in:
		
							
								
								
									
										18
									
								
								.github/workflows/build.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										18
									
								
								.github/workflows/build.yml
									
									
									
									
										vendored
									
									
								
							| @@ -664,7 +664,7 @@ jobs: | |||||||
|           ./build-xcframework.sh |           ./build-xcframework.sh | ||||||
|  |  | ||||||
|   windows-msys2: |   windows-msys2: | ||||||
|     runs-on: windows-latest |     runs-on: windows-2025 | ||||||
|  |  | ||||||
|     strategy: |     strategy: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
| @@ -714,7 +714,7 @@ jobs: | |||||||
|             cmake --build build --config ${{ matrix.build }} -j $(nproc) |             cmake --build build --config ${{ matrix.build }} -j $(nproc) | ||||||
|  |  | ||||||
|   windows-latest-cmake: |   windows-latest-cmake: | ||||||
|     runs-on: windows-latest |     runs-on: windows-2025 | ||||||
|  |  | ||||||
|     env: |     env: | ||||||
|       OPENBLAS_VERSION: 0.3.23 |       OPENBLAS_VERSION: 0.3.23 | ||||||
| @@ -725,16 +725,22 @@ jobs: | |||||||
|       matrix: |       matrix: | ||||||
|         include: |         include: | ||||||
|           - build: 'cpu-x64 (static)' |           - build: 'cpu-x64 (static)' | ||||||
|  |             arch: 'x64' | ||||||
|             defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DBUILD_SHARED_LIBS=OFF' |             defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DBUILD_SHARED_LIBS=OFF' | ||||||
|           - build: 'openblas-x64' |           - build: 'openblas-x64' | ||||||
|  |             arch: 'x64' | ||||||
|             defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_OPENMP=OFF -DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS -DBLAS_INCLUDE_DIRS="$env:RUNNER_TEMP/openblas/include" -DBLAS_LIBRARIES="$env:RUNNER_TEMP/openblas/lib/openblas.lib"' |             defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_OPENMP=OFF -DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS -DBLAS_INCLUDE_DIRS="$env:RUNNER_TEMP/openblas/include" -DBLAS_LIBRARIES="$env:RUNNER_TEMP/openblas/lib/openblas.lib"' | ||||||
|           - build: 'vulkan-x64' |           - build: 'vulkan-x64' | ||||||
|  |             arch: 'x64' | ||||||
|             defines: '-DCMAKE_BUILD_TYPE=Release -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_VULKAN=ON' |             defines: '-DCMAKE_BUILD_TYPE=Release -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_VULKAN=ON' | ||||||
|           - build: 'llvm-arm64' |           - build: 'llvm-arm64' | ||||||
|  |             arch: 'arm64' | ||||||
|             defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/arm64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON' |             defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/arm64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON' | ||||||
|           - build: 'llvm-arm64-opencl-adreno' |           - build: 'llvm-arm64-opencl-adreno' | ||||||
|  |             arch: 'arm64' | ||||||
|             defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/arm64-windows-llvm.cmake -DCMAKE_PREFIX_PATH="$env:RUNNER_TEMP/opencl-arm64-release" -DGGML_OPENCL=ON -DGGML_OPENCL_USE_ADRENO_KERNELS=ON' |             defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/arm64-windows-llvm.cmake -DCMAKE_PREFIX_PATH="$env:RUNNER_TEMP/opencl-arm64-release" -DGGML_OPENCL=ON -DGGML_OPENCL_USE_ADRENO_KERNELS=ON' | ||||||
|          # - build: 'kompute-x64' |          # - build: 'kompute-x64' | ||||||
|  |          #   arch: 'x64' | ||||||
|          #   defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_OPENMP=OFF -DGGML_KOMPUTE=ON -DKOMPUTE_OPT_DISABLE_VULKAN_VERSION_CHECK=ON' |          #   defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_OPENMP=OFF -DGGML_KOMPUTE=ON -DKOMPUTE_OPT_DISABLE_VULKAN_VERSION_CHECK=ON' | ||||||
|  |  | ||||||
|     steps: |     steps: | ||||||
| @@ -805,6 +811,8 @@ jobs: | |||||||
|       - name: libCURL |       - name: libCURL | ||||||
|         id: get_libcurl |         id: get_libcurl | ||||||
|         uses: ./.github/actions/windows-setup-curl |         uses: ./.github/actions/windows-setup-curl | ||||||
|  |         with: | ||||||
|  |           architecture: ${{ matrix.arch == 'x64' && 'win64' || 'win64a' }} | ||||||
|  |  | ||||||
|       - name: Build |       - name: Build | ||||||
|         id: cmake_build |         id: cmake_build | ||||||
| @@ -825,7 +833,7 @@ jobs: | |||||||
|  |  | ||||||
|       - name: Test |       - name: Test | ||||||
|         id: cmake_test |         id: cmake_test | ||||||
|         if: ${{ matrix.build != 'llvm-arm64' && matrix.build != 'llvm-arm64-opencl-adreno' }} |         if: ${{ matrix.arch == 'x64' }} | ||||||
|         run: | |         run: | | ||||||
|           cd build |           cd build | ||||||
|           ctest -L main -C Release --verbose --timeout 900 |           ctest -L main -C Release --verbose --timeout 900 | ||||||
| @@ -930,7 +938,7 @@ jobs: | |||||||
|           cmake --build build --config Release |           cmake --build build --config Release | ||||||
|  |  | ||||||
|   windows-latest-cmake-sycl: |   windows-latest-cmake-sycl: | ||||||
|     runs-on: windows-latest |     runs-on: windows-2022 | ||||||
|  |  | ||||||
|     defaults: |     defaults: | ||||||
|       run: |       run: | ||||||
| @@ -964,7 +972,7 @@ jobs: | |||||||
|  |  | ||||||
|   windows-latest-cmake-hip: |   windows-latest-cmake-hip: | ||||||
|     if: ${{ github.event.inputs.create_release != 'true' }} |     if: ${{ github.event.inputs.create_release != 'true' }} | ||||||
|     runs-on: windows-latest |     runs-on: windows-2022 | ||||||
|  |  | ||||||
|     steps: |     steps: | ||||||
|       - name: Clone |       - name: Clone | ||||||
|   | |||||||
							
								
								
									
										12
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										12
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							| @@ -235,7 +235,7 @@ jobs: | |||||||
|           name: llama-bin-ubuntu-vulkan-x64.zip |           name: llama-bin-ubuntu-vulkan-x64.zip | ||||||
|  |  | ||||||
|   windows-cpu: |   windows-cpu: | ||||||
|     runs-on: windows-latest |     runs-on: windows-2025 | ||||||
|  |  | ||||||
|     strategy: |     strategy: | ||||||
|       matrix: |       matrix: | ||||||
| @@ -271,7 +271,7 @@ jobs: | |||||||
|         env: |         env: | ||||||
|           CURL_PATH: ${{ steps.get_libcurl.outputs.curl_path }} |           CURL_PATH: ${{ steps.get_libcurl.outputs.curl_path }} | ||||||
|         run: | |         run: | | ||||||
|           call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" ${{ matrix.arch }} |           call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" ${{ matrix.arch == 'x64' && 'x64' || 'amd64_arm64' }} | ||||||
|           cmake -S . -B build -G "Ninja Multi-Config" ^ |           cmake -S . -B build -G "Ninja Multi-Config" ^ | ||||||
|             -D CMAKE_TOOLCHAIN_FILE=cmake/${{ matrix.arch }}-windows-llvm.cmake ^ |             -D CMAKE_TOOLCHAIN_FILE=cmake/${{ matrix.arch }}-windows-llvm.cmake ^ | ||||||
|             -DGGML_NATIVE=OFF ^ |             -DGGML_NATIVE=OFF ^ | ||||||
| @@ -288,7 +288,7 @@ jobs: | |||||||
|           CURL_PATH: ${{ steps.get_libcurl.outputs.curl_path }} |           CURL_PATH: ${{ steps.get_libcurl.outputs.curl_path }} | ||||||
|         run: | |         run: | | ||||||
|           Copy-Item $env:CURL_PATH\bin\libcurl-${{ matrix.arch }}.dll .\build\bin\Release\ |           Copy-Item $env:CURL_PATH\bin\libcurl-${{ matrix.arch }}.dll .\build\bin\Release\ | ||||||
|           Copy-Item "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Redist\MSVC\14.42.34433\debug_nonredist\${{ matrix.arch }}\Microsoft.VC143.OpenMP.LLVM\libomp140.${{ matrix.arch == 'x64' && 'x86_64' || 'aarch64' }}.dll" .\build\bin\Release\ |           Copy-Item "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Redist\MSVC\14.44.35112\debug_nonredist\${{ matrix.arch }}\Microsoft.VC143.OpenMP.LLVM\libomp140.${{ matrix.arch == 'x64' && 'x86_64' || 'aarch64' }}.dll" .\build\bin\Release\ | ||||||
|           7z a llama-bin-win-cpu-${{ matrix.arch }}.zip .\build\bin\Release\* |           7z a llama-bin-win-cpu-${{ matrix.arch }}.zip .\build\bin\Release\* | ||||||
|  |  | ||||||
|       - name: Upload artifacts |       - name: Upload artifacts | ||||||
| @@ -298,7 +298,7 @@ jobs: | |||||||
|           name: llama-bin-win-cpu-${{ matrix.arch }}.zip |           name: llama-bin-win-cpu-${{ matrix.arch }}.zip | ||||||
|  |  | ||||||
|   windows: |   windows: | ||||||
|     runs-on: windows-latest |     runs-on: windows-2025 | ||||||
|  |  | ||||||
|     env: |     env: | ||||||
|       OPENBLAS_VERSION: 0.3.23 |       OPENBLAS_VERSION: 0.3.23 | ||||||
| @@ -448,7 +448,7 @@ jobs: | |||||||
|           name: cudart-llama-bin-win-cuda-${{ matrix.cuda }}-x64.zip |           name: cudart-llama-bin-win-cuda-${{ matrix.cuda }}-x64.zip | ||||||
|  |  | ||||||
|   windows-sycl: |   windows-sycl: | ||||||
|     runs-on: windows-latest |     runs-on: windows-2022 | ||||||
|  |  | ||||||
|     defaults: |     defaults: | ||||||
|       run: |       run: | ||||||
| @@ -520,7 +520,7 @@ jobs: | |||||||
|           name: llama-bin-win-sycl-x64.zip |           name: llama-bin-win-sycl-x64.zip | ||||||
|  |  | ||||||
|   windows-hip: |   windows-hip: | ||||||
|     runs-on: windows-latest |     runs-on: windows-2022 | ||||||
|  |  | ||||||
|     strategy: |     strategy: | ||||||
|       matrix: |       matrix: | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user
	 Sigbjørn Skjæret
					Sigbjørn Skjæret