mirror of
				https://github.com/ggml-org/llama.cpp.git
				synced 2025-10-30 08:42:00 +00:00 
			
		
		
		
	 201294ae17
			
		
	
	201294ae17
	
	
	
		
			
			Exposes a few attributes demonstrating how to build [singularity](https://docs.sylabs.io/guides/latest/user-guide/)/[apptainer](https://apptainer.org/) and Docker images re-using llama.cpp's Nix expression. Built locally on `x86_64-linux` with `nix build github:someoneserge/llama.cpp/feat/nix/images#llamaPackages.{docker,docker-min,sif,llama-cpp}` and it's fast and effective.
		
			
				
	
	
		
			38 lines
		
	
	
		
			850 B
		
	
	
	
		
			Nix
		
	
	
	
	
	
			
		
		
	
	
			38 lines
		
	
	
		
			850 B
		
	
	
	
		
			Nix
		
	
	
	
	
	
| {
 | |
|   lib,
 | |
|   dockerTools,
 | |
|   buildEnv,
 | |
|   llama-cpp,
 | |
|   interactive ? true,
 | |
|   coreutils,
 | |
| }:
 | |
| 
 | |
| # A tar that can be fed into `docker load`:
 | |
| #
 | |
| # $ nix build .#llamaPackages.docker
 | |
| # $ docker load < result
 | |
| 
 | |
| # For details and variations cf.
 | |
| # - https://nixos.org/manual/nixpkgs/unstable/#ssec-pkgs-dockerTools-buildLayeredImage
 | |
| # - https://discourse.nixos.org/t/a-faster-dockertools-buildimage-prototype/16922
 | |
| # - https://nixery.dev/
 | |
| 
 | |
| # Approximate (compressed) sizes, at the time of writing, are:
 | |
| #
 | |
| # .#llamaPackages.docker: 125M;
 | |
| # .#llamaPackagesCuda.docker: 537M;
 | |
| # .#legacyPackages.aarch64-linux.llamaPackagesXavier.docker: 415M.
 | |
| 
 | |
| dockerTools.buildLayeredImage {
 | |
|   name = llama-cpp.pname;
 | |
|   tag = "latest";
 | |
| 
 | |
|   contents =
 | |
|     [ llama-cpp ]
 | |
|     ++ lib.optionals interactive [
 | |
|       coreutils
 | |
|       dockerTools.binSh
 | |
|       dockerTools.caCertificates
 | |
|     ];
 | |
| }
 |