mirror of
				https://github.com/ggml-org/llama.cpp.git
				synced 2025-11-04 09:32:00 +00:00 
			
		
		
		
	* server : replace behave with pytest * fix test on windows * misc * add more tests * more tests * styling * log less, fix embd test * added all sequential tests * fix coding style * fix save slot test * add parallel completion test * fix parallel test * remove feature files * update test docs * no cache_prompt for some tests * add test_cache_vs_nocache_prompt
		
			
				
	
	
		
			67 lines
		
	
	
		
			1.3 KiB
		
	
	
	
		
			Nix
		
	
	
	
	
	
			
		
		
	
	
			67 lines
		
	
	
		
			1.3 KiB
		
	
	
	
		
			Nix
		
	
	
	
	
	
{
 | 
						|
  lib,
 | 
						|
  stdenv,
 | 
						|
  buildPythonPackage,
 | 
						|
  poetry-core,
 | 
						|
  mkShell,
 | 
						|
  python3Packages,
 | 
						|
  gguf-py,
 | 
						|
}@inputs:
 | 
						|
 | 
						|
let
 | 
						|
  llama-python-deps = with python3Packages; [
 | 
						|
    numpy
 | 
						|
    sentencepiece
 | 
						|
    transformers
 | 
						|
    protobuf
 | 
						|
    torchWithoutCuda
 | 
						|
    gguf-py
 | 
						|
    tqdm
 | 
						|
 | 
						|
    # for scripts/compare-llama-bench.py
 | 
						|
    gitpython
 | 
						|
    tabulate
 | 
						|
 | 
						|
    # for examples/pydantic-models-to-grammar-examples.py
 | 
						|
    docstring-parser
 | 
						|
    pydantic
 | 
						|
 | 
						|
  ];
 | 
						|
 | 
						|
  llama-python-test-deps = with python3Packages; [
 | 
						|
    # Server bench
 | 
						|
    matplotlib
 | 
						|
 | 
						|
    # server tests
 | 
						|
    openai
 | 
						|
    pytest
 | 
						|
    prometheus-client
 | 
						|
  ];
 | 
						|
in
 | 
						|
 | 
						|
buildPythonPackage ({
 | 
						|
  pname = "llama-scripts";
 | 
						|
  version = "0.0.0";
 | 
						|
  pyproject = true;
 | 
						|
 | 
						|
  # NOTE: The files filtered out here are not visible in the build sandbox, neither
 | 
						|
  # do they affect the output hash. They can be modified without triggering a rebuild.
 | 
						|
  src = lib.cleanSourceWith {
 | 
						|
    filter =
 | 
						|
      name: type:
 | 
						|
      let
 | 
						|
        any = builtins.any (x: x);
 | 
						|
        baseName = builtins.baseNameOf name;
 | 
						|
      in
 | 
						|
      any [
 | 
						|
        (lib.hasSuffix ".py" name)
 | 
						|
        (baseName == "README.md")
 | 
						|
        (baseName == "pyproject.toml")
 | 
						|
      ];
 | 
						|
    src = lib.cleanSource ../../.;
 | 
						|
  };
 | 
						|
  nativeBuildInputs = [ poetry-core ];
 | 
						|
  nativeCheckInputs = llama-python-test-deps;
 | 
						|
  dependencies = llama-python-deps;
 | 
						|
})
 |